From 30d598911d78c803b4d5a7316097f53cdadfce9f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:24:45 +0100 Subject: [PATCH 001/133] removed old integrators --- pype/plugins/global/publish/integrate.py | 417 ----------------- .../publish/integrate_rendered_frames.py | 423 ------------------ 2 files changed, 840 deletions(-) delete mode 100644 pype/plugins/global/publish/integrate.py delete mode 100644 pype/plugins/global/publish/integrate_rendered_frames.py diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py deleted file mode 100644 index 87b9e1a9bd..0000000000 --- a/pype/plugins/global/publish/integrate.py +++ /dev/null @@ -1,417 +0,0 @@ -import os -import logging -import shutil - -import errno -import pyblish.api -from avalon import api, io -from avalon.vendor import filelink - - -log = logging.getLogger(__name__) - - -class IntegrateAsset(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Integrate Asset" - order = pyblish.api.IntegratorOrder - families = [] - exclude_families = ["clip"] - - def process(self, instance): - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - self.register(instance) - - self.log.info("Integrating Asset in to the database ...") - if instance.data.get('transfer', True): - self.integrate(instance) - - def register(self, instance): - # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - LOCATION = api.Session["AVALON_LOCATION"] - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - # Ensure at least one file is set up for transfer in staging dir. - files = instance.data.get("files", []) - assert files, "Instance has no files to transfer" - assert isinstance(files, (list, tuple)), ( - "Instance 'files' must be a list, got: {0}".format(files) - ) - - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.info("Verifying version from assumed destination") - - assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] - if assumed_version != next_version: - raise AttributeError("Assumed version 'v{0:03d}' does not match" - "next version in database " - "('v{1:03d}')".format(assumed_version, - next_version)) - - self.log.debug("Next version: v{0:03d}".format(next_version)) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({ - "type": 'asset', - "name": ASSET - })['data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - - # template_publish = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] - if 'transfers' not in instance.data: - instance.data['transfers'] = [] - - for files in instance.data["files"]: - - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # - - if isinstance(files, list): - collection = files - # Assert that each member has identical suffix - _, ext = os.path.splitext(collection[0]) - assert all(ext == os.path.splitext(name)[1] - for name in collection), ( - "Files had varying suffixes, this is a bug" - ) - - assert not any(os.path.isabs(name) for name in collection) - - template_data["representation"] = ext[1:] - - for fname in collection: - - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["publish"]["path"] - - instance.data["transfers"].append([src, dst]) - template = anatomy.templates["publish"]["path"] - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - fname = files - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - _, ext = os.path.splitext(fname) - - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["publish"]["path"] - - instance.data["transfers"].append([src, dst]) - template = anatomy.templates["publish"]["path"] - - representation = { - "schema": "pype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {'path': dst, 'template': template}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] - } - } - - destination_list.append(dst) - instance.data['destination_list'] = destination_list - representations.append(representation) - - self.log.info("Registering {} items".format(len(representations))) - - io.insert_many(representations) - - def integrate(self, instance): - """Move the files - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - """ - - transfers = instance.data.get("transfers", list()) - - for src, dest in transfers: - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - # Produce hardlinked copies - # Note: hardlink can only be produced between two files on the same - # server/disk and editing one of the two will edit both files at once. - # As such it is recommended to only make hardlinks between static files - # to ensure publishes remain safe and non-edited. - hardlinks = instance.data.get("hardlinks", list()) - for src, dest in hardlinks: - self.log.info("Hardlinking file .. {} -> {}".format(src, dest)) - self.hardlink_file(src, dest) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) - - def hardlink_file(self, src, dst): - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - filelink.create(src, dst, filelink.HARDLINK) - - def get_subset(self, asset, instance): - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"] - }) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "avalon-core:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - - Returns: - dict: collection of data to create a version - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "avalon-core:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for the version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families - - self.log.debug("Registered root: {}".format(api.registered_root())) - # create relative source path for DB - try: - source = instance.data['source'] - except KeyError: - source = context.data["currentFile"] - - relative_path = os.path.relpath(source, api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - self.log.debug("Source: {}".format(source)) - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment"), - "machine": context.data.get("machine"), - "fps": context.data.get("fps")} - - # Include optional data if present in - optionals = [ - "frameStart", "frameEnd", "step", "handles", "sourceHashes" - ] - for key in optionals: - if key in instance.data: - version_data[key] = instance.data[key] - - return version_data diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py deleted file mode 100644 index 5819051146..0000000000 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ /dev/null @@ -1,423 +0,0 @@ -import os -import logging -import shutil -import clique - -import errno -import pyblish.api -from avalon import api, io - - -log = logging.getLogger(__name__) - - -class IntegrateFrames(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Integrate Frames" - order = pyblish.api.IntegratorOrder - families = ["imagesequence"] - - family_targets = [".frames", ".local", ".review", "imagesequence", "render", "source"] - exclude_families = ["clip"] - - def process(self, instance): - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - families = [f for f in instance.data["families"] - for search in self.family_targets - if search in f] - - if not families: - return - - self.register(instance) - - # self.log.info("Integrating Asset in to the database ...") - # self.log.info("instance.data: {}".format(instance.data)) - if instance.data.get('transfer', True): - self.integrate(instance) - - def register(self, instance): - - # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - LOCATION = api.Session["AVALON_LOCATION"] - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.info("Verifying version from assumed destination") - - assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] - if assumed_version != next_version: - raise AttributeError("Assumed version 'v{0:03d}' does not match" - "next version in database " - "('v{1:03d}')".format(assumed_version, - next_version)) - - if instance.data.get('version'): - next_version = int(instance.data.get('version')) - - self.log.debug("Next version: v{0:03d}".format(next_version)) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({"type": 'asset', "name": ASSET})[ - 'data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset.get('silo'), - "task": api.Session["AVALON_TASK"], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - - # template_publish = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] - - if 'transfers' not in instance.data: - instance.data['transfers'] = [] - - for files in instance.data["files"]: - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # - if isinstance(files, list): - - src_collections, remainder = clique.assemble(files) - src_collection = src_collections[0] - # Assert that each member has identical suffix - src_head = src_collection.format("{head}") - src_tail = ext = src_collection.format("{tail}") - - test_dest_files = list() - for i in [1, 2]: - template_data["representation"] = src_tail[1:] - template_data["frame"] = src_collection.format( - "{padding}") % i - anatomy_filled = anatomy.format(template_data) - test_dest_files.append(anatomy_filled["render"]["path"]) - - dst_collections, remainder = clique.assemble(test_dest_files) - dst_collection = dst_collections[0] - dst_head = dst_collection.format("{head}") - dst_tail = dst_collection.format("{tail}") - - for i in src_collection.indexes: - src_padding = src_collection.format("{padding}") % i - src_file_name = "{0}{1}{2}".format( - src_head, src_padding, src_tail) - dst_padding = dst_collection.format("{padding}") % i - dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) - - src = os.path.join(stagingdir, src_file_name) - instance.data["transfers"].append([src, dst]) - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - - template_data.pop("frame", None) - - fname = files - - self.log.info("fname: {}".format(fname)) - - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - _, ext = os.path.splitext(fname) - - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["render"]["path"] - - instance.data["transfers"].append([src, dst]) - - if ext[1:] not in ["jpeg", "jpg", "mov", "mp4", "wav"]: - template_data["frame"] = "#" * int(anatomy_filled["render"]["padding"]) - - anatomy_filled = anatomy.format(template_data) - path_to_save = anatomy_filled["render"]["path"] - template = anatomy.templates["render"]["path"] - - self.log.debug("path_to_save: {}".format(path_to_save)) - - representation = { - "schema": "pype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {'path': path_to_save, 'template': template}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "root": root, - "project": { - "name": PROJECT, - "code": project['data']['code'] - }, - "task": api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy, - "representation": ext[1:] - } - } - - destination_list.append(dst) - instance.data['destination_list'] = destination_list - representations.append(representation) - - self.log.info("Registering {} items".format(len(representations))) - io.insert_many(representations) - - def integrate(self, instance): - """Move the files - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - """ - - transfers = instance.data["transfers"] - - for src, dest in transfers: - src = os.path.normpath(src) - dest = os.path.normpath(dest) - if src in dest: - continue - - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) - - def get_subset(self, asset, instance): - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"] - }) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "pype:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - - Returns: - dict: collection of data to create a version - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "pype:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for the version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families - - try: - source = instance.data['source'] - except KeyError: - source = context.data["currentFile"] - - relative_path = os.path.relpath(source, api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment")} - - # Include optional data if present in - optionals = ["frameStart", "frameEnd", "step", - "handles", "colorspace", "fps", "outputDir"] - - for key in optionals: - if key in instance.data: - version_data[key] = instance.data.get(key, None) - - return version_data From 9bcdf7f72a96b9839aee1e9c49acb75475b55cc8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:25:33 +0100 Subject: [PATCH 002/133] added avalon entities collector --- .../global/publish/collect_avalon_entities.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 pype/plugins/global/publish/collect_avalon_entities.py diff --git a/pype/plugins/global/publish/collect_avalon_entities.py b/pype/plugins/global/publish/collect_avalon_entities.py new file mode 100644 index 0000000000..c256dffd52 --- /dev/null +++ b/pype/plugins/global/publish/collect_avalon_entities.py @@ -0,0 +1,46 @@ +"""Collect Anatomy and global anatomy data. + +Requires: + session -> AVALON_PROJECT, AVALON_ASSET + +Provides: + context -> projectEntity - project entity from database + context -> assetEntity - asset entity from database +""" + +from avalon import io, api +import pyblish.api + + +class CollectAvalonEntities(pyblish.api.ContextPlugin): + """Collect Anatomy into Context""" + + order = pyblish.api.CollectorOrder + label = "Collect Avalon Entities" + + def process(self, context): + project_name = api.Session["AVALON_PROJECT"] + asset_name = api.Session["AVALON_ASSET"] + + project_entity = io.find_one({ + "type": "project", + "name": project_name + }) + assert project_entity, ( + "Project '{0}' was not found." + ).format(project_name) + self.log.debug("Collected Project entity \"{}\"".format(project_entity)) + + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name '{0}' in project '{1}'" + ).format(asset_name, project_name) + + self.log.debug("Collected Asset entity \"{}\"".format(asset_entity)) + + context.data["projectEntity"] = project_entity + context.data["assetEntity"] = asset_entity From a2d75afe7a8e78fd2481c18a095aa96b9382a9e7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:25:57 +0100 Subject: [PATCH 003/133] collect anatomy also collect global anatomy data --- .../plugins/global/publish/collect_anatomy.py | 67 +++++++++++++++++-- 1 file changed, 62 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_anatomy.py b/pype/plugins/global/publish/collect_anatomy.py index 9412209850..0831c16d32 100644 --- a/pype/plugins/global/publish/collect_anatomy.py +++ b/pype/plugins/global/publish/collect_anatomy.py @@ -1,10 +1,24 @@ -""" +"""Collect Anatomy and global anatomy data. + Requires: - None + session -> AVALON_TASK + projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder) + username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001) + datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder) + +Optional: + comment -> collect_comment *(pyblish.api.CollectorOrder) + intent -> collected in pyblish-lite + Provides: context -> anatomy (pypeapp.Anatomy) + context -> anatomyData """ +import os +import json + +from avalon import io, api, lib from pypeapp import Anatomy import pyblish.api @@ -12,9 +26,52 @@ import pyblish.api class CollectAnatomy(pyblish.api.ContextPlugin): """Collect Anatomy into Context""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder + 0.002 label = "Collect Anatomy" def process(self, context): - context.data['anatomy'] = Anatomy() - self.log.info("Anatomy templates collected...") + root_path = api.registered_root() + task_name = api.Session["AVALON_TASK"] + + project_entity = context.data["projectEntity"] + asset_entity = context.data["assetEntity"] + + project_name = project_entity["name"] + + context.data["anatomy"] = Anatomy(project_name) + self.log.info( + "Anatomy object collected for project \"{}\".".format(project_name) + ) + + hierarchy_items = asset_entity["data"]["parents"] + hierarchy = "" + if hierarchy_items: + hierarchy = os.path.join(*hierarchy_items) + + context_data = { + "root": root_path, + "project": { + "name": project_name, + "code": project_entity["data"].get("code") + }, + "asset": asset_entity["name"], + "hierarchy": hierarchy.replace("\\", "/"), + "task": task_name, + + "username": context.data["user"] + } + + avalon_app_name = os.environ.get("AVALON_APP_NAME") + if avalon_app_name: + application_def = lib.get_application(avalon_app_name) + app_dir = application_def.get("application_dir") + if app_dir: + context_data["app"] = app_dir + + datetime_data = context.data.get("datetimeData") or {} + context_data.update(datetime_data) + + context.data["anatomyData"] = context_data + + self.log.info("Global anatomy Data collected") + self.log.debug(json.dumps(context_data, indent=4)) From 54f76e7f7f9ec884bdbbe915a5088e7aaf8e3e10 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:19 +0100 Subject: [PATCH 004/133] collect templates replaced with collect instance anatomy data --- .../publish/collect_instance_anatomy_data.py | 119 ++++++++++++++++++ .../global/publish/collect_templates.py | 117 ----------------- 2 files changed, 119 insertions(+), 117 deletions(-) create mode 100644 pype/plugins/global/publish/collect_instance_anatomy_data.py delete mode 100644 pype/plugins/global/publish/collect_templates.py diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py new file mode 100644 index 0000000000..a1a9278d2a --- /dev/null +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -0,0 +1,119 @@ +""" +Requires: + context -> anatomyData + context -> projectEntity + context -> assetEntity + instance -> asset + instance -> subset + instance -> family + +Optional: + instance -> resolutionWidth + instance -> resolutionHeight + instance -> fps + +Provides: + instance -> anatomyData +""" + +import copy +import json + +from avalon import io +import pyblish.api + + +class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): + """Fill templates with data needed for publish""" + + order = pyblish.api.CollectorOrder + 0.1 + label = "Collect instance anatomy data" + hosts = ["maya", "nuke", "standalonepublisher"] + + def process(self, instance): + # get all the stuff from the database + anatomy_data = copy.deepcopy(instance.context.data["anatomyData"]) + project_entity = instance.context.data["projectEntity"] + context_asset_entity = instance.context.data["assetEntity"] + + asset_name = instance.data["asset"] + # Check if asset name is the same as what is in context + # - they may be different, e.g. in NukeStudio + if context_asset_entity["name"] == asset_name: + asset_entity = context_asset_entity + + else: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + + instance.context.data["assetEntity"] = asset_entity + instance.context.data["projectEntity"] = project_entity + + subset_name = instance.data["subset"] + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + version_number = instance.data.get("version") + if version_number is None: + version_number = instance.context.data.get("version") + + latest_version = None + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + if version_number is None: + # TODO we should be able to change this version by studio + # preferences (like start with version number `0`) + version_number = 1 + if latest_version is not None: + version_number += int(latest_version) + + # Version should not be collected since may be instance + anatomy_data.update({ + "asset": asset_entity["name"], + "family": instance.data["family"], + "subset": subset_name, + "version": version_number + }) + + resolution_width = instance.data.get("resolutionWidth") + if resolution_width: + anatomy_data["resolution_width"] = resolution_width + + resolution_height = instance.data.get("resolutionHeight") + if resolution_height: + anatomy_data["resolution_height"] = resolution_height + + fps = instance.data.get("fps") + if resolution_height: + anatomy_data["fps"] = fps + + instance.data["anatomyData"] = anatomy_data + instance.data["latestVersion"] = latest_version + # TODO check if template is used anywhere + # instance.data["template"] = template + + # TODO we should move this to any Validator + # # We take the parent folder of representation 'filepath' + # instance.data["assumedDestination"] = os.path.dirname( + # (anatomy.format(template_data))["publish"]["path"] + # ) + + self.log.info("Instance anatomy Data collected") + self.log.debug(json.dumps(anatomy_data, indent=4)) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py deleted file mode 100644 index f065b3c246..0000000000 --- a/pype/plugins/global/publish/collect_templates.py +++ /dev/null @@ -1,117 +0,0 @@ -""" -Requires: - session -> AVALON_PROJECT - context -> anatomy (pypeapp.Anatomy) - instance -> subset - instance -> asset - instance -> family - -Provides: - instance -> template - instance -> assumedTemplateData - instance -> assumedDestination -""" - -import os - -from avalon import io, api -import pyblish.api - - -class CollectTemplates(pyblish.api.InstancePlugin): - """Fill templates with data needed for publish""" - - order = pyblish.api.CollectorOrder + 0.1 - label = "Collect and fill Templates" - hosts = ["maya", "nuke", "standalonepublisher"] - - def process(self, instance): - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} - - # Add datetime data to template data - datetime_data = instance.context.data.get("datetimeData") or {} - template_data.update(datetime_data) - - resolution_width = instance.data.get("resolutionWidth") - resolution_height = instance.data.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) - self.log.info("Assumed Destination has been created...") - self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) - self.log.debug("__ template: `{}`".format(instance.data["template"])) From 1515f47f0fad2700efaa69022ac682456b7e4c50 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:33 +0100 Subject: [PATCH 005/133] extract burnin uses anatomyData --- pype/plugins/global/publish/extract_burnin.py | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index e50ba891d2..b95c15f340 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,21 +32,15 @@ class ExtractBurnin(pype.api.Extractor): frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 - prep_data = { - "username": instance.context.data['user'], - "asset": os.environ['AVALON_ASSET'], - "task": os.environ['AVALON_TASK'], + prep_data = copy.deepcopy(instance.data["anatomyData"]) + prep_data.update({ "frame_start": frame_start, "frame_end": frame_end, "duration": duration, "version": int(version), "comment": instance.context.data.get("comment", ""), "intent": instance.context.data.get("intent", "") - } - - # Add datetime data to preparation data - datetime_data = instance.context.data.get("datetimeData") or {} - prep_data.update(datetime_data) + }) slate_frame_start = frame_start slate_frame_end = frame_end @@ -64,10 +58,6 @@ class ExtractBurnin(pype.api.Extractor): "slate_duration": slate_duration }) - # Update data with template data - template_data = instance.data.get("assumedTemplateData") or {} - prep_data.update(template_data) - # get anatomy project anatomy = instance.context.data['anatomy'] From e4b23553dffd42aad2ee07b5e207787b1b52c4f8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:55 +0100 Subject: [PATCH 006/133] integrate assumed destinatoin replaced with collect resources path --- .../global/publish/collect_resources_path.py | 132 ++++++++++++++++ .../publish/integrate_assumed_destination.py | 147 ------------------ 2 files changed, 132 insertions(+), 147 deletions(-) create mode 100644 pype/plugins/global/publish/collect_resources_path.py delete mode 100644 pype/plugins/global/publish/integrate_assumed_destination.py diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py new file mode 100644 index 0000000000..52e926e09c --- /dev/null +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -0,0 +1,132 @@ +import os +import copy + +import pyblish.api +from avalon import io + + +class IntegrateResourcesPath(pyblish.api.InstancePlugin): + """Generate the assumed destination path where the file will be stored""" + + label = "Integrate Prepare Resource" + order = pyblish.api.IntegratorOrder - 0.05 + families = ["clip", "projectfile", "plate"] + + def process(self, instance): + project_entity = instance.context["projectEntity"] + asset_entity = instance.context["assetEntity"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + + asset_name = instance.data["asset"] + if asset_name != asset_entity["name"]: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name '{}' in project '{}'".format( + asset_name, project_entity["name"] + ) + ) + + instance.data["assetEntity"] = asset_entity + + template_data["name"] = asset_entity["name"] + silo_name = asset_entity.get("silo") + if silo_name: + template_data["silo"] = silo_name + + parents = asset_entity["data"].get("parents") or [] + hierarchy = "/".join(parents) + template_data["hierarchy"] = hierarchy + + subset_name = instance.data["subset"] + self.log.info(subset_name) + + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + # assume there is no version yet, we start at `1` + version = None + version_number = 1 + if subset is not None: + version = io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + sort=[("name", -1)] + ) + + # if there is a subset there ought to be version + if version is not None: + version_number += version["name"] + + if instance.data.get('version'): + version_number = int(instance.data.get('version')) + + anatomy = instance.context.data["anatomy"] + padding = int(anatomy.templates['render']['padding']) + + template_data.update({ + "subset": subset_name, + "frame": ('#' * padding), + "version": version_number, + "representation": "TEMP" + }) + + anatomy_filled = anatomy.format(template_data) + + template_names = ["publish"] + for repre in instance.data["representations"]: + template_name = repre.get("anatomy_template") + if template_name and template_name not in template_names: + template_names.append(template_name) + + resources = instance.data.get("resources", list()) + transfers = instance.data.get("transfers", list()) + + for template_name in template_names: + mock_template = anatomy_filled[template_name]["path"] + + # For now assume resources end up in a "resources" folder in the + # published folder + mock_destination = os.path.join( + os.path.dirname(mock_template), "resources" + ) + + # Clean the path + mock_destination = os.path.abspath( + os.path.normpath(mock_destination) + ).replace("\\", "/") + + # Define resource destination and transfers + for resource in resources: + # Add destination to the resource + source_filename = os.path.basename( + resource["source"]).replace("\\", "/") + destination = os.path.join(mock_destination, source_filename) + + # Force forward slashes to fix issue with software unable + # to work correctly with backslashes in specific scenarios + # (e.g. escape characters in PLN-151 V-Ray UDIM) + destination = destination.replace("\\", "/") + + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join( + mock_destination, fname).replace("\\", "/") + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers diff --git a/pype/plugins/global/publish/integrate_assumed_destination.py b/pype/plugins/global/publish/integrate_assumed_destination.py deleted file mode 100644 index d090e2711a..0000000000 --- a/pype/plugins/global/publish/integrate_assumed_destination.py +++ /dev/null @@ -1,147 +0,0 @@ -import pyblish.api -import os - -from avalon import io, api - - -class IntegrateAssumedDestination(pyblish.api.InstancePlugin): - """Generate the assumed destination path where the file will be stored""" - - label = "Integrate Assumed Destination" - order = pyblish.api.IntegratorOrder - 0.05 - families = ["clip", "projectfile", "plate"] - - def process(self, instance): - - anatomy = instance.context.data['anatomy'] - - self.create_destination_template(instance, anatomy) - - template_data = instance.data["assumedTemplateData"] - # self.log.info(anatomy.templates) - anatomy_filled = anatomy.format(template_data) - - # self.log.info(anatomy_filled) - mock_template = anatomy_filled["publish"]["path"] - - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join(os.path.dirname(mock_template), - "resources") - - # Clean the path - mock_destination = os.path.abspath( - os.path.normpath(mock_destination)).replace("\\", "/") - - # Define resource destination and transfers - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) - for resource in resources: - - # Add destination to the resource - source_filename = os.path.basename( - resource["source"]).replace("\\", "/") - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join( - mock_destination, fname).replace("\\", "/") - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - # get all the stuff from the database - subset_name = instance.data["subset"] - self.log.info(subset_name) - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates - - project = io.find_one( - {"type": "project", "name": project_name}, - projection={"config": True, "data": True} - ) - - template = a_template['publish']['path'] - # anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - - padding = int(a_template['render']['padding']) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template From f6992a3d44532fac0d11a87c5d7cdfe0a0db715d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:20 +0100 Subject: [PATCH 007/133] collector for project data was removed --- .../global/publish/collect_project_data.py | 24 ------------------- .../nukestudio/publish/collect_clips.py | 2 +- 2 files changed, 1 insertion(+), 25 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_project_data.py diff --git a/pype/plugins/global/publish/collect_project_data.py b/pype/plugins/global/publish/collect_project_data.py deleted file mode 100644 index acdbc2c41f..0000000000 --- a/pype/plugins/global/publish/collect_project_data.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Requires: - None - -Provides: - context -> projectData -""" - -import pyblish.api -import pype.api as pype - - -class CollectProjectData(pyblish.api.ContextPlugin): - """Collecting project data from avalon db""" - - label = "Collect Project Data" - order = pyblish.api.CollectorOrder - 0.1 - hosts = ["nukestudio"] - - def process(self, context): - # get project data from avalon db - context.data["projectData"] = pype.get_project()["data"] - - return diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 3759d50f6a..82053b6811 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -17,7 +17,7 @@ class CollectClips(api.ContextPlugin): self.log.debug("Created `assetsShared` in context") context.data["assetsShared"] = dict() - projectdata = context.data["projectData"] + projectdata = context.data["projectEntity"]["data"] version = context.data.get("version", "001") sequence = context.data.get("activeSequence") selection = context.data.get("selection") From 5177b891ac5b1b1be0f19c621630be169b08741d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:42 +0100 Subject: [PATCH 008/133] extract yeti rig and extract look uses anatomyData --- pype/plugins/maya/publish/extract_look.py | 69 +++++-------------- pype/plugins/maya/publish/extract_yeti_rig.py | 10 +-- 2 files changed, 23 insertions(+), 56 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index fa6ecd72c3..4000011520 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -1,6 +1,7 @@ import os import sys import json +import copy import tempfile import contextlib import subprocess @@ -333,7 +334,7 @@ class ExtractLook(pype.api.Extractor): anatomy = instance.context.data["anatomy"] - self.create_destination_template(instance, anatomy) + destination_dir = self.create_destination_template(instance, anatomy) # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) @@ -343,7 +344,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - instance.data["assumedDestination"], "resources", basename + ext + destination_dir, "resources", basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise, force): @@ -421,38 +422,17 @@ class ExtractLook(pype.api.Extractor): file path (str) """ - # get all the stuff from the database + asset_entity = instance.context["assetEntity"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + subset_name = instance.data["subset"] self.log.info(subset_name) - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = a_template["publish"]["path"] - # anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'").format(asset_name, project_name) - silo = asset.get("silo") subset = io.find_one({ "type": "subset", "name": subset_name, - "parent": asset["_id"] + "parent": asset_entity["_id"] }) # assume there is no version yet, we start at `1` @@ -471,33 +451,18 @@ class ExtractLook(pype.api.Extractor): if version is not None: version_number += version["name"] - if instance.data.get("version"): - version_number = int(instance.data.get("version")) + if instance.data.get('version'): + version_number = int(instance.data.get('version')) - padding = int(a_template["render"]["padding"]) + anatomy = instance.context.data["anatomy"] + padding = int(anatomy.templates['render']['padding']) - hierarchy = asset["data"]["parents"] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) - - template_data = { - "root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, "code": project["data"]["code"]}, - "silo": silo, - "family": instance.data["family"], - "asset": asset_name, + template_data.update({ "subset": subset_name, "frame": ("#" * padding), "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP", - } + "representation": "TEMP" + }) + anatomy_filled = anatomy.format(template_data) - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - anatomy.format(template_data)["publish"]["path"] - ) + return os.path.dirname(anatomy_filled["publish"]["path"]) diff --git a/pype/plugins/maya/publish/extract_yeti_rig.py b/pype/plugins/maya/publish/extract_yeti_rig.py index 892bc0bea6..d390a1365a 100644 --- a/pype/plugins/maya/publish/extract_yeti_rig.py +++ b/pype/plugins/maya/publish/extract_yeti_rig.py @@ -1,6 +1,7 @@ import os import json import contextlib +import copy from maya import cmds @@ -111,11 +112,12 @@ class ExtractYetiRig(pype.api.Extractor): self.log.info("Writing metadata file") # Create assumed destination folder for imageSearchPath - assumed_temp_data = instance.data["assumedTemplateData"] - template = instance.data["template"] - template_formatted = template.format(**assumed_temp_data) + template_data = copy.deepcopy(instance.data["anatomyData"]) - destination_folder = os.path.dirname(template_formatted) + anatomy = instance.context["anatomy"] + filled = anatomy.format(template_data) + + destination_folder = os.path.dir(filled["publish"]["path"]) image_search_path = os.path.join(destination_folder, "resources") image_search_path = os.path.normpath(image_search_path) From f1486a9cd42820684d03c317442a0919b597bfef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:59 +0100 Subject: [PATCH 009/133] integrate_new uses anatomyData --- pype/plugins/global/publish/integrate_new.py | 80 ++++++++------------ 1 file changed, 30 insertions(+), 50 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 7d95534897..c6bc1ffbab 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -2,6 +2,7 @@ import os from os.path import getsize import logging import sys +import copy import clique import errno import pyblish.api @@ -100,12 +101,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - TASK = instance.data.get("task") or api.Session["AVALON_TASK"] - LOCATION = api.Session["AVALON_LOCATION"] + anatomy_data = instance.data["anatomyData"] + asset_entity = instance.data["assetEntity"] + avalon_location = api.Session["AVALON_LOCATION"] + + io.install() context = instance.context + # Atomicity # # Guarantee atomic publishes - each asset contains @@ -140,35 +143,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # stagingdir = instance.data.get("stagingDir") if not stagingdir: - self.log.info('''{} is missing reference to staging - directory Will try to get it from - representation'''.format(instance)) + self.log.info(( + "{0} is missing reference to staging directory." + " Will try to get it from representation." + ).format(instance)) - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) + else: + self.log.debug( + "Establishing staging directory @ {0}".format(stagingdir) + ) # Ensure at least one file is set up for transfer in staging dir. - repres = instance.data.get("representations", None) + repres = instance.data.get("representations") assert repres, "Instance has no files to transfer" assert isinstance(repres, (list, tuple)), ( - "Instance 'files' must be a list, got: {0}".format(repres) + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) ) - # FIXME: io is not initialized at this point for shell host - io.install() - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) + intent = context.data.get("intent") + subset = self.get_subset(asset_entity, instance) # get next version latest_version = io.find_one( @@ -229,16 +224,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # \ \________. # \|________| # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({ - "type": 'asset', - "name": ASSET - })['data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -261,20 +246,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # |_______| # # create template data for Anatomy - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset.get('silo'), - "task": TASK, - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} + template_data = copy.deepcopy(anatomy_data) + # TODO cleanup this code, should be already in anatomyData + template_data.update({ + "subset": subset["name"], + "version": int(version["name"]) + }) - # Add datetime data to template data - datetime_data = context.data.get("datetimeData") or {} - template_data.update(datetime_data) + if intent is not None: + template_data["intent"] = intent resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") @@ -292,6 +272,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): stagingdir = repre['stagingDir'] if repre.get('anatomy_template'): template_name = repre['anatomy_template'] + template = os.path.normpath( anatomy.templates[template_name]["path"]) @@ -322,7 +303,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_filled = anatomy_filled[template_name]["path"] if repre_context is None: repre_context = template_filled.used_values - test_dest_files.append( os.path.normpath(template_filled) ) From 91d51f145844aed301c3f7a721e807e0dfb154a7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:28:33 +0100 Subject: [PATCH 010/133] removed deprecated validate templates --- .../global/publish/validate_templates.py | 43 ------------------- 1 file changed, 43 deletions(-) delete mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py deleted file mode 100644 index f24f6b1a2e..0000000000 --- a/pype/plugins/global/publish/validate_templates.py +++ /dev/null @@ -1,43 +0,0 @@ -import pyblish.api -import os - - -class ValidateTemplates(pyblish.api.ContextPlugin): - """Check if all templates were filled""" - - label = "Validate Templates" - order = pyblish.api.ValidatorOrder - 0.1 - hosts = ["maya", "houdini", "nuke"] - - def process(self, context): - - anatomy = context.data["anatomy"] - if not anatomy: - raise RuntimeError("Did not find anatomy") - else: - data = { - "root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], - "project": {"name": "D001_projectsx", - "code": "prjX"}, - "ext": "exr", - "version": 3, - "task": "animation", - "asset": "sh001", - "app": "maya", - "hierarchy": "ep101/sq01/sh010"} - - anatomy_filled = anatomy.format(data) - self.log.info(anatomy_filled) - - data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], - "project": {"name": "D001_projectsy", - "code": "prjY"}, - "ext": "abc", - "version": 1, - "task": "lookdev", - "asset": "bob", - "app": "maya", - "hierarchy": "ep101/sq01/bob"} - - anatomy_filled = context.data["anatomy"].format(data) - self.log.info(anatomy_filled["work"]["folder"]) From 670f660a9724a83691913ffbfece7b9ae22cd414 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 10:59:30 +0100 Subject: [PATCH 011/133] changed collect instance anatomy data order to 0.49 --- pype/plugins/global/publish/collect_instance_anatomy_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index a1a9278d2a..76ab8dc3f6 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -26,7 +26,7 @@ import pyblish.api class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): """Fill templates with data needed for publish""" - order = pyblish.api.CollectorOrder + 0.1 + order = pyblish.api.CollectorOrder + 0.49 label = "Collect instance anatomy data" hosts = ["maya", "nuke", "standalonepublisher"] From 1a04dca10a7c2af07d779bc1339c941eb5ebf44d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 11:00:38 +0100 Subject: [PATCH 012/133] asset entity may not exist so collecting instance anatomy data was changed to not crash --- .../publish/collect_instance_anatomy_data.py | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 76ab8dc3f6..ecef9d10f2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -53,40 +53,42 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): instance.context.data["projectEntity"] = project_entity subset_name = instance.data["subset"] - subset_entity = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - version_number = instance.data.get("version") - if version_number is None: - version_number = instance.context.data.get("version") - latest_version = None - if subset_entity is None: - self.log.debug("Subset entity does not exist yet.") - else: - version_entity = io.find_one( - { - "type": "version", - "parent": subset_entity["_id"] - }, - sort=[("name", -1)] - ) - if version_entity: - latest_version = version_entity["name"] + if asset_entity: + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + # If version is not specified for instance or context if version_number is None: # TODO we should be able to change this version by studio # preferences (like start with version number `0`) version_number = 1 + # use latest version (+1) if already any exist if latest_version is not None: version_number += int(latest_version) # Version should not be collected since may be instance anatomy_data.update({ - "asset": asset_entity["name"], + "asset": asset_name, "family": instance.data["family"], "subset": subset_name, "version": version_number From a14b05ccd1b5f75d4ccde0349de1096ec0425592 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 11:00:48 +0100 Subject: [PATCH 013/133] removed comments --- .../global/publish/collect_instance_anatomy_data.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index ecef9d10f2..838fb1a113 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -108,14 +108,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): instance.data["anatomyData"] = anatomy_data instance.data["latestVersion"] = latest_version - # TODO check if template is used anywhere - # instance.data["template"] = template - - # TODO we should move this to any Validator - # # We take the parent folder of representation 'filepath' - # instance.data["assumedDestination"] = os.path.dirname( - # (anatomy.format(template_data))["publish"]["path"] - # ) self.log.info("Instance anatomy Data collected") self.log.debug(json.dumps(anatomy_data, indent=4)) From f70f307cc40d95bfa5181e27ef7384108e9fb10b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:16:35 +0100 Subject: [PATCH 014/133] removed missleading comments --- pype/plugins/global/publish/integrate_new.py | 52 -------------------- 1 file changed, 52 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c6bc1ffbab..c3a03324aa 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -109,38 +109,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - # for result in context.data["results"]: - # if not result["success"]: - # self.log.debug(result) - # exc_type, exc_value, exc_traceback = result["error_info"] - # extracted_traceback = traceback.extract_tb(exc_traceback)[-1] - # self.log.debug( - # "Error at line {}: \"{}\"".format( - # extracted_traceback[1], result["error"] - # ) - # ) - # assert all(result["success"] for result in context.data["results"]),( - # "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( @@ -214,16 +182,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] instance.data['version'] = version['name'] - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -235,16 +193,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): instance.data['transfers'] = [] for idx, repre in enumerate(instance.data["representations"]): - - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) # TODO cleanup this code, should be already in anatomyData From 6ef1a7e17605233bc8ea0dc25e3912d0d0a9dc9d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:17:15 +0100 Subject: [PATCH 015/133] formatting --- pype/plugins/global/publish/integrate_new.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c3a03324aa..b71b5fb298 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -159,10 +159,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) + version = self.create_version( + subset=subset, + version_number=next_version, + locations=[avalon_location], + data=version_data + ) self.log.debug("Creating version ...") existing_version = io.find_one({ From 1dcdac7ae051cb359fb481a812027a553e4c79e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:18:11 +0100 Subject: [PATCH 016/133] asset_entity check moved back since nukestudio instances may have not set value --- pype/plugins/global/publish/integrate_new.py | 26 +++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index b71b5fb298..774a54ea7c 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -102,13 +102,37 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] - asset_entity = instance.data["assetEntity"] avalon_location = api.Session["AVALON_LOCATION"] io.install() context = instance.context + project_entity = instance.data["projectEntity"] + + asset_name = instance.data["asset"] + asset_entity = instance.data.get("assetEntity") + if not asset_entity: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + + assert asset_entity, ( + "No asset found by the name \"{0}\" in project \"{1}\"" + ).format(asset_name, project_entity["name"]) + + instance.data["assetEntity"] = asset_entity + + # update anatomy data with asset specific keys + # - name should already been set + hierarchy = "" + parents = asset_entity["data"]["parents"] + if parents: + hierarchy = "/".join(parents) + anatomy_data["hierarchy"] = hierarchy + stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( From e23cc33de7ad1e9156f1a659a0619e2fc2609f68 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:18:59 +0100 Subject: [PATCH 017/133] latest version is not queried before check if instance have already version set in data --- pype/plugins/global/publish/integrate_new.py | 33 +++++++++++--------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 774a54ea7c..24162c4cf1 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -157,22 +157,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): intent = context.data.get("intent") subset = self.get_subset(asset_entity, instance) - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) + # TODO iLLiCiT use "latestVersion" from `instance.data` + # and store version in anatomyData instance collector + # instead of query again + instance_version = instance.data.get('version') + if instance_version is not None: + next_version = int(instance_version) - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] + else: + # get next version + latest_version = io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + {"name": True}, + sort=[("name", -1)] + ) - if instance.data.get('version'): - next_version = int(instance.data.get('version')) + next_version = 1 + if latest_version is not None: + next_version += int(latest_version["name"]) self.log.debug("Next version: v{0:03d}".format(next_version)) From 29c6768da935380dd499834857b896c64d2b05f5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:19:19 +0100 Subject: [PATCH 018/133] intent added to anatomy data --- pype/plugins/global/publish/integrate_new.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 24162c4cf1..093a9e354c 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -154,7 +154,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) ) - intent = context.data.get("intent") subset = self.get_subset(asset_entity, instance) # TODO iLLiCiT use "latestVersion" from `instance.data` @@ -213,6 +212,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] instance.data['version'] = version['name'] + intent = context.data.get("intent") + if intent is not None: + anatomy_data["intent"] = intent + anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files From 3a5ab92687bbf7cc89ade7ec453997d5189e0f64 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:19:44 +0100 Subject: [PATCH 019/133] removed subset and version anatomy update since they are already set for whole instance --- pype/plugins/global/publish/integrate_new.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 093a9e354c..fc7cbf4afa 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -229,12 +229,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for idx, repre in enumerate(instance.data["representations"]): # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) - # TODO cleanup this code, should be already in anatomyData - template_data.update({ - "subset": subset["name"], - "version": int(version["name"]) - }) - if intent is not None: template_data["intent"] = intent From 9113fb1c7f72b1e1ad7a0e32ac16fcb26cd67139 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:20:18 +0100 Subject: [PATCH 020/133] added check if index_frame_start exist --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index fc7cbf4afa..6d85e29732 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -298,7 +298,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = int(repre.get("frameStart")) # exception for slate workflow - if "slate" in instance.data["families"]: + if index_frame_start and "slate" in instance.data["families"]: index_frame_start -= 1 dst_padding_exp = src_padding_exp From fde457d445c18d2f87591017df23e3915b8e55b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:21:27 +0100 Subject: [PATCH 021/133] intent added to version data --- pype/plugins/global/publish/integrate_new.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 6d85e29732..5dba744346 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -601,6 +601,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "fps": context.data.get( "fps", instance.data.get("fps"))} + intent = context.data.get("intent") + if intent is not None: + version_data["intent"] = intent + # Include optional data if present in optionals = [ "frameStart", "frameEnd", "step", "handles", From df512a5a4a17d9d8b14ceba2bc60a908eccbfe5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:21:52 +0100 Subject: [PATCH 022/133] formatting changes --- pype/plugins/global/publish/integrate_new.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 5dba744346..1ff1dfe520 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -331,7 +331,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not dst_start_frame: dst_start_frame = dst_padding - dst = "{0}{1}{2}".format( dst_head, dst_start_frame, @@ -503,14 +502,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): filelink.create(src, dst, filelink.HARDLINK) def get_subset(self, asset, instance): + subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", "parent": asset["_id"], - "name": instance.data["subset"] + "name": subset_name }) if subset is None: - subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) self.log.debug( From 6bd8706579b5b1c19ae0ea0c3859e53fdda02013 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:22:06 +0100 Subject: [PATCH 023/133] added few TODOs --- pype/plugins/global/publish/integrate_new.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 1ff1dfe520..15165f4217 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -203,6 +203,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # TODO query by _id and + # remove old version and representations but keep their ids io.update_many({ 'type': 'version', 'parent': subset["_id"], @@ -304,6 +306,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: + # TODO 1.) do not count padding in each index iteration + # 2.) do not count dst_padding from src_padding before + # index_frame_start check src_padding = src_padding_exp % i src_file_name = "{0}{1}{2}".format( From 178fed2ae22893670dcfff056c13f44ed64c925b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:32:12 +0100 Subject: [PATCH 024/133] updated instance input/output docsting --- pype/plugins/global/publish/collect_instance_anatomy_data.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 838fb1a113..404480b30b 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -8,12 +8,17 @@ Requires: instance -> family Optional: + instance -> version instance -> resolutionWidth instance -> resolutionHeight instance -> fps Provides: + instance -> projectEntity + instance -> assetEntity instance -> anatomyData + instance -> version + instance -> latestVersion """ import copy From ed8b56b6de17330054b2c9469ea63133a1ed5a36 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:32:54 +0100 Subject: [PATCH 025/133] fixed per key instance.data value assignment --- .../global/publish/collect_instance_anatomy_data.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 404480b30b..8a98b6cbb2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -54,9 +54,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): "parent": project_entity["_id"] }) - instance.context.data["assetEntity"] = asset_entity - instance.context.data["projectEntity"] = project_entity - subset_name = instance.data["subset"] version_number = instance.data.get("version") latest_version = None @@ -68,7 +65,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): "parent": asset_entity["_id"] }) - if subset_entity is None: self.log.debug("Subset entity does not exist yet.") else: @@ -84,7 +80,7 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): # If version is not specified for instance or context if version_number is None: - # TODO we should be able to change this version by studio + # TODO we should be able to change default version by studio # preferences (like start with version number `0`) version_number = 1 # use latest version (+1) if already any exist @@ -111,8 +107,12 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): if resolution_height: anatomy_data["fps"] = fps + instance.data["projectEntity"] = project_entity + instance.data["assetEntity"] = asset_entity instance.data["anatomyData"] = anatomy_data instance.data["latestVersion"] = latest_version + # TODO should be version number set here? + instance.data["version"] = version_number self.log.info("Instance anatomy Data collected") self.log.debug(json.dumps(anatomy_data, indent=4)) From 876ff064b6b6c1a941888e43758196525b49872c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:11:29 +0100 Subject: [PATCH 026/133] reduced collect resources path because of already collected data in instance anatomy data --- .../global/publish/collect_resources_path.py | 62 +------------------ 1 file changed, 3 insertions(+), 59 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index 52e926e09c..de78874cd6 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -13,70 +13,14 @@ class IntegrateResourcesPath(pyblish.api.InstancePlugin): families = ["clip", "projectfile", "plate"] def process(self, instance): - project_entity = instance.context["projectEntity"] - asset_entity = instance.context["assetEntity"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - asset_name = instance.data["asset"] - if asset_name != asset_entity["name"]: - asset_entity = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project_entity["_id"] - }) - assert asset_entity, ( - "No asset found by the name '{}' in project '{}'".format( - asset_name, project_entity["name"] - ) - ) - - instance.data["assetEntity"] = asset_entity - - template_data["name"] = asset_entity["name"] - silo_name = asset_entity.get("silo") - if silo_name: - template_data["silo"] = silo_name - - parents = asset_entity["data"].get("parents") or [] - hierarchy = "/".join(parents) - template_data["hierarchy"] = hierarchy - - subset_name = instance.data["subset"] - self.log.info(subset_name) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates['render']['padding']) + padding = int(anatomy.templates["render"]["padding"]) + # add possible representation specific key to anatomy data template_data.update({ - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, + "frame": ("#" * padding), "representation": "TEMP" }) From 3fdfcec29bf6b62023fe34a8b1d1b01fe2198edf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:17:18 +0100 Subject: [PATCH 027/133] version_number is used from instance.data in integrate_new --- pype/plugins/global/publish/integrate_new.py | 32 +++----------------- 1 file changed, 5 insertions(+), 27 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 15165f4217..aff92ea308 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -156,40 +156,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset_entity, instance) - # TODO iLLiCiT use "latestVersion" from `instance.data` - # and store version in anatomyData instance collector - # instead of query again - instance_version = instance.data.get('version') - if instance_version is not None: - next_version = int(instance_version) - - else: - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += int(latest_version["name"]) - - self.log.debug("Next version: v{0:03d}".format(next_version)) + version_number = instance.data["version"] + self.log.debug("Next version: v{0:03d}".format(version_number)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') - if version_data_instance: version_data.update(version_data_instance) version = self.create_version( subset=subset, - version_number=next_version, + version_number=version_number, locations=[avalon_location], data=version_data ) @@ -198,7 +176,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], - 'name': next_version + 'name': version_number }) if existing_version is None: version_id = io.insert_one(version).inserted_id @@ -208,7 +186,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): io.update_many({ 'type': 'version', 'parent': subset["_id"], - 'name': next_version + 'name': version_number }, {'$set': version} ) version_id = existing_version['_id'] From ebdc7c3700f17f636573fa45e4ad500f261200f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:23:03 +0100 Subject: [PATCH 028/133] added few todos --- pype/plugins/global/publish/collect_resources_path.py | 5 +++-- pype/plugins/global/publish/integrate_new.py | 5 ++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index de78874cd6..fe152584b6 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -16,11 +16,12 @@ class IntegrateResourcesPath(pyblish.api.InstancePlugin): template_data = copy.deepcopy(instance.data["anatomyData"]) anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates["render"]["padding"]) + frame_padding = int(anatomy.templates["render"]["padding"]) # add possible representation specific key to anatomy data + # TODO ability to set host specific "frame" value template_data.update({ - "frame": ("#" * padding), + "frame": ("#" * frame_padding), "representation": "TEMP" }) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index aff92ea308..570a093cdc 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -102,7 +102,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] - avalon_location = api.Session["AVALON_LOCATION"] io.install() @@ -165,6 +164,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) + # TODO remove avalon_location (shall we?) + avalon_location = api.Session["AVALON_LOCATION"] + # TODO rename method from `create_version` to + # `prepare_version` or similar... version = self.create_version( subset=subset, version_number=version_number, From 66466bc24bf3ba98f1a715dfbd0fe9352ba6a65c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:07:17 +0100 Subject: [PATCH 029/133] collect resources path uses anatomy publish.folder key with ability of backwards compatibility --- .../global/publish/collect_resources_path.py | 95 ++++++++----------- 1 file changed, 39 insertions(+), 56 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index fe152584b6..9fc8c576f5 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -1,77 +1,60 @@ +""" +Requires: + context -> anatomy + context -> anatomyData + +Provides: + instance -> publishDir + instance -> resourcesDir +""" + import os import copy import pyblish.api -from avalon import io +from avalon import api -class IntegrateResourcesPath(pyblish.api.InstancePlugin): - """Generate the assumed destination path where the file will be stored""" +class CollectResourcesPath(pyblish.api.InstancePlugin): + """Generate directory path where the files and resources will be stored""" - label = "Integrate Prepare Resource" - order = pyblish.api.IntegratorOrder - 0.05 - families = ["clip", "projectfile", "plate"] + label = "Collect Resources Path" + order = pyblish.api.CollectorOrder + 0.995 def process(self, instance): + anatomy = instance.context.data["anatomy"] + template_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy = instance.context.data["anatomy"] - frame_padding = int(anatomy.templates["render"]["padding"]) - - # add possible representation specific key to anatomy data - # TODO ability to set host specific "frame" value + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue template_data.update({ - "frame": ("#" * frame_padding), + "frame": "FRAME_TEMP", "representation": "TEMP" }) anatomy_filled = anatomy.format(template_data) - template_names = ["publish"] - for repre in instance.data["representations"]: - template_name = repre.get("anatomy_template") - if template_name and template_name not in template_names: - template_names.append(template_name) + if "folder" in anatomy.templates["publish"]: + publish_folder = anatomy_filled["publish"]["folder"] + else: + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) + file_path = anatomy_filled["publish"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) - for template_name in template_names: - mock_template = anatomy_filled[template_name]["path"] + publish_folder = os.path.normpath(publish_folder) + resources_folder = os.path.join(publish_folder, "resources") - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join( - os.path.dirname(mock_template), "resources" - ) + instance.data["publishDir"] = publish_folder + instance.data["resourcesDir"] = resources_folder - # Clean the path - mock_destination = os.path.abspath( - os.path.normpath(mock_destination) - ).replace("\\", "/") - - # Define resource destination and transfers - for resource in resources: - # Add destination to the resource - source_filename = os.path.basename( - resource["source"]).replace("\\", "/") - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join( - mock_destination, fname).replace("\\", "/") - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers + self.log.debug("publishDir: \"{}\"".format(publish_folder)) + self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) From 6f26d0160ce62817843d935b10ba2a937e715a38 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:11:32 +0100 Subject: [PATCH 030/133] integrated assued destination was moved back with name integrate resources path --- .../publish/integrate_resources_path.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 pype/plugins/global/publish/integrate_resources_path.py diff --git a/pype/plugins/global/publish/integrate_resources_path.py b/pype/plugins/global/publish/integrate_resources_path.py new file mode 100644 index 0000000000..56dc0e5ef7 --- /dev/null +++ b/pype/plugins/global/publish/integrate_resources_path.py @@ -0,0 +1,49 @@ +import os +import pyblish.api + + +class IntegrateResourcesPath(pyblish.api.InstancePlugin): + """Generate directory path where the files and resources will be stored""" + + label = "Integrate Resources Path" + order = pyblish.api.IntegratorOrder - 0.05 + families = ["clip", "projectfile", "plate"] + + def process(self, instance): + resources = instance.data.get("resources") or [] + transfers = instance.data.get("transfers") or [] + + if not resources and not transfers: + self.log.debug( + "Instance does not have `resources` and `transfers`" + ) + return + + resources_folder = instance.data["resourcesDir"] + + # Define resource destination and transfers + for resource in resources: + # Add destination to the resource + source_filename = os.path.basename( + resource["source"]).replace("\\", "/") + destination = os.path.join(resources_folder, source_filename) + + # Force forward slashes to fix issue with software unable + # to work correctly with backslashes in specific scenarios + # (e.g. escape characters in PLN-151 V-Ray UDIM) + destination = destination.replace("\\", "/") + + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join( + resources_folder, fname + ).replace("\\", "/") + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers From fcffa08177efd97ffc08bbf3520eb2be6a8d02f6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:27:03 +0100 Subject: [PATCH 031/133] extract look uses `resourcesDir` instead of computing itself --- pype/plugins/maya/publish/extract_look.py | 63 +---------------------- 1 file changed, 2 insertions(+), 61 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index 4000011520..58196433aa 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -331,10 +331,9 @@ class ExtractLook(pype.api.Extractor): maya_path)) def resource_destination(self, instance, filepath, do_maketx): - anatomy = instance.context.data["anatomy"] - destination_dir = self.create_destination_template(instance, anatomy) + resources_dir = instance.data["resourcesDir"] # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) @@ -344,7 +343,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - destination_dir, "resources", basename + ext + resources_dir, basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise, force): @@ -408,61 +407,3 @@ class ExtractLook(pype.api.Extractor): return converted, COPY, texture_hash return filepath, COPY, texture_hash - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - asset_entity = instance.context["assetEntity"] - - template_data = copy.deepcopy(instance.data["anatomyData"]) - - subset_name = instance.data["subset"] - self.log.info(subset_name) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - - anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates['render']['padding']) - - template_data.update({ - "subset": subset_name, - "frame": ("#" * padding), - "version": version_number, - "representation": "TEMP" - }) - anatomy_filled = anatomy.format(template_data) - - return os.path.dirname(anatomy_filled["publish"]["path"]) From e92537d34a9c63b7cf09f1b4a46f11c30d76e90d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:27:34 +0100 Subject: [PATCH 032/133] extract effects can compute resources dir with anatomyData (need changes) --- .../nukestudio/publish/extract_effects.py | 182 +++++++++--------- 1 file changed, 96 insertions(+), 86 deletions(-) diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index 15d2a80a55..9e43bee1c8 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -2,10 +2,12 @@ import os import json import re +import copy import pyblish.api import tempfile from avalon import io, api + class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): """Collect video tracks effects into context.""" @@ -71,9 +73,11 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): ) data["source"] = data["sourcePath"] + # WARNING instance should not be created in Extractor! # create new instance instance = instance.context.create_instance(**data) - + # TODO replace line below with `instance.data["resourcesDir"]` + # when instance is created during collection part dst_dir = self.resource_destination_dir(instance) # change paths in effects to files @@ -141,103 +145,109 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): return (v, dst) def resource_destination_dir(self, instance): - anatomy = instance.context.data['anatomy'] - self.create_destination_template(instance, anatomy) + # WARNING this is from `collect_instance_anatomy_data.py` + anatomy_data = copy.deepcopy(instance.context.data["anatomyData"]) + project_entity = instance.context.data["projectEntity"] + context_asset_entity = instance.context.data["assetEntity"] - return os.path.join( - instance.data["assumedDestination"], - "resources" - ) - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - # get all the stuff from the database - subset_name = instance.data["subset"] - self.log.info(subset_name) asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates + if context_asset_entity["name"] == asset_name: + asset_entity = context_asset_entity - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) + else: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) - template = a_template['publish']['path'] - # anatomy = instance.context.data['anatomy'] + subset_name = instance.data["subset"] + version_number = instance.data.get("version") + latest_version = None - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] + if asset_entity: + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + if version_number is None: + version_number = 1 + if latest_version is not None: + version_number += int(latest_version) + + anatomy_data.update({ + "asset": asset_name, + "family": instance.data["family"], + "subset": subset_name, + "version": version_number }) - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') + resolution_width = instance.data.get("resolutionWidth") + if resolution_width: + anatomy_data["resolution_width"] = resolution_width - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] + resolution_height = instance.data.get("resolutionHeight") + if resolution_height: + anatomy_data["resolution_height"] = resolution_height + + fps = instance.data.get("fps") + if resolution_height: + anatomy_data["fps"] = fps + + instance.data["projectEntity"] = project_entity + instance.data["assetEntity"] = asset_entity + instance.data["anatomyData"] = anatomy_data + instance.data["latestVersion"] = latest_version + instance.data["version"] = version_number + + # WARNING this is from `collect_resources_path.py` + anatomy = instance.context.data["anatomy"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue + template_data.update({ + "frame": "FRAME_TEMP", + "representation": "TEMP" }) - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) + anatomy_filled = anatomy.format(template_data) - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] + if "folder" in anatomy.templates["publish"]: + publish_folder = anatomy_filled["publish"]["folder"] + else: + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) - if instance.data.get('version'): - version_number = int(instance.data.get('version')) + file_path = anatomy_filled["publish"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) - padding = int(a_template['render']['padding']) + publish_folder = os.path.normpath(publish_folder) + resources_folder = os.path.join(publish_folder, "resources") - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) + instance.data["publishDir"] = publish_folder + instance.data["resourcesDir"] = resources_folder - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - anatomy.format(template_data)["publish"]["path"] - ) + return resources_folder From f6e6220869a53f7411b55e03468761a9f5f7c323 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:54:58 +0100 Subject: [PATCH 033/133] extract yeti rig uses resourcesDir --- pype/plugins/maya/publish/extract_yeti_rig.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/pype/plugins/maya/publish/extract_yeti_rig.py b/pype/plugins/maya/publish/extract_yeti_rig.py index d390a1365a..70a509564f 100644 --- a/pype/plugins/maya/publish/extract_yeti_rig.py +++ b/pype/plugins/maya/publish/extract_yeti_rig.py @@ -1,7 +1,6 @@ import os import json import contextlib -import copy from maya import cmds @@ -111,16 +110,7 @@ class ExtractYetiRig(pype.api.Extractor): self.log.info("Writing metadata file") - # Create assumed destination folder for imageSearchPath - template_data = copy.deepcopy(instance.data["anatomyData"]) - - anatomy = instance.context["anatomy"] - filled = anatomy.format(template_data) - - destination_folder = os.path.dir(filled["publish"]["path"]) - - image_search_path = os.path.join(destination_folder, "resources") - image_search_path = os.path.normpath(image_search_path) + image_search_path = resources_dir = instance.data["resourcesDir"] settings = instance.data.get("rigsettings", None) if settings: From f6ae5b2213b6ee21f2e27f8a2a347a669259cc12 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 5 Feb 2020 09:59:25 +0100 Subject: [PATCH 034/133] task name is also checked --- .../global/publish/collect_instance_anatomy_data.py | 12 +++++++++--- pype/plugins/global/publish/integrate_new.py | 4 ++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 8a98b6cbb2..9c6a8b08f2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -87,13 +87,19 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): if latest_version is not None: version_number += int(latest_version) - # Version should not be collected since may be instance - anatomy_data.update({ + anatomy_updates = { "asset": asset_name, "family": instance.data["family"], "subset": subset_name, "version": version_number - }) + } + + task_name = instance.data.get("task") + if task_name: + anatomy_updates["task"] = task_name + + # Version should not be collected since may be instance + anatomy_data.update(anatomy_updates) resolution_width = instance.data.get("resolutionWidth") if resolution_width: diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 570a093cdc..d27582bb71 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -132,6 +132,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): hierarchy = "/".join(parents) anatomy_data["hierarchy"] = hierarchy + task_name = instance.data.get("task") + if task_name: + anatomy_data["task"] = task_name + stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( From 75b603d845fe44d6ba5f39268137ca0f6128763e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:46:03 +0100 Subject: [PATCH 035/133] removed add datetime and add frame numbers --- pype/scripts/otio_burnin.py | 38 ------------------------------------- 1 file changed, 38 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index f128352974..aca848dcfa 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -132,44 +132,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): options = ffmpeg_burnins.TextOptions(**self.options_init) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_datetime(self, date_format, align, options=None): - """ - Adding date text to a filter. Using pythons datetime module. - - :param str date_format: format of date (e.g. `%d.%m.%Y`) - :param enum align: alignment, must use provided enum flags - :param dict options: recommended to use TextOptions - """ - if not options: - options = ffmpeg_burnins.TextOptions(**self.options_init) - today = datetime.datetime.today() - text = today.strftime(date_format) - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - - def add_frame_numbers( - self, align, options=None, start_frame=None, text=None - ): - """ - Convenience method to create the frame number expression. - - :param enum align: alignment, must use provided enum flags - :param dict options: recommended to use FrameNumberOptions - """ - if not options: - options = ffmpeg_burnins.FrameNumberOptions(**self.options_init) - if start_frame: - options['frame_offset'] = start_frame - - expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] - _text = str(int(self.end_frame + options['frame_offset'])) - if text and isinstance(text, str): - text = r"{}".format(text) - expr = text.replace("{current_frame}", expr) - text = text.replace("{current_frame}", _text) - - options['expression'] = expr - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_timecode(self, align, options=None, start_frame=None): """ Convenience method to create the frame number expression. From 8c75c74cdec4c08225723f9f8328046999588735 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:49:59 +0100 Subject: [PATCH 036/133] print command before run --- pype/scripts/otio_burnin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index aca848dcfa..b9d10ca23a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -226,9 +226,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): is_sequence = "%" in output - command = self.command(output=output, - args=args, - overwrite=overwrite) + command = self.command( + output=output, + args=args, + overwrite=overwrite + ) + print(command) + proc = Popen(command, shell=True) proc.communicate() if proc.returncode != 0: From 8e86f6e37a0b6fb4ce794e67372b65d49f85b813 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:50:18 +0100 Subject: [PATCH 037/133] implemented custom drawtext and timecode constants --- pype/scripts/otio_burnin.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index b9d10ca23a..00d63939e7 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -27,6 +27,15 @@ FFPROBE = ( '{} -v quiet -print_format json -show_format -show_streams %(source)s' ).format(os.path.normpath(ffmpeg_path + "ffprobe")) +DRAWTEXT = ( + "drawtext=text=\\'%(text)s\\':x=%(x)s:y=%(y)s:fontcolor=" + "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" +) +TIMECODE = ( + "drawtext=text=\\'%(text)s\\':timecode=\\'%(timecode)s\\'" + ":timecode_rate=%(fps).2f:x=%(x)s:y=%(y)s:fontcolor=" + "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" +) def _streams(source): """Reimplemented from otio burnins to be able use full path to ffprobe From 4f862acfb8af3b12315008bd1e95b773fd58fc56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:50:51 +0100 Subject: [PATCH 038/133] added constants for easier handling or entered keys --- pype/scripts/otio_burnin.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 00d63939e7..e0df769db4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -37,6 +37,11 @@ TIMECODE = ( "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) +MISSING_KEY_VALUE = "N/A" +CURRENT_FRAME_KEY = "{current_frame}" +TIME_CODE_KEY = "{timecode}" + + def _streams(source): """Reimplemented from otio burnins to be able use full path to ffprobe :param str source: source media file From b73fcc6730e7b3367988a7aa636f363b60d82204 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:51:09 +0100 Subject: [PATCH 039/133] modified docstring --- pype/scripts/otio_burnin.py | 28 ++++++++-------------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e0df769db4..79565af22a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -307,34 +307,22 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) - each key of "burnins" represents Alignment, there are 6 possibilities: TOP_LEFT TOP_CENTERED TOP_RIGHT BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT - - value for each key is dict which should contain "function" which says - what kind of burnin is that: - "text", "timecode" or "frame_numbers" - - "text" key with content is also required when "text" function is used + - value must be string with text you want to burn-in + - text may contain specific formatting keys (exmplained below) Requirement of *data* keys is based on presets. - - "start_frame" - is required when "timecode" or "frame_numbers" function is used - - "start_frame_tc" - when "timecode" should start with different frame + - "frame_start" - is required when "timecode" or "current_frame" ins keys + - "frame_start_tc" - when "timecode" should start with different frame - *keys for static text* EXAMPLE: preset = { "options": {*OPTIONS FOR LOOK*}, "burnins": { - "TOP_LEFT": { - "function": "text", - "text": "static_text" - }, - "TOP_RIGHT": { - "function": "text", - "text": "{shot}" - }, - "BOTTOM_LEFT": { - "function": "timecode" - }, - "BOTTOM_RIGHT": { - "function": "frame_numbers" - } + "TOP_LEFT": "static_text", + "TOP_RIGHT": "{shot}", + "BOTTOM_LEFT": "TC: {timecode}", + "BOTTOM_RIGHT": "{frame_start}{current_frame}" } } From d263cc3bfd0029b788d8ce4ff1bea405765bf3ef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:51:37 +0100 Subject: [PATCH 040/133] data variable in __main___ was renamed to in_data to not be overriden during processing --- pype/scripts/otio_burnin.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 79565af22a..9564982980 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -439,10 +439,10 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) if __name__ == '__main__': import sys import json - data = json.loads(sys.argv[-1]) + in_data = json.loads(sys.argv[-1]) burnins_from_data( - data['input'], - data['codec'], - data['output'], - data['burnin_data'] + in_data['input'], + in_data['codec'], + in_data['output'], + in_data['burnin_data'] ) From be088579be01d7d5db473133d7c49f245aeec10c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:52:26 +0100 Subject: [PATCH 041/133] alignment checks lowered string (it is available to use `top_left` in presets --- pype/scripts/otio_burnin.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 9564982980..85e72245cd 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -359,17 +359,18 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) for align_text, preset in presets.get('burnins', {}).items(): align = None - if align_text == 'TOP_LEFT': + align_text = align_text.strip().lower() + if align_text == "top_left": align = ModifiedBurnins.TOP_LEFT - elif align_text == 'TOP_CENTERED': + elif align_text == "top_centered": align = ModifiedBurnins.TOP_CENTERED - elif align_text == 'TOP_RIGHT': + elif align_text == "top_right": align = ModifiedBurnins.TOP_RIGHT - elif align_text == 'BOTTOM_LEFT': + elif align_text == "bottom_left": align = ModifiedBurnins.BOTTOM_LEFT - elif align_text == 'BOTTOM_CENTERED': + elif align_text == "bottom_centered": align = ModifiedBurnins.BOTTOM_CENTERED - elif align_text == 'BOTTOM_RIGHT': + elif align_text == "bottom_right": align = ModifiedBurnins.BOTTOM_RIGHT bi_func = preset.get('function') From ca19b5d6798ef3535544b36340cea82a26ba7ff5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:56:51 +0100 Subject: [PATCH 042/133] add_text can accept frame_start argument --- pype/scripts/otio_burnin.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 85e72245cd..d913baa5e2 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -134,17 +134,21 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) - def add_text(self, text, align, options=None): + def add_text(self, text, align, frame_start=None, options=None): """ Adding static text to a filter. :param str text: text to apply to the drawtext :param enum align: alignment, must use provided enum flags + :param int frame_start: starting frame for burnins :param dict options: recommended to use TextOptions """ if not options: options = ffmpeg_burnins.TextOptions(**self.options_init) - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) + + options = options.copy() + if frame_start: + options["frame_offset"] = frame_start def add_timecode(self, align, options=None, start_frame=None): """ From 5d5d3eec92d892ddae1845cbabada0847c739471 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:57:11 +0100 Subject: [PATCH 043/133] add_text use custom DRAWTEXT ffmpeg string --- pype/scripts/otio_burnin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index d913baa5e2..be4ec3e57d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -150,6 +150,8 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if frame_start: options["frame_offset"] = frame_start + self._add_burnin(text, align, options, DRAWTEXT) + def add_timecode(self, align, options=None, start_frame=None): """ Convenience method to create the frame number expression. From 96d3e51d9200cf04e4b63705a727d381c48a286e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:00:29 +0100 Subject: [PATCH 044/133] add timecode allows to add text and use custom TIMECODE ffmpeg string --- pype/scripts/otio_burnin.py | 39 +++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index be4ec3e57d..67b85f9ba4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -140,7 +140,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): :param str text: text to apply to the drawtext :param enum align: alignment, must use provided enum flags - :param int frame_start: starting frame for burnins + :param int frame_start: starting frame for burnins current frame :param dict options: recommended to use TextOptions """ if not options: @@ -152,32 +152,41 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): self._add_burnin(text, align, options, DRAWTEXT) - def add_timecode(self, align, options=None, start_frame=None): + def add_timecode( + self, align, frame_start=None, frame_start_tc=None, text=None, + options=None + ): """ Convenience method to create the frame number expression. :param enum align: alignment, must use provided enum flags + :param int frame_start: starting frame for burnins current frame + :param int frame_start_tc: starting frame for burnins timecode + :param str text: text that will be before timecode :param dict options: recommended to use TimeCodeOptions """ if not options: options = ffmpeg_burnins.TimeCodeOptions(**self.options_init) - if start_frame: - options['frame_offset'] = start_frame - timecode = ffmpeg_burnins._frames_to_timecode( - options['frame_offset'], + options = options.copy() + if frame_start: + options["frame_offset"] = frame_start + + if not frame_start_tc: + frame_start_tc = options["frame_offset"] + + if not text: + text = "" + + if not options.get("fps"): + options["fps"] = self.frame_rate + + options["timecode"] = ffmpeg_burnins._frames_to_timecode( + frame_start_tc, self.frame_rate ) - options = options.copy() - if not options.get('fps'): - options['fps'] = self.frame_rate - self._add_burnin( - timecode.replace(':', r'\:'), - align, - options, - ffmpeg_burnins.TIMECODE - ) + self._add_burnin(text, align, options, TIMECODE) def _add_burnin(self, text, align, options, draw): """ From defe60e5566ec8a251802636430843650a9115d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:01:37 +0100 Subject: [PATCH 045/133] add burnin do not use expression but only text --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 67b85f9ba4..39bf963342 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -197,7 +197,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): """ resolution = self.resolution data = { - 'text': options.get('expression') or text, + 'text': text, 'color': options['font_color'], 'size': options['font_size'] } From ca2279e710dcf15e8545c3a904027508d9989435 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:01 +0100 Subject: [PATCH 046/133] _drawtext must count text sizes with timecode text --- pype/scripts/otio_burnin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 39bf963342..4c1301becf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -201,8 +201,12 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): 'color': options['font_color'], 'size': options['font_size'] } + timecode_text = options.get("timecode") or "" + text_for_size = text + timecode_text data.update(options) - data.update(ffmpeg_burnins._drawtext(align, resolution, text, options)) + data.update( + ffmpeg_burnins._drawtext(align, resolution, text_for_size, options) + ) if 'font' in data and ffmpeg_burnins._is_windows(): data['font'] = data['font'].replace(os.sep, r'\\' + os.sep) data['font'] = data['font'].replace(':', r'\:') From 39e785aefb6e4a48b5a8ea215a06070c11c2f425 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:14 +0100 Subject: [PATCH 047/133] doctstring changes --- pype/scripts/otio_burnin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 4c1301becf..73de2f2827 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -347,14 +347,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) For this preset we'll need at least this data: data = { - "start_frame": 1001, + "frame_start": 1001, "shot": "sh0010" } When Timecode should start from 1 then data need: data = { - "start_frame": 1001, - "start_frame_tc": 1, + "frame_start": 1001, + "frame_start_tc": 1, "shot": "sh0010" } ''' From 9a8c3b56a22cf333909e8dad8fc064a9164c0d1e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:59 +0100 Subject: [PATCH 048/133] changed data processing to not use functions but only text --- pype/scripts/otio_burnin.py | 99 +++++++++++++++++-------------------- 1 file changed, 45 insertions(+), 54 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 73de2f2827..e7464cdc7c 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -368,15 +368,27 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) stream = burnin._streams[0] if "resolution_width" not in data: - data["resolution_width"] = stream.get("width", "Unknown") + data["resolution_width"] = stream.get("width", MISSING_KEY_VALUE) if "resolution_height" not in data: - data["resolution_height"] = stream.get("height", "Unknown") + data["resolution_height"] = stream.get("height", MISSING_KEY_VALUE) if "fps" not in data: data["fps"] = get_fps(stream.get("r_frame_rate", "0/0")) - for align_text, preset in presets.get('burnins', {}).items(): + # Check frame start and add expression if is available + if frame_start is not None: + data[CURRENT_FRAME_KEY] = r'%%{eif\:n+%d\:d}' % frame_start + + if frame_start_tc is not None: + data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY + + for align_text, value in presets.get('burnins', {}).items(): + if not value: + continue + + has_timecode = TIME_CODE_KEY in value + align = None align_text = align_text.strip().lower() if align_text == "top_left": @@ -392,65 +404,44 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) elif align_text == "bottom_right": align = ModifiedBurnins.BOTTOM_RIGHT - bi_func = preset.get('function') - if not bi_func: - log.error( - 'Missing function for burnin!' - 'Burnins are not created!' + # Replace with missing key value if frame_start_tc is not set + if frame_start_tc is None and has_timecode: + has_timecode = False + log.warning( + "`frame_start` and `frame_start_tc`" + " are not set in entered data." ) - return + value = value.replace(TIME_CODE_KEY, MISSING_KEY_VALUE) - if ( - bi_func in ['frame_numbers', 'timecode'] and - frame_start is None - ): - log.error( - 'start_frame is not set in entered data!' - 'Burnins are not created!' - ) - return + key_pattern = re.compile(r"(\{.*?[^{0]*\})") - if bi_func == 'frame_numbers': - current_frame_identifier = "{current_frame}" - text = preset.get('text') or current_frame_identifier + missing_keys = [] + for group in key_pattern.findall(value): + try: + group.format(**data) + except (TypeError, KeyError): + missing_keys.append(group) - if current_frame_identifier not in text: - log.warning(( - 'Text for Frame numbers don\'t have ' - '`{current_frame}` key in text!' - )) + missing_keys = list(set(missing_keys)) + for key in missing_keys: + value = value.replace(key, MISSING_KEY_VALUE) - text_items = [] - split_items = text.split(current_frame_identifier) - for item in split_items: - text_items.append(item.format(**data)) + # Handle timecode differently + if has_timecode: + args = [align, frame_start, frame_start_tc] + if not value.startswith(TIME_CODE_KEY): + value_items = value.split(TIME_CODE_KEY) + text = value_items[0].format(**data) + args.append(value_items[0]) - text = "{current_frame}".join(text_items) + burnin.add_timecode(*args) + continue - burnin.add_frame_numbers(align, start_frame=frame_start, text=text) + text = value.format(**data) + burnin.add_text(text, align, frame_start) - elif bi_func == 'timecode': - burnin.add_timecode(align, start_frame=frame_start_tc) - - elif bi_func == 'text': - if not preset.get('text'): - log.error('Text is not set for text function burnin!') - return - text = preset['text'].format(**data) - burnin.add_text(text, align) - - elif bi_func == "datetime": - date_format = preset["format"] - burnin.add_datetime(date_format, align) - - else: - log.error( - 'Unknown function for burnins {}'.format(bi_func) - ) - return - - codec_args = '' - if codec_data is not []: + codec_args = "" + if codec_data: codec_args = " ".join(codec_data) burnin.render(output_path, args=codec_args, overwrite=overwrite, **data) From 1033f779d1a72d33365ec197b398a6f41cf478f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:03:58 +0100 Subject: [PATCH 049/133] codec moved to optional args because is optional --- pype/scripts/otio_burnin.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e7464cdc7c..bc45e45f82 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -302,7 +302,9 @@ def example(input_path, output_path): burnin.render(output_path, overwrite=True) -def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True): +def burnins_from_data( + input_path, output_path, data, codec_data=None, overwrite=True +): ''' This method adds burnins to video/image file based on presets setting. Extension of output MUST be same as input. (mov -> mov, avi -> avi,...) @@ -453,7 +455,7 @@ if __name__ == '__main__': in_data = json.loads(sys.argv[-1]) burnins_from_data( in_data['input'], - in_data['codec'], in_data['output'], - in_data['burnin_data'] + in_data['burnin_data'], + in_data['codec'] ) From f19235f91e4492331f04df281049d8984716fcdd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:04:08 +0100 Subject: [PATCH 050/133] added forgotten import --- pype/scripts/otio_burnin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index bc45e45f82..8a95542c04 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,4 +1,5 @@ import os +import re import datetime import subprocess import json From 6be774b1f8716471e28beb2659f3d27750df6f4e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:04:43 +0100 Subject: [PATCH 051/133] removed imports from __main__ --- pype/scripts/otio_burnin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 8a95542c04..6c1e19690b 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,4 +1,5 @@ import os +import sys import re import datetime import subprocess @@ -451,8 +452,6 @@ def burnins_from_data( if __name__ == '__main__': - import sys - import json in_data = json.loads(sys.argv[-1]) burnins_from_data( in_data['input'], From a2d07a89a9fa19b007c0565459df4973bbf1710d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:06:31 +0100 Subject: [PATCH 052/133] removed deprecated method usage in example --- pype/scripts/otio_burnin.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 6c1e19690b..590939df56 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -296,10 +296,6 @@ def example(input_path, output_path): burnin.add_text('My Text', ModifiedBurnins.TOP_CENTERED) # Datetime burnin.add_text('%d-%m-%y', ModifiedBurnins.TOP_RIGHT) - # Frame number - burnin.add_frame_numbers(ModifiedBurnins.TOP_RIGHT, start_frame=start_frame) - # Timecode - burnin.add_timecode(ModifiedBurnins.TOP_LEFT, start_frame=start_frame) # Start render (overwrite output file if exist) burnin.render(output_path, overwrite=True) From ae387d09778607ec56b12c2d9d75a9e74740786a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 11:39:22 +0100 Subject: [PATCH 053/133] added subproces for status --- pype/ftrack/ftrack_server/sub_event_info.py | 411 ++++++++++++++++++++ 1 file changed, 411 insertions(+) create mode 100644 pype/ftrack/ftrack_server/sub_event_info.py diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py new file mode 100644 index 0000000000..d63b6acadd --- /dev/null +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -0,0 +1,411 @@ +import os +import sys +import copy +import signal +import socket +import uuid +from datetime import datetime + +import ftrack_api +from ftrack_server import FtrackServer +from pype.ftrack.ftrack_server.lib import ( + SocketSession, SocketBaseEventHub, + TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT +) +from pypeapp import Logger + +log = Logger().get_logger("Event storer") +log.info(os.environ.get("FTRACK_EVENT_SUB_ID")) + + +class ObjectFactory: + session = None + sock = None + subprocess_id = os.environ["FTRACK_EVENT_SUB_ID"] + status_factory = None + + +def trigger_status_info(status_id=None, status=None): + if not status and not status_id: + log.warning( + "`status_id` or `status` must be specified to trigger action." + ) + return + + if not status: + status = ObjectFactory.status_factory[status_id] + + if not status: + return + + new_event_data = copy.deepcopy(action_data) + new_event_data.update({ + "selection": [] + }) + new_event_data["subprocess_id"] = ObjectFactory.subprocess_id + new_event_data["status_id"] = status.id + + new_event = ftrack_api.event.base.Event( + topic="ftrack.action.launch", + data=new_event_data, + source=status.source + ) + ObjectFactory.session.event_hub.publish(new_event) + + +action_identifier = ( + "event.server.status" + ObjectFactory.subprocess_id +) + +# TODO add IP adress to label +# TODO add icon +action_data = { + "label": "Pype Admin", + "variant": "Event server Status", + "description": "Get Infromation about event server", + "actionIdentifier": action_identifier, + "icon": None +} + + +class Status: + default_item = { + "type": "label", + "value": "Information not allowed." + } + note_item = { + "type": "label", + "value": "Hit `submit` to refresh data." + } + splitter_item = { + "type": "label", + "value": "---" + } + + def __init__(self, source_info, parent): + self.id = str(uuid.uuid1()) + self.created = datetime.now() + self.parent = parent + + self.source = source_info + + self.main_process = None + self.storer = None + self.processor = None + + def add_result(self, source, data): + if source.lower() == "storer": + self.storer = data + + elif source.lower() == "processor": + self.processor = data + + else: + self.main_process = data + + def filled(self): + # WARNING DEBUG PART!!!! + return True + return ( + self.main_process is not None and + self.storer is not None and + self.processor is not None + ) + + def get_items_from_dict(self, in_dict): + items = [] + for key, value in in_dict.items(): + items.append({ + "type": "label", + "value": "##{}".format(key) + }) + items.append({ + "type": "label", + "value": value + }) + return items + + def bool_items(self): + items = [] + name_labels = { + "shutdown_main": "Shutdown main process", + "reset_storer": "Reset storer", + "reset_processor": "Reset processor" + } + for name, label in name_labels.items(): + items.append({ + "type": "boolean", + "value": False, + "label": label, + "name": name + }) + return items + + def items(self): + items = [] + items.append(self.note_item) + + items.append({"type": "label", "value": "Main process"}) + if not self.main_process: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.main_process) + ) + + items.append(self.splitter_item) + items.append({"type": "label", "value": "Storer process"}) + if not self.storer: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.storer) + ) + + items.append(self.splitter_item) + items.append({"type": "label", "value": "Processor process"}) + if not self.processor: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.processor) + ) + + items.append(self.splitter_item) + items.extend(self.bool_items()) + + return items + + @property + def is_overtime(self): + time_delta = (datetime.now() - self.created).total_seconds() + return time_delta >= self.parent.max_delta_seconds + + +class StatusFactory: + max_delta_seconds = 30 + + def __init__(self): + self.statuses = {} + + def __getitem__(self, key): + return self.statuses.get(key) + + def create_status(self, source_info): + new_status = Status(source_info, self) + self.statuses[new_status.id] = new_status + return new_status + + def process_result(self, event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != ObjectFactory.subprocess_id: + return + + status_id = event["data"].get("status_id") + status = self.statuses[status_id] + if not status: + return + + source = event["data"]["source"] + data = event["data"]["status_info"] + + status.add_result(source, data) + if status.filled(): + trigger_status_info(status=status) + + +def server_activity_validate_user(event): + """Validate user permissions to show server info.""" + session = ObjectFactory.session + + username = event["source"].get("user", {}).get("username") + if not username: + return False + + user_ent = session.query( + "User where username = \"{}\"".format(username) + ).first() + if not user_ent: + return False + + role_list = ["Pypeclub", "Administrator"] + for role in user_ent["user_security_roles"]: + if role["security_role"]["name"] in role_list: + return True + return False + + +def server_activity_discover(event): + """Discover action in actions menu conditions.""" + session = ObjectFactory.session + if session is None: + return + + if not server_activity_validate_user(event): + return + + return {"items": [action_data]} + + +def handle_filled_event(event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != ObjectFactory.subprocess_id: + return None + + status_id = event["data"].get("status_id") + status = ObjectFactory.status_factory[status_id] + if not status: + return None + + values = event.get("values") + if values: + log.info(values) + + title = "Event server - Status" + + event_data = copy.deepcopy(event["data"]) + event_data.update({ + "type": "widget", + "items": status.items(), + "title": title + }) + + ObjectFactory.session.event_hub.publish( + ftrack_api.event.base.Event( + topic="ftrack.action.trigger-user-interface", + data=event_data + ), + on_error='ignore' + ) + + +def server_activity(event): + session = ObjectFactory.session + if session is None: + msg = "Session is not set. Can't trigger Reset action." + log.warning(msg) + return { + "success": False, + "message": msg + } + + valid = server_activity_validate_user(event) + if not valid: + return { + "success": False, + "message": "You don't have permissions to see Event server status!" + } + + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id is not None: + return handle_filled_event(event) + + status = ObjectFactory.status_factory.create_status(event["source"]) + + event_data = { + "status_id": status.id, + "subprocess_id": ObjectFactory.subprocess_id + } + session.event_hub.publish( + ftrack_api.event.base.Event( + topic=TOPIC_STATUS_SERVER, + data=event_data + ), + on_error="ignore" + ) + + return { + "success": True, + "message": "Collecting information (this may take > 20s)" + } + + +def register(session): + '''Registers the event, subscribing the discover and launch topics.''' + session.event_hub.subscribe( + "topic=ftrack.action.discover", + server_activity_discover + ) + + status_launch_subscription = ( + "topic=ftrack.action.launch and data.actionIdentifier={}" + ).format(action_identifier) + + session.event_hub.subscribe( + status_launch_subscription, + server_activity + ) + + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER_RESULT), + ObjectFactory.status_factory.process_result + ) + + +def main(args): + port = int(args[-1]) + + # Create a TCP/IP socket + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # Connect the socket to the port where the server is listening + server_address = ("localhost", port) + log.debug("Storer connected to {} port {}".format(*server_address)) + sock.connect(server_address) + sock.sendall(b"CreatedStatus") + # store socket connection object + ObjectFactory.sock = sock + ObjectFactory.status_factory = StatusFactory() + + _returncode = 0 + try: + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub + ) + ObjectFactory.session = session + register(session) + server = FtrackServer("event") + log.debug("Launched Ftrack Event storer") + server.run_server(session, load_files=False) + + except Exception: + _returncode = 1 + log.error("ServerInfo subprocess crashed", exc_info=True) + + finally: + log.debug("Ending. Closing socket.") + sock.close() + return _returncode + + +if __name__ == "__main__": + # Register interupt signal + def signal_handler(sig, frame): + print("You pressed Ctrl+C. Process ended.") + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + sys.exit(main(sys.argv)) + + +example_action_event = { + 'data': { + 'selection': [], + 'description': 'Test action2', + 'variant': None, + 'label': 'Test action2', + 'actionIdentifier': 'test.action2.3ceffe5e9acf40f8aa80603adebd0d06', + 'values': {}, + 'icon': None, + }, + 'topic': 'ftrack.action.launch', + 'sent': None, + 'source': { + 'id': 'eb67d186301c4cbbab73c1aee9b7c55d', + 'user': {'username': 'jakub.trllo', 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121'} + }, + 'target': '', + 'in_reply_to_event': None +} From c937964dc8c80b54b95d5059670f845a83f4ca82 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 12:13:22 +0100 Subject: [PATCH 054/133] added subprocess to event server cli --- pype/ftrack/ftrack_server/event_server_cli.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..b2c540e993 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -7,6 +7,7 @@ import socket import argparse import atexit import time +import uuid import ftrack_api from pype.ftrack.lib import credentials @@ -175,6 +176,7 @@ def main_loop(ftrack_url): otherwise thread will be killed. """ + os.environ["FTRACK_EVENT_SUB_ID"] = str(uuid.uuid1()) # Get mongo hostname and port for testing mongo connection mongo_list = ftrack_events_mongo_settings() mongo_hostname = mongo_list[0] @@ -202,6 +204,13 @@ def main_loop(ftrack_url): processor_last_failed = datetime.datetime.now() processor_failed_count = 0 + statuser_name = "StorerThread" + statuser_port = 10021 + statuser_path = "{}/sub_event_info.py".format(file_path) + statuser_thread = None + statuser_last_failed = datetime.datetime.now() + statuser_failed_count = 0 + ftrack_accessible = False mongo_accessible = False @@ -336,6 +345,43 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed + if statuser_thread is None: + if statuser_failed_count < max_fail_count: + statuser_thread = socket_thread.SocketThread( + statuser_name, statuser_port, statuser_path + ) + statuser_thread.start() + + elif statuser_failed_count == max_fail_count: + print(( + "Statuser failed {}times in row" + " I'll try to run again {}s later" + ).format(str(max_fail_count), str(wait_time_after_max_fail))) + statuser_failed_count += 1 + + elif (( + datetime.datetime.now() - statuser_last_failed + ).seconds > wait_time_after_max_fail): + statuser_failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not statuser_thread.isAlive(): + statuser_thread.join() + statuser_thread = None + ftrack_accessible = False + mongo_accessible = False + + _processor_last_failed = datetime.datetime.now() + delta_time = ( + _processor_last_failed - statuser_last_failed + ).seconds + + if delta_time < min_fail_seconds: + statuser_failed_count += 1 + else: + statuser_failed_count = 0 + statuser_last_failed = _processor_last_failed + time.sleep(1) From fa60c87c3e0f9e9261dd9b9e5c8b4188c50e0b4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:28:29 +0100 Subject: [PATCH 055/133] created base EventHub that can set callbacks on heartbeat and set message for sockets on heartbeat --- pype/ftrack/ftrack_server/lib.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index fefba580e0..2617b63614 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -123,20 +123,30 @@ def check_ftrack_url(url, log_errors=True): return url -class StorerEventHub(ftrack_api.event.hub.EventHub): +class SocketBaseEventHub(ftrack_api.event.hub.EventHub): + + hearbeat_msg = b"hearbeat" + heartbeat_callbacks = [] + def __init__(self, *args, **kwargs): self.sock = kwargs.pop("sock") - super(StorerEventHub, self).__init__(*args, **kwargs) + super(SocketBaseEventHub, self).__init__(*args, **kwargs) def _handle_packet(self, code, packet_identifier, path, data): """Override `_handle_packet` which extend heartbeat""" code_name = self._code_name_mapping[code] if code_name == "heartbeat": # Reply with heartbeat. - self.sock.sendall(b"storer") - return self._send_packet(self._code_name_mapping['heartbeat']) + for callback in self.heartbeat_callbacks: + callback() + + self.sock.sendall(self.hearbeat_msg) + return self._send_packet(self._code_name_mapping["heartbeat"]) + + return super(SocketBaseEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) - elif code_name == "connect": event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, From 24022c583651f16d70b210e340472be523c447d8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:28:44 +0100 Subject: [PATCH 056/133] Status event hub implemented --- pype/ftrack/ftrack_server/lib.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 2617b63614..71ce6861a4 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -147,6 +147,25 @@ class SocketBaseEventHub(ftrack_api.event.hub.EventHub): code, packet_identifier, path, data ) + +class StatusEventHub(SocketBaseEventHub): + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.status.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(StatusEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, From a97c73258e349291ae8f0899f37ac7ec9a8c13b5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:01 +0100 Subject: [PATCH 057/133] removed user event hub --- pype/ftrack/ftrack_server/lib.py | 29 -------------------- pype/ftrack/ftrack_server/sub_user_server.py | 4 +-- 2 files changed, 2 insertions(+), 31 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 71ce6861a4..57c5b7d5dc 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -296,35 +296,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): return super()._handle_packet(code, packet_identifier, path, data) -class UserEventHub(ftrack_api.event.hub.EventHub): - def __init__(self, *args, **kwargs): - self.sock = kwargs.pop("sock") - super(UserEventHub, self).__init__(*args, **kwargs) - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "heartbeat": - # Reply with heartbeat. - self.sock.sendall(b"hearbeat") - return self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == "connect": - event = ftrack_api.event.base.Event( - topic="pype.storer.started", - data={}, - source={ - "id": self.id, - "user": {"username": self._api_user} - } - ) - self._event_queue.put(event) - - return super(UserEventHub, self)._handle_packet( - code, packet_identifier, path, data - ) - - class SocketSession(ftrack_api.session.Session): '''An isolated session for interaction with an ftrack server.''' def __init__( diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py index f0d39447a8..8c1497a562 100644 --- a/pype/ftrack/ftrack_server/sub_user_server.py +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -5,7 +5,7 @@ import socket import traceback from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub +from pype.ftrack.ftrack_server.lib import SocketSession, SocketBaseEventHub from pypeapp import Logger @@ -28,7 +28,7 @@ def main(args): try: session = SocketSession( - auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub + auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub ) server = FtrackServer("action") log.debug("Launched User Ftrack Server") From 526f9282d1e4136b44eab6e5505b1adf23e4af5b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:24 +0100 Subject: [PATCH 058/133] storer and processor eventhubs are modified --- pype/ftrack/ftrack_server/lib.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 57c5b7d5dc..478bede6ef 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -166,6 +166,16 @@ class StatusEventHub(SocketBaseEventHub): return super(StatusEventHub, self)._handle_packet( code, packet_identifier, path, data ) + + +class StorerEventHub(SocketBaseEventHub): + + hearbeat_msg = b"storer" + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "connect": event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, @@ -181,7 +191,9 @@ class StatusEventHub(SocketBaseEventHub): ) -class ProcessEventHub(ftrack_api.event.hub.EventHub): +class ProcessEventHub(SocketBaseEventHub): + + hearbeat_msg = b"processor" url, database, table_name = get_ftrack_event_mongo_info() is_table_created = False @@ -193,7 +205,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): database_name=self.database, table_name=self.table_name ) - self.sock = kwargs.pop("sock") super(ProcessEventHub, self).__init__(*args, **kwargs) def prepare_dbcon(self): @@ -289,9 +300,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): code_name = self._code_name_mapping[code] if code_name == "event": return - if code_name == "heartbeat": - self.sock.sendall(b"processor") - return self._send_packet(self._code_name_mapping["heartbeat"]) return super()._handle_packet(code, packet_identifier, path, data) From 4fd403bf54a167ea6d0621554b0a9b6768ca2bfb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:38 +0100 Subject: [PATCH 059/133] added constants with topics to lib --- pype/ftrack/ftrack_server/lib.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 478bede6ef..e623cab8fb 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -28,6 +28,10 @@ from pypeapp import Logger from pype.ftrack.lib.custom_db_connector import DbConnector +TOPIC_STATUS_SERVER = "pype.event.server.status" +TOPIC_STATUS_SERVER_RESULT = "pype.event.server.status.result" + + def ftrack_events_mongo_settings(): host = None port = None From 37de60577809c2ace929f7dab880a95ddc0ed0c2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:30:07 +0100 Subject: [PATCH 060/133] socket thread can use additional arguments to execute and -port arg was removed (not used) --- pype/ftrack/ftrack_server/socket_thread.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index 8e217870ba..cb073d83a0 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -12,13 +12,14 @@ class SocketThread(threading.Thread): MAX_TIMEOUT = 35 - def __init__(self, name, port, filepath): + def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() self.log = Logger().get_logger("SocketThread", "Event Thread") self.setName(name) self.name = name self.port = port self.filepath = filepath + self.additional_args = additional_args self.sock = None self.subproc = None self.connection = None @@ -53,7 +54,12 @@ class SocketThread(threading.Thread): ) self.subproc = subprocess.Popen( - [sys.executable, self.filepath, "-port", str(self.port)] + [ + sys.executable, + self.filepath, + *self.additional_args, + str(self.port) + ] ) # Listen for incoming connections From 05929f2b02929b9652411e4f0b53d324f3a67b76 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:31:24 +0100 Subject: [PATCH 061/133] status get suprocess data ony if they are missing (why to collect same data for each action launch) --- pype/ftrack/ftrack_server/sub_event_info.py | 426 +++++++++----------- 1 file changed, 197 insertions(+), 229 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index d63b6acadd..5a38c992f5 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -1,137 +1,189 @@ import os import sys -import copy +import json import signal import socket -import uuid -from datetime import datetime +import datetime import ftrack_api from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( - SocketSession, SocketBaseEventHub, + SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) from pypeapp import Logger log = Logger().get_logger("Event storer") -log.info(os.environ.get("FTRACK_EVENT_SUB_ID")) - - -class ObjectFactory: - session = None - sock = None - subprocess_id = os.environ["FTRACK_EVENT_SUB_ID"] - status_factory = None - - -def trigger_status_info(status_id=None, status=None): - if not status and not status_id: - log.warning( - "`status_id` or `status` must be specified to trigger action." - ) - return - - if not status: - status = ObjectFactory.status_factory[status_id] - - if not status: - return - - new_event_data = copy.deepcopy(action_data) - new_event_data.update({ - "selection": [] - }) - new_event_data["subprocess_id"] = ObjectFactory.subprocess_id - new_event_data["status_id"] = status.id - - new_event = ftrack_api.event.base.Event( - topic="ftrack.action.launch", - data=new_event_data, - source=status.source - ) - ObjectFactory.session.event_hub.publish(new_event) - - action_identifier = ( - "event.server.status" + ObjectFactory.subprocess_id + "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) - -# TODO add IP adress to label -# TODO add icon action_data = { "label": "Pype Admin", - "variant": "Event server Status", + "variant": "- Event server Status", "description": "Get Infromation about event server", "actionIdentifier": action_identifier, "icon": None } +class ObjectFactory: + session = None + status_factory = None + + class Status: default_item = { "type": "label", - "value": "Information not allowed." + "value": "Process info is not available at this moment." } + + def __init__(self, name, label, parent): + self.name = name + self.label = label or name + self.parent = parent + + self.info = None + self.last_update = None + + def update(self, info): + self.last_update = datetime.datetime.now() + self.info = info + + def get_delta_string(self, delta): + days, hours, minutes = ( + delta.days, delta.seconds // 3600, delta.seconds // 60 % 60 + ) + delta_items = [ + "{}d".format(days), + "{}h".format(hours), + "{}m".format(minutes) + ] + if not days: + delta_items.pop(0) + if not hours: + delta_items.pop(0) + delta_items.append("{}s".format(delta.seconds % 60)) + if not minutes: + delta_items.pop(0) + + return " ".join(delta_items) + + def get_items(self): + items = [] + last_update = "N/A" + if self.last_update: + delta = datetime.datetime.now() - self.last_update + last_update = "{} ago".format( + self.get_delta_string(delta) + ) + + last_update = "Updated: {}".format(last_update) + items.append({ + "type": "label", + "value": "#{}".format(self.label) + }) + items.append({ + "type": "label", + "value": "##{}".format(last_update) + }) + + if not self.info: + if self.info is None: + trigger_info_get() + items.append(self.default_item) + return items + + info = {} + for key, value in self.info.items(): + if key not in ["created_at:", "created_at"]: + info[key] = value + continue + + datetime_value = datetime.datetime.strptime( + value, "%Y.%m.%d %H:%M:%S" + ) + delta = datetime.datetime.now() - datetime_value + + running_for = self.get_delta_string(delta) + info["Started at"] = "{} [running: {}]".format(value, running_for) + + for key, value in info.items(): + items.append({ + "type": "label", + "value": "{}: {}".format(key, value) + }) + + return items + + +class StatusFactory: + note_item = { "type": "label", - "value": "Hit `submit` to refresh data." + "value": ( + "NOTE: Hit `submit` and uncheck all" + " checkers to refresh data." + ) } splitter_item = { "type": "label", "value": "---" } - def __init__(self, source_info, parent): - self.id = str(uuid.uuid1()) - self.created = datetime.now() - self.parent = parent + def __init__(self, statuses={}): + self.statuses = [] + for status in statuses.items(): + self.create_status(*status) - self.source = source_info + def __getitem__(self, key): + return self.get(key) - self.main_process = None - self.storer = None - self.processor = None + def get(self, key, default=None): + for status in self.statuses: + if status.name == key: + return status + return default - def add_result(self, source, data): - if source.lower() == "storer": - self.storer = data - - elif source.lower() == "processor": - self.processor = data - - else: - self.main_process = data - - def filled(self): - # WARNING DEBUG PART!!!! + def is_filled(self): + for status in self.statuses: + if status.info is None: + return False return True - return ( - self.main_process is not None and - self.storer is not None and - self.processor is not None - ) - def get_items_from_dict(self, in_dict): - items = [] - for key, value in in_dict.items(): - items.append({ - "type": "label", - "value": "##{}".format(key) - }) - items.append({ - "type": "label", - "value": value - }) - return items + def create_status(self, name, label): + new_status = Status(name, label, self) + self.statuses.append(new_status) + + def process_event_result(self, event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + source = event["data"]["source"] + data = event["data"]["status_info"] + for status in self.statuses: + if status.name == source: + status.update(data) + break def bool_items(self): items = [] - name_labels = { - "shutdown_main": "Shutdown main process", - "reset_storer": "Reset storer", - "reset_processor": "Reset processor" - } + items.append({ + "type": "label", + "value": "#Restart process" + }) + items.append({ + "type": "label", + "value": ( + "WARNING: Main process may not restart" + " if does not run as a service!" + ) + }) + + name_labels = {} + for status in self.statuses: + name_labels[status.name] = status.label + for name, label in name_labels.items(): items.append({ "type": "boolean", @@ -144,75 +196,14 @@ class Status: def items(self): items = [] items.append(self.note_item) - - items.append({"type": "label", "value": "Main process"}) - if not self.main_process: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.main_process) - ) - - items.append(self.splitter_item) - items.append({"type": "label", "value": "Storer process"}) - if not self.storer: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.storer) - ) - - items.append(self.splitter_item) - items.append({"type": "label", "value": "Processor process"}) - if not self.processor: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.processor) - ) - - items.append(self.splitter_item) items.extend(self.bool_items()) + for status in self.statuses: + items.append(self.splitter_item) + items.extend(status.get_items()) + return items - @property - def is_overtime(self): - time_delta = (datetime.now() - self.created).total_seconds() - return time_delta >= self.parent.max_delta_seconds - - -class StatusFactory: - max_delta_seconds = 30 - - def __init__(self): - self.statuses = {} - - def __getitem__(self, key): - return self.statuses.get(key) - - def create_status(self, source_info): - new_status = Status(source_info, self) - self.statuses[new_status.id] = new_status - return new_status - - def process_result(self, event): - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id != ObjectFactory.subprocess_id: - return - - status_id = event["data"].get("status_id") - status = self.statuses[status_id] - if not status: - return - - source = event["data"]["source"] - data = event["data"]["status_info"] - - status.add_result(source, data) - if status.filled(): - trigger_status_info(status=status) - def server_activity_validate_user(event): """Validate user permissions to show server info.""" @@ -247,38 +238,6 @@ def server_activity_discover(event): return {"items": [action_data]} -def handle_filled_event(event): - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id != ObjectFactory.subprocess_id: - return None - - status_id = event["data"].get("status_id") - status = ObjectFactory.status_factory[status_id] - if not status: - return None - - values = event.get("values") - if values: - log.info(values) - - title = "Event server - Status" - - event_data = copy.deepcopy(event["data"]) - event_data.update({ - "type": "widget", - "items": status.items(), - "title": title - }) - - ObjectFactory.session.event_hub.publish( - ftrack_api.event.base.Event( - topic="ftrack.action.trigger-user-interface", - data=event_data - ), - on_error='ignore' - ) - - def server_activity(event): session = ObjectFactory.session if session is None: @@ -289,35 +248,47 @@ def server_activity(event): "message": msg } - valid = server_activity_validate_user(event) - if not valid: + if not server_activity_validate_user(event): return { "success": False, "message": "You don't have permissions to see Event server status!" } - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id is not None: - return handle_filled_event(event) + values = event["data"].get("values") or {} + is_checked = False + for value in values.values(): + if value: + is_checked = True + break - status = ObjectFactory.status_factory.create_status(event["source"]) + if not is_checked: + return { + "items": ObjectFactory.status_factory.items(), + "title": "Server current status" + } - event_data = { - "status_id": status.id, - "subprocess_id": ObjectFactory.subprocess_id - } + +def trigger_info_get(): + session = ObjectFactory.session session.event_hub.publish( ftrack_api.event.base.Event( topic=TOPIC_STATUS_SERVER, - data=event_data + data={"subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"]} ), on_error="ignore" ) - return { - "success": True, - "message": "Collecting information (this may take > 20s)" - } + +def on_start(event): + session = ObjectFactory.session + source_id = event.get("source", {}).get("id") + if not source_id or source_id != session.event_hub.id: + return + + if session is None: + log.warning("Session is not set. Can't trigger Sync to avalon action.") + return True + trigger_info_get() def register(session): @@ -326,6 +297,7 @@ def register(session): "topic=ftrack.action.discover", server_activity_discover ) + session.event_hub.subscribe("topic=pype.status.started", on_start) status_launch_subscription = ( "topic=ftrack.action.launch and data.actionIdentifier={}" @@ -338,34 +310,51 @@ def register(session): session.event_hub.subscribe( "topic={}".format(TOPIC_STATUS_SERVER_RESULT), - ObjectFactory.status_factory.process_result + ObjectFactory.status_factory.process_event_result ) +def heartbeat(): + if ObjectFactory.status_factory.is_filled(): + return + + trigger_info_get() + + def main(args): port = int(args[-1]) + server_info = json.loads(args[-2]) # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Connect the socket to the port where the server is listening server_address = ("localhost", port) - log.debug("Storer connected to {} port {}".format(*server_address)) + log.debug("Statuser connected to {} port {}".format(*server_address)) sock.connect(server_address) sock.sendall(b"CreatedStatus") # store socket connection object ObjectFactory.sock = sock - ObjectFactory.status_factory = StatusFactory() + statuse_names = { + "main": "Main process", + "storer": "Storer", + "processor": "Processor" + } + + ObjectFactory.status_factory = StatusFactory(statuse_names) + ObjectFactory.status_factory["main"].update(server_info) _returncode = 0 try: session = SocketSession( - auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub + auto_connect_event_hub=True, sock=sock, Eventhub=StatusEventHub ) ObjectFactory.session = session + session.event_hub.heartbeat_callbacks.append(heartbeat) register(session) server = FtrackServer("event") - log.debug("Launched Ftrack Event storer") + log.debug("Launched Ftrack Event statuser") + server.run_server(session, load_files=False) except Exception: @@ -388,24 +377,3 @@ if __name__ == "__main__": signal.signal(signal.SIGTERM, signal_handler) sys.exit(main(sys.argv)) - - -example_action_event = { - 'data': { - 'selection': [], - 'description': 'Test action2', - 'variant': None, - 'label': 'Test action2', - 'actionIdentifier': 'test.action2.3ceffe5e9acf40f8aa80603adebd0d06', - 'values': {}, - 'icon': None, - }, - 'topic': 'ftrack.action.launch', - 'sent': None, - 'source': { - 'id': 'eb67d186301c4cbbab73c1aee9b7c55d', - 'user': {'username': 'jakub.trllo', 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121'} - }, - 'target': '', - 'in_reply_to_event': None -} From 1b1a78cb6ed79be18fcf89bd340c4e09528fda56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:31:47 +0100 Subject: [PATCH 062/133] processor suprocess can send status information on ask event --- .../ftrack_server/sub_event_processor.py | 51 ++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/sub_event_processor.py b/pype/ftrack/ftrack_server/sub_event_processor.py index 9c971ca916..2a3ad3e76d 100644 --- a/pype/ftrack/ftrack_server/sub_event_processor.py +++ b/pype/ftrack/ftrack_server/sub_event_processor.py @@ -1,13 +1,59 @@ +import os import sys import signal import socket +import datetime from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub +from pype.ftrack.ftrack_server.lib import ( + SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER +) +import ftrack_api from pypeapp import Logger log = Logger().get_logger("Event processor") +subprocess_started = datetime.datetime.now() + + +class SessionFactory: + session = None + + +def send_status(event): + subprocess_id = event["data"].get("subprocess_id") + if not subprocess_id: + return + + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + session = SessionFactory.session + if not session: + return + + new_event_data = { + "subprocess_id": subprocess_id, + "source": "processor", + "status_info": { + "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S") + } + } + + new_event = ftrack_api.event.base.Event( + topic="pype.event.server.status.result", + data=new_event_data + ) + + session.event_hub.publish(new_event) + + +def register(session): + '''Registers the event, subscribing the discover and launch topics.''' + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER), send_status + ) + def main(args): port = int(args[-1]) @@ -24,6 +70,9 @@ def main(args): session = SocketSession( auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub ) + register(session) + SessionFactory.session = session + server = FtrackServer("event") log.debug("Launched Ftrack Event processor") server.run_server(session) From 2ff7b87956651c3343d195b56f0f871aaa4afee1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:32:02 +0100 Subject: [PATCH 063/133] storer can send status information on ask --- pype/ftrack/ftrack_server/sub_event_storer.py | 36 +++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index dfe8e21654..b4b9b8a7ab 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -8,14 +8,15 @@ import pymongo import ftrack_api from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( + SocketSession, StorerEventHub, get_ftrack_event_mongo_info, - SocketSession, - StorerEventHub + TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) from pype.ftrack.lib.custom_db_connector import DbConnector from pypeapp import Logger log = Logger().get_logger("Event storer") +subprocess_started = datetime.datetime.now() class SessionFactory: @@ -138,11 +139,42 @@ def trigger_sync(event): ) +def send_status(event): + session = SessionFactory.session + if not session: + return + + subprocess_id = event["data"].get("subprocess_id") + if not subprocess_id: + return + + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + new_event_data = { + "subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"], + "source": "storer", + "status_info": { + "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S") + } + } + + new_event = ftrack_api.event.base.Event( + topic=TOPIC_STATUS_SERVER_RESULT, + data=new_event_data + ) + + session.event_hub.publish(new_event) + + def register(session): '''Registers the event, subscribing the discover and launch topics.''' install_db() session.event_hub.subscribe("topic=*", launch) session.event_hub.subscribe("topic=pype.storer.started", trigger_sync) + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER), send_status + ) def main(args): From 5433daf7b065eb7c16720009170b3400a5ee0fd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:32:40 +0100 Subject: [PATCH 064/133] event server cli sent his infomation on status subprocess startup --- pype/ftrack/ftrack_server/event_server_cli.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b2c540e993..2dadb5da25 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -4,7 +4,10 @@ import signal import datetime import subprocess import socket +import json +import platform import argparse +import getpass import atexit import time import uuid @@ -233,6 +236,16 @@ def main_loop(ftrack_url): atexit.register( on_exit, processor_thread=processor_thread, storer_thread=storer_thread ) + + system_name, pc_name = platform.uname()[:2] + host_name = socket.gethostname() + main_info = { + "created_at": datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S"), + "Username": getpass.getuser(), + "Host Name": host_name, + "Host IP": socket.gethostbyname(host_name) + } + main_info_str = json.dumps(main_info) # Main loop while True: # Check if accessible Ftrack and Mongo url @@ -270,6 +283,7 @@ def main_loop(ftrack_url): printed_ftrack_error = False printed_mongo_error = False + # ====== STORER ======= # Run backup thread which does not requeire mongo to work if storer_thread is None: if storer_failed_count < max_fail_count: @@ -304,6 +318,7 @@ def main_loop(ftrack_url): storer_failed_count = 0 storer_last_failed = _storer_last_failed + # ====== PROCESSOR ======= if processor_thread is None: if processor_failed_count < max_fail_count: processor_thread = socket_thread.SocketThread( @@ -345,10 +360,12 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed + # ====== STATUSER ======= if statuser_thread is None: if statuser_failed_count < max_fail_count: statuser_thread = socket_thread.SocketThread( - statuser_name, statuser_port, statuser_path + statuser_name, statuser_port, statuser_path, + [main_info_str] ) statuser_thread.start() From 2f85cdf0be4ed0b54481013ebc57c201dad9f444 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 19:53:16 +0100 Subject: [PATCH 065/133] restarting is working, need to add comunication between main proces and status process --- pype/ftrack/ftrack_server/event_server_cli.py | 99 +++++++++++-------- pype/ftrack/ftrack_server/socket_thread.py | 56 ++++++++++- pype/ftrack/ftrack_server/sub_event_info.py | 35 +++++++ 3 files changed, 148 insertions(+), 42 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 2dadb5da25..19e889f77d 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -222,7 +222,7 @@ def main_loop(ftrack_url): # stop threads on exit # TODO check if works and args have thread objects! - def on_exit(processor_thread, storer_thread): + def on_exit(processor_thread, storer_thread, statuser_thread): if processor_thread is not None: processor_thread.stop() processor_thread.join() @@ -233,8 +233,16 @@ def main_loop(ftrack_url): storer_thread.join() storer_thread = None + if statuser_thread is not None: + statuser_thread.stop() + statuser_thread.join() + statuser_thread = None + atexit.register( - on_exit, processor_thread=processor_thread, storer_thread=storer_thread + on_exit, + processor_thread=processor_thread, + storer_thread=storer_thread, + statuser_thread=statuser_thread ) system_name, pc_name = platform.uname()[:2] @@ -283,6 +291,51 @@ def main_loop(ftrack_url): printed_ftrack_error = False printed_mongo_error = False + # ====== STATUSER ======= + if statuser_thread is None: + if statuser_failed_count < max_fail_count: + statuser_thread = socket_thread.StatusSocketThread( + statuser_name, statuser_port, statuser_path, + [main_info_str] + ) + statuser_thread.start() + + elif statuser_failed_count == max_fail_count: + print(( + "Statuser failed {}times in row" + " I'll try to run again {}s later" + ).format(str(max_fail_count), str(wait_time_after_max_fail))) + statuser_failed_count += 1 + + elif (( + datetime.datetime.now() - statuser_last_failed + ).seconds > wait_time_after_max_fail): + statuser_failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not statuser_thread.isAlive(): + statuser_thread.join() + statuser_thread = None + ftrack_accessible = False + mongo_accessible = False + + _processor_last_failed = datetime.datetime.now() + delta_time = ( + _processor_last_failed - statuser_last_failed + ).seconds + + if delta_time < min_fail_seconds: + statuser_failed_count += 1 + else: + statuser_failed_count = 0 + statuser_last_failed = _processor_last_failed + + elif statuser_thread.stop_subprocess: + print("Main process was stopped by action") + on_exit(processor_thread, storer_thread, statuser_thread) + os.kill(os.getpid(), signal.SIGTERM) + return 1 + # ====== STORER ======= # Run backup thread which does not requeire mongo to work if storer_thread is None: @@ -291,6 +344,7 @@ def main_loop(ftrack_url): storer_name, storer_port, storer_path ) storer_thread.start() + elif storer_failed_count == max_fail_count: print(( "Storer failed {}times I'll try to run again {}s later" @@ -360,44 +414,9 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed - # ====== STATUSER ======= - if statuser_thread is None: - if statuser_failed_count < max_fail_count: - statuser_thread = socket_thread.SocketThread( - statuser_name, statuser_port, statuser_path, - [main_info_str] - ) - statuser_thread.start() - - elif statuser_failed_count == max_fail_count: - print(( - "Statuser failed {}times in row" - " I'll try to run again {}s later" - ).format(str(max_fail_count), str(wait_time_after_max_fail))) - statuser_failed_count += 1 - - elif (( - datetime.datetime.now() - statuser_last_failed - ).seconds > wait_time_after_max_fail): - statuser_failed_count = 0 - - # If thread failed test Ftrack and Mongo connection - elif not statuser_thread.isAlive(): - statuser_thread.join() - statuser_thread = None - ftrack_accessible = False - mongo_accessible = False - - _processor_last_failed = datetime.datetime.now() - delta_time = ( - _processor_last_failed - statuser_last_failed - ).seconds - - if delta_time < min_fail_seconds: - statuser_failed_count += 1 - else: - statuser_failed_count = 0 - statuser_last_failed = _processor_last_failed + if statuser_thread is not None: + statuser_thread.set_process("storer", storer_thread) + statuser_thread.set_process("processor", processor_thread) time.sleep(1) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index cb073d83a0..cbe4f9dd8b 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -3,6 +3,7 @@ import sys import time import socket import threading +import traceback import subprocess from pypeapp import Logger @@ -14,12 +15,13 @@ class SocketThread(threading.Thread): def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() - self.log = Logger().get_logger("SocketThread", "Event Thread") + self.log = Logger().get_logger(self.__class__.__name__) self.setName(name) self.name = name self.port = port self.filepath = filepath self.additional_args = additional_args + self.sock = None self.subproc = None self.connection = None @@ -59,7 +61,8 @@ class SocketThread(threading.Thread): self.filepath, *self.additional_args, str(self.port) - ] + ], + stdin=subprocess.PIPE ) # Listen for incoming connections @@ -133,3 +136,52 @@ class SocketThread(threading.Thread): if data == b"MongoError": self.mongo_error = True connection.sendall(data) + + +class StatusSocketThread(SocketThread): + process_name_mapping = { + b"RestartS": "storer", + b"RestartP": "processor", + b"RestartM": "main" + } + + def __init__(self, *args, **kwargs): + self.process_threads = {} + self.stop_subprocess = False + super(StatusSocketThread, self).__init__(*args, **kwargs) + + def set_process(self, process_name, thread): + try: + if not self.subproc: + self.process_threads[process_name] = None + return + + if ( + process_name in self.process_threads and + self.process_threads[process_name] == thread + ): + return + + self.process_threads[process_name] = thread + self.subproc.stdin.write( + str.encode("reset:{}".format(process_name)) + ) + self.subproc.stdin.flush() + + except Exception: + print("Could not set thread in StatusSocketThread") + traceback.print_exception(*sys.exc_info()) + + def _handle_data(self, connection, data): + if not data: + return + + process_name = self.process_name_mapping.get(data) + if process_name: + if process_name == "main": + self.stop_subprocess = True + else: + subp = self.process_threads.get(process_name) + if subp: + subp.stop() + connection.sendall(data) diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index 5a38c992f5..a0c2564e10 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -1,6 +1,8 @@ import os import sys import json +import time +import threading import signal import socket import datetime @@ -29,6 +31,7 @@ action_data = { class ObjectFactory: session = None status_factory = None + checker_thread = None class Status: @@ -267,6 +270,17 @@ def server_activity(event): "title": "Server current status" } + session = ObjectFactory.session + if values["main"]: + session.event_hub.sock.sendall(b"RestartM") + return + + if values["storer"]: + session.event_hub.sock.sendall(b"RestartS") + + if values["processor"]: + session.event_hub.sock.sendall(b"RestartP") + def trigger_info_get(): session = ObjectFactory.session @@ -367,13 +381,34 @@ def main(args): return _returncode +class OutputChecker(threading.Thread): + read_input = True + + def run(self): + while self.read_input: + line = sys.stdin.readlines() + log.info(str(line)) + # for line in sys.stdin.readlines(): + # log.info(str(line)) + log.info("alive-end") + time.sleep(0.5) + + def stop(self): + self.read_input = False + + if __name__ == "__main__": # Register interupt signal def signal_handler(sig, frame): print("You pressed Ctrl+C. Process ended.") + ObjectFactory.checker_thread.stop() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) + checker_thread = OutputChecker() + ObjectFactory.checker_thread = checker_thread + checker_thread.start() + sys.exit(main(sys.argv)) From 10853e1ade753801109009d0497b389533419316 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:26:43 +0100 Subject: [PATCH 066/133] process information are refreshed by main process now --- pype/ftrack/ftrack_server/socket_thread.py | 2 +- pype/ftrack/ftrack_server/sub_event_info.py | 39 ++++++++++++++------- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index cbe4f9dd8b..942965f9e2 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -164,7 +164,7 @@ class StatusSocketThread(SocketThread): self.process_threads[process_name] = thread self.subproc.stdin.write( - str.encode("reset:{}".format(process_name)) + str.encode("reset:{}\r\n".format(process_name)) ) self.subproc.stdin.flush() diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index a0c2564e10..4c94513eae 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -32,6 +32,7 @@ class ObjectFactory: session = None status_factory = None checker_thread = None + last_trigger = None class Status: @@ -124,8 +125,8 @@ class StatusFactory: note_item = { "type": "label", "value": ( - "NOTE: Hit `submit` and uncheck all" - " checkers to refresh data." + "HINT: To refresh data uncheck" + " all checkboxes and hit `Submit` button." ) } splitter_item = { @@ -164,9 +165,13 @@ class StatusFactory: source = event["data"]["source"] data = event["data"]["status_info"] + + self.update_status_info(source, data) + + def update_status_info(self, process_name, info): for status in self.statuses: - if status.name == source: - status.update(data) + if status.name == process_name: + status.update(info) break def bool_items(self): @@ -178,7 +183,7 @@ class StatusFactory: items.append({ "type": "label", "value": ( - "WARNING: Main process may not restart" + "WARNING: Main process may shut down when checked" " if does not run as a service!" ) }) @@ -283,6 +288,11 @@ def server_activity(event): def trigger_info_get(): + if ObjectFactory.last_trigger: + delta = datetime.datetime.now() - ObjectFactory.last_trigger + if delta.seconds() < 5: + return + session = ObjectFactory.session session.event_hub.publish( ftrack_api.event.base.Event( @@ -352,8 +362,8 @@ def main(args): statuse_names = { "main": "Main process", - "storer": "Storer", - "processor": "Processor" + "storer": "Event Storer", + "processor": "Event Processor" } ObjectFactory.status_factory = StatusFactory(statuse_names) @@ -386,12 +396,15 @@ class OutputChecker(threading.Thread): def run(self): while self.read_input: - line = sys.stdin.readlines() - log.info(str(line)) - # for line in sys.stdin.readlines(): - # log.info(str(line)) - log.info("alive-end") - time.sleep(0.5) + for line in sys.stdin: + line = line.rstrip().lower() + if not line.startswith("reset:"): + continue + process_name = line.replace("reset:", "") + + ObjectFactory.status_factory.update_status_info( + process_name, None + ) def stop(self): self.read_input = False From 49f9dbf4183f057ab2f0ad16fe4b0909de55eef1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:28:33 +0100 Subject: [PATCH 067/133] renamed sub_event_info to sub_event_status --- pype/ftrack/ftrack_server/event_server_cli.py | 2 +- .../ftrack_server/{sub_event_info.py => sub_event_status.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename pype/ftrack/ftrack_server/{sub_event_info.py => sub_event_status.py} (100%) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 19e889f77d..90c7c566fc 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -209,7 +209,7 @@ def main_loop(ftrack_url): statuser_name = "StorerThread" statuser_port = 10021 - statuser_path = "{}/sub_event_info.py".format(file_path) + statuser_path = "{}/sub_event_status.py".format(file_path) statuser_thread = None statuser_last_failed = datetime.datetime.now() statuser_failed_count = 0 diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_status.py similarity index 100% rename from pype/ftrack/ftrack_server/sub_event_info.py rename to pype/ftrack/ftrack_server/sub_event_status.py From e9c4ec7fee46b87a067efc9a7566a09f071a4ea3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:30:38 +0100 Subject: [PATCH 068/133] label has IP adress of server --- pype/ftrack/ftrack_server/sub_event_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_status.py b/pype/ftrack/ftrack_server/sub_event_status.py index 4c94513eae..8dc176a091 100644 --- a/pype/ftrack/ftrack_server/sub_event_status.py +++ b/pype/ftrack/ftrack_server/sub_event_status.py @@ -1,7 +1,6 @@ import os import sys import json -import time import threading import signal import socket @@ -19,9 +18,10 @@ log = Logger().get_logger("Event storer") action_identifier = ( "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) +host_ip = socket.gethostbyname(socket.gethostname()) action_data = { "label": "Pype Admin", - "variant": "- Event server Status", + "variant": "- Event server Status ({})".format(host_ip), "description": "Get Infromation about event server", "actionIdentifier": action_identifier, "icon": None From 4e85279771711e794330d414537381be9025a4b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 12:01:04 +0100 Subject: [PATCH 069/133] added icon to status action --- pype/ftrack/ftrack_server/sub_event_status.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_status.py b/pype/ftrack/ftrack_server/sub_event_status.py index 8dc176a091..1a15a1f28d 100644 --- a/pype/ftrack/ftrack_server/sub_event_status.py +++ b/pype/ftrack/ftrack_server/sub_event_status.py @@ -12,7 +12,7 @@ from pype.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from pypeapp import Logger +from pypeapp import Logger, config log = Logger().get_logger("Event storer") action_identifier = ( @@ -24,7 +24,16 @@ action_data = { "variant": "- Event server Status ({})".format(host_ip), "description": "Get Infromation about event server", "actionIdentifier": action_identifier, - "icon": None + "icon": "{}/ftrack/action_icons/PypeAdmin.svg".format( + os.environ.get( + "PYPE_STATICS_SERVER", + "http://localhost:{}".format( + config.get_presets().get("services", {}).get( + "rest_api", {} + ).get("default_port", 8021) + ) + ) + ) } From 350d1ca7d038c8cc75619915262fb2a7fdff4be0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 10 Feb 2020 16:41:58 +0100 Subject: [PATCH 070/133] implemented action that remove files from disk for paticulat asset version except lastest `x` versions --- .../actions/action_delete_old_versions.py | 481 ++++++++++++++++++ 1 file changed, 481 insertions(+) create mode 100644 pype/ftrack/actions/action_delete_old_versions.py diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py new file mode 100644 index 0000000000..126c9a5e24 --- /dev/null +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -0,0 +1,481 @@ +import os +import collections +import uuid + +import clique +from pymongo import UpdateOne + +from pype.ftrack import BaseAction +from pype.ftrack.lib.io_nonsingleton import DbConnector + +import avalon.pipeline + + +class DeleteOldVersions(BaseAction): + + identifier = "delete.old.versions" + label = "Pype Admin" + variant = "- Delete old versions" + description = ( + "Delete files from older publishes so project can be" + " archived with only lates versions." + ) + + dbcon = DbConnector() + + inteface_title = "Choose your preferences" + splitter_item = {"type": "label", "value": "---"} + sequence_splitter = "__sequence_splitter__" + + def discover(self, session, entities, event): + ''' Validation ''' + selection = event["data"].get("selection") or [] + for entity in selection: + entity_type = (entity.get("entityType") or "").lower() + if entity_type == "assetversion": + return True + return False + + def interface(self, session, entities, event): + items = [] + root = os.environ.get("AVALON_PROJECTS") + if not root: + msg = "Root path to projects is not set." + items.append({ + "type": "label", + "value": "ERROR: {}".format(msg) + }) + self.show_interface( + items=items, title=self.inteface_title, event=event + ) + return { + "success": False, + "message": msg + } + + elif not os.path.exists(root): + msg = "Root path does not exists \"{}\".".format(str(root)) + items.append({ + "type": "label", + "value": "ERROR: {}".format(msg) + }) + self.show_interface( + items=items, title=self.inteface_title, event=event + ) + return { + "success": False, + "message": msg + } + + values = event["data"].get("values") + if values: + versions_count = int(values["last_versions_count"]) + if versions_count >= 1: + return + items.append({ + "type": "label", + "value": ( + "# You have to keep at least 1 version!" + ) + }) + + items.append({ + "type": "label", + "value": ( + "WARNING: This will remove published files of older" + " versions from disk so we don't recommend use" + " this action on \"live\" project." + ) + }) + + items.append(self.splitter_item) + + # How many versions to keep + items.append({ + "type": "label", + "value": "## Choose how many versions you want to keep:" + }) + items.append({ + "type": "label", + "value": ( + "NOTE: We do recommend to keep 2" + " versions (even if default is 1)." + ) + }) + items.append({ + "type": "number", + "name": "last_versions_count", + "label": "Versions", + "value": 1 + }) + + items.append(self.splitter_item) + + items.append({ + "type": "label", + "value": ( + "## Remove publish folder even if there" + " are other than published files:" + ) + }) + items.append({ + "type": "label", + "value": ( + "WARNING: This may remove more than you want." + ) + }) + items.append({ + "type": "boolean", + "name": "force_delete_publish_folder", + "label": "Are you sure?", + "value": True + }) + + return { + "items": items, + "title": self.inteface_title + } + + def launch(self, session, entities, event): + values = event["data"].get("values") + if not values: + return + + versions_count = int(values["last_versions_count"]) + force_to_remove = values["force_delete_publish_folder"] + + _val1 = "OFF" + if force_to_remove: + _val1 = "ON" + + _val3 = "s" + if versions_count == 1: + _val3 = "" + + self.log.debug(( + "Process started. Force to delete publish folder is set to [{0}]" + " and will keep {1} latest version{2}." + ).format(_val1, versions_count, _val3)) + + self.dbcon.install() + + project = None + avalon_asset_names = [] + asset_versions_by_parent_id = collections.defaultdict(list) + subset_names_by_asset_name = collections.defaultdict(list) + + for entity in entities: + parent_ent = entity["asset"]["parent"] + parent_ftrack_id = parent_ent["id"] + parent_name = parent_ent["name"] + + if parent_name not in avalon_asset_names: + avalon_asset_names.append(parent_name) + + # Group asset versions by parent entity + asset_versions_by_parent_id[parent_ftrack_id].append(entity) + + # Get project + if project is None: + project = parent_ent["project"] + + # Collect subset names per asset + subset_name = entity["asset"]["name"] + subset_names_by_asset_name[parent_name].append(subset_name) + + # Set Mongo collection + project_name = project["full_name"] + self.dbcon.Session["AVALON_PROJECT"] = project_name + self.log.debug("Project is set to {}".format(project_name)) + + # Get Assets from avalon database + assets = list(self.dbcon.find({ + "type": "asset", + "name": {"$in": avalon_asset_names} + })) + asset_id_to_name_map = { + asset["_id"]: asset["name"] for asset in assets + } + asset_ids = list(asset_id_to_name_map.keys()) + + self.log.debug("Collected assets ({})".format(len(asset_ids))) + + # Get Subsets + subsets = list(self.dbcon.find({ + "type": "subset", + "parent": {"$in": asset_ids} + })) + subsets_by_id = {} + subset_ids = [] + for subset in subsets: + asset_id = subset["parent"] + asset_name = asset_id_to_name_map[asset_id] + available_subsets = subset_names_by_asset_name[asset_name] + + if subset["name"] not in available_subsets: + continue + + subset_ids.append(subset["_id"]) + subsets_by_id[subset["_id"]] = subset + + self.log.debug("Collected subsets ({})".format(len(subset_ids))) + + # Get Versions + versions = list(self.dbcon.find({ + "type": "version", + "parent": {"$in": subset_ids} + })) + + versions_by_parent = collections.defaultdict(list) + for ent in versions: + versions_by_parent[ent["parent"]].append(ent) + + def sort_func(ent): + return int(ent["name"]) + + last_versions_by_parent = collections.defaultdict(list) + all_last_versions = [] + for parent_id, _versions in versions_by_parent.items(): + for idx, version in enumerate( + sorted(_versions, key=sort_func, reverse=True) + ): + if idx >= versions_count: + break + last_versions_by_parent[parent_id].append(version) + all_last_versions.append(version) + + self.log.debug("Collected versions ({})".format(len(versions))) + + # Filter latest versions + for version in all_last_versions: + versions.remove(version) + + # Filter already deleted versions + versions_to_pop = [] + for version in versions: + version_tags = version["data"].get("tags") + if version_tags and "deleted" in version_tags: + versions_to_pop.append(version) + + for version in versions_to_pop: + subset = subsets_by_id[version["parent"]] + asset_id = subset["parent"] + asset_name = asset_id_to_name_map[asset_id] + msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( + asset_name, subset["name"], version["name"] + ) + self.log.warning(( + "Skipping version. Already tagged as `deleted`. < {} >" + ).format(msg)) + versions.remove(version) + + version_ids = [ent["_id"] for ent in versions] + + self.log.debug( + "Filtered versions to delete ({})".format(len(version_ids)) + ) + + if not version_ids: + msg = "Skipping processing. Nothing to delete." + self.log.debug(msg) + return { + "success": True, + "message": msg + } + + repres = list(self.dbcon.find({ + "type": "representation", + "parent": {"$in": version_ids} + })) + + self.log.debug( + "Collected representations to remove ({})".format(len(repres)) + ) + + dir_paths = {} + file_paths_by_dir = collections.defaultdict(list) + for repre in repres: + file_path, seq_path = self.path_from_represenation(repre) + if file_path is None: + self.log.warning(( + "Could not format path for represenation \"{}\"" + ).format(str(repre))) + continue + + dir_path = os.path.dirname(file_path) + dir_id = None + for _dir_id, _dir_path in dir_paths.items(): + if _dir_path == dir_path: + dir_id = _dir_id + break + + if dir_id is None: + dir_id = uuid.uuid4() + dir_paths[dir_id] = dir_path + + file_paths_by_dir[dir_id].append([file_path, seq_path]) + + dir_ids_to_pop = [] + for dir_id, dir_path in dir_paths.items(): + if os.path.exists(dir_path): + continue + + dir_ids_to_pop.append(dir_id) + + # Pop dirs from both dictionaries + for dir_id in dir_ids_to_pop: + dir_paths.pop(dir_id) + paths = file_paths_by_dir.pop(dir_id) + # TODO report of missing directories? + paths_msg = ", ".join([ + "'{}'".format(path[0].replace("\\", "/")) for path in paths + ]) + self.log.warning(( + "Folder does not exist. Deleting it's files skipped: {}" + ).format(paths_msg)) + + if force_to_remove: + self.delete_whole_dir_paths(dir_paths.values()) + else: + self.delete_only_repre_files(dir_paths, file_paths_by_dir) + + mongo_changes_bulk = [] + for version in versions: + orig_version_tags = version["data"].get("tags") or [] + version_tags = [tag for tag in orig_version_tags] + if "deleted" not in version_tags: + version_tags.append("deleted") + + if version_tags == orig_version_tags: + continue + + filter = {"_id": version["_id"]} + update_data = {"$set": {"data.tags": version_tags}} + mongo_changes_bulk.append(UpdateOne(filter, update_data)) + + if mongo_changes_bulk: + self.dbcon.bulk_write(mongo_changes_bulk) + + self.dbcon.uninstall() + + return True + + def delete_whole_dir_paths(self, dir_paths): + for dir_path in dir_paths: + # Delete all files and fodlers in dir path + for root, dirs, files in os.walk(dir_path, topdown=False): + for name in files: + os.remove(os.path.join(root, name)) + + for name in dirs: + os.rmdir(os.path.join(root, name)) + + # Delete even the folder and it's parents folders if they are empty + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + os.rmdir(os.path.join(dir_path)) + + def delete_only_repre_files(self, dir_paths, file_paths): + for dir_id, dir_path in dir_paths.items(): + dir_files = os.listdir(dir_path) + collections, remainders = clique.assemble(dir_files) + for file_path, seq_path in file_paths[dir_id]: + file_path_base = os.path.split(file_path)[1] + # Just remove file if `frame` key was not in context or + # filled path is in remainders (single file sequence) + if not seq_path or file_path_base in remainders: + if not os.path.exists(file_path): + self.log.warning( + "File was not found: {}".format(file_path) + ) + continue + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + remainders.remove(file_path_base) + continue + + seq_path_base = os.path.split(seq_path)[1] + head, tail = seq_path_base.split(self.sequence_splitter) + + final_col = None + for collection in collections: + if head != collection.head or tail != collection.tail: + continue + final_col = collection + break + + if final_col is not None: + # Fill full path to head + final_col.head = os.path.join(dir_path, final_col.head) + for _file_path in final_col: + if os.path.exists(_file_path): + os.remove(_file_path) + _seq_path = final_col.format("{head}{padding}{tail}") + self.log.debug("Removed files: {}".format(_seq_path)) + collections.remove(final_col) + + elif os.path.exists(file_path): + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + + else: + self.log.warning( + "File was not found: {}".format(file_path) + ) + + # Delete as much as possible parent folders + for dir_path in dir_paths.values(): + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + self.log.debug("Removed folder: {}".format(dir_path)) + os.rmdir(dir_path) + + def path_from_represenation(self, representation): + try: + template = representation["data"]["template"] + + except KeyError: + return (None, None) + + root = os.environ["AVALON_PROJECTS"] + if not root: + return (None, None) + + sequence_path = None + try: + context = representation["context"] + context["root"] = root + path = avalon.pipeline.format_template_with_optional_keys( + context, template + ) + if "frame" in context: + context["frame"] = self.sequence_splitter + sequence_path = os.path.normpath( + avalon.pipeline.format_template_with_optional_keys( + context, template + ) + ) + + except KeyError: + # Template references unavailable data + return (None, None) + + return (os.path.normpath(path), sequence_path) + + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + PrepareForArchivation(session, plugins_presets).register() From 765ec59d7b47238bfc0579c3d2baaf14880f8a7e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 10 Feb 2020 17:07:56 +0100 Subject: [PATCH 071/133] added roles and icon to action --- pype/ftrack/actions/action_delete_old_versions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index 126c9a5e24..46f3e60d77 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -20,6 +20,10 @@ class DeleteOldVersions(BaseAction): "Delete files from older publishes so project can be" " archived with only lates versions." ) + role_list = ["Pypeclub", "Project Manager", "Administrator"] + icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) dbcon = DbConnector() From c0584eded70c2d63ab4be82484089e263bf15988 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 11 Feb 2020 18:49:40 +0100 Subject: [PATCH 072/133] integrate new will remove old representations if republishing version and set new repres IDs to those previous --- pype/plugins/global/publish/integrate_new.py | 24 +++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 7d95534897..4499445e6e 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -207,6 +207,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'parent': subset["_id"], 'name': next_version }) + existing_repres = None if existing_version is None: version_id = io.insert_one(version).inserted_id else: @@ -217,6 +218,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): }, {'$set': version} ) version_id = existing_version['_id'] + existing_repres = {repre["name"]: repre for repre in io.find({ + "type": "representation", + "parent": version_id + })} + instance.data['version'] = version['name'] # Write to disk @@ -249,6 +255,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if 'transfers' not in instance.data: instance.data['transfers'] = [] + new_repre_names = [] for idx, repre in enumerate(instance.data["representations"]): # Collection @@ -419,8 +426,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] + repre_name = repre['name'] + new_repre_names.append(repre_name) + # Use previous + if existing_repres and repre_name in existing_repres: + repre_id = existing_repres[repre_name]["_id"] + else: + repre_id = io.ObjectId() + representation = { - "_id": io.ObjectId(), + "_id": repre_id, "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, @@ -446,6 +461,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): representations.append(representation) self.log.debug("__ representations: {}".format(representations)) + # Remove old representations if there are any (before insertion of new) + if existing_repres: + repre_ids_to_remove = [] + for repre in existing_repres.values(): + repre_ids_to_remove.append(repre["_id"]) + io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) From 85ba7f17f494a4324c0be113fff563a9edf9d597 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 11:17:08 +0100 Subject: [PATCH 073/133] representations are not deleted but their type changes to archived_representations and their id is changed --- pype/plugins/global/publish/integrate_new.py | 39 +++++++++++++++++--- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 4499445e6e..c8e6a0188e 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -4,6 +4,8 @@ import logging import sys import clique import errno + +from pymongo import DeleteOne, InsertOne import pyblish.api from avalon import api, io from avalon.vendor import filelink @@ -207,21 +209,48 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'parent': subset["_id"], 'name': next_version }) - existing_repres = None + if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # Update version data io.update_many({ 'type': 'version', 'parent': subset["_id"], 'name': next_version - }, {'$set': version} - ) + }, { + '$set': version + }) version_id = existing_version['_id'] - existing_repres = {repre["name"]: repre for repre in io.find({ + + # Find representations of existing version and archive them + current_repres = list(io.find({ "type": "representation", "parent": version_id - })} + })) + bulk_writes = [] + for repre in current_repres: + # Representation must change type, + # `_id` must be stored to other key and replaced with new + # - that is because new representations should have same ID + repre_id = repre["_id"] + bulk_writes.append(DeleteOne({"_id": repre_id})) + + repre["orig_id"] = repre_id + repre["_id"] = io.ObjectId() + repre["type"] = "archived_representation" + bulk_writes.append(InsertOne(repre)) + + # bulk updates + if bulk_writes: + io._database[io.Session["AVALON_PROJECT"]].bulk_write( + bulk_writes + ) + + existing_repres = list(io.find({ + "parent": version_id, + "type": "archived_representation" + })) instance.data['version'] = version['name'] From 7f49ed9fb3e353ce0be37c41d70a3da45d368ebb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 11:17:27 +0100 Subject: [PATCH 074/133] check of existing representations was updated --- pype/plugins/global/publish/integrate_new.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c8e6a0188e..b5b6b10aa2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -411,7 +411,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not dst_start_frame: dst_start_frame = dst_padding - dst = "{0}{1}{2}".format( dst_head, dst_start_frame, @@ -457,10 +456,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_name = repre['name'] new_repre_names.append(repre_name) - # Use previous - if existing_repres and repre_name in existing_repres: - repre_id = existing_repres[repre_name]["_id"] - else: + + # Use previous representation's id if there are any + repre_id = None + for _repre in existing_repres: + # NOTE should we check lowered names? + if repre_name == _repre["name"]: + repre_id = _repre["orig_id"] + break + + # Create new id if existing representations does not match + if repre_id is None: repre_id = io.ObjectId() representation = { From 26f53789f3f7cbdfdac3f1f09ddf0a2d6f7566dc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:42:51 +0100 Subject: [PATCH 075/133] fix(nks): filter out audio trackitems on effect collect --- pype/plugins/nukestudio/publish/collect_clips.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 4525b4947f..48e0cb66db 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -1,7 +1,7 @@ import os from pyblish import api - +import hiero import nuke class CollectClips(api.ContextPlugin): @@ -48,7 +48,9 @@ class CollectClips(api.ContextPlugin): track = item.parent() source = item.source().mediaSource() source_path = source.firstpath() - effects = [f for f in item.linkedItems() if f.isEnabled()] + effects = [f for f in item.linkedItems() + if f.isEnabled() + if isinstance(f, hiero.core.EffectTrackItem)] # If source is *.nk its a comp effect and we need to fetch the # write node output. This should be improved by parsing the script From 0cd57430f946badfdb1e06cc9580be7d81f0f6b6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:43:31 +0100 Subject: [PATCH 076/133] fix(nks): removing optionals --- pype/plugins/nukestudio/publish/extract_audio.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/extract_audio.py b/pype/plugins/nukestudio/publish/extract_audio.py index 315ba6784d..2c4afc8412 100644 --- a/pype/plugins/nukestudio/publish/extract_audio.py +++ b/pype/plugins/nukestudio/publish/extract_audio.py @@ -10,8 +10,6 @@ class ExtractAudioFile(pype.api.Extractor): hosts = ["nukestudio"] families = ["clip", "audio"] match = api.Intersection - optional = True - active = False def process(self, instance): import os From f46ca740f53b4472f46c1f955389d5d2d3aaff32 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:43:59 +0100 Subject: [PATCH 077/133] feat(nks): adding debug log --- pype/plugins/nukestudio/publish/collect_plates.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index b98eccce7f..75eb5bb043 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -146,6 +146,7 @@ class CollectPlatesData(api.InstancePlugin): head, padding = os.path.splitext(basename) ext = ext[1:] padding = padding[1:] + self.log.debug("_ padding: `{}`".format(padding)) # head, padding, ext = source_file.split('.') source_first_frame = int(padding) padding = len(padding) From 5d8e2dc37fc618304268f49291a38b69740dec82 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 18:00:59 +0100 Subject: [PATCH 078/133] fix(nk): swap `write` family for `render2d` --- pype/plugins/global/load/open_file.py | 2 +- .../global/publish/collect_filesequences.py | 12 +-- pype/plugins/global/publish/extract_jpeg.py | 93 ++++++++++--------- 3 files changed, 53 insertions(+), 54 deletions(-) diff --git a/pype/plugins/global/load/open_file.py b/pype/plugins/global/load/open_file.py index 9425eaab04..b496311e0c 100644 --- a/pype/plugins/global/load/open_file.py +++ b/pype/plugins/global/load/open_file.py @@ -18,7 +18,7 @@ def open(filepath): class Openfile(api.Loader): """Open Image Sequence with system default""" - families = ["write"] + families = ["render2d"] representations = ["*"] label = "Open" diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 6c06229304..8b42606e4a 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -211,12 +211,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # Get family from the data families = data.get("families", ["render"]) - if "render" not in families: - families.append("render") if "ftrack" not in families: families.append("ftrack") - if "write" in instance_family: - families.append("write") + if families_data and "render2d" in families_data: + families.append("render2d") if families_data and "slate" in families_data: families.append("slate") @@ -334,7 +332,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "stagingDir": root, "anatomy_template": "render", "fps": fps, - "tags": ["review"] if not baked_mov_path else [], + "tags": ["review"] if not baked_mov_path else ["thumb-nuke"], } instance.data["representations"].append( representation) @@ -388,8 +386,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # If no start or end frame provided, get it from collection indices = list(collection.indexes) - start = data.get("frameStart", indices[0]) - end = data.get("frameEnd", indices[-1]) + start = int(data.get("frameStart", indices[0])) + end = int(data.get("frameEnd", indices[-1])) ext = list(collection)[0].split(".")[-1] diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 4978649ba2..7c0820ea28 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -19,7 +19,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): label = "Extract Jpeg EXR" hosts = ["shell"] order = pyblish.api.ExtractorOrder - families = ["imagesequence", "render", "write", "source"] + families = ["imagesequence", "render", "render2d", "source"] enabled = False def process(self, instance): @@ -41,62 +41,63 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): for repre in representations: self.log.debug(repre) - if 'review' not in repre['tags']: - return + if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: + if not isinstance(repre['files'], list): + return - input_file = repre['files'][0] + input_file = repre['files'][0] - # input_file = ( - # collections[0].format('{head}{padding}{tail}') % start - # ) - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("input {}".format(full_input_path)) + # input_file = ( + # collections[0].format('{head}{padding}{tail}') % start + # ) + full_input_path = os.path.join(stagingdir, input_file) + self.log.info("input {}".format(full_input_path)) - filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + filename = os.path.splitext(input_file)[0] + if not filename.endswith('.'): + filename += "." + jpeg_file = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpeg_file) - self.log.info("output {}".format(full_output_path)) + self.log.info("output {}".format(full_output_path)) - config_data = instance.context.data['output_repre_config'] + config_data = instance.context.data['output_repre_config'] - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) + proj_name = os.environ.get('AVALON_PROJECT', '__default__') + profile = config_data.get(proj_name, config_data['__default__']) - jpeg_items = [] - jpeg_items.append( - os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) - # override file if already exists - jpeg_items.append("-y") - # use same input args like with mov - jpeg_items.extend(profile.get('input', [])) - # input file - jpeg_items.append("-i {}".format(full_input_path)) - # output file - jpeg_items.append(full_output_path) + jpeg_items = [] + jpeg_items.append( + os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) + # override file if already exists + jpeg_items.append("-y") + # use same input args like with mov + jpeg_items.extend(profile.get('input', [])) + # input file + jpeg_items.append("-i {}".format(full_input_path)) + # output file + jpeg_items.append(full_output_path) - subprocess_jpeg = " ".join(jpeg_items) + subprocess_jpeg = " ".join(jpeg_items) - # run subprocess - self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg) + # run subprocess + self.log.debug("{}".format(subprocess_jpeg)) + pype.api.subprocess(subprocess_jpeg) - if "representations" not in instance.data: - instance.data["representations"] = [] + if "representations" not in instance.data: + instance.data["representations"] = [] - representation = { - 'name': 'thumbnail', - 'ext': 'jpg', - 'files': jpeg_file, - "stagingDir": stagingdir, - "thumbnail": True, - "tags": ['thumbnail'] - } + representation = { + 'name': 'thumbnail', + 'ext': 'jpg', + 'files': jpeg_file, + "stagingDir": stagingdir, + "thumbnail": True, + "tags": ['thumbnail'] + } - # adding representation - self.log.debug("Adding: {}".format(representation)) - representations_new.append(representation) + # adding representation + self.log.debug("Adding: {}".format(representation)) + representations_new.append(representation) instance.data["representations"] = representations_new From 00e77d690d6e8c42999f0ec154c71f84b9dbe52b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:36:55 +0100 Subject: [PATCH 079/133] added notelabellink to ignored entity types --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 49ac50c1db..708ae707e9 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -28,7 +28,7 @@ class SyncToAvalonEvent(BaseEvent): ignore_entTypes = [ "socialfeed", "socialnotification", "note", "assetversion", "job", "user", "reviewsessionobject", "timer", - "timelog", "auth_userrole", "appointment" + "timelog", "auth_userrole", "appointment", "notelabellink" ] ignore_ent_types = ["Milestone"] ignore_keys = ["statusid", "thumbid"] From 2ff72b5aeea0ce4c83e27b84f7da017733f7b489 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:37:13 +0100 Subject: [PATCH 080/133] small cleanup in code --- pype/ftrack/events/event_sync_to_avalon.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 708ae707e9..643a3d793e 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -573,8 +573,7 @@ class SyncToAvalonEvent(BaseEvent): if auto_sync is not True: return True - debug_msg = "" - debug_msg += "Updated: {}".format(len(updated)) + debug_msg = "Updated: {}".format(len(updated)) debug_action_map = { "add": "Created", "remove": "Removed", From 5b1f33350b2d2b6d6d02d19919435aa73ef35c9a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:37:43 +0100 Subject: [PATCH 081/133] added another bug report message when configuration id is not for specific entity --- pype/ftrack/events/event_sync_to_avalon.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 643a3d793e..c646756788 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1544,6 +1544,14 @@ class SyncToAvalonEvent(BaseEvent): entity_type_conf_ids[entity_type] = configuration_id break + if not configuration_id: + self.log.warning( + "BUG REPORT: Missing configuration for `{} < {} >`".format( + entity_type, ent_info["entityType"] + ) + ) + continue + _entity_key = collections.OrderedDict({ "configuration_id": configuration_id, "entity_id": ftrack_id From bbe3ce3781cd4ff4fc5181e9237cc984c29c6836 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:38:44 +0100 Subject: [PATCH 082/133] added debug logs for specific reason at this moment, to avoid ignoring entity types but to find out which we are using --- pype/ftrack/events/event_sync_to_avalon.py | 33 ++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index c646756788..345bc5b925 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -3,6 +3,7 @@ import collections import copy import queue import time +import datetime import atexit import traceback @@ -51,9 +52,36 @@ class SyncToAvalonEvent(BaseEvent): def __init__(self, session, plugins_presets={}): '''Expects a ftrack_api.Session instance''' + # Debug settings + # - time expiration in seconds + self.debug_print_time_expiration = 5 * 60 + # - store current time + self.debug_print_time = datetime.datetime.now() + # - store synchronize entity types to be able to use + # only entityTypes in interest instead of filtering by ignored + self.debug_sync_types = collections.defaultdict(list) + + # Set processing session to not use global self.set_process_session(session) super().__init__(session, plugins_presets) + def debug_logs(self): + """This is debug method for printing small debugs messages. """ + now_datetime = datetime.datetime.now() + delta = now_datetime - self.debug_print_time + if delta.total_seconds() < self.debug_print_time_expiration: + return + + self.debug_print_time = now_datetime + known_types_items = [] + for entityType, entity_type in self.debug_sync_types.items(): + known_types_items.append("{} <{}>".format(entity_type, entityType)) + + known_entityTypes = ", ".join(known_types_items) + self.log.debug( + "DEBUG MESSAGE: Known entityTypes {}".format(known_entityTypes) + ) + @property def cur_project(self): if self._cur_project is None: @@ -484,6 +512,9 @@ class SyncToAvalonEvent(BaseEvent): if not entity_type or entity_type in self.ignore_ent_types: continue + if entity_type not in self.debug_sync_types[entityType]: + self.debug_sync_types[entityType].append(entity_type) + action = ent_info["action"] ftrack_id = ent_info["entityId"] if isinstance(ftrack_id, list): @@ -633,6 +664,8 @@ class SyncToAvalonEvent(BaseEvent): self.ftrack_added = entities_by_action["add"] self.ftrack_updated = updated + self.debug_logs() + self.log.debug("Synchronization begins") try: time_1 = time.time() From 9fec5fa0e3f997e85d16ae5b83f3771c828a2de8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:08:06 +0100 Subject: [PATCH 083/133] fixed messages --- pype/ftrack/events/event_sync_to_avalon.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 345bc5b925..53de588bcc 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -75,11 +75,14 @@ class SyncToAvalonEvent(BaseEvent): self.debug_print_time = now_datetime known_types_items = [] for entityType, entity_type in self.debug_sync_types.items(): - known_types_items.append("{} <{}>".format(entity_type, entityType)) + ent_types_msg = ", ".join(entity_type) + known_types_items.append( + "<{}> ({})".format(entityType, ent_types_msg) + ) known_entityTypes = ", ".join(known_types_items) self.log.debug( - "DEBUG MESSAGE: Known entityTypes {}".format(known_entityTypes) + "DEBUG MESSAGE: Known types {}".format(known_entityTypes) ) @property @@ -1603,7 +1606,7 @@ class SyncToAvalonEvent(BaseEvent): try: # Commit changes of mongo_id to empty string self.process_session.commit() - self.log.debug("Commititng unsetting") + self.log.debug("Committing unsetting") except Exception: self.process_session.rollback() # TODO logging From f12bb0f8597bcbad0862b5eac50963d225e1284b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:08:26 +0100 Subject: [PATCH 084/133] fixed prints when credentials to event server are not valid --- pype/ftrack/ftrack_server/event_server_cli.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..cae037f2d9 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -63,10 +63,19 @@ def validate_credentials(url, user, api): ) session.close() except Exception as e: - print( - 'ERROR: Can\'t log into Ftrack with used credentials:' - ' Ftrack server: "{}" // Username: {} // API key: {}' - ).format(url, user, api) + print("Can't log into Ftrack with used credentials:") + ftrack_cred = { + "Ftrack server": str(url), + "Username": str(user), + "API key": str(api) + } + item_lens = [len(key) + 1 for key in ftrack_cred.keys()] + justify_len = max(*item_lens) + for key, value in ftrack_cred.items(): + print("{} {}".format( + (key + ":").ljust(justify_len, " "), + value + )) return False print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format( From aea05e2fe912c1a46d60625dc57d0f73ec009165 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:09:12 +0100 Subject: [PATCH 085/133] fixed error message formatting --- pype/ftrack/lib/ftrack_base_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 8329505ffb..f11cb020e9 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -49,7 +49,7 @@ class BaseHandler(object): ).format( str(type(session)), str(ftrack_api.session.Session), - str(session_processor.ProcessSession) + str(SocketSession) )) self._session = session From 75bff66ce21e88cd43f165f2355286882b0f4bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 14 Feb 2020 10:40:22 +0000 Subject: [PATCH 086/133] submit_publish_job.py edited online with Bitbucket --- pype/plugins/global/publish/submit_publish_job.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index a9fa8febd4..792fc05a38 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -166,6 +166,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "PYPE_STUDIO_PROJECTS_PATH", "PYPE_STUDIO_PROJECTS_MOUNT" ] + + deadline_pool = "" def _submit_deadline_post_job(self, instance, job): """ @@ -201,7 +203,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "JobDependency0": job["_id"], "UserName": job["Props"]["User"], "Comment": instance.context.data.get("comment", ""), - "Priority": job["Props"]["Pri"] + "Priority": job["Props"]["Pri"], + "Pool": self.deadline_pool }, "PluginInfo": { "Version": "3.6", From f6b91ed589f94da3c9e3989d1ce04b2aaa405122 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:06:50 +0100 Subject: [PATCH 087/133] credentials functions are not private and are ready to store credentials by host and user --- pype/ftrack/lib/credentials.py | 163 ++++++++++++++++++++++----------- 1 file changed, 109 insertions(+), 54 deletions(-) diff --git a/pype/ftrack/lib/credentials.py b/pype/ftrack/lib/credentials.py index 7e305942f2..16b1fb25fb 100644 --- a/pype/ftrack/lib/credentials.py +++ b/pype/ftrack/lib/credentials.py @@ -2,85 +2,140 @@ import os import json import ftrack_api import appdirs +import getpass +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse -config_path = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype')) -action_file_name = 'ftrack_cred.json' -event_file_name = 'ftrack_event_cred.json' -action_fpath = os.path.join(config_path, action_file_name) -event_fpath = os.path.join(config_path, event_file_name) -folders = set([os.path.dirname(action_fpath), os.path.dirname(event_fpath)]) +CONFIG_PATH = os.path.normpath(appdirs.user_data_dir("pype-app", "pype")) +CREDENTIALS_FILE_NAME = "ftrack_cred.json" +CREDENTIALS_PATH = os.path.join(CONFIG_PATH, CREDENTIALS_FILE_NAME) +CREDENTIALS_FOLDER = os.path.dirname(CREDENTIALS_PATH) -for folder in folders: - if not os.path.isdir(folder): - os.makedirs(folder) +if not os.path.isdir(CREDENTIALS_FOLDER): + os.makedirs(CREDENTIALS_FOLDER) + +USER_GETTER = None -def _get_credentials(event=False): - if event: - fpath = event_fpath - else: - fpath = action_fpath +def get_ftrack_hostname(ftrack_server=None): + if not ftrack_server: + ftrack_server = os.environ["FTRACK_SERVER"] + if "//" not in ftrack_server: + ftrack_server = "//" + ftrack_server + + return urlparse(ftrack_server).hostname + + +def get_user(): + if USER_GETTER: + return USER_GETTER() + return getpass.getuser() + + +def get_credentials(ftrack_server=None, user=None): credentials = {} - try: - file = open(fpath, 'r') - credentials = json.load(file) - except Exception: - file = open(fpath, 'w') + if not os.path.exists(CREDENTIALS_PATH): + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(credentials)) + file.close() + return credentials - file.close() + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + content_json = json.loads(content or "{}") + credentials = content_json.get(hostname, {}).get(user) or {} return credentials -def _save_credentials(username, apiKey, event=False, auto_connect=None): - data = { - 'username': username, - 'apiKey': apiKey +def save_credentials(ft_user, ft_api_key, ftrack_server=None, user=None): + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + content_json = json.loads(content or "{}") + if hostname not in content_json: + content_json[hostname] = {} + + content_json[hostname][user] = { + "username": ft_user, + "api_key": ft_api_key } - if event: - fpath = event_fpath - if auto_connect is None: - cred = _get_credentials(True) - auto_connect = cred.get('auto_connect', False) - data['auto_connect'] = auto_connect - else: - fpath = action_fpath + # Deprecated keys + if "username" in content_json: + content_json.pop("username") + if "apiKey" in content_json: + content_json.pop("apiKey") - file = open(fpath, 'w') - file.write(json.dumps(data)) - file.close() + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(content_json, indent=4)) -def _clear_credentials(event=False): - if event: - fpath = event_fpath - else: - fpath = action_fpath - open(fpath, 'w').close() - _set_env(None, None) +def clear_credentials(ft_user=None, ftrack_server=None, user=None): + if not ft_user: + ft_user = os.environ.get("FTRACK_API_USER") + + if not ft_user: + return + + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + content_json = json.loads(content or "{}") + if hostname not in content_json: + content_json[hostname] = {} + + content_json[hostname].pop(user, None) + + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(content_json)) -def _set_env(username, apiKey): - if not username: - username = '' - if not apiKey: - apiKey = '' - os.environ['FTRACK_API_USER'] = username - os.environ['FTRACK_API_KEY'] = apiKey +def set_env(ft_user=None, ft_api_key=None): + os.environ["FTRACK_API_USER"] = ft_user or "" + os.environ["FTRACK_API_KEY"] = ft_api_key or "" -def _check_credentials(username=None, apiKey=None): +def get_env_credentials(): + return ( + os.environ.get("FTRACK_API_USER"), + os.environ.get("FTRACK_API_KEY") + ) - if username and apiKey: - _set_env(username, apiKey) + +def check_credentials(ft_user, ft_api_key, ftrack_server=None): + if not ftrack_server: + ftrack_server = os.environ["FTRACK_SERVER"] + + if not ft_user or not ft_api_key: + return False try: - session = ftrack_api.Session() + session = ftrack_api.Session( + server_url=ftrack_server, + api_key=ft_api_key, + api_user=ft_user + ) session.close() - except Exception as e: + + except Exception: return False return True From 0272d38c7eb98bb68341b1762b93f5da4571b695 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:09:07 +0100 Subject: [PATCH 088/133] lib init do not import all credentials functions but only credentials module --- pype/ftrack/lib/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py index eabfdf0d7d..9da3b819b3 100644 --- a/pype/ftrack/lib/__init__.py +++ b/pype/ftrack/lib/__init__.py @@ -1,5 +1,5 @@ from . import avalon_sync -from .credentials import * +from . import credentials from .ftrack_app_handler import * from .ftrack_event_handler import * from .ftrack_action_handler import * From 79245bcd00283fb8e424ce438e836af0b17eba70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:10:50 +0100 Subject: [PATCH 089/133] user module can execute callbacks on username change --- pype/user/user_module.py | 16 +++++++++++++++- pype/user/widget_user.py | 2 +- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/pype/user/user_module.py b/pype/user/user_module.py index d70885b211..a43866f471 100644 --- a/pype/user/user_module.py +++ b/pype/user/user_module.py @@ -19,8 +19,8 @@ class UserModule: log = pype.Logger().get_logger("UserModule", "user") def __init__(self, main_parent=None, parent=None): + self._callbacks_on_user_change = [] self.cred = {} - self.cred_path = os.path.normpath(os.path.join( self.cred_folder_path, self.cred_filename )) @@ -28,6 +28,9 @@ class UserModule: self.load_credentials() + def register_callback_on_user_change(self, callback): + self._callbacks_on_user_change.append(callback) + def tray_start(self): """Store credentials to env and preset them to widget""" username = "" @@ -95,6 +98,17 @@ class UserModule: )) return self.save_credentials(getpass.getuser()) + def change_credentials(self, username): + self.save_credentials(username) + for callback in self._callbacks_on_user_change: + try: + callback() + except Exception: + self.log.warning( + "Failed to execute callback \"{}\".".format(str(callback)), + exc_info=True + ) + def save_credentials(self, username): """Save credentials to JSON file, env and widget""" if username is None: diff --git a/pype/user/widget_user.py b/pype/user/widget_user.py index 7ca12ec4d4..27faa857f5 100644 --- a/pype/user/widget_user.py +++ b/pype/user/widget_user.py @@ -77,7 +77,7 @@ class UserWidget(QtWidgets.QWidget): def click_save(self): # all what should happen - validations and saving into appsdir username = self.input_username.text() - self.module.save_credentials(username) + self.module.change_credentials(username) self._close_widget() def closeEvent(self, event): From ce5ad584dd405597272c0b592998cce7e9953ef8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:11:06 +0100 Subject: [PATCH 090/133] user module has get_user method to get currently set user --- pype/user/user_module.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/user/user_module.py b/pype/user/user_module.py index a43866f471..46ceb0031f 100644 --- a/pype/user/user_module.py +++ b/pype/user/user_module.py @@ -40,6 +40,9 @@ class UserModule: os.environ[self.env_name] = username self.widget_login.set_user(username) + def get_user(self): + return self.cred.get("username") or getpass.getuser() + def process_modules(self, modules): """ Gives ability to connect with imported modules from TrayManager. From 908a89f4ca2a0af681021f9fb8c86c7fd4723a93 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:11:44 +0100 Subject: [PATCH 091/133] ftrack module uses new credentials functions and has ability to change user on User module's user change --- pype/ftrack/tray/ftrack_module.py | 57 ++++++++++++++++++------------- pype/ftrack/tray/login_dialog.py | 12 +++---- 2 files changed, 40 insertions(+), 29 deletions(-) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index 250872f239..5811209a02 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -34,29 +34,28 @@ class FtrackModule: def validate(self): validation = False - cred = credentials._get_credentials() - try: - if 'username' in cred and 'apiKey' in cred: - validation = credentials._check_credentials( - cred['username'], - cred['apiKey'] - ) - if validation is False: - self.show_login_widget() - else: - self.show_login_widget() - - except Exception as e: - log.error("We are unable to connect to Ftrack: {0}".format(e)) - - validation = credentials._check_credentials() - if validation is True: + cred = credentials.get_credentials() + ft_user = cred.get("username") + ft_api_key = cred.get("api_key") + validation = credentials.check_credentials(ft_user, ft_api_key) + if validation: + credentials.set_env(ft_user, ft_api_key) log.info("Connected to Ftrack successfully") self.loginChange() - else: - log.warning("Please sign in to Ftrack") - self.bool_logged = False - self.set_menu_visibility() + + return validation + + if not validation and ft_user and ft_api_key: + log.warning( + "Current Ftrack credentials are not valid. {}: {} - {}".format( + str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key + ) + ) + + log.info("Please sign in to Ftrack") + self.bool_logged = False + self.show_login_widget() + self.set_menu_visibility() return validation @@ -67,7 +66,7 @@ class FtrackModule: self.start_action_server() def logout(self): - credentials._clear_credentials() + credentials.clear_credentials() self.stop_action_server() log.info("Logged out of Ftrack") @@ -307,11 +306,23 @@ class FtrackModule: except Exception as e: log.error("During Killing Timer event server: {0}".format(e)) + def changed_user(self): + self.stop_action_server() + credentials.set_env() + self.validate() + def process_modules(self, modules): if 'TimersManager' in modules: self.timer_manager = modules['TimersManager'] self.timer_manager.add_module(self) + if "UserModule" in modules: + credentials.USER_GETTER = modules["UserModule"].get_user + modules["UserModule"].register_callback_on_user_change( + self.changed_user + ) + + def start_timer_manager(self, data): if self.thread_timer is not None: self.thread_timer.ftrack_start_timer(data) @@ -336,7 +347,7 @@ class FtrackEventsThread(QtCore.QThread): def __init__(self, parent): super(FtrackEventsThread, self).__init__() - cred = credentials._get_credentials() + cred = credentials.get_credentials() self.username = cred['username'] self.user = None self.last_task = None diff --git a/pype/ftrack/tray/login_dialog.py b/pype/ftrack/tray/login_dialog.py index 4dcbec5ab3..5f3777f93e 100644 --- a/pype/ftrack/tray/login_dialog.py +++ b/pype/ftrack/tray/login_dialog.py @@ -204,11 +204,11 @@ class Login_Dialog_ui(QtWidgets.QWidget): self.setError("{0} {1}".format(msg, " and ".join(missing))) return - verification = credentials._check_credentials(username, apiKey) + verification = credentials.check_credentials(username, apiKey) if verification: - credentials._save_credentials(username, apiKey, self.is_event) - credentials._set_env(username, apiKey) + credentials.save_credentials(username, apiKey, self.is_event) + credentials.set_env(username, apiKey) if self.parent is not None: self.parent.loginChange() self._close_widget() @@ -304,11 +304,11 @@ class Login_Dialog_ui(QtWidgets.QWidget): self._login_server_thread.start(url) return - verification = credentials._check_credentials(username, apiKey) + verification = credentials.check_credentials(username, apiKey) if verification is True: - credentials._save_credentials(username, apiKey, self.is_event) - credentials._set_env(username, apiKey) + credentials.save_credentials(username, apiKey, self.is_event) + credentials.set_env(username, apiKey) if self.parent is not None: self.parent.loginChange() self._close_widget() From a7c4dffb42c78a096655efa50e6164e579584636 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:12:05 +0100 Subject: [PATCH 092/133] event server cli also uses new credentials functions --- pype/ftrack/ftrack_server/event_server_cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..d889b6be23 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -446,9 +446,9 @@ def main(argv): event_paths = kwargs.ftrackeventpaths if not kwargs.noloadcred: - cred = credentials._get_credentials(True) + cred = credentials.get_credentials(ftrack_url) username = cred.get('username') - api_key = cred.get('apiKey') + api_key = cred.get('api_key') if kwargs.ftrackuser: username = kwargs.ftrackuser @@ -482,7 +482,7 @@ def main(argv): return 1 if kwargs.storecred: - credentials._save_credentials(username, api_key, True) + credentials.save_credentials(username, api_key, ftrack_url) # Set Ftrack environments os.environ["FTRACK_SERVER"] = ftrack_url From 4de7478d9dcf176eb349a52a20eddac76e0424e8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:19:13 +0100 Subject: [PATCH 093/133] again remove collect templates --- .../global/publish/collect_templates.py | 119 ------------------ 1 file changed, 119 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_templates.py diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py deleted file mode 100644 index 3104b5b705..0000000000 --- a/pype/plugins/global/publish/collect_templates.py +++ /dev/null @@ -1,119 +0,0 @@ -""" -Requires: - session -> AVALON_PROJECT - context -> anatomy (pypeapp.Anatomy) - instance -> subset - instance -> asset - instance -> family - -Provides: - instance -> template - instance -> assumedTemplateData - instance -> assumedDestination -""" - -import os - -from avalon import io, api -import pyblish.api - - -class CollectTemplates(pyblish.api.InstancePlugin): - """Fill templates with data needed for publish""" - - order = pyblish.api.CollectorOrder + 0.1 - label = "Collect and fill Templates" - hosts = ["maya", "nuke", "standalonepublisher"] - - def process(self, instance): - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - else: - hierarchy = "" - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} - - # Add datetime data to template data - datetime_data = instance.context.data.get("datetimeData") or {} - template_data.update(datetime_data) - - resolution_width = instance.data.get("resolutionWidth") - resolution_height = instance.data.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) - self.log.info("Assumed Destination has been created...") - self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) - self.log.debug("__ template: `{}`".format(instance.data["template"])) From 0618b7a85ff9767ac6f5d4eaf3f58bd72f2b433c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:20:04 +0100 Subject: [PATCH 094/133] fix order --- pype/plugins/global/publish/collect_resources_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index 9fc8c576f5..734d1f84e4 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -19,7 +19,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored""" label = "Collect Resources Path" - order = pyblish.api.CollectorOrder + 0.995 + order = pyblish.api.CollectorOrder + 0.495 def process(self, instance): anatomy = instance.context.data["anatomy"] From a7ca458e4ee1550859fee03f84592aea9615947e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:24:56 +0100 Subject: [PATCH 095/133] collect scene has publish set to True by default --- pype/plugins/maya/publish/collect_scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_scene.py b/pype/plugins/maya/publish/collect_scene.py index f2fbb4d623..089019f2d3 100644 --- a/pype/plugins/maya/publish/collect_scene.py +++ b/pype/plugins/maya/publish/collect_scene.py @@ -35,7 +35,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin): "subset": subset, "asset": os.getenv("AVALON_ASSET", None), "label": subset, - "publish": False, + "publish": True, "family": 'workfile', "families": ['workfile'], "setMembers": [current_file] From 377513f01f77c49d656f152157a1245e63e3bab6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:25:25 +0100 Subject: [PATCH 096/133] removed locations from version --- pype/plugins/global/publish/integrate_new.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d27582bb71..8735f8fed7 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -168,14 +168,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) - # TODO remove avalon_location (shall we?) - avalon_location = api.Session["AVALON_LOCATION"] # TODO rename method from `create_version` to # `prepare_version` or similar... version = self.create_version( subset=subset, version_number=version_number, - locations=[avalon_location], data=version_data ) @@ -528,26 +525,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return subset - def create_version(self, subset, version_number, locations, data=None): + def create_version(self, subset, version_number, data=None): """ Copy given source to destination Args: subset (dict): the registered subset of the asset version_number (int): the version number - locations (list): the currently registered locations Returns: dict: collection of data to create a version """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] return {"schema": "pype:version-3.0", "type": "version", "parent": subset["_id"], "name": version_number, - "locations": version_locations, "data": data} def create_version_data(self, context, instance): From 3d1e231a0db9f075eb7b6157cb99665f285e34e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:41:24 +0100 Subject: [PATCH 097/133] added job to store thumbnails to avalon action --- .../action_store_thumbnails_to_avalon.py | 52 +++++++++++++++---- 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index d63d3a6ae3..ce0dfeb244 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -1,6 +1,7 @@ import os import requests import errno +import json from bson.objectid import ObjectId from pype.ftrack import BaseAction @@ -41,13 +42,30 @@ class StoreThumbnailsToAvalon(BaseAction): # DEBUG LINE # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails" + user = session.query( + "User where username is '{0}'".format(session.api_user) + ).one() + action_job = session.create("Job", { + "user": user, + "status": "running", + "data": json.dumps({ + "description": "Storing thumbnails to avalon." + }) + }) + session.commit() + thumbnail_roots = os.environ.get(self.thumbnail_key) if not thumbnail_roots: + msg = "`{}` environment is not set".format(self.thumbnail_key) + + action_job["status"] = "failed" + session.commit() + + self.log.warning(msg) + return { "success": False, - "message": "`{}` environment is not set".format( - self.thumbnail_key - ) + "message": msg } existing_thumbnail_root = None @@ -57,11 +75,18 @@ class StoreThumbnailsToAvalon(BaseAction): break if existing_thumbnail_root is None: + msg = ( + "Can't access paths, set in `{}` ({})" + ).format(self.thumbnail_key, thumbnail_roots) + + action_job["status"] = "failed" + session.commit() + + self.log.warning(msg) + return { "success": False, - "message": ( - "Can't access paths, set in `{}` ({})" - ).format(self.thumbnail_key, thumbnail_roots) + "message": msg } project = get_project_from_entity(entities[0]) @@ -71,6 +96,9 @@ class StoreThumbnailsToAvalon(BaseAction): if "publish" not in anatomy.templates: msg = "Anatomy does not have set publish key!" + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -84,6 +112,9 @@ class StoreThumbnailsToAvalon(BaseAction): " template in Antomy for project \"{}\"" ).format(project_name) + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -127,6 +158,9 @@ class StoreThumbnailsToAvalon(BaseAction): " can offer. {}" ).format(submsg) + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -256,10 +290,8 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) - self.db_con.update_one( - {"_id": avalon_asset["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} - ) + action_job["status"] = "done" + session.commit() return True From 5290f6dd58de1abf78be75ab54c949c84972ae83 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:09:03 +0100 Subject: [PATCH 098/133] fix arguments appending --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 590939df56..fd3c51816a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -432,7 +432,7 @@ def burnins_from_data( if not value.startswith(TIME_CODE_KEY): value_items = value.split(TIME_CODE_KEY) text = value_items[0].format(**data) - args.append(value_items[0]) + args.append(text) burnin.add_timecode(*args) continue From feb2037c0259dba1fa5b130dd66da8655571ec6d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:09:42 +0100 Subject: [PATCH 099/133] excahnge timecode and text keys in arguments --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index fd3c51816a..1d00a08521 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -34,7 +34,7 @@ DRAWTEXT = ( "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) TIMECODE = ( - "drawtext=text=\\'%(text)s\\':timecode=\\'%(timecode)s\\'" + "drawtext=timecode=\\'%(timecode)s\\':text=\\'%(text)s\\'" ":timecode_rate=%(fps).2f:x=%(x)s:y=%(y)s:fontcolor=" "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) From 773fbf106a89f6d901addf543dda849d86f8ae1d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:23:37 +0100 Subject: [PATCH 100/133] ftrack server won't raise exception if there are any event handlers to register --- pype/ftrack/ftrack_server/ftrack_server.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py index eebc3f6ec4..8464203c1d 100644 --- a/pype/ftrack/ftrack_server/ftrack_server.py +++ b/pype/ftrack/ftrack_server/ftrack_server.py @@ -100,9 +100,9 @@ class FtrackServer: log.warning(msg, exc_info=e) if len(register_functions_dict) < 1: - raise Exception(( - "There are no events with register function." - " Registered paths: \"{}\"" + log.warning(( + "There are no events with `register` function" + " in registered paths: \"{}\"" ).format("| ".join(paths))) # Load presets for setting plugins @@ -122,7 +122,7 @@ class FtrackServer: else: register(self.session, plugins_presets=plugins_presets) - if function_counter%7 == 0: + if function_counter % 7 == 0: time.sleep(0.1) function_counter += 1 except Exception as exc: From d1372fa25fb2c3fd5c2ccdbc101db73aaf8c74bf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 15 Feb 2020 11:36:27 +0100 Subject: [PATCH 101/133] reversed logic of extract review conditions which don't have else statement and maked bigger indentation --- pype/plugins/global/publish/extract_review.py | 632 +++++++++--------- 1 file changed, 320 insertions(+), 312 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 2e79d86c38..4d63e2c641 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -43,320 +43,328 @@ class ExtractReview(pyblish.api.InstancePlugin): # filter out mov and img sequences representations_new = representations[:] for repre in representations: - if repre['ext'] in self.ext_filter: - tags = repre.get("tags", []) - - if "thumbnail" in tags: - continue - - self.log.info("Try repre: {}".format(repre)) - - if "review" in tags: - staging_dir = repre["stagingDir"] - - # iterating preset output profiles - for name, profile in output_profiles.items(): - repre_new = repre.copy() - ext = profile.get("ext", None) - p_tags = profile.get('tags', []) - self.log.info("p_tags: `{}`".format(p_tags)) - - # adding control for presets to be sequence - # or single file - is_sequence = ("sequence" in p_tags) and (ext in ( - "png", "jpg", "jpeg")) - - self.log.debug("Profile name: {}".format(name)) - - if not ext: - ext = "mov" - self.log.warning( - str("`ext` attribute not in output " - "profile. Setting to default ext: `mov`")) - - self.log.debug( - "instance.families: {}".format( - instance.data['families'])) - self.log.debug( - "profile.families: {}".format(profile['families'])) - - if any(item in instance.data['families'] for item in profile['families']): - if isinstance(repre["files"], list): - collections, remainder = clique.assemble( - repre["files"]) - - full_input_path = os.path.join( - staging_dir, collections[0].format( - '{head}{padding}{tail}') - ) - - filename = collections[0].format('{head}') - if filename.endswith('.'): - filename = filename[:-1] - else: - full_input_path = os.path.join( - staging_dir, repre["files"]) - filename = repre["files"].split(".")[0] - - repr_file = filename + "_{0}.{1}".format(name, ext) - full_output_path = os.path.join( - staging_dir, repr_file) - - if is_sequence: - filename_base = filename + "_{0}".format(name) - repr_file = filename_base + ".%08d.{0}".format( - ext) - repre_new["sequence_file"] = repr_file - full_output_path = os.path.join( - staging_dir, filename_base, repr_file) - - self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) - - new_tags = [x for x in tags if x != "delete"] - - # add families - [instance.data["families"].append(t) - for t in p_tags - if t not in instance.data["families"]] - - # add to - [new_tags.append(t) for t in p_tags - if t not in new_tags] - - self.log.info("new_tags: `{}`".format(new_tags)) - - input_args = [] - - # overrides output file - input_args.append("-y") - - # preset's input data - input_args.extend(profile.get('input', [])) - - # necessary input data - # adds start arg only if image sequence - if isinstance(repre["files"], list): - input_args.append( - "-start_number {0} -framerate {1}".format( - start_frame, fps)) - - input_args.append("-i {}".format(full_input_path)) - - for audio in instance.data.get("audio", []): - offset_frames = ( - instance.data.get("startFrameReview") - - audio["offset"] - ) - offset_seconds = offset_frames / fps - - if offset_seconds > 0: - input_args.append("-ss") - else: - input_args.append("-itsoffset") - - input_args.append(str(abs(offset_seconds))) - - input_args.extend( - ["-i", audio["filename"]] - ) - - # Need to merge audio if there are more - # than 1 input. - if len(instance.data["audio"]) > 1: - input_args.extend( - [ - "-filter_complex", - "amerge", - "-ac", - "2" - ] - ) - - output_args = [] - codec_args = profile.get('codec', []) - output_args.extend(codec_args) - # preset's output data - output_args.extend(profile.get('output', [])) - - # defining image ratios - resolution_ratio = float(resolution_width / ( - resolution_height * pixel_aspect)) - delivery_ratio = float(to_width) / float(to_height) - self.log.debug(resolution_ratio) - self.log.debug(delivery_ratio) - - # get scale factor - scale_factor = to_height / ( - resolution_height * pixel_aspect) - self.log.debug(scale_factor) - - # letter_box - lb = profile.get('letter_box', 0) - if lb != 0: - ffmpet_width = to_width - ffmpet_height = to_height - if "reformat" not in p_tags: - lb /= pixel_aspect - if resolution_ratio != delivery_ratio: - ffmpet_width = resolution_width - ffmpet_height = int( - resolution_height * pixel_aspect) - else: - if resolution_ratio != delivery_ratio: - lb /= scale_factor - else: - lb /= pixel_aspect - - output_args.append(str( - "-filter:v scale={0}x{1}:flags=lanczos," - "setsar=1,drawbox=0:0:iw:" - "round((ih-(iw*(1/{2})))/2):t=fill:" - "c=black,drawbox=0:ih-round((ih-(iw*(" - "1/{2})))/2):iw:round((ih-(iw*(1/{2})))" - "/2):t=fill:c=black").format( - ffmpet_width, ffmpet_height, lb)) - - # In case audio is longer than video. - output_args.append("-shortest") - - # output filename - output_args.append(full_output_path) - - self.log.debug( - "__ pixel_aspect: `{}`".format(pixel_aspect)) - self.log.debug( - "__ resolution_width: `{}`".format( - resolution_width)) - self.log.debug( - "__ resolution_height: `{}`".format( - resolution_height)) - - # scaling none square pixels and 1920 width - if "reformat" in p_tags: - if resolution_ratio < delivery_ratio: - self.log.debug("lower then delivery") - width_scale = int(to_width * scale_factor) - width_half_pad = int(( - to_width - width_scale)/2) - height_scale = to_height - height_half_pad = 0 - else: - self.log.debug("heigher then delivery") - width_scale = to_width - width_half_pad = 0 - scale_factor = float(to_width) / float( - resolution_width) - self.log.debug(scale_factor) - height_scale = int( - resolution_height * scale_factor) - height_half_pad = int( - (to_height - height_scale)/2) - - self.log.debug( - "__ width_scale: `{}`".format(width_scale)) - self.log.debug( - "__ width_half_pad: `{}`".format( - width_half_pad)) - self.log.debug( - "__ height_scale: `{}`".format( - height_scale)) - self.log.debug( - "__ height_half_pad: `{}`".format( - height_half_pad)) - - scaling_arg = str( - "scale={0}x{1}:flags=lanczos," - "pad={2}:{3}:{4}:{5}:black,setsar=1" - ).format(width_scale, height_scale, - to_width, to_height, - width_half_pad, - height_half_pad - ) - - vf_back = self.add_video_filter_args( - output_args, scaling_arg) - # add it to output_args - output_args.insert(0, vf_back) - - # baking lut file application - lut_path = instance.data.get("lutPath") - if lut_path and ("bake-lut" in p_tags): - # removing Gama info as it is all baked in lut - gamma = next((g for g in input_args - if "-gamma" in g), None) - if gamma: - input_args.remove(gamma) - - # create lut argument - lut_arg = "lut3d=file='{}'".format( - lut_path.replace( - "\\", "/").replace(":/", "\\:/") - ) - lut_arg += ",colormatrix=bt601:bt709" - - vf_back = self.add_video_filter_args( - output_args, lut_arg) - # add it to output_args - output_args.insert(0, vf_back) - self.log.info("Added Lut to ffmpeg command") - self.log.debug( - "_ output_args: `{}`".format(output_args)) - - if is_sequence: - stg_dir = os.path.dirname(full_output_path) - - if not os.path.exists(stg_dir): - self.log.debug( - "creating dir: {}".format(stg_dir)) - os.mkdir(stg_dir) - - mov_args = [ - os.path.join( - os.environ.get( - "FFMPEG_PATH", - ""), "ffmpeg"), - " ".join(input_args), - " ".join(output_args) - ] - subprcs_cmd = " ".join(mov_args) - - # run subprocess - self.log.debug("Executing: {}".format(subprcs_cmd)) - output = pype.api.subprocess(subprcs_cmd) - self.log.debug("Output: {}".format(output)) - - # create representation data - repre_new.update({ - 'name': name, - 'ext': ext, - 'files': repr_file, - "tags": new_tags, - "outputName": name, - "codec": codec_args, - "_profile": profile, - "resolutionHeight": resolution_height, - "resolutionWidth": resolution_width, - }) - if is_sequence: - repre_new.update({ - "stagingDir": stg_dir, - "files": os.listdir(stg_dir) - }) - - if repre_new.get('preview'): - repre_new.pop("preview") - if repre_new.get('thumbnail'): - repre_new.pop("thumbnail") - - # adding representation - self.log.debug("Adding: {}".format(repre_new)) - representations_new.append(repre_new) - else: - continue - else: + if repre['ext'] not in self.ext_filter: continue + tags = repre.get("tags", []) + + if "thumbnail" in tags: + continue + + self.log.info("Try repre: {}".format(repre)) + + if "review" not in tags: + continue + + staging_dir = repre["stagingDir"] + + # iterating preset output profiles + for name, profile in output_profiles.items(): + repre_new = repre.copy() + ext = profile.get("ext", None) + p_tags = profile.get('tags', []) + self.log.info("p_tags: `{}`".format(p_tags)) + + # adding control for presets to be sequence + # or single file + is_sequence = ("sequence" in p_tags) and (ext in ( + "png", "jpg", "jpeg")) + + self.log.debug("Profile name: {}".format(name)) + + if not ext: + ext = "mov" + self.log.warning( + str("`ext` attribute not in output " + "profile. Setting to default ext: `mov`")) + + self.log.debug( + "instance.families: {}".format( + instance.data['families'])) + self.log.debug( + "profile.families: {}".format(profile['families'])) + + profile_family_check = False + for _family in profile['families']: + if _family in instance.data['families']: + profile_family_check = True + break + + if not profile_family_check: + continue + + if isinstance(repre["files"], list): + collections, remainder = clique.assemble( + repre["files"]) + + full_input_path = os.path.join( + staging_dir, collections[0].format( + '{head}{padding}{tail}') + ) + + filename = collections[0].format('{head}') + if filename.endswith('.'): + filename = filename[:-1] + else: + full_input_path = os.path.join( + staging_dir, repre["files"]) + filename = repre["files"].split(".")[0] + + repr_file = filename + "_{0}.{1}".format(name, ext) + full_output_path = os.path.join( + staging_dir, repr_file) + + if is_sequence: + filename_base = filename + "_{0}".format(name) + repr_file = filename_base + ".%08d.{0}".format( + ext) + repre_new["sequence_file"] = repr_file + full_output_path = os.path.join( + staging_dir, filename_base, repr_file) + + self.log.info("input {}".format(full_input_path)) + self.log.info("output {}".format(full_output_path)) + + new_tags = [x for x in tags if x != "delete"] + + # add families + [instance.data["families"].append(t) + for t in p_tags + if t not in instance.data["families"]] + + # add to + [new_tags.append(t) for t in p_tags + if t not in new_tags] + + self.log.info("new_tags: `{}`".format(new_tags)) + + input_args = [] + + # overrides output file + input_args.append("-y") + + # preset's input data + input_args.extend(profile.get('input', [])) + + # necessary input data + # adds start arg only if image sequence + if isinstance(repre["files"], list): + input_args.append( + "-start_number {0} -framerate {1}".format( + start_frame, fps)) + + input_args.append("-i {}".format(full_input_path)) + + for audio in instance.data.get("audio", []): + offset_frames = ( + instance.data.get("startFrameReview") - + audio["offset"] + ) + offset_seconds = offset_frames / fps + + if offset_seconds > 0: + input_args.append("-ss") + else: + input_args.append("-itsoffset") + + input_args.append(str(abs(offset_seconds))) + + input_args.extend( + ["-i", audio["filename"]] + ) + + # Need to merge audio if there are more + # than 1 input. + if len(instance.data["audio"]) > 1: + input_args.extend( + [ + "-filter_complex", + "amerge", + "-ac", + "2" + ] + ) + + output_args = [] + codec_args = profile.get('codec', []) + output_args.extend(codec_args) + # preset's output data + output_args.extend(profile.get('output', [])) + + # defining image ratios + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + # get scale factor + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + + # letter_box + lb = profile.get('letter_box', 0) + if lb != 0: + ffmpet_width = to_width + ffmpet_height = to_height + if "reformat" not in p_tags: + lb /= pixel_aspect + if resolution_ratio != delivery_ratio: + ffmpet_width = resolution_width + ffmpet_height = int( + resolution_height * pixel_aspect) + else: + if resolution_ratio != delivery_ratio: + lb /= scale_factor + else: + lb /= pixel_aspect + + output_args.append(str( + "-filter:v scale={0}x{1}:flags=lanczos," + "setsar=1,drawbox=0:0:iw:" + "round((ih-(iw*(1/{2})))/2):t=fill:" + "c=black,drawbox=0:ih-round((ih-(iw*(" + "1/{2})))/2):iw:round((ih-(iw*(1/{2})))" + "/2):t=fill:c=black").format( + ffmpet_width, ffmpet_height, lb)) + + # In case audio is longer than video. + output_args.append("-shortest") + + # output filename + output_args.append(full_output_path) + + self.log.debug( + "__ pixel_aspect: `{}`".format(pixel_aspect)) + self.log.debug( + "__ resolution_width: `{}`".format( + resolution_width)) + self.log.debug( + "__ resolution_height: `{}`".format( + resolution_height)) + + # scaling none square pixels and 1920 width + if "reformat" in p_tags: + if resolution_ratio < delivery_ratio: + self.log.debug("lower then delivery") + width_scale = int(to_width * scale_factor) + width_half_pad = int(( + to_width - width_scale)/2) + height_scale = to_height + height_half_pad = 0 + else: + self.log.debug("heigher then delivery") + width_scale = to_width + width_half_pad = 0 + scale_factor = float(to_width) / float( + resolution_width) + self.log.debug(scale_factor) + height_scale = int( + resolution_height * scale_factor) + height_half_pad = int( + (to_height - height_scale)/2) + + self.log.debug( + "__ width_scale: `{}`".format(width_scale)) + self.log.debug( + "__ width_half_pad: `{}`".format( + width_half_pad)) + self.log.debug( + "__ height_scale: `{}`".format( + height_scale)) + self.log.debug( + "__ height_half_pad: `{}`".format( + height_half_pad)) + + scaling_arg = str( + "scale={0}x{1}:flags=lanczos," + "pad={2}:{3}:{4}:{5}:black,setsar=1" + ).format(width_scale, height_scale, + to_width, to_height, + width_half_pad, + height_half_pad + ) + + vf_back = self.add_video_filter_args( + output_args, scaling_arg) + # add it to output_args + output_args.insert(0, vf_back) + + # baking lut file application + lut_path = instance.data.get("lutPath") + if lut_path and ("bake-lut" in p_tags): + # removing Gama info as it is all baked in lut + gamma = next((g for g in input_args + if "-gamma" in g), None) + if gamma: + input_args.remove(gamma) + + # create lut argument + lut_arg = "lut3d=file='{}'".format( + lut_path.replace( + "\\", "/").replace(":/", "\\:/") + ) + lut_arg += ",colormatrix=bt601:bt709" + + vf_back = self.add_video_filter_args( + output_args, lut_arg) + # add it to output_args + output_args.insert(0, vf_back) + self.log.info("Added Lut to ffmpeg command") + self.log.debug( + "_ output_args: `{}`".format(output_args)) + + if is_sequence: + stg_dir = os.path.dirname(full_output_path) + + if not os.path.exists(stg_dir): + self.log.debug( + "creating dir: {}".format(stg_dir)) + os.mkdir(stg_dir) + + mov_args = [ + os.path.join( + os.environ.get( + "FFMPEG_PATH", + ""), "ffmpeg"), + " ".join(input_args), + " ".join(output_args) + ] + subprcs_cmd = " ".join(mov_args) + + # run subprocess + self.log.debug("Executing: {}".format(subprcs_cmd)) + output = pype.api.subprocess(subprcs_cmd) + self.log.debug("Output: {}".format(output)) + + # create representation data + repre_new.update({ + 'name': name, + 'ext': ext, + 'files': repr_file, + "tags": new_tags, + "outputName": name, + "codec": codec_args, + "_profile": profile, + "resolutionHeight": resolution_height, + "resolutionWidth": resolution_width, + }) + if is_sequence: + repre_new.update({ + "stagingDir": stg_dir, + "files": os.listdir(stg_dir) + }) + + if repre_new.get('preview'): + repre_new.pop("preview") + if repre_new.get('thumbnail'): + repre_new.pop("thumbnail") + + # adding representation + self.log.debug("Adding: {}".format(repre_new)) + representations_new.append(repre_new) + for repre in representations_new: if "delete" in repre.get("tags", []): representations_new.remove(repre) From 5f5a80818c20e26deeded4f616d477a479999ee8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 16 Feb 2020 20:07:42 +0100 Subject: [PATCH 102/133] fix(global): fixing version collection --- pype/plugins/nuke/publish/collect_writes.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index bf1c6a4b66..c29f676ef7 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -52,9 +52,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) - # get version to instance for integration - instance.data['version'] = instance.context.data.get( - "version", pype.get_version_from_path(nuke.root().name())) + # # get version to instance for integration + # instance.data['version'] = instance.context.data.get( + # "version", pype.get_version_from_path(nuke.root().name())) self.log.debug('Write Version: %s' % instance.data('version')) @@ -92,16 +92,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): # Add version data to instance version_data = { - "handles": handle_start, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "version": int(instance.data['version']), "colorspace": node["colorspace"].value(), - "families": ["render"], - "subset": instance.data["subset"], - "fps": instance.context.data["fps"] } instance.data["family"] = "write" From e8499b43ff4cf6a0b9a15b502fbf164474ca0e49 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 16 Feb 2020 20:08:18 +0100 Subject: [PATCH 103/133] fix(global): wrong version format print --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d27582bb71..bb65a02bce 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -160,7 +160,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset_entity, instance) version_number = instance.data["version"] - self.log.debug("Next version: v{0:03d}".format(version_number)) + self.log.debug("Next version: v{}".format(version_number)) version_data = self.create_version_data(context, instance) From 957ca8ecd2f03097e4c1d48dff955d49b4150825 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 14:02:59 +0100 Subject: [PATCH 104/133] fix current frame key --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 1d00a08521..e34f7235e4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -378,7 +378,7 @@ def burnins_from_data( # Check frame start and add expression if is available if frame_start is not None: - data[CURRENT_FRAME_KEY] = r'%%{eif\:n+%d\:d}' % frame_start + data[CURRENT_FRAME_KEY[1:-1]] = r'%%{eif\:n+%d\:d}' % frame_start if frame_start_tc is not None: data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY From b657af153f7d9af72ea73327ebbef4a5e8a333eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 15:25:33 +0100 Subject: [PATCH 105/133] fix(global): removing unnecessary host argument --- pype/plugins/global/publish/collect_anatomy.py | 2 +- pype/plugins/global/publish/collect_instance_anatomy_data.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_anatomy.py b/pype/plugins/global/publish/collect_anatomy.py index 0831c16d32..ae83e39513 100644 --- a/pype/plugins/global/publish/collect_anatomy.py +++ b/pype/plugins/global/publish/collect_anatomy.py @@ -18,7 +18,7 @@ Provides: import os import json -from avalon import io, api, lib +from avalon import api, lib from pypeapp import Anatomy import pyblish.api diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 9c6a8b08f2..825c48dcf4 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -33,7 +33,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.49 label = "Collect instance anatomy data" - hosts = ["maya", "nuke", "standalonepublisher"] def process(self, instance): # get all the stuff from the database From 3922529058d43a631a7269ba4006707edd68c150 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 15:54:44 +0100 Subject: [PATCH 106/133] escape colon and comma in texts --- pype/scripts/otio_burnin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e34f7235e4..c61ea66d2d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -199,7 +199,11 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): """ resolution = self.resolution data = { - 'text': text, + 'text': ( + text + .replace(",", r"\,") + .replace(':', r'\:') + ), 'color': options['font_color'], 'size': options['font_size'] } From d9ffc411a4d65559e436e7d220b8023c8eba5dc6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 16:48:36 +0100 Subject: [PATCH 107/133] integrate new's version override is ready to handle "append" method per instance --- pype/plugins/global/publish/integrate_new.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index b5b6b10aa2..2e2094dfc8 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -204,6 +204,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): data=version_data) self.log.debug("Creating version ...") + + new_repre_names_low = [_repre["name"].lower() for _repre in repres] + existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], @@ -213,6 +216,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # Check if instance have set `append` mode which cause that + # only replicated representations are set to archive + append_repres = instance.data.get("append", False) + # Update version data io.update_many({ 'type': 'version', @@ -230,6 +237,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): })) bulk_writes = [] for repre in current_repres: + if append_repres: + # archive only duplicated representations + if repre["name"].lower() not in new_repre_names_low: + continue # Representation must change type, # `_id` must be stored to other key and replaced with new # - that is because new representations should have same ID @@ -284,7 +295,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if 'transfers' not in instance.data: instance.data['transfers'] = [] - new_repre_names = [] for idx, repre in enumerate(instance.data["representations"]): # Collection @@ -454,9 +464,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] - repre_name = repre['name'] - new_repre_names.append(repre_name) - # Use previous representation's id if there are any repre_id = None for _repre in existing_repres: From 06f9187119ff8b15074717c83c224b96e287e7e7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 16:52:20 +0100 Subject: [PATCH 108/133] thumbnail is also stored to asset in store thumbnails action --- pype/ftrack/actions/action_store_thumbnails_to_avalon.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index ce0dfeb244..21ac6666d5 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -290,6 +290,11 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) + self.db_con.update_one( +- {"_id": avalon_asset["_id"]}, +- {"$set": {"data.thumbnail_id": thumbnail_id}} +- ) + action_job["status"] = "done" session.commit() From 4256eccc2b797d1e8af4d800e11a14c78222c669 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:05:08 +0100 Subject: [PATCH 109/133] fixed few merge issues --- pype/plugins/global/publish/integrate_new.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9de29cd387..8d41aa7907 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -196,6 +196,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): append_repres = instance.data.get("append", False) # Update version data + # TODO query by _id and io.update_many({ 'type': 'version', 'parent': subset["_id"], @@ -322,7 +323,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = None if repre.get("frameStart"): - frame_start_padding = anatomy.templates["render"]["padding"] + frame_start_padding = ( + anatomy.templates["render"]["padding"] + ) index_frame_start = int(repre.get("frameStart")) # exception for slate workflow @@ -407,9 +410,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Use previous representation's id if there are any repre_id = None + repre_name_low = repre["name"].lower() for _repre in existing_repres: # NOTE should we check lowered names? - if repre_name == _repre["name"]: + if repre_name_low == _repre["name"]: repre_id = _repre["orig_id"] break @@ -435,7 +439,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): representation["context"]["output"] = repre['outputName'] if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) + representation['context']['frame'] = ( + src_padding_exp % int(repre.get("frameStart")) + ) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) From d3823aecd1c36fa876142d7775fe5f47cbf913eb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:34:59 +0100 Subject: [PATCH 110/133] fixed skipping jpex extraction --- pype/plugins/global/publish/extract_jpeg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 7c0820ea28..28d16198cd 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -43,7 +43,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): self.log.debug(repre) if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: if not isinstance(repre['files'], list): - return + continue input_file = repre['files'][0] From 1a1e73649866a77e932075b901fe8edccf2e29ca Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:36:31 +0100 Subject: [PATCH 111/133] moved identation by changing validation condition logic --- pype/plugins/global/publish/extract_jpeg.py | 95 +++++++++++---------- 1 file changed, 49 insertions(+), 46 deletions(-) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 28d16198cd..abd20bb9ea 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -41,63 +41,66 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): for repre in representations: self.log.debug(repre) - if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: - if not isinstance(repre['files'], list): - continue + valid = 'review' in repre['tags'] or "thumb-nuke" in repre['tags'] + if not valid: + continue - input_file = repre['files'][0] + if not isinstance(repre['files'], list): + continue - # input_file = ( - # collections[0].format('{head}{padding}{tail}') % start - # ) - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("input {}".format(full_input_path)) + input_file = repre['files'][0] - filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + # input_file = ( + # collections[0].format('{head}{padding}{tail}') % start + # ) + full_input_path = os.path.join(stagingdir, input_file) + self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) + filename = os.path.splitext(input_file)[0] + if not filename.endswith('.'): + filename += "." + jpeg_file = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpeg_file) - config_data = instance.context.data['output_repre_config'] + self.log.info("output {}".format(full_output_path)) - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) + config_data = instance.context.data['output_repre_config'] - jpeg_items = [] - jpeg_items.append( - os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) - # override file if already exists - jpeg_items.append("-y") - # use same input args like with mov - jpeg_items.extend(profile.get('input', [])) - # input file - jpeg_items.append("-i {}".format(full_input_path)) - # output file - jpeg_items.append(full_output_path) + proj_name = os.environ.get('AVALON_PROJECT', '__default__') + profile = config_data.get(proj_name, config_data['__default__']) - subprocess_jpeg = " ".join(jpeg_items) + jpeg_items = [] + jpeg_items.append( + os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) + # override file if already exists + jpeg_items.append("-y") + # use same input args like with mov + jpeg_items.extend(profile.get('input', [])) + # input file + jpeg_items.append("-i {}".format(full_input_path)) + # output file + jpeg_items.append(full_output_path) - # run subprocess - self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg) + subprocess_jpeg = " ".join(jpeg_items) - if "representations" not in instance.data: - instance.data["representations"] = [] + # run subprocess + self.log.debug("{}".format(subprocess_jpeg)) + pype.api.subprocess(subprocess_jpeg) - representation = { - 'name': 'thumbnail', - 'ext': 'jpg', - 'files': jpeg_file, - "stagingDir": stagingdir, - "thumbnail": True, - "tags": ['thumbnail'] - } + if "representations" not in instance.data: + instance.data["representations"] = [] - # adding representation - self.log.debug("Adding: {}".format(representation)) - representations_new.append(representation) + representation = { + 'name': 'thumbnail', + 'ext': 'jpg', + 'files': jpeg_file, + "stagingDir": stagingdir, + "thumbnail": True, + "tags": ['thumbnail'] + } + + # adding representation + self.log.debug("Adding: {}".format(representation)) + representations_new.append(representation) instance.data["representations"] = representations_new From 3fe6a13f0c2c97adf34bf6aea042970fdf90e572 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:40:49 +0100 Subject: [PATCH 112/133] sync actions ignore milestones --- pype/ftrack/lib/avalon_sync.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index f08dc73c19..f5b4c4b8c3 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -236,6 +236,7 @@ class SyncEntitiesFactory: " from TypedContext where project_id is \"{}\"" ) ignore_custom_attr_key = "avalon_ignore_sync" + ignore_entity_types = ["milestone"] report_splitter = {"type": "label", "value": "---"} @@ -366,7 +367,10 @@ class SyncEntitiesFactory: parent_id = entity["parent_id"] entity_type = entity.entity_type entity_type_low = entity_type.lower() - if entity_type_low == "task": + if entity_type_low in self.ignore_entity_types: + continue + + elif entity_type_low == "task": entities_dict[parent_id]["tasks"].append(entity["name"]) continue From 871870c603119c4920dd15f4995e27425898ceb8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 17 Feb 2020 18:34:01 +0100 Subject: [PATCH 113/133] fix remaining dashes --- pype/ftrack/actions/action_store_thumbnails_to_avalon.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index 21ac6666d5..7adc36f4b5 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -291,9 +291,9 @@ class StoreThumbnailsToAvalon(BaseAction): ) self.db_con.update_one( -- {"_id": avalon_asset["_id"]}, -- {"$set": {"data.thumbnail_id": thumbnail_id}} -- ) + {"_id": avalon_asset["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) action_job["status"] = "done" session.commit() From 07dbb2533d3a091c27a4a40a237b46377509f9b4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 21:07:26 +0100 Subject: [PATCH 114/133] fix(nks): missing family if `review` applied --- pype/plugins/nukestudio/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nukestudio/publish/collect_audio.py b/pype/plugins/nukestudio/publish/collect_audio.py index 61419b1ad9..e141f50488 100644 --- a/pype/plugins/nukestudio/publish/collect_audio.py +++ b/pype/plugins/nukestudio/publish/collect_audio.py @@ -15,7 +15,7 @@ class CollectAudio(api.InstancePlugin): order = api.CollectorOrder + 0.1025 label = "Collect Audio" hosts = ["nukestudio"] - families = ["clip"] + families = ["clip", "plate"] def process(self, instance): # Exclude non-tagged instances. From bc4447a971e48ee375801146ff391c66c1e1c681 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 21:45:15 +0100 Subject: [PATCH 115/133] fix(nks): tags collection for plates and audio wrong subset name --- .../nukestudio/publish/collect_audio.py | 20 ++++++++++--------- .../nukestudio/publish/collect_plates.py | 11 ++++------ 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_audio.py b/pype/plugins/nukestudio/publish/collect_audio.py index e141f50488..727d7da795 100644 --- a/pype/plugins/nukestudio/publish/collect_audio.py +++ b/pype/plugins/nukestudio/publish/collect_audio.py @@ -1,5 +1,5 @@ from pyblish import api - +import os class CollectAudio(api.InstancePlugin): """Collect audio from tags. @@ -12,17 +12,19 @@ class CollectAudio(api.InstancePlugin): """ # Run just before CollectSubsets - order = api.CollectorOrder + 0.1025 + order = api.CollectorOrder + 0.1021 label = "Collect Audio" hosts = ["nukestudio"] - families = ["clip", "plate"] + families = ["clip"] def process(self, instance): # Exclude non-tagged instances. tagged = False for tag in instance.data["tags"]: - family = dict(tag["metadata"]).get("tag.family", "") + tag_data = dict(tag["metadata"]) + family = tag_data.get("tag.family", "") if family.lower() == "audio": + subset = tag_data.get("tag.subset", "Main") tagged = True if not tagged: @@ -40,14 +42,14 @@ class CollectAudio(api.InstancePlugin): data["family"] = "audio" data["families"] = ["ftrack"] - subset = "" - for tag in instance.data["tags"]: - tag_data = dict(tag["metadata"]) - if "tag.subset" in tag_data: - subset = tag_data["tag.subset"] data["subset"] = "audio" + subset.title() data["source"] = data["sourcePath"] + data["label"] = "{} - {} - ({})".format( + data['asset'], data["subset"], os.path.splitext(data["sourcePath"])[ + 1] + ) + self.log.debug("Creating instance with data: {}".format(data)) instance.context.create_instance(**data) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index 75eb5bb043..e0ecbaf302 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -23,8 +23,10 @@ class CollectPlates(api.InstancePlugin): # Exclude non-tagged instances. tagged = False for tag in instance.data["tags"]: - family = dict(tag["metadata"]).get("tag.family", "") + tag_data = dict(tag["metadata"]) + family = tag_data.get("tag.family", "") if family.lower() == "plate": + subset = tag_data.get("tag.subset", "Main") tagged = True break @@ -43,12 +45,7 @@ class CollectPlates(api.InstancePlugin): data["family"] = family.lower() data["families"] = ["ftrack"] + instance.data["families"][1:] data["source"] = data["sourcePath"] - - subset = "" - for tag in instance.data["tags"]: - tag_data = dict(tag["metadata"]) - if "tag.subset" in tag_data: - subset = tag_data["tag.subset"] + data["subset"] = data["family"] + subset.title() data["name"] = data["subset"] + "_" + data["asset"] From f3fc92881ad360017a03d905a55d1485256ce9e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 21:47:43 +0100 Subject: [PATCH 116/133] fix(nks): not correct way of collecting frame start - should not be offset-ed by handle start --- pype/plugins/nukestudio/publish/collect_tag_framestart.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_tag_framestart.py b/pype/plugins/nukestudio/publish/collect_tag_framestart.py index 1342d996ab..993aa99a3e 100644 --- a/pype/plugins/nukestudio/publish/collect_tag_framestart.py +++ b/pype/plugins/nukestudio/publish/collect_tag_framestart.py @@ -30,9 +30,12 @@ class CollectClipTagFrameStart(api.InstancePlugin): except ValueError: if "source" in t_value: source_first = instance.data["sourceFirst"] + if source_first == 0: + source_first = 1 + self.log.info("Start frame on `{0}`".format(source_first)) source_in = instance.data["sourceIn"] - handle_start = instance.data["handleStart"] - start_frame = (source_first + source_in) - handle_start + self.log.info("Start frame on `{0}`".format(source_in)) + start_frame = source_first + source_in instance.data["startingFrame"] = start_frame self.log.info("Start frame on `{0}` set to `{1}`".format( From 188881a0a5873ee7984b610f64599eebb57d9ac0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 18 Feb 2020 00:02:50 +0100 Subject: [PATCH 117/133] fix(nks): review family to `plate` and plates cleanup --- .../nukestudio/publish/collect_plates.py | 17 ++++++++++------- .../nukestudio/publish/collect_reviews.py | 2 +- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index e0ecbaf302..acdc5193ae 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -14,7 +14,7 @@ class CollectPlates(api.InstancePlugin): """ # Run just before CollectSubsets - order = api.CollectorOrder + 0.1025 + order = api.CollectorOrder + 0.1021 label = "Collect Plates" hosts = ["nukestudio"] families = ["clip"] @@ -36,24 +36,27 @@ class CollectPlates(api.InstancePlugin): "\"plate\"".format(instance) ) return + self.log.debug("__ subset: `{}`".format(instance.data["subset"])) + # if "audio" in instance.data["subset"]: + # return # Collect data. data = {} for key, value in instance.data.iteritems(): data[key] = value + self.log.debug("__ family: `{}`".format(family)) + self.log.debug("__ subset: `{}`".format(subset)) + data["family"] = family.lower() data["families"] = ["ftrack"] + instance.data["families"][1:] data["source"] = data["sourcePath"] - - data["subset"] = data["family"] + subset.title() - + data["subset"] = family + subset.title() data["name"] = data["subset"] + "_" + data["asset"] data["label"] = "{} - {} - ({})".format( - data['asset'], data["subset"], os.path.splitext(data["sourcePath"])[ - 1] - ) + data['asset'], data["subset"], os.path.splitext( + data["sourcePath"])[1]) if "review" in instance.data["families"]: data["label"] += " - review" diff --git a/pype/plugins/nukestudio/publish/collect_reviews.py b/pype/plugins/nukestudio/publish/collect_reviews.py index f223e5ca65..af8fd4a0e7 100644 --- a/pype/plugins/nukestudio/publish/collect_reviews.py +++ b/pype/plugins/nukestudio/publish/collect_reviews.py @@ -16,7 +16,7 @@ class CollectReviews(api.InstancePlugin): order = api.CollectorOrder + 0.1022 label = "Collect Reviews" hosts = ["nukestudio"] - families = ["clip"] + families = ["plate"] def process(self, instance): # Exclude non-tagged instances. From 9526e0b5a3c8f4a386333ab62cd60384570f4a83 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:08:16 +0100 Subject: [PATCH 118/133] implemented event that can change task status on first asset version creation --- .../events/event_first_version_status.py | 175 ++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 pype/ftrack/events/event_first_version_status.py diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py new file mode 100644 index 0000000000..ac0e94c3ae --- /dev/null +++ b/pype/ftrack/events/event_first_version_status.py @@ -0,0 +1,175 @@ +from pype.ftrack import BaseEvent + + +class FirstVersionStatus(BaseEvent): + + # WARNING Priority MUST be higher + # than handler in `event_version_to_task_statuses.py` + priority = 200 + + first_run = True + keys_enum = ["task", "task_type"] + # This should be set with presets + task_status_map = [] + + # EXAMPLE of `task_status_map` + __example_status_map__ = [{ + # `key` specify where to look for name (is enumerator of `keys_enum`) + "key": "task", + # speicification of name + "name": "compositing", + # Status to set to the task + "status": "Blocking" + }] + + def launch(self, session, event): + """Set task's status for first created Asset Version.""" + + if not self.task_status_map: + return + + if self.first_run: + self.first_run = False + valid_task_status_map = [] + for item in self.task_status_map: + key = (item.get("key") or "").lower() + name = (item.get("name") or "").lower() + status = (item.get("status") or "").lower() + if not (key and name and status): + self.log.warning(( + "Invalid item in Task -> Status mapping. {}" + ).format(str(item))) + continue + + if key not in self.keys_enum: + expected_msg = "" + last_key_idx = len(self.keys_enum) - 1 + for idx, key in enumerate(self.keys_enum): + if idx == 0: + joining_part = "`{}`" + elif idx == last_key_idx: + joining_part = "or `{}`" + else: + joining_part = ", `{}`" + expected_msg += joining_part.format(key) + + self.log.warning(( + "Invalid key `{}`. Expected: {}." + ).format(key, expected_msg)) + continue + + valid_task_status_map.append({ + "key": key, + "name": name, + "status": status + }) + self.task_status_map = valid_task_status_map + + entities_info = self.filter_event_ents(event) + if not entities_info: + return + + entity_ids = [] + for entity_info in entities_info: + entity_ids.append(entity_info["entityId"]) + + joined_entity_ids = ",".join( + ["\"{}\"".format(entity_id) for entity_id in entity_ids] + ) + asset_verisons = session.query( + "AssetVersion where id in ({})".format(joined_entity_ids) + ).all() + + statuses_per_type_id = {} + + project_schema = None + for asset_verison in asset_verisons: + task_entity = asset_verison["task"] + found_item = None + for item in self.task_status_map: + if ( + item["key"] == "task" and + task_entity["name"].lower() != item["name"] + ): + continue + + elif ( + item["key"] == "task_type" and + task_entity["type"]["name"].lower() != item["name"] + ): + continue + + found_item = item + break + + if not found_item: + continue + + if project_schema is None: + project_schema = task_entity["project"]["project_schema"] + + # Get all available statuses for Task + type_id = task_entity["type_id"] + if type_id not in statuses_per_type_id: + statuses = project_schema.get_statuses( + "Task", task_entity["type_id"] + ) + + # map lowered status name with it's object + statuses_per_type_id[type_id] = { + status["name"].lower(): status for status in statuses + } + + statuses_by_low_name = statuses_per_type_id[type_id] + new_status = statuses_by_low_name.get(found_item["status"]) + if not new_status: + continue + + ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) + + try: + task_entity["status"] = new_status + session.commit() + self.log.debug("[ {} ] Status updated to [ {} ]".format( + ent_path, new_status['name'] + )) + + except Exception: + session.rollback() + self.log.warning( + "[ {} ] Status couldn't be set.".format(ent_path), + exc_info=True + ) + + def filter_event_ents(self, event): + filtered_ents = [] + for entity in event["data"].get("entities", []): + # Care only about add actions + if entity["action"] != "add": + continue + + # Filter AssetVersions + if entity["entityType"] != "assetversion": + continue + + entity_changes = entity.get("changes") or {} + + # Check if version of Asset Version is `1` + version_num = entity_changes.get("version", {}).get("new") + if version_num != 1: + continue + + # Skip in Asset Version don't have task + task_id = entity_changes.get("taskid", {}).get("new") + if not task_id: + continue + + filtered_ents.append(entity) + + return filtered_ents + + +def register(session, plugins_presets): + '''Register plugin. Called when used as an plugin.''' + + FirstVersionStatus(session, plugins_presets).register() From e57fecdeb34ac6dfb41d7cc75e019a32b40c5981 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 18 Feb 2020 13:08:25 +0100 Subject: [PATCH 119/133] making sure that options from capture.json are applied correctly this will need some refactoring though --- pype/maya/lib.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/pype/maya/lib.py b/pype/maya/lib.py index ec39b3556e..dafc281903 100644 --- a/pype/maya/lib.py +++ b/pype/maya/lib.py @@ -2176,18 +2176,29 @@ def load_capture_preset(path=None, data=None): 4: 'nolights'} for key in preset[id]: if key == 'high_quality': - temp_options2['multiSampleEnable'] = True - temp_options2['multiSampleCount'] = 8 - temp_options2['textureMaxResolution'] = 1024 - temp_options2['enableTextureMaxRes'] = True + if preset[id][key] == True: + temp_options2['multiSampleEnable'] = True + temp_options2['multiSampleCount'] = 4 + temp_options2['textureMaxResolution'] = 1024 + temp_options2['enableTextureMaxRes'] = True + temp_options2['textureMaxResMode'] = 1 + else: + temp_options2['multiSampleEnable'] = False + temp_options2['multiSampleCount'] = 4 + temp_options2['textureMaxResolution'] = 512 + temp_options2['enableTextureMaxRes'] = True + temp_options2['textureMaxResMode'] = 0 + + if key == 'ssaoEnable': + if preset[id][key] == True: + temp_options2['ssaoEnable'] = True + else: + temp_options2['ssaoEnable'] = False if key == 'alphaCut': temp_options2['transparencyAlgorithm'] = 5 temp_options2['transparencyQuality'] = 1 - if key == 'ssaoEnable': - temp_options2['ssaoEnable'] = True - if key == 'headsUpDisplay': temp_options['headsUpDisplay'] = True From 5a38ba950c728fc6718ae4e9ad39712b7a329119 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:14:22 +0100 Subject: [PATCH 120/133] validation of presets happens after registration --- .../events/event_first_version_status.py | 78 ++++++++++--------- 1 file changed, 40 insertions(+), 38 deletions(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index ac0e94c3ae..59956697b6 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -7,7 +7,6 @@ class FirstVersionStatus(BaseEvent): # than handler in `event_version_to_task_statuses.py` priority = 200 - first_run = True keys_enum = ["task", "task_type"] # This should be set with presets task_status_map = [] @@ -22,49 +21,52 @@ class FirstVersionStatus(BaseEvent): "status": "Blocking" }] + def register(self, *args, **kwargs): + result = super(FirstVersionStatus, self).register(*args, **kwargs) + + valid_task_status_map = [] + for item in self.task_status_map: + key = (item.get("key") or "").lower() + name = (item.get("name") or "").lower() + status = (item.get("status") or "").lower() + if not (key and name and status): + self.log.warning(( + "Invalid item in Task -> Status mapping. {}" + ).format(str(item))) + continue + + if key not in self.keys_enum: + expected_msg = "" + last_key_idx = len(self.keys_enum) - 1 + for idx, key in enumerate(self.keys_enum): + if idx == 0: + joining_part = "`{}`" + elif idx == last_key_idx: + joining_part = "or `{}`" + else: + joining_part = ", `{}`" + expected_msg += joining_part.format(key) + + self.log.warning(( + "Invalid key `{}`. Expected: {}." + ).format(key, expected_msg)) + continue + + valid_task_status_map.append({ + "key": key, + "name": name, + "status": status + }) + self.task_status_map = valid_task_status_map + + return result + def launch(self, session, event): """Set task's status for first created Asset Version.""" if not self.task_status_map: return - if self.first_run: - self.first_run = False - valid_task_status_map = [] - for item in self.task_status_map: - key = (item.get("key") or "").lower() - name = (item.get("name") or "").lower() - status = (item.get("status") or "").lower() - if not (key and name and status): - self.log.warning(( - "Invalid item in Task -> Status mapping. {}" - ).format(str(item))) - continue - - if key not in self.keys_enum: - expected_msg = "" - last_key_idx = len(self.keys_enum) - 1 - for idx, key in enumerate(self.keys_enum): - if idx == 0: - joining_part = "`{}`" - elif idx == last_key_idx: - joining_part = "or `{}`" - else: - joining_part = ", `{}`" - expected_msg += joining_part.format(key) - - self.log.warning(( - "Invalid key `{}`. Expected: {}." - ).format(key, expected_msg)) - continue - - valid_task_status_map.append({ - "key": key, - "name": name, - "status": status - }) - self.task_status_map = valid_task_status_map - entities_info = self.filter_event_ents(event) if not entities_info: return From 96dce267e8c21bbbde068f50f657a881fbcd88bc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:17:37 +0100 Subject: [PATCH 121/133] default value of `key` in status mapping is `task` to look after task's name --- pype/ftrack/events/event_first_version_status.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index 59956697b6..c147692dc4 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -14,6 +14,7 @@ class FirstVersionStatus(BaseEvent): # EXAMPLE of `task_status_map` __example_status_map__ = [{ # `key` specify where to look for name (is enumerator of `keys_enum`) + # By default is set to "task" "key": "task", # speicification of name "name": "compositing", @@ -26,7 +27,7 @@ class FirstVersionStatus(BaseEvent): valid_task_status_map = [] for item in self.task_status_map: - key = (item.get("key") or "").lower() + key = (item.get("key") or "task").lower() name = (item.get("name") or "").lower() status = (item.get("status") or "").lower() if not (key and name and status): From cc6d70f8498a364c5ce643efd683e1427ca46179 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:56:48 +0100 Subject: [PATCH 122/133] integrate ftrack note adds intent at the beginning of comment (if is set) --- pype/plugins/ftrack/publish/integrate_ftrack_note.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index f7fb5addbb..87016684ed 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -18,7 +18,17 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): self.log.info("Comment is not set.") return - self.log.debug("Comment is set to {}".format(comment)) + self.log.debug("Comment is set to `{}`".format(comment)) + + intent = instance.context.data.get("intent") + if intent: + msg = "Intent is set to `{}` and was added to comment.".format( + intent + ) + comment = "{}: {}".format(intent, comment) + else: + msg = "Intent is not set." + self.log.debug(msg) asset_versions_key = "ftrackIntegratedAssetVersions" asset_versions = instance.data.get(asset_versions_key) From a3ad40e34a729f79902cf653beece81d0617b270 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 14:05:02 +0100 Subject: [PATCH 123/133] added possiblity to set note with intent template through presets --- pype/plugins/ftrack/publish/integrate_ftrack_note.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index 87016684ed..bab7d1ecf5 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -10,6 +10,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.4999 label = "Integrate Ftrack note" families = ["ftrack"] + # Can be set in presets (Allows only `intent` and `comment` keys) + note_with_intent_template = "{intent}: {comment}" optional = True def process(self, instance): @@ -25,7 +27,10 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): msg = "Intent is set to `{}` and was added to comment.".format( intent ) - comment = "{}: {}".format(intent, comment) + comment = note_with_intent_template.format(**{ + "intent": intent, + "comment": comment + }) else: msg = "Intent is not set." self.log.debug(msg) From 7296e86475077d014085be296e7747b7e01fbb06 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 18 Feb 2020 14:49:11 +0100 Subject: [PATCH 124/133] fix forgotten .value() call --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 8d41aa7907..a2343ce8a9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -453,7 +453,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Remove old representations if there are any (before insertion of new) if existing_repres: repre_ids_to_remove = [] - for repre in existing_repres.values(): + for repre in existing_repres: repre_ids_to_remove.append(repre["_id"]) io.delete_many({"_id": {"$in": repre_ids_to_remove}}) From 79ad22b5fbbd49afaab132a16b270ab35bc0748e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 15:15:23 +0100 Subject: [PATCH 125/133] added few logs to event --- pype/ftrack/events/event_first_version_status.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index c147692dc4..2447a20c3e 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -58,7 +58,12 @@ class FirstVersionStatus(BaseEvent): "name": name, "status": status }) + self.task_status_map = valid_task_status_map + if not self.task_status_map: + self.log.warning(( + "Event handler `{}` don't have set presets." + ).format(self.__class__.__name__)) return result @@ -123,13 +128,16 @@ class FirstVersionStatus(BaseEvent): status["name"].lower(): status for status in statuses } + ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) + statuses_by_low_name = statuses_per_type_id[type_id] new_status = statuses_by_low_name.get(found_item["status"]) if not new_status: + self.log.warning("Status `{}` was not found for `{}`.".format( + found_item["status"], ent_path + )) continue - ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) - try: task_entity["status"] = new_status session.commit() From 79db4af6254ddf606fbb429106568bbe6f01d097 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 15:52:45 +0100 Subject: [PATCH 126/133] task is not changed on task but asset version --- .../events/event_first_version_status.py | 38 ++++++++++--------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index 2447a20c3e..2e2b98ad5f 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -18,7 +18,7 @@ class FirstVersionStatus(BaseEvent): "key": "task", # speicification of name "name": "compositing", - # Status to set to the task + # Status to set to the asset version "status": "Blocking" }] @@ -84,15 +84,15 @@ class FirstVersionStatus(BaseEvent): joined_entity_ids = ",".join( ["\"{}\"".format(entity_id) for entity_id in entity_ids] ) - asset_verisons = session.query( + asset_versions = session.query( "AssetVersion where id in ({})".format(joined_entity_ids) ).all() - statuses_per_type_id = {} + asset_version_statuses = None project_schema = None - for asset_verison in asset_verisons: - task_entity = asset_verison["task"] + for asset_version in asset_versions: + task_entity = asset_version["task"] found_item = None for item in self.task_status_map: if ( @@ -117,29 +117,31 @@ class FirstVersionStatus(BaseEvent): project_schema = task_entity["project"]["project_schema"] # Get all available statuses for Task - type_id = task_entity["type_id"] - if type_id not in statuses_per_type_id: - statuses = project_schema.get_statuses( - "Task", task_entity["type_id"] - ) + if asset_version_statuses is None: + statuses = project_schema.get_statuses("AssetVersion") # map lowered status name with it's object - statuses_per_type_id[type_id] = { + asset_version_statuses = { status["name"].lower(): status for status in statuses } - ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) + ent_path = "/".join( + [ent["name"] for ent in task_entity["link"]] + + [ + str(asset_version["asset"]["name"]), + str(asset_version["version"]) + ] + ) - statuses_by_low_name = statuses_per_type_id[type_id] - new_status = statuses_by_low_name.get(found_item["status"]) + new_status = asset_version_statuses.get(found_item["status"]) if not new_status: - self.log.warning("Status `{}` was not found for `{}`.".format( - found_item["status"], ent_path - )) + self.log.warning( + "AssetVersion doesn't have status `{}`." + ).format(found_item["status"]) continue try: - task_entity["status"] = new_status + asset_version["status"] = new_status session.commit() self.log.debug("[ {} ] Status updated to [ {} ]".format( ent_path, new_status['name'] From d98cb1c2c49a62dfa82350f26168a6f887617454 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 16:20:11 +0100 Subject: [PATCH 127/133] fix template access --- pype/plugins/ftrack/publish/integrate_ftrack_note.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index bab7d1ecf5..38f7486322 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -27,7 +27,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): msg = "Intent is set to `{}` and was added to comment.".format( intent ) - comment = note_with_intent_template.format(**{ + comment = self.note_with_intent_template.format(**{ "intent": intent, "comment": comment }) From f59f8b142696f32748296953887af683e2d9aaad Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 17:15:14 +0100 Subject: [PATCH 128/133] added possibility to add labels to notes --- .../ftrack/publish/integrate_ftrack_note.py | 24 ++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index 38f7486322..2621ca96ab 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -10,10 +10,14 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.4999 label = "Integrate Ftrack note" families = ["ftrack"] - # Can be set in presets (Allows only `intent` and `comment` keys) - note_with_intent_template = "{intent}: {comment}" optional = True + # Can be set in presets: + # - Allows only `intent` and `comment` keys + note_with_intent_template = "{intent}: {comment}" + # - note label must exist in Ftrack + note_labels = [] + def process(self, instance): comment = (instance.context.data.get("comment") or "").strip() if not comment: @@ -52,8 +56,22 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): ) ) + labels = [] + if self.note_labels: + all_labels = session.query("NoteLabel").all() + labels_by_low_name = {lab["name"].lower(): lab for lab in all_labels} + for _label in self.note_labels: + label = labels_by_low_name.get(_label.lower()) + if not label: + self.log.warning( + "Note Label `{}` was not found.".format(_label) + ) + continue + + labels.append(label) + for asset_version in asset_versions: - asset_version.create_note(comment, author=user) + asset_version.create_note(comment, author=user, labels=labels) try: session.commit() From 60de315ddfb269385fccc48f590828fe3027c2b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:43:28 +0100 Subject: [PATCH 129/133] fixed class name --- pype/ftrack/actions/action_delete_old_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index 46f3e60d77..e418a21e53 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -482,4 +482,4 @@ class DeleteOldVersions(BaseAction): def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' - PrepareForArchivation(session, plugins_presets).register() + DeleteOldVersions(session, plugins_presets).register() From d12fe99d1827591747b5f58a279b073e74fe82b3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:43:41 +0100 Subject: [PATCH 130/133] default version number is 2 --- pype/ftrack/actions/action_delete_old_versions.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index e418a21e53..a546f380a4 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -102,15 +102,14 @@ class DeleteOldVersions(BaseAction): items.append({ "type": "label", "value": ( - "NOTE: We do recommend to keep 2" - " versions (even if default is 1)." + "NOTE: We do recommend to keep 2 versions." ) }) items.append({ "type": "number", "name": "last_versions_count", "label": "Versions", - "value": 1 + "value": 2 }) items.append(self.splitter_item) From 7692de229b5d0b337eeb93ac269531785f0ad4dd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:44:06 +0100 Subject: [PATCH 131/133] remove publish folder is set to False by default and changed `you` to `You` --- pype/ftrack/actions/action_delete_old_versions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index a546f380a4..bbc5dc4b73 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -130,8 +130,8 @@ class DeleteOldVersions(BaseAction): items.append({ "type": "boolean", "name": "force_delete_publish_folder", - "label": "Are you sure?", - "value": True + "label": "Are You sure?", + "value": False }) return { From dd83b585335557f854fea94a0b7a4eea451c7702 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:52:37 +0100 Subject: [PATCH 132/133] changed filter variable to update_query --- pype/ftrack/actions/action_delete_old_versions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index bbc5dc4b73..c566198522 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -352,9 +352,9 @@ class DeleteOldVersions(BaseAction): if version_tags == orig_version_tags: continue - filter = {"_id": version["_id"]} + update_query = {"_id": version["_id"]} update_data = {"$set": {"data.tags": version_tags}} - mongo_changes_bulk.append(UpdateOne(filter, update_data)) + mongo_changes_bulk.append(UpdateOne(update_query, update_data)) if mongo_changes_bulk: self.dbcon.bulk_write(mongo_changes_bulk) From da4c9b5a4f7338cc2bc5a322b73377cc44a21a28 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:55:43 +0100 Subject: [PATCH 133/133] changed elif to if condition --- pype/ftrack/actions/action_delete_old_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index c566198522..bec21dae96 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -57,7 +57,7 @@ class DeleteOldVersions(BaseAction): "message": msg } - elif not os.path.exists(root): + if not os.path.exists(root): msg = "Root path does not exists \"{}\".".format(str(root)) items.append({ "type": "label",