From 30d598911d78c803b4d5a7316097f53cdadfce9f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:24:45 +0100 Subject: [PATCH 001/107] removed old integrators --- pype/plugins/global/publish/integrate.py | 417 ----------------- .../publish/integrate_rendered_frames.py | 423 ------------------ 2 files changed, 840 deletions(-) delete mode 100644 pype/plugins/global/publish/integrate.py delete mode 100644 pype/plugins/global/publish/integrate_rendered_frames.py diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py deleted file mode 100644 index 87b9e1a9bd..0000000000 --- a/pype/plugins/global/publish/integrate.py +++ /dev/null @@ -1,417 +0,0 @@ -import os -import logging -import shutil - -import errno -import pyblish.api -from avalon import api, io -from avalon.vendor import filelink - - -log = logging.getLogger(__name__) - - -class IntegrateAsset(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Integrate Asset" - order = pyblish.api.IntegratorOrder - families = [] - exclude_families = ["clip"] - - def process(self, instance): - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - self.register(instance) - - self.log.info("Integrating Asset in to the database ...") - if instance.data.get('transfer', True): - self.integrate(instance) - - def register(self, instance): - # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - LOCATION = api.Session["AVALON_LOCATION"] - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - # Ensure at least one file is set up for transfer in staging dir. - files = instance.data.get("files", []) - assert files, "Instance has no files to transfer" - assert isinstance(files, (list, tuple)), ( - "Instance 'files' must be a list, got: {0}".format(files) - ) - - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.info("Verifying version from assumed destination") - - assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] - if assumed_version != next_version: - raise AttributeError("Assumed version 'v{0:03d}' does not match" - "next version in database " - "('v{1:03d}')".format(assumed_version, - next_version)) - - self.log.debug("Next version: v{0:03d}".format(next_version)) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({ - "type": 'asset', - "name": ASSET - })['data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - - # template_publish = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] - if 'transfers' not in instance.data: - instance.data['transfers'] = [] - - for files in instance.data["files"]: - - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # - - if isinstance(files, list): - collection = files - # Assert that each member has identical suffix - _, ext = os.path.splitext(collection[0]) - assert all(ext == os.path.splitext(name)[1] - for name in collection), ( - "Files had varying suffixes, this is a bug" - ) - - assert not any(os.path.isabs(name) for name in collection) - - template_data["representation"] = ext[1:] - - for fname in collection: - - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["publish"]["path"] - - instance.data["transfers"].append([src, dst]) - template = anatomy.templates["publish"]["path"] - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - fname = files - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - _, ext = os.path.splitext(fname) - - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["publish"]["path"] - - instance.data["transfers"].append([src, dst]) - template = anatomy.templates["publish"]["path"] - - representation = { - "schema": "pype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {'path': dst, 'template': template}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] - } - } - - destination_list.append(dst) - instance.data['destination_list'] = destination_list - representations.append(representation) - - self.log.info("Registering {} items".format(len(representations))) - - io.insert_many(representations) - - def integrate(self, instance): - """Move the files - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - """ - - transfers = instance.data.get("transfers", list()) - - for src, dest in transfers: - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - # Produce hardlinked copies - # Note: hardlink can only be produced between two files on the same - # server/disk and editing one of the two will edit both files at once. - # As such it is recommended to only make hardlinks between static files - # to ensure publishes remain safe and non-edited. - hardlinks = instance.data.get("hardlinks", list()) - for src, dest in hardlinks: - self.log.info("Hardlinking file .. {} -> {}".format(src, dest)) - self.hardlink_file(src, dest) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) - - def hardlink_file(self, src, dst): - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - filelink.create(src, dst, filelink.HARDLINK) - - def get_subset(self, asset, instance): - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"] - }) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "avalon-core:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - - Returns: - dict: collection of data to create a version - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "avalon-core:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for the version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families - - self.log.debug("Registered root: {}".format(api.registered_root())) - # create relative source path for DB - try: - source = instance.data['source'] - except KeyError: - source = context.data["currentFile"] - - relative_path = os.path.relpath(source, api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - self.log.debug("Source: {}".format(source)) - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment"), - "machine": context.data.get("machine"), - "fps": context.data.get("fps")} - - # Include optional data if present in - optionals = [ - "frameStart", "frameEnd", "step", "handles", "sourceHashes" - ] - for key in optionals: - if key in instance.data: - version_data[key] = instance.data[key] - - return version_data diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py deleted file mode 100644 index 5819051146..0000000000 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ /dev/null @@ -1,423 +0,0 @@ -import os -import logging -import shutil -import clique - -import errno -import pyblish.api -from avalon import api, io - - -log = logging.getLogger(__name__) - - -class IntegrateFrames(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Integrate Frames" - order = pyblish.api.IntegratorOrder - families = ["imagesequence"] - - family_targets = [".frames", ".local", ".review", "imagesequence", "render", "source"] - exclude_families = ["clip"] - - def process(self, instance): - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - families = [f for f in instance.data["families"] - for search in self.family_targets - if search in f] - - if not families: - return - - self.register(instance) - - # self.log.info("Integrating Asset in to the database ...") - # self.log.info("instance.data: {}".format(instance.data)) - if instance.data.get('transfer', True): - self.integrate(instance) - - def register(self, instance): - - # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - LOCATION = api.Session["AVALON_LOCATION"] - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.info("Verifying version from assumed destination") - - assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] - if assumed_version != next_version: - raise AttributeError("Assumed version 'v{0:03d}' does not match" - "next version in database " - "('v{1:03d}')".format(assumed_version, - next_version)) - - if instance.data.get('version'): - next_version = int(instance.data.get('version')) - - self.log.debug("Next version: v{0:03d}".format(next_version)) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({"type": 'asset', "name": ASSET})[ - 'data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset.get('silo'), - "task": api.Session["AVALON_TASK"], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - - # template_publish = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] - - if 'transfers' not in instance.data: - instance.data['transfers'] = [] - - for files in instance.data["files"]: - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # - if isinstance(files, list): - - src_collections, remainder = clique.assemble(files) - src_collection = src_collections[0] - # Assert that each member has identical suffix - src_head = src_collection.format("{head}") - src_tail = ext = src_collection.format("{tail}") - - test_dest_files = list() - for i in [1, 2]: - template_data["representation"] = src_tail[1:] - template_data["frame"] = src_collection.format( - "{padding}") % i - anatomy_filled = anatomy.format(template_data) - test_dest_files.append(anatomy_filled["render"]["path"]) - - dst_collections, remainder = clique.assemble(test_dest_files) - dst_collection = dst_collections[0] - dst_head = dst_collection.format("{head}") - dst_tail = dst_collection.format("{tail}") - - for i in src_collection.indexes: - src_padding = src_collection.format("{padding}") % i - src_file_name = "{0}{1}{2}".format( - src_head, src_padding, src_tail) - dst_padding = dst_collection.format("{padding}") % i - dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) - - src = os.path.join(stagingdir, src_file_name) - instance.data["transfers"].append([src, dst]) - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - - template_data.pop("frame", None) - - fname = files - - self.log.info("fname: {}".format(fname)) - - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - _, ext = os.path.splitext(fname) - - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["render"]["path"] - - instance.data["transfers"].append([src, dst]) - - if ext[1:] not in ["jpeg", "jpg", "mov", "mp4", "wav"]: - template_data["frame"] = "#" * int(anatomy_filled["render"]["padding"]) - - anatomy_filled = anatomy.format(template_data) - path_to_save = anatomy_filled["render"]["path"] - template = anatomy.templates["render"]["path"] - - self.log.debug("path_to_save: {}".format(path_to_save)) - - representation = { - "schema": "pype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {'path': path_to_save, 'template': template}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "root": root, - "project": { - "name": PROJECT, - "code": project['data']['code'] - }, - "task": api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy, - "representation": ext[1:] - } - } - - destination_list.append(dst) - instance.data['destination_list'] = destination_list - representations.append(representation) - - self.log.info("Registering {} items".format(len(representations))) - io.insert_many(representations) - - def integrate(self, instance): - """Move the files - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - """ - - transfers = instance.data["transfers"] - - for src, dest in transfers: - src = os.path.normpath(src) - dest = os.path.normpath(dest) - if src in dest: - continue - - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) - - def get_subset(self, asset, instance): - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"] - }) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "pype:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - - Returns: - dict: collection of data to create a version - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "pype:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for the version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families - - try: - source = instance.data['source'] - except KeyError: - source = context.data["currentFile"] - - relative_path = os.path.relpath(source, api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment")} - - # Include optional data if present in - optionals = ["frameStart", "frameEnd", "step", - "handles", "colorspace", "fps", "outputDir"] - - for key in optionals: - if key in instance.data: - version_data[key] = instance.data.get(key, None) - - return version_data From 9bcdf7f72a96b9839aee1e9c49acb75475b55cc8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:25:33 +0100 Subject: [PATCH 002/107] added avalon entities collector --- .../global/publish/collect_avalon_entities.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 pype/plugins/global/publish/collect_avalon_entities.py diff --git a/pype/plugins/global/publish/collect_avalon_entities.py b/pype/plugins/global/publish/collect_avalon_entities.py new file mode 100644 index 0000000000..c256dffd52 --- /dev/null +++ b/pype/plugins/global/publish/collect_avalon_entities.py @@ -0,0 +1,46 @@ +"""Collect Anatomy and global anatomy data. + +Requires: + session -> AVALON_PROJECT, AVALON_ASSET + +Provides: + context -> projectEntity - project entity from database + context -> assetEntity - asset entity from database +""" + +from avalon import io, api +import pyblish.api + + +class CollectAvalonEntities(pyblish.api.ContextPlugin): + """Collect Anatomy into Context""" + + order = pyblish.api.CollectorOrder + label = "Collect Avalon Entities" + + def process(self, context): + project_name = api.Session["AVALON_PROJECT"] + asset_name = api.Session["AVALON_ASSET"] + + project_entity = io.find_one({ + "type": "project", + "name": project_name + }) + assert project_entity, ( + "Project '{0}' was not found." + ).format(project_name) + self.log.debug("Collected Project entity \"{}\"".format(project_entity)) + + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name '{0}' in project '{1}'" + ).format(asset_name, project_name) + + self.log.debug("Collected Asset entity \"{}\"".format(asset_entity)) + + context.data["projectEntity"] = project_entity + context.data["assetEntity"] = asset_entity From a2d75afe7a8e78fd2481c18a095aa96b9382a9e7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:25:57 +0100 Subject: [PATCH 003/107] collect anatomy also collect global anatomy data --- .../plugins/global/publish/collect_anatomy.py | 67 +++++++++++++++++-- 1 file changed, 62 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_anatomy.py b/pype/plugins/global/publish/collect_anatomy.py index 9412209850..0831c16d32 100644 --- a/pype/plugins/global/publish/collect_anatomy.py +++ b/pype/plugins/global/publish/collect_anatomy.py @@ -1,10 +1,24 @@ -""" +"""Collect Anatomy and global anatomy data. + Requires: - None + session -> AVALON_TASK + projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder) + username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001) + datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder) + +Optional: + comment -> collect_comment *(pyblish.api.CollectorOrder) + intent -> collected in pyblish-lite + Provides: context -> anatomy (pypeapp.Anatomy) + context -> anatomyData """ +import os +import json + +from avalon import io, api, lib from pypeapp import Anatomy import pyblish.api @@ -12,9 +26,52 @@ import pyblish.api class CollectAnatomy(pyblish.api.ContextPlugin): """Collect Anatomy into Context""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder + 0.002 label = "Collect Anatomy" def process(self, context): - context.data['anatomy'] = Anatomy() - self.log.info("Anatomy templates collected...") + root_path = api.registered_root() + task_name = api.Session["AVALON_TASK"] + + project_entity = context.data["projectEntity"] + asset_entity = context.data["assetEntity"] + + project_name = project_entity["name"] + + context.data["anatomy"] = Anatomy(project_name) + self.log.info( + "Anatomy object collected for project \"{}\".".format(project_name) + ) + + hierarchy_items = asset_entity["data"]["parents"] + hierarchy = "" + if hierarchy_items: + hierarchy = os.path.join(*hierarchy_items) + + context_data = { + "root": root_path, + "project": { + "name": project_name, + "code": project_entity["data"].get("code") + }, + "asset": asset_entity["name"], + "hierarchy": hierarchy.replace("\\", "/"), + "task": task_name, + + "username": context.data["user"] + } + + avalon_app_name = os.environ.get("AVALON_APP_NAME") + if avalon_app_name: + application_def = lib.get_application(avalon_app_name) + app_dir = application_def.get("application_dir") + if app_dir: + context_data["app"] = app_dir + + datetime_data = context.data.get("datetimeData") or {} + context_data.update(datetime_data) + + context.data["anatomyData"] = context_data + + self.log.info("Global anatomy Data collected") + self.log.debug(json.dumps(context_data, indent=4)) From 54f76e7f7f9ec884bdbbe915a5088e7aaf8e3e10 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:19 +0100 Subject: [PATCH 004/107] collect templates replaced with collect instance anatomy data --- .../publish/collect_instance_anatomy_data.py | 119 ++++++++++++++++++ .../global/publish/collect_templates.py | 117 ----------------- 2 files changed, 119 insertions(+), 117 deletions(-) create mode 100644 pype/plugins/global/publish/collect_instance_anatomy_data.py delete mode 100644 pype/plugins/global/publish/collect_templates.py diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py new file mode 100644 index 0000000000..a1a9278d2a --- /dev/null +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -0,0 +1,119 @@ +""" +Requires: + context -> anatomyData + context -> projectEntity + context -> assetEntity + instance -> asset + instance -> subset + instance -> family + +Optional: + instance -> resolutionWidth + instance -> resolutionHeight + instance -> fps + +Provides: + instance -> anatomyData +""" + +import copy +import json + +from avalon import io +import pyblish.api + + +class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): + """Fill templates with data needed for publish""" + + order = pyblish.api.CollectorOrder + 0.1 + label = "Collect instance anatomy data" + hosts = ["maya", "nuke", "standalonepublisher"] + + def process(self, instance): + # get all the stuff from the database + anatomy_data = copy.deepcopy(instance.context.data["anatomyData"]) + project_entity = instance.context.data["projectEntity"] + context_asset_entity = instance.context.data["assetEntity"] + + asset_name = instance.data["asset"] + # Check if asset name is the same as what is in context + # - they may be different, e.g. in NukeStudio + if context_asset_entity["name"] == asset_name: + asset_entity = context_asset_entity + + else: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + + instance.context.data["assetEntity"] = asset_entity + instance.context.data["projectEntity"] = project_entity + + subset_name = instance.data["subset"] + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + version_number = instance.data.get("version") + if version_number is None: + version_number = instance.context.data.get("version") + + latest_version = None + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + if version_number is None: + # TODO we should be able to change this version by studio + # preferences (like start with version number `0`) + version_number = 1 + if latest_version is not None: + version_number += int(latest_version) + + # Version should not be collected since may be instance + anatomy_data.update({ + "asset": asset_entity["name"], + "family": instance.data["family"], + "subset": subset_name, + "version": version_number + }) + + resolution_width = instance.data.get("resolutionWidth") + if resolution_width: + anatomy_data["resolution_width"] = resolution_width + + resolution_height = instance.data.get("resolutionHeight") + if resolution_height: + anatomy_data["resolution_height"] = resolution_height + + fps = instance.data.get("fps") + if resolution_height: + anatomy_data["fps"] = fps + + instance.data["anatomyData"] = anatomy_data + instance.data["latestVersion"] = latest_version + # TODO check if template is used anywhere + # instance.data["template"] = template + + # TODO we should move this to any Validator + # # We take the parent folder of representation 'filepath' + # instance.data["assumedDestination"] = os.path.dirname( + # (anatomy.format(template_data))["publish"]["path"] + # ) + + self.log.info("Instance anatomy Data collected") + self.log.debug(json.dumps(anatomy_data, indent=4)) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py deleted file mode 100644 index f065b3c246..0000000000 --- a/pype/plugins/global/publish/collect_templates.py +++ /dev/null @@ -1,117 +0,0 @@ -""" -Requires: - session -> AVALON_PROJECT - context -> anatomy (pypeapp.Anatomy) - instance -> subset - instance -> asset - instance -> family - -Provides: - instance -> template - instance -> assumedTemplateData - instance -> assumedDestination -""" - -import os - -from avalon import io, api -import pyblish.api - - -class CollectTemplates(pyblish.api.InstancePlugin): - """Fill templates with data needed for publish""" - - order = pyblish.api.CollectorOrder + 0.1 - label = "Collect and fill Templates" - hosts = ["maya", "nuke", "standalonepublisher"] - - def process(self, instance): - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} - - # Add datetime data to template data - datetime_data = instance.context.data.get("datetimeData") or {} - template_data.update(datetime_data) - - resolution_width = instance.data.get("resolutionWidth") - resolution_height = instance.data.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) - self.log.info("Assumed Destination has been created...") - self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) - self.log.debug("__ template: `{}`".format(instance.data["template"])) From 1515f47f0fad2700efaa69022ac682456b7e4c50 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:33 +0100 Subject: [PATCH 005/107] extract burnin uses anatomyData --- pype/plugins/global/publish/extract_burnin.py | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index e50ba891d2..b95c15f340 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,21 +32,15 @@ class ExtractBurnin(pype.api.Extractor): frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 - prep_data = { - "username": instance.context.data['user'], - "asset": os.environ['AVALON_ASSET'], - "task": os.environ['AVALON_TASK'], + prep_data = copy.deepcopy(instance.data["anatomyData"]) + prep_data.update({ "frame_start": frame_start, "frame_end": frame_end, "duration": duration, "version": int(version), "comment": instance.context.data.get("comment", ""), "intent": instance.context.data.get("intent", "") - } - - # Add datetime data to preparation data - datetime_data = instance.context.data.get("datetimeData") or {} - prep_data.update(datetime_data) + }) slate_frame_start = frame_start slate_frame_end = frame_end @@ -64,10 +58,6 @@ class ExtractBurnin(pype.api.Extractor): "slate_duration": slate_duration }) - # Update data with template data - template_data = instance.data.get("assumedTemplateData") or {} - prep_data.update(template_data) - # get anatomy project anatomy = instance.context.data['anatomy'] From e4b23553dffd42aad2ee07b5e207787b1b52c4f8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:55 +0100 Subject: [PATCH 006/107] integrate assumed destinatoin replaced with collect resources path --- .../global/publish/collect_resources_path.py | 132 ++++++++++++++++ .../publish/integrate_assumed_destination.py | 147 ------------------ 2 files changed, 132 insertions(+), 147 deletions(-) create mode 100644 pype/plugins/global/publish/collect_resources_path.py delete mode 100644 pype/plugins/global/publish/integrate_assumed_destination.py diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py new file mode 100644 index 0000000000..52e926e09c --- /dev/null +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -0,0 +1,132 @@ +import os +import copy + +import pyblish.api +from avalon import io + + +class IntegrateResourcesPath(pyblish.api.InstancePlugin): + """Generate the assumed destination path where the file will be stored""" + + label = "Integrate Prepare Resource" + order = pyblish.api.IntegratorOrder - 0.05 + families = ["clip", "projectfile", "plate"] + + def process(self, instance): + project_entity = instance.context["projectEntity"] + asset_entity = instance.context["assetEntity"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + + asset_name = instance.data["asset"] + if asset_name != asset_entity["name"]: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name '{}' in project '{}'".format( + asset_name, project_entity["name"] + ) + ) + + instance.data["assetEntity"] = asset_entity + + template_data["name"] = asset_entity["name"] + silo_name = asset_entity.get("silo") + if silo_name: + template_data["silo"] = silo_name + + parents = asset_entity["data"].get("parents") or [] + hierarchy = "/".join(parents) + template_data["hierarchy"] = hierarchy + + subset_name = instance.data["subset"] + self.log.info(subset_name) + + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + # assume there is no version yet, we start at `1` + version = None + version_number = 1 + if subset is not None: + version = io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + sort=[("name", -1)] + ) + + # if there is a subset there ought to be version + if version is not None: + version_number += version["name"] + + if instance.data.get('version'): + version_number = int(instance.data.get('version')) + + anatomy = instance.context.data["anatomy"] + padding = int(anatomy.templates['render']['padding']) + + template_data.update({ + "subset": subset_name, + "frame": ('#' * padding), + "version": version_number, + "representation": "TEMP" + }) + + anatomy_filled = anatomy.format(template_data) + + template_names = ["publish"] + for repre in instance.data["representations"]: + template_name = repre.get("anatomy_template") + if template_name and template_name not in template_names: + template_names.append(template_name) + + resources = instance.data.get("resources", list()) + transfers = instance.data.get("transfers", list()) + + for template_name in template_names: + mock_template = anatomy_filled[template_name]["path"] + + # For now assume resources end up in a "resources" folder in the + # published folder + mock_destination = os.path.join( + os.path.dirname(mock_template), "resources" + ) + + # Clean the path + mock_destination = os.path.abspath( + os.path.normpath(mock_destination) + ).replace("\\", "/") + + # Define resource destination and transfers + for resource in resources: + # Add destination to the resource + source_filename = os.path.basename( + resource["source"]).replace("\\", "/") + destination = os.path.join(mock_destination, source_filename) + + # Force forward slashes to fix issue with software unable + # to work correctly with backslashes in specific scenarios + # (e.g. escape characters in PLN-151 V-Ray UDIM) + destination = destination.replace("\\", "/") + + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join( + mock_destination, fname).replace("\\", "/") + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers diff --git a/pype/plugins/global/publish/integrate_assumed_destination.py b/pype/plugins/global/publish/integrate_assumed_destination.py deleted file mode 100644 index d090e2711a..0000000000 --- a/pype/plugins/global/publish/integrate_assumed_destination.py +++ /dev/null @@ -1,147 +0,0 @@ -import pyblish.api -import os - -from avalon import io, api - - -class IntegrateAssumedDestination(pyblish.api.InstancePlugin): - """Generate the assumed destination path where the file will be stored""" - - label = "Integrate Assumed Destination" - order = pyblish.api.IntegratorOrder - 0.05 - families = ["clip", "projectfile", "plate"] - - def process(self, instance): - - anatomy = instance.context.data['anatomy'] - - self.create_destination_template(instance, anatomy) - - template_data = instance.data["assumedTemplateData"] - # self.log.info(anatomy.templates) - anatomy_filled = anatomy.format(template_data) - - # self.log.info(anatomy_filled) - mock_template = anatomy_filled["publish"]["path"] - - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join(os.path.dirname(mock_template), - "resources") - - # Clean the path - mock_destination = os.path.abspath( - os.path.normpath(mock_destination)).replace("\\", "/") - - # Define resource destination and transfers - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) - for resource in resources: - - # Add destination to the resource - source_filename = os.path.basename( - resource["source"]).replace("\\", "/") - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join( - mock_destination, fname).replace("\\", "/") - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - # get all the stuff from the database - subset_name = instance.data["subset"] - self.log.info(subset_name) - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates - - project = io.find_one( - {"type": "project", "name": project_name}, - projection={"config": True, "data": True} - ) - - template = a_template['publish']['path'] - # anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - - padding = int(a_template['render']['padding']) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template From f6992a3d44532fac0d11a87c5d7cdfe0a0db715d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:20 +0100 Subject: [PATCH 007/107] collector for project data was removed --- .../global/publish/collect_project_data.py | 24 ------------------- .../nukestudio/publish/collect_clips.py | 2 +- 2 files changed, 1 insertion(+), 25 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_project_data.py diff --git a/pype/plugins/global/publish/collect_project_data.py b/pype/plugins/global/publish/collect_project_data.py deleted file mode 100644 index acdbc2c41f..0000000000 --- a/pype/plugins/global/publish/collect_project_data.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Requires: - None - -Provides: - context -> projectData -""" - -import pyblish.api -import pype.api as pype - - -class CollectProjectData(pyblish.api.ContextPlugin): - """Collecting project data from avalon db""" - - label = "Collect Project Data" - order = pyblish.api.CollectorOrder - 0.1 - hosts = ["nukestudio"] - - def process(self, context): - # get project data from avalon db - context.data["projectData"] = pype.get_project()["data"] - - return diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 3759d50f6a..82053b6811 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -17,7 +17,7 @@ class CollectClips(api.ContextPlugin): self.log.debug("Created `assetsShared` in context") context.data["assetsShared"] = dict() - projectdata = context.data["projectData"] + projectdata = context.data["projectEntity"]["data"] version = context.data.get("version", "001") sequence = context.data.get("activeSequence") selection = context.data.get("selection") From 5177b891ac5b1b1be0f19c621630be169b08741d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:42 +0100 Subject: [PATCH 008/107] extract yeti rig and extract look uses anatomyData --- pype/plugins/maya/publish/extract_look.py | 69 +++++-------------- pype/plugins/maya/publish/extract_yeti_rig.py | 10 +-- 2 files changed, 23 insertions(+), 56 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index fa6ecd72c3..4000011520 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -1,6 +1,7 @@ import os import sys import json +import copy import tempfile import contextlib import subprocess @@ -333,7 +334,7 @@ class ExtractLook(pype.api.Extractor): anatomy = instance.context.data["anatomy"] - self.create_destination_template(instance, anatomy) + destination_dir = self.create_destination_template(instance, anatomy) # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) @@ -343,7 +344,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - instance.data["assumedDestination"], "resources", basename + ext + destination_dir, "resources", basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise, force): @@ -421,38 +422,17 @@ class ExtractLook(pype.api.Extractor): file path (str) """ - # get all the stuff from the database + asset_entity = instance.context["assetEntity"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + subset_name = instance.data["subset"] self.log.info(subset_name) - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = a_template["publish"]["path"] - # anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'").format(asset_name, project_name) - silo = asset.get("silo") subset = io.find_one({ "type": "subset", "name": subset_name, - "parent": asset["_id"] + "parent": asset_entity["_id"] }) # assume there is no version yet, we start at `1` @@ -471,33 +451,18 @@ class ExtractLook(pype.api.Extractor): if version is not None: version_number += version["name"] - if instance.data.get("version"): - version_number = int(instance.data.get("version")) + if instance.data.get('version'): + version_number = int(instance.data.get('version')) - padding = int(a_template["render"]["padding"]) + anatomy = instance.context.data["anatomy"] + padding = int(anatomy.templates['render']['padding']) - hierarchy = asset["data"]["parents"] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) - - template_data = { - "root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, "code": project["data"]["code"]}, - "silo": silo, - "family": instance.data["family"], - "asset": asset_name, + template_data.update({ "subset": subset_name, "frame": ("#" * padding), "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP", - } + "representation": "TEMP" + }) + anatomy_filled = anatomy.format(template_data) - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - anatomy.format(template_data)["publish"]["path"] - ) + return os.path.dirname(anatomy_filled["publish"]["path"]) diff --git a/pype/plugins/maya/publish/extract_yeti_rig.py b/pype/plugins/maya/publish/extract_yeti_rig.py index 892bc0bea6..d390a1365a 100644 --- a/pype/plugins/maya/publish/extract_yeti_rig.py +++ b/pype/plugins/maya/publish/extract_yeti_rig.py @@ -1,6 +1,7 @@ import os import json import contextlib +import copy from maya import cmds @@ -111,11 +112,12 @@ class ExtractYetiRig(pype.api.Extractor): self.log.info("Writing metadata file") # Create assumed destination folder for imageSearchPath - assumed_temp_data = instance.data["assumedTemplateData"] - template = instance.data["template"] - template_formatted = template.format(**assumed_temp_data) + template_data = copy.deepcopy(instance.data["anatomyData"]) - destination_folder = os.path.dirname(template_formatted) + anatomy = instance.context["anatomy"] + filled = anatomy.format(template_data) + + destination_folder = os.path.dir(filled["publish"]["path"]) image_search_path = os.path.join(destination_folder, "resources") image_search_path = os.path.normpath(image_search_path) From f1486a9cd42820684d03c317442a0919b597bfef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:59 +0100 Subject: [PATCH 009/107] integrate_new uses anatomyData --- pype/plugins/global/publish/integrate_new.py | 80 ++++++++------------ 1 file changed, 30 insertions(+), 50 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 7d95534897..c6bc1ffbab 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -2,6 +2,7 @@ import os from os.path import getsize import logging import sys +import copy import clique import errno import pyblish.api @@ -100,12 +101,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - TASK = instance.data.get("task") or api.Session["AVALON_TASK"] - LOCATION = api.Session["AVALON_LOCATION"] + anatomy_data = instance.data["anatomyData"] + asset_entity = instance.data["assetEntity"] + avalon_location = api.Session["AVALON_LOCATION"] + + io.install() context = instance.context + # Atomicity # # Guarantee atomic publishes - each asset contains @@ -140,35 +143,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # stagingdir = instance.data.get("stagingDir") if not stagingdir: - self.log.info('''{} is missing reference to staging - directory Will try to get it from - representation'''.format(instance)) + self.log.info(( + "{0} is missing reference to staging directory." + " Will try to get it from representation." + ).format(instance)) - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) + else: + self.log.debug( + "Establishing staging directory @ {0}".format(stagingdir) + ) # Ensure at least one file is set up for transfer in staging dir. - repres = instance.data.get("representations", None) + repres = instance.data.get("representations") assert repres, "Instance has no files to transfer" assert isinstance(repres, (list, tuple)), ( - "Instance 'files' must be a list, got: {0}".format(repres) + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) ) - # FIXME: io is not initialized at this point for shell host - io.install() - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) + intent = context.data.get("intent") + subset = self.get_subset(asset_entity, instance) # get next version latest_version = io.find_one( @@ -229,16 +224,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # \ \________. # \|________| # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({ - "type": 'asset', - "name": ASSET - })['data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -261,20 +246,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # |_______| # # create template data for Anatomy - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset.get('silo'), - "task": TASK, - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} + template_data = copy.deepcopy(anatomy_data) + # TODO cleanup this code, should be already in anatomyData + template_data.update({ + "subset": subset["name"], + "version": int(version["name"]) + }) - # Add datetime data to template data - datetime_data = context.data.get("datetimeData") or {} - template_data.update(datetime_data) + if intent is not None: + template_data["intent"] = intent resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") @@ -292,6 +272,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): stagingdir = repre['stagingDir'] if repre.get('anatomy_template'): template_name = repre['anatomy_template'] + template = os.path.normpath( anatomy.templates[template_name]["path"]) @@ -322,7 +303,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_filled = anatomy_filled[template_name]["path"] if repre_context is None: repre_context = template_filled.used_values - test_dest_files.append( os.path.normpath(template_filled) ) From 91d51f145844aed301c3f7a721e807e0dfb154a7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:28:33 +0100 Subject: [PATCH 010/107] removed deprecated validate templates --- .../global/publish/validate_templates.py | 43 ------------------- 1 file changed, 43 deletions(-) delete mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py deleted file mode 100644 index f24f6b1a2e..0000000000 --- a/pype/plugins/global/publish/validate_templates.py +++ /dev/null @@ -1,43 +0,0 @@ -import pyblish.api -import os - - -class ValidateTemplates(pyblish.api.ContextPlugin): - """Check if all templates were filled""" - - label = "Validate Templates" - order = pyblish.api.ValidatorOrder - 0.1 - hosts = ["maya", "houdini", "nuke"] - - def process(self, context): - - anatomy = context.data["anatomy"] - if not anatomy: - raise RuntimeError("Did not find anatomy") - else: - data = { - "root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], - "project": {"name": "D001_projectsx", - "code": "prjX"}, - "ext": "exr", - "version": 3, - "task": "animation", - "asset": "sh001", - "app": "maya", - "hierarchy": "ep101/sq01/sh010"} - - anatomy_filled = anatomy.format(data) - self.log.info(anatomy_filled) - - data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], - "project": {"name": "D001_projectsy", - "code": "prjY"}, - "ext": "abc", - "version": 1, - "task": "lookdev", - "asset": "bob", - "app": "maya", - "hierarchy": "ep101/sq01/bob"} - - anatomy_filled = context.data["anatomy"].format(data) - self.log.info(anatomy_filled["work"]["folder"]) From 670f660a9724a83691913ffbfece7b9ae22cd414 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 10:59:30 +0100 Subject: [PATCH 011/107] changed collect instance anatomy data order to 0.49 --- pype/plugins/global/publish/collect_instance_anatomy_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index a1a9278d2a..76ab8dc3f6 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -26,7 +26,7 @@ import pyblish.api class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): """Fill templates with data needed for publish""" - order = pyblish.api.CollectorOrder + 0.1 + order = pyblish.api.CollectorOrder + 0.49 label = "Collect instance anatomy data" hosts = ["maya", "nuke", "standalonepublisher"] From 1a04dca10a7c2af07d779bc1339c941eb5ebf44d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 11:00:38 +0100 Subject: [PATCH 012/107] asset entity may not exist so collecting instance anatomy data was changed to not crash --- .../publish/collect_instance_anatomy_data.py | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 76ab8dc3f6..ecef9d10f2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -53,40 +53,42 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): instance.context.data["projectEntity"] = project_entity subset_name = instance.data["subset"] - subset_entity = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - version_number = instance.data.get("version") - if version_number is None: - version_number = instance.context.data.get("version") - latest_version = None - if subset_entity is None: - self.log.debug("Subset entity does not exist yet.") - else: - version_entity = io.find_one( - { - "type": "version", - "parent": subset_entity["_id"] - }, - sort=[("name", -1)] - ) - if version_entity: - latest_version = version_entity["name"] + if asset_entity: + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + # If version is not specified for instance or context if version_number is None: # TODO we should be able to change this version by studio # preferences (like start with version number `0`) version_number = 1 + # use latest version (+1) if already any exist if latest_version is not None: version_number += int(latest_version) # Version should not be collected since may be instance anatomy_data.update({ - "asset": asset_entity["name"], + "asset": asset_name, "family": instance.data["family"], "subset": subset_name, "version": version_number From a14b05ccd1b5f75d4ccde0349de1096ec0425592 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 11:00:48 +0100 Subject: [PATCH 013/107] removed comments --- .../global/publish/collect_instance_anatomy_data.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index ecef9d10f2..838fb1a113 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -108,14 +108,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): instance.data["anatomyData"] = anatomy_data instance.data["latestVersion"] = latest_version - # TODO check if template is used anywhere - # instance.data["template"] = template - - # TODO we should move this to any Validator - # # We take the parent folder of representation 'filepath' - # instance.data["assumedDestination"] = os.path.dirname( - # (anatomy.format(template_data))["publish"]["path"] - # ) self.log.info("Instance anatomy Data collected") self.log.debug(json.dumps(anatomy_data, indent=4)) From f70f307cc40d95bfa5181e27ef7384108e9fb10b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:16:35 +0100 Subject: [PATCH 014/107] removed missleading comments --- pype/plugins/global/publish/integrate_new.py | 52 -------------------- 1 file changed, 52 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c6bc1ffbab..c3a03324aa 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -109,38 +109,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - # for result in context.data["results"]: - # if not result["success"]: - # self.log.debug(result) - # exc_type, exc_value, exc_traceback = result["error_info"] - # extracted_traceback = traceback.extract_tb(exc_traceback)[-1] - # self.log.debug( - # "Error at line {}: \"{}\"".format( - # extracted_traceback[1], result["error"] - # ) - # ) - # assert all(result["success"] for result in context.data["results"]),( - # "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( @@ -214,16 +182,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] instance.data['version'] = version['name'] - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -235,16 +193,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): instance.data['transfers'] = [] for idx, repre in enumerate(instance.data["representations"]): - - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) # TODO cleanup this code, should be already in anatomyData From 6ef1a7e17605233bc8ea0dc25e3912d0d0a9dc9d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:17:15 +0100 Subject: [PATCH 015/107] formatting --- pype/plugins/global/publish/integrate_new.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c3a03324aa..b71b5fb298 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -159,10 +159,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) + version = self.create_version( + subset=subset, + version_number=next_version, + locations=[avalon_location], + data=version_data + ) self.log.debug("Creating version ...") existing_version = io.find_one({ From 1dcdac7ae051cb359fb481a812027a553e4c79e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:18:11 +0100 Subject: [PATCH 016/107] asset_entity check moved back since nukestudio instances may have not set value --- pype/plugins/global/publish/integrate_new.py | 26 +++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index b71b5fb298..774a54ea7c 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -102,13 +102,37 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] - asset_entity = instance.data["assetEntity"] avalon_location = api.Session["AVALON_LOCATION"] io.install() context = instance.context + project_entity = instance.data["projectEntity"] + + asset_name = instance.data["asset"] + asset_entity = instance.data.get("assetEntity") + if not asset_entity: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + + assert asset_entity, ( + "No asset found by the name \"{0}\" in project \"{1}\"" + ).format(asset_name, project_entity["name"]) + + instance.data["assetEntity"] = asset_entity + + # update anatomy data with asset specific keys + # - name should already been set + hierarchy = "" + parents = asset_entity["data"]["parents"] + if parents: + hierarchy = "/".join(parents) + anatomy_data["hierarchy"] = hierarchy + stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( From e23cc33de7ad1e9156f1a659a0619e2fc2609f68 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:18:59 +0100 Subject: [PATCH 017/107] latest version is not queried before check if instance have already version set in data --- pype/plugins/global/publish/integrate_new.py | 33 +++++++++++--------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 774a54ea7c..24162c4cf1 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -157,22 +157,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): intent = context.data.get("intent") subset = self.get_subset(asset_entity, instance) - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) + # TODO iLLiCiT use "latestVersion" from `instance.data` + # and store version in anatomyData instance collector + # instead of query again + instance_version = instance.data.get('version') + if instance_version is not None: + next_version = int(instance_version) - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] + else: + # get next version + latest_version = io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + {"name": True}, + sort=[("name", -1)] + ) - if instance.data.get('version'): - next_version = int(instance.data.get('version')) + next_version = 1 + if latest_version is not None: + next_version += int(latest_version["name"]) self.log.debug("Next version: v{0:03d}".format(next_version)) From 29c6768da935380dd499834857b896c64d2b05f5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:19:19 +0100 Subject: [PATCH 018/107] intent added to anatomy data --- pype/plugins/global/publish/integrate_new.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 24162c4cf1..093a9e354c 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -154,7 +154,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) ) - intent = context.data.get("intent") subset = self.get_subset(asset_entity, instance) # TODO iLLiCiT use "latestVersion" from `instance.data` @@ -213,6 +212,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] instance.data['version'] = version['name'] + intent = context.data.get("intent") + if intent is not None: + anatomy_data["intent"] = intent + anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files From 3a5ab92687bbf7cc89ade7ec453997d5189e0f64 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:19:44 +0100 Subject: [PATCH 019/107] removed subset and version anatomy update since they are already set for whole instance --- pype/plugins/global/publish/integrate_new.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 093a9e354c..fc7cbf4afa 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -229,12 +229,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for idx, repre in enumerate(instance.data["representations"]): # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) - # TODO cleanup this code, should be already in anatomyData - template_data.update({ - "subset": subset["name"], - "version": int(version["name"]) - }) - if intent is not None: template_data["intent"] = intent From 9113fb1c7f72b1e1ad7a0e32ac16fcb26cd67139 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:20:18 +0100 Subject: [PATCH 020/107] added check if index_frame_start exist --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index fc7cbf4afa..6d85e29732 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -298,7 +298,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = int(repre.get("frameStart")) # exception for slate workflow - if "slate" in instance.data["families"]: + if index_frame_start and "slate" in instance.data["families"]: index_frame_start -= 1 dst_padding_exp = src_padding_exp From fde457d445c18d2f87591017df23e3915b8e55b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:21:27 +0100 Subject: [PATCH 021/107] intent added to version data --- pype/plugins/global/publish/integrate_new.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 6d85e29732..5dba744346 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -601,6 +601,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "fps": context.data.get( "fps", instance.data.get("fps"))} + intent = context.data.get("intent") + if intent is not None: + version_data["intent"] = intent + # Include optional data if present in optionals = [ "frameStart", "frameEnd", "step", "handles", From df512a5a4a17d9d8b14ceba2bc60a908eccbfe5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:21:52 +0100 Subject: [PATCH 022/107] formatting changes --- pype/plugins/global/publish/integrate_new.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 5dba744346..1ff1dfe520 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -331,7 +331,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not dst_start_frame: dst_start_frame = dst_padding - dst = "{0}{1}{2}".format( dst_head, dst_start_frame, @@ -503,14 +502,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): filelink.create(src, dst, filelink.HARDLINK) def get_subset(self, asset, instance): + subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", "parent": asset["_id"], - "name": instance.data["subset"] + "name": subset_name }) if subset is None: - subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) self.log.debug( From 6bd8706579b5b1c19ae0ea0c3859e53fdda02013 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:22:06 +0100 Subject: [PATCH 023/107] added few TODOs --- pype/plugins/global/publish/integrate_new.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 1ff1dfe520..15165f4217 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -203,6 +203,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # TODO query by _id and + # remove old version and representations but keep their ids io.update_many({ 'type': 'version', 'parent': subset["_id"], @@ -304,6 +306,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: + # TODO 1.) do not count padding in each index iteration + # 2.) do not count dst_padding from src_padding before + # index_frame_start check src_padding = src_padding_exp % i src_file_name = "{0}{1}{2}".format( From 178fed2ae22893670dcfff056c13f44ed64c925b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:32:12 +0100 Subject: [PATCH 024/107] updated instance input/output docsting --- pype/plugins/global/publish/collect_instance_anatomy_data.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 838fb1a113..404480b30b 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -8,12 +8,17 @@ Requires: instance -> family Optional: + instance -> version instance -> resolutionWidth instance -> resolutionHeight instance -> fps Provides: + instance -> projectEntity + instance -> assetEntity instance -> anatomyData + instance -> version + instance -> latestVersion """ import copy From ed8b56b6de17330054b2c9469ea63133a1ed5a36 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:32:54 +0100 Subject: [PATCH 025/107] fixed per key instance.data value assignment --- .../global/publish/collect_instance_anatomy_data.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 404480b30b..8a98b6cbb2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -54,9 +54,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): "parent": project_entity["_id"] }) - instance.context.data["assetEntity"] = asset_entity - instance.context.data["projectEntity"] = project_entity - subset_name = instance.data["subset"] version_number = instance.data.get("version") latest_version = None @@ -68,7 +65,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): "parent": asset_entity["_id"] }) - if subset_entity is None: self.log.debug("Subset entity does not exist yet.") else: @@ -84,7 +80,7 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): # If version is not specified for instance or context if version_number is None: - # TODO we should be able to change this version by studio + # TODO we should be able to change default version by studio # preferences (like start with version number `0`) version_number = 1 # use latest version (+1) if already any exist @@ -111,8 +107,12 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): if resolution_height: anatomy_data["fps"] = fps + instance.data["projectEntity"] = project_entity + instance.data["assetEntity"] = asset_entity instance.data["anatomyData"] = anatomy_data instance.data["latestVersion"] = latest_version + # TODO should be version number set here? + instance.data["version"] = version_number self.log.info("Instance anatomy Data collected") self.log.debug(json.dumps(anatomy_data, indent=4)) From 876ff064b6b6c1a941888e43758196525b49872c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:11:29 +0100 Subject: [PATCH 026/107] reduced collect resources path because of already collected data in instance anatomy data --- .../global/publish/collect_resources_path.py | 62 +------------------ 1 file changed, 3 insertions(+), 59 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index 52e926e09c..de78874cd6 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -13,70 +13,14 @@ class IntegrateResourcesPath(pyblish.api.InstancePlugin): families = ["clip", "projectfile", "plate"] def process(self, instance): - project_entity = instance.context["projectEntity"] - asset_entity = instance.context["assetEntity"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - asset_name = instance.data["asset"] - if asset_name != asset_entity["name"]: - asset_entity = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project_entity["_id"] - }) - assert asset_entity, ( - "No asset found by the name '{}' in project '{}'".format( - asset_name, project_entity["name"] - ) - ) - - instance.data["assetEntity"] = asset_entity - - template_data["name"] = asset_entity["name"] - silo_name = asset_entity.get("silo") - if silo_name: - template_data["silo"] = silo_name - - parents = asset_entity["data"].get("parents") or [] - hierarchy = "/".join(parents) - template_data["hierarchy"] = hierarchy - - subset_name = instance.data["subset"] - self.log.info(subset_name) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates['render']['padding']) + padding = int(anatomy.templates["render"]["padding"]) + # add possible representation specific key to anatomy data template_data.update({ - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, + "frame": ("#" * padding), "representation": "TEMP" }) From 3fdfcec29bf6b62023fe34a8b1d1b01fe2198edf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:17:18 +0100 Subject: [PATCH 027/107] version_number is used from instance.data in integrate_new --- pype/plugins/global/publish/integrate_new.py | 32 +++----------------- 1 file changed, 5 insertions(+), 27 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 15165f4217..aff92ea308 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -156,40 +156,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset_entity, instance) - # TODO iLLiCiT use "latestVersion" from `instance.data` - # and store version in anatomyData instance collector - # instead of query again - instance_version = instance.data.get('version') - if instance_version is not None: - next_version = int(instance_version) - - else: - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += int(latest_version["name"]) - - self.log.debug("Next version: v{0:03d}".format(next_version)) + version_number = instance.data["version"] + self.log.debug("Next version: v{0:03d}".format(version_number)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') - if version_data_instance: version_data.update(version_data_instance) version = self.create_version( subset=subset, - version_number=next_version, + version_number=version_number, locations=[avalon_location], data=version_data ) @@ -198,7 +176,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], - 'name': next_version + 'name': version_number }) if existing_version is None: version_id = io.insert_one(version).inserted_id @@ -208,7 +186,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): io.update_many({ 'type': 'version', 'parent': subset["_id"], - 'name': next_version + 'name': version_number }, {'$set': version} ) version_id = existing_version['_id'] From ebdc7c3700f17f636573fa45e4ad500f261200f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:23:03 +0100 Subject: [PATCH 028/107] added few todos --- pype/plugins/global/publish/collect_resources_path.py | 5 +++-- pype/plugins/global/publish/integrate_new.py | 5 ++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index de78874cd6..fe152584b6 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -16,11 +16,12 @@ class IntegrateResourcesPath(pyblish.api.InstancePlugin): template_data = copy.deepcopy(instance.data["anatomyData"]) anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates["render"]["padding"]) + frame_padding = int(anatomy.templates["render"]["padding"]) # add possible representation specific key to anatomy data + # TODO ability to set host specific "frame" value template_data.update({ - "frame": ("#" * padding), + "frame": ("#" * frame_padding), "representation": "TEMP" }) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index aff92ea308..570a093cdc 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -102,7 +102,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] - avalon_location = api.Session["AVALON_LOCATION"] io.install() @@ -165,6 +164,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) + # TODO remove avalon_location (shall we?) + avalon_location = api.Session["AVALON_LOCATION"] + # TODO rename method from `create_version` to + # `prepare_version` or similar... version = self.create_version( subset=subset, version_number=version_number, From 66466bc24bf3ba98f1a715dfbd0fe9352ba6a65c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:07:17 +0100 Subject: [PATCH 029/107] collect resources path uses anatomy publish.folder key with ability of backwards compatibility --- .../global/publish/collect_resources_path.py | 95 ++++++++----------- 1 file changed, 39 insertions(+), 56 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index fe152584b6..9fc8c576f5 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -1,77 +1,60 @@ +""" +Requires: + context -> anatomy + context -> anatomyData + +Provides: + instance -> publishDir + instance -> resourcesDir +""" + import os import copy import pyblish.api -from avalon import io +from avalon import api -class IntegrateResourcesPath(pyblish.api.InstancePlugin): - """Generate the assumed destination path where the file will be stored""" +class CollectResourcesPath(pyblish.api.InstancePlugin): + """Generate directory path where the files and resources will be stored""" - label = "Integrate Prepare Resource" - order = pyblish.api.IntegratorOrder - 0.05 - families = ["clip", "projectfile", "plate"] + label = "Collect Resources Path" + order = pyblish.api.CollectorOrder + 0.995 def process(self, instance): + anatomy = instance.context.data["anatomy"] + template_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy = instance.context.data["anatomy"] - frame_padding = int(anatomy.templates["render"]["padding"]) - - # add possible representation specific key to anatomy data - # TODO ability to set host specific "frame" value + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue template_data.update({ - "frame": ("#" * frame_padding), + "frame": "FRAME_TEMP", "representation": "TEMP" }) anatomy_filled = anatomy.format(template_data) - template_names = ["publish"] - for repre in instance.data["representations"]: - template_name = repre.get("anatomy_template") - if template_name and template_name not in template_names: - template_names.append(template_name) + if "folder" in anatomy.templates["publish"]: + publish_folder = anatomy_filled["publish"]["folder"] + else: + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) + file_path = anatomy_filled["publish"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) - for template_name in template_names: - mock_template = anatomy_filled[template_name]["path"] + publish_folder = os.path.normpath(publish_folder) + resources_folder = os.path.join(publish_folder, "resources") - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join( - os.path.dirname(mock_template), "resources" - ) + instance.data["publishDir"] = publish_folder + instance.data["resourcesDir"] = resources_folder - # Clean the path - mock_destination = os.path.abspath( - os.path.normpath(mock_destination) - ).replace("\\", "/") - - # Define resource destination and transfers - for resource in resources: - # Add destination to the resource - source_filename = os.path.basename( - resource["source"]).replace("\\", "/") - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join( - mock_destination, fname).replace("\\", "/") - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers + self.log.debug("publishDir: \"{}\"".format(publish_folder)) + self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) From 6f26d0160ce62817843d935b10ba2a937e715a38 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:11:32 +0100 Subject: [PATCH 030/107] integrated assued destination was moved back with name integrate resources path --- .../publish/integrate_resources_path.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 pype/plugins/global/publish/integrate_resources_path.py diff --git a/pype/plugins/global/publish/integrate_resources_path.py b/pype/plugins/global/publish/integrate_resources_path.py new file mode 100644 index 0000000000..56dc0e5ef7 --- /dev/null +++ b/pype/plugins/global/publish/integrate_resources_path.py @@ -0,0 +1,49 @@ +import os +import pyblish.api + + +class IntegrateResourcesPath(pyblish.api.InstancePlugin): + """Generate directory path where the files and resources will be stored""" + + label = "Integrate Resources Path" + order = pyblish.api.IntegratorOrder - 0.05 + families = ["clip", "projectfile", "plate"] + + def process(self, instance): + resources = instance.data.get("resources") or [] + transfers = instance.data.get("transfers") or [] + + if not resources and not transfers: + self.log.debug( + "Instance does not have `resources` and `transfers`" + ) + return + + resources_folder = instance.data["resourcesDir"] + + # Define resource destination and transfers + for resource in resources: + # Add destination to the resource + source_filename = os.path.basename( + resource["source"]).replace("\\", "/") + destination = os.path.join(resources_folder, source_filename) + + # Force forward slashes to fix issue with software unable + # to work correctly with backslashes in specific scenarios + # (e.g. escape characters in PLN-151 V-Ray UDIM) + destination = destination.replace("\\", "/") + + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join( + resources_folder, fname + ).replace("\\", "/") + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers From fcffa08177efd97ffc08bbf3520eb2be6a8d02f6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:27:03 +0100 Subject: [PATCH 031/107] extract look uses `resourcesDir` instead of computing itself --- pype/plugins/maya/publish/extract_look.py | 63 +---------------------- 1 file changed, 2 insertions(+), 61 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index 4000011520..58196433aa 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -331,10 +331,9 @@ class ExtractLook(pype.api.Extractor): maya_path)) def resource_destination(self, instance, filepath, do_maketx): - anatomy = instance.context.data["anatomy"] - destination_dir = self.create_destination_template(instance, anatomy) + resources_dir = instance.data["resourcesDir"] # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) @@ -344,7 +343,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - destination_dir, "resources", basename + ext + resources_dir, basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise, force): @@ -408,61 +407,3 @@ class ExtractLook(pype.api.Extractor): return converted, COPY, texture_hash return filepath, COPY, texture_hash - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - asset_entity = instance.context["assetEntity"] - - template_data = copy.deepcopy(instance.data["anatomyData"]) - - subset_name = instance.data["subset"] - self.log.info(subset_name) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - - anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates['render']['padding']) - - template_data.update({ - "subset": subset_name, - "frame": ("#" * padding), - "version": version_number, - "representation": "TEMP" - }) - anatomy_filled = anatomy.format(template_data) - - return os.path.dirname(anatomy_filled["publish"]["path"]) From e92537d34a9c63b7cf09f1b4a46f11c30d76e90d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:27:34 +0100 Subject: [PATCH 032/107] extract effects can compute resources dir with anatomyData (need changes) --- .../nukestudio/publish/extract_effects.py | 182 +++++++++--------- 1 file changed, 96 insertions(+), 86 deletions(-) diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index 15d2a80a55..9e43bee1c8 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -2,10 +2,12 @@ import os import json import re +import copy import pyblish.api import tempfile from avalon import io, api + class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): """Collect video tracks effects into context.""" @@ -71,9 +73,11 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): ) data["source"] = data["sourcePath"] + # WARNING instance should not be created in Extractor! # create new instance instance = instance.context.create_instance(**data) - + # TODO replace line below with `instance.data["resourcesDir"]` + # when instance is created during collection part dst_dir = self.resource_destination_dir(instance) # change paths in effects to files @@ -141,103 +145,109 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): return (v, dst) def resource_destination_dir(self, instance): - anatomy = instance.context.data['anatomy'] - self.create_destination_template(instance, anatomy) + # WARNING this is from `collect_instance_anatomy_data.py` + anatomy_data = copy.deepcopy(instance.context.data["anatomyData"]) + project_entity = instance.context.data["projectEntity"] + context_asset_entity = instance.context.data["assetEntity"] - return os.path.join( - instance.data["assumedDestination"], - "resources" - ) - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - # get all the stuff from the database - subset_name = instance.data["subset"] - self.log.info(subset_name) asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates + if context_asset_entity["name"] == asset_name: + asset_entity = context_asset_entity - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) + else: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) - template = a_template['publish']['path'] - # anatomy = instance.context.data['anatomy'] + subset_name = instance.data["subset"] + version_number = instance.data.get("version") + latest_version = None - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] + if asset_entity: + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + if version_number is None: + version_number = 1 + if latest_version is not None: + version_number += int(latest_version) + + anatomy_data.update({ + "asset": asset_name, + "family": instance.data["family"], + "subset": subset_name, + "version": version_number }) - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') + resolution_width = instance.data.get("resolutionWidth") + if resolution_width: + anatomy_data["resolution_width"] = resolution_width - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] + resolution_height = instance.data.get("resolutionHeight") + if resolution_height: + anatomy_data["resolution_height"] = resolution_height + + fps = instance.data.get("fps") + if resolution_height: + anatomy_data["fps"] = fps + + instance.data["projectEntity"] = project_entity + instance.data["assetEntity"] = asset_entity + instance.data["anatomyData"] = anatomy_data + instance.data["latestVersion"] = latest_version + instance.data["version"] = version_number + + # WARNING this is from `collect_resources_path.py` + anatomy = instance.context.data["anatomy"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue + template_data.update({ + "frame": "FRAME_TEMP", + "representation": "TEMP" }) - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) + anatomy_filled = anatomy.format(template_data) - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] + if "folder" in anatomy.templates["publish"]: + publish_folder = anatomy_filled["publish"]["folder"] + else: + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) - if instance.data.get('version'): - version_number = int(instance.data.get('version')) + file_path = anatomy_filled["publish"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) - padding = int(a_template['render']['padding']) + publish_folder = os.path.normpath(publish_folder) + resources_folder = os.path.join(publish_folder, "resources") - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) + instance.data["publishDir"] = publish_folder + instance.data["resourcesDir"] = resources_folder - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - anatomy.format(template_data)["publish"]["path"] - ) + return resources_folder From f6e6220869a53f7411b55e03468761a9f5f7c323 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:54:58 +0100 Subject: [PATCH 033/107] extract yeti rig uses resourcesDir --- pype/plugins/maya/publish/extract_yeti_rig.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/pype/plugins/maya/publish/extract_yeti_rig.py b/pype/plugins/maya/publish/extract_yeti_rig.py index d390a1365a..70a509564f 100644 --- a/pype/plugins/maya/publish/extract_yeti_rig.py +++ b/pype/plugins/maya/publish/extract_yeti_rig.py @@ -1,7 +1,6 @@ import os import json import contextlib -import copy from maya import cmds @@ -111,16 +110,7 @@ class ExtractYetiRig(pype.api.Extractor): self.log.info("Writing metadata file") - # Create assumed destination folder for imageSearchPath - template_data = copy.deepcopy(instance.data["anatomyData"]) - - anatomy = instance.context["anatomy"] - filled = anatomy.format(template_data) - - destination_folder = os.path.dir(filled["publish"]["path"]) - - image_search_path = os.path.join(destination_folder, "resources") - image_search_path = os.path.normpath(image_search_path) + image_search_path = resources_dir = instance.data["resourcesDir"] settings = instance.data.get("rigsettings", None) if settings: From f6ae5b2213b6ee21f2e27f8a2a347a669259cc12 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 5 Feb 2020 09:59:25 +0100 Subject: [PATCH 034/107] task name is also checked --- .../global/publish/collect_instance_anatomy_data.py | 12 +++++++++--- pype/plugins/global/publish/integrate_new.py | 4 ++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 8a98b6cbb2..9c6a8b08f2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -87,13 +87,19 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): if latest_version is not None: version_number += int(latest_version) - # Version should not be collected since may be instance - anatomy_data.update({ + anatomy_updates = { "asset": asset_name, "family": instance.data["family"], "subset": subset_name, "version": version_number - }) + } + + task_name = instance.data.get("task") + if task_name: + anatomy_updates["task"] = task_name + + # Version should not be collected since may be instance + anatomy_data.update(anatomy_updates) resolution_width = instance.data.get("resolutionWidth") if resolution_width: diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 570a093cdc..d27582bb71 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -132,6 +132,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): hierarchy = "/".join(parents) anatomy_data["hierarchy"] = hierarchy + task_name = instance.data.get("task") + if task_name: + anatomy_data["task"] = task_name + stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( From 75b603d845fe44d6ba5f39268137ca0f6128763e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:46:03 +0100 Subject: [PATCH 035/107] removed add datetime and add frame numbers --- pype/scripts/otio_burnin.py | 38 ------------------------------------- 1 file changed, 38 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index f128352974..aca848dcfa 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -132,44 +132,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): options = ffmpeg_burnins.TextOptions(**self.options_init) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_datetime(self, date_format, align, options=None): - """ - Adding date text to a filter. Using pythons datetime module. - - :param str date_format: format of date (e.g. `%d.%m.%Y`) - :param enum align: alignment, must use provided enum flags - :param dict options: recommended to use TextOptions - """ - if not options: - options = ffmpeg_burnins.TextOptions(**self.options_init) - today = datetime.datetime.today() - text = today.strftime(date_format) - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - - def add_frame_numbers( - self, align, options=None, start_frame=None, text=None - ): - """ - Convenience method to create the frame number expression. - - :param enum align: alignment, must use provided enum flags - :param dict options: recommended to use FrameNumberOptions - """ - if not options: - options = ffmpeg_burnins.FrameNumberOptions(**self.options_init) - if start_frame: - options['frame_offset'] = start_frame - - expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] - _text = str(int(self.end_frame + options['frame_offset'])) - if text and isinstance(text, str): - text = r"{}".format(text) - expr = text.replace("{current_frame}", expr) - text = text.replace("{current_frame}", _text) - - options['expression'] = expr - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_timecode(self, align, options=None, start_frame=None): """ Convenience method to create the frame number expression. From 8c75c74cdec4c08225723f9f8328046999588735 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:49:59 +0100 Subject: [PATCH 036/107] print command before run --- pype/scripts/otio_burnin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index aca848dcfa..b9d10ca23a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -226,9 +226,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): is_sequence = "%" in output - command = self.command(output=output, - args=args, - overwrite=overwrite) + command = self.command( + output=output, + args=args, + overwrite=overwrite + ) + print(command) + proc = Popen(command, shell=True) proc.communicate() if proc.returncode != 0: From 8e86f6e37a0b6fb4ce794e67372b65d49f85b813 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:50:18 +0100 Subject: [PATCH 037/107] implemented custom drawtext and timecode constants --- pype/scripts/otio_burnin.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index b9d10ca23a..00d63939e7 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -27,6 +27,15 @@ FFPROBE = ( '{} -v quiet -print_format json -show_format -show_streams %(source)s' ).format(os.path.normpath(ffmpeg_path + "ffprobe")) +DRAWTEXT = ( + "drawtext=text=\\'%(text)s\\':x=%(x)s:y=%(y)s:fontcolor=" + "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" +) +TIMECODE = ( + "drawtext=text=\\'%(text)s\\':timecode=\\'%(timecode)s\\'" + ":timecode_rate=%(fps).2f:x=%(x)s:y=%(y)s:fontcolor=" + "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" +) def _streams(source): """Reimplemented from otio burnins to be able use full path to ffprobe From 4f862acfb8af3b12315008bd1e95b773fd58fc56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:50:51 +0100 Subject: [PATCH 038/107] added constants for easier handling or entered keys --- pype/scripts/otio_burnin.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 00d63939e7..e0df769db4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -37,6 +37,11 @@ TIMECODE = ( "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) +MISSING_KEY_VALUE = "N/A" +CURRENT_FRAME_KEY = "{current_frame}" +TIME_CODE_KEY = "{timecode}" + + def _streams(source): """Reimplemented from otio burnins to be able use full path to ffprobe :param str source: source media file From b73fcc6730e7b3367988a7aa636f363b60d82204 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:51:09 +0100 Subject: [PATCH 039/107] modified docstring --- pype/scripts/otio_burnin.py | 28 ++++++++-------------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e0df769db4..79565af22a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -307,34 +307,22 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) - each key of "burnins" represents Alignment, there are 6 possibilities: TOP_LEFT TOP_CENTERED TOP_RIGHT BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT - - value for each key is dict which should contain "function" which says - what kind of burnin is that: - "text", "timecode" or "frame_numbers" - - "text" key with content is also required when "text" function is used + - value must be string with text you want to burn-in + - text may contain specific formatting keys (exmplained below) Requirement of *data* keys is based on presets. - - "start_frame" - is required when "timecode" or "frame_numbers" function is used - - "start_frame_tc" - when "timecode" should start with different frame + - "frame_start" - is required when "timecode" or "current_frame" ins keys + - "frame_start_tc" - when "timecode" should start with different frame - *keys for static text* EXAMPLE: preset = { "options": {*OPTIONS FOR LOOK*}, "burnins": { - "TOP_LEFT": { - "function": "text", - "text": "static_text" - }, - "TOP_RIGHT": { - "function": "text", - "text": "{shot}" - }, - "BOTTOM_LEFT": { - "function": "timecode" - }, - "BOTTOM_RIGHT": { - "function": "frame_numbers" - } + "TOP_LEFT": "static_text", + "TOP_RIGHT": "{shot}", + "BOTTOM_LEFT": "TC: {timecode}", + "BOTTOM_RIGHT": "{frame_start}{current_frame}" } } From d263cc3bfd0029b788d8ce4ff1bea405765bf3ef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:51:37 +0100 Subject: [PATCH 040/107] data variable in __main___ was renamed to in_data to not be overriden during processing --- pype/scripts/otio_burnin.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 79565af22a..9564982980 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -439,10 +439,10 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) if __name__ == '__main__': import sys import json - data = json.loads(sys.argv[-1]) + in_data = json.loads(sys.argv[-1]) burnins_from_data( - data['input'], - data['codec'], - data['output'], - data['burnin_data'] + in_data['input'], + in_data['codec'], + in_data['output'], + in_data['burnin_data'] ) From be088579be01d7d5db473133d7c49f245aeec10c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:52:26 +0100 Subject: [PATCH 041/107] alignment checks lowered string (it is available to use `top_left` in presets --- pype/scripts/otio_burnin.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 9564982980..85e72245cd 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -359,17 +359,18 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) for align_text, preset in presets.get('burnins', {}).items(): align = None - if align_text == 'TOP_LEFT': + align_text = align_text.strip().lower() + if align_text == "top_left": align = ModifiedBurnins.TOP_LEFT - elif align_text == 'TOP_CENTERED': + elif align_text == "top_centered": align = ModifiedBurnins.TOP_CENTERED - elif align_text == 'TOP_RIGHT': + elif align_text == "top_right": align = ModifiedBurnins.TOP_RIGHT - elif align_text == 'BOTTOM_LEFT': + elif align_text == "bottom_left": align = ModifiedBurnins.BOTTOM_LEFT - elif align_text == 'BOTTOM_CENTERED': + elif align_text == "bottom_centered": align = ModifiedBurnins.BOTTOM_CENTERED - elif align_text == 'BOTTOM_RIGHT': + elif align_text == "bottom_right": align = ModifiedBurnins.BOTTOM_RIGHT bi_func = preset.get('function') From ca19b5d6798ef3535544b36340cea82a26ba7ff5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:56:51 +0100 Subject: [PATCH 042/107] add_text can accept frame_start argument --- pype/scripts/otio_burnin.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 85e72245cd..d913baa5e2 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -134,17 +134,21 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) - def add_text(self, text, align, options=None): + def add_text(self, text, align, frame_start=None, options=None): """ Adding static text to a filter. :param str text: text to apply to the drawtext :param enum align: alignment, must use provided enum flags + :param int frame_start: starting frame for burnins :param dict options: recommended to use TextOptions """ if not options: options = ffmpeg_burnins.TextOptions(**self.options_init) - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) + + options = options.copy() + if frame_start: + options["frame_offset"] = frame_start def add_timecode(self, align, options=None, start_frame=None): """ From 5d5d3eec92d892ddae1845cbabada0847c739471 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:57:11 +0100 Subject: [PATCH 043/107] add_text use custom DRAWTEXT ffmpeg string --- pype/scripts/otio_burnin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index d913baa5e2..be4ec3e57d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -150,6 +150,8 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if frame_start: options["frame_offset"] = frame_start + self._add_burnin(text, align, options, DRAWTEXT) + def add_timecode(self, align, options=None, start_frame=None): """ Convenience method to create the frame number expression. From 96d3e51d9200cf04e4b63705a727d381c48a286e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:00:29 +0100 Subject: [PATCH 044/107] add timecode allows to add text and use custom TIMECODE ffmpeg string --- pype/scripts/otio_burnin.py | 39 +++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index be4ec3e57d..67b85f9ba4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -140,7 +140,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): :param str text: text to apply to the drawtext :param enum align: alignment, must use provided enum flags - :param int frame_start: starting frame for burnins + :param int frame_start: starting frame for burnins current frame :param dict options: recommended to use TextOptions """ if not options: @@ -152,32 +152,41 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): self._add_burnin(text, align, options, DRAWTEXT) - def add_timecode(self, align, options=None, start_frame=None): + def add_timecode( + self, align, frame_start=None, frame_start_tc=None, text=None, + options=None + ): """ Convenience method to create the frame number expression. :param enum align: alignment, must use provided enum flags + :param int frame_start: starting frame for burnins current frame + :param int frame_start_tc: starting frame for burnins timecode + :param str text: text that will be before timecode :param dict options: recommended to use TimeCodeOptions """ if not options: options = ffmpeg_burnins.TimeCodeOptions(**self.options_init) - if start_frame: - options['frame_offset'] = start_frame - timecode = ffmpeg_burnins._frames_to_timecode( - options['frame_offset'], + options = options.copy() + if frame_start: + options["frame_offset"] = frame_start + + if not frame_start_tc: + frame_start_tc = options["frame_offset"] + + if not text: + text = "" + + if not options.get("fps"): + options["fps"] = self.frame_rate + + options["timecode"] = ffmpeg_burnins._frames_to_timecode( + frame_start_tc, self.frame_rate ) - options = options.copy() - if not options.get('fps'): - options['fps'] = self.frame_rate - self._add_burnin( - timecode.replace(':', r'\:'), - align, - options, - ffmpeg_burnins.TIMECODE - ) + self._add_burnin(text, align, options, TIMECODE) def _add_burnin(self, text, align, options, draw): """ From defe60e5566ec8a251802636430843650a9115d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:01:37 +0100 Subject: [PATCH 045/107] add burnin do not use expression but only text --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 67b85f9ba4..39bf963342 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -197,7 +197,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): """ resolution = self.resolution data = { - 'text': options.get('expression') or text, + 'text': text, 'color': options['font_color'], 'size': options['font_size'] } From ca2279e710dcf15e8545c3a904027508d9989435 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:01 +0100 Subject: [PATCH 046/107] _drawtext must count text sizes with timecode text --- pype/scripts/otio_burnin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 39bf963342..4c1301becf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -201,8 +201,12 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): 'color': options['font_color'], 'size': options['font_size'] } + timecode_text = options.get("timecode") or "" + text_for_size = text + timecode_text data.update(options) - data.update(ffmpeg_burnins._drawtext(align, resolution, text, options)) + data.update( + ffmpeg_burnins._drawtext(align, resolution, text_for_size, options) + ) if 'font' in data and ffmpeg_burnins._is_windows(): data['font'] = data['font'].replace(os.sep, r'\\' + os.sep) data['font'] = data['font'].replace(':', r'\:') From 39e785aefb6e4a48b5a8ea215a06070c11c2f425 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:14 +0100 Subject: [PATCH 047/107] doctstring changes --- pype/scripts/otio_burnin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 4c1301becf..73de2f2827 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -347,14 +347,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) For this preset we'll need at least this data: data = { - "start_frame": 1001, + "frame_start": 1001, "shot": "sh0010" } When Timecode should start from 1 then data need: data = { - "start_frame": 1001, - "start_frame_tc": 1, + "frame_start": 1001, + "frame_start_tc": 1, "shot": "sh0010" } ''' From 9a8c3b56a22cf333909e8dad8fc064a9164c0d1e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:59 +0100 Subject: [PATCH 048/107] changed data processing to not use functions but only text --- pype/scripts/otio_burnin.py | 99 +++++++++++++++++-------------------- 1 file changed, 45 insertions(+), 54 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 73de2f2827..e7464cdc7c 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -368,15 +368,27 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) stream = burnin._streams[0] if "resolution_width" not in data: - data["resolution_width"] = stream.get("width", "Unknown") + data["resolution_width"] = stream.get("width", MISSING_KEY_VALUE) if "resolution_height" not in data: - data["resolution_height"] = stream.get("height", "Unknown") + data["resolution_height"] = stream.get("height", MISSING_KEY_VALUE) if "fps" not in data: data["fps"] = get_fps(stream.get("r_frame_rate", "0/0")) - for align_text, preset in presets.get('burnins', {}).items(): + # Check frame start and add expression if is available + if frame_start is not None: + data[CURRENT_FRAME_KEY] = r'%%{eif\:n+%d\:d}' % frame_start + + if frame_start_tc is not None: + data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY + + for align_text, value in presets.get('burnins', {}).items(): + if not value: + continue + + has_timecode = TIME_CODE_KEY in value + align = None align_text = align_text.strip().lower() if align_text == "top_left": @@ -392,65 +404,44 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) elif align_text == "bottom_right": align = ModifiedBurnins.BOTTOM_RIGHT - bi_func = preset.get('function') - if not bi_func: - log.error( - 'Missing function for burnin!' - 'Burnins are not created!' + # Replace with missing key value if frame_start_tc is not set + if frame_start_tc is None and has_timecode: + has_timecode = False + log.warning( + "`frame_start` and `frame_start_tc`" + " are not set in entered data." ) - return + value = value.replace(TIME_CODE_KEY, MISSING_KEY_VALUE) - if ( - bi_func in ['frame_numbers', 'timecode'] and - frame_start is None - ): - log.error( - 'start_frame is not set in entered data!' - 'Burnins are not created!' - ) - return + key_pattern = re.compile(r"(\{.*?[^{0]*\})") - if bi_func == 'frame_numbers': - current_frame_identifier = "{current_frame}" - text = preset.get('text') or current_frame_identifier + missing_keys = [] + for group in key_pattern.findall(value): + try: + group.format(**data) + except (TypeError, KeyError): + missing_keys.append(group) - if current_frame_identifier not in text: - log.warning(( - 'Text for Frame numbers don\'t have ' - '`{current_frame}` key in text!' - )) + missing_keys = list(set(missing_keys)) + for key in missing_keys: + value = value.replace(key, MISSING_KEY_VALUE) - text_items = [] - split_items = text.split(current_frame_identifier) - for item in split_items: - text_items.append(item.format(**data)) + # Handle timecode differently + if has_timecode: + args = [align, frame_start, frame_start_tc] + if not value.startswith(TIME_CODE_KEY): + value_items = value.split(TIME_CODE_KEY) + text = value_items[0].format(**data) + args.append(value_items[0]) - text = "{current_frame}".join(text_items) + burnin.add_timecode(*args) + continue - burnin.add_frame_numbers(align, start_frame=frame_start, text=text) + text = value.format(**data) + burnin.add_text(text, align, frame_start) - elif bi_func == 'timecode': - burnin.add_timecode(align, start_frame=frame_start_tc) - - elif bi_func == 'text': - if not preset.get('text'): - log.error('Text is not set for text function burnin!') - return - text = preset['text'].format(**data) - burnin.add_text(text, align) - - elif bi_func == "datetime": - date_format = preset["format"] - burnin.add_datetime(date_format, align) - - else: - log.error( - 'Unknown function for burnins {}'.format(bi_func) - ) - return - - codec_args = '' - if codec_data is not []: + codec_args = "" + if codec_data: codec_args = " ".join(codec_data) burnin.render(output_path, args=codec_args, overwrite=overwrite, **data) From 1033f779d1a72d33365ec197b398a6f41cf478f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:03:58 +0100 Subject: [PATCH 049/107] codec moved to optional args because is optional --- pype/scripts/otio_burnin.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e7464cdc7c..bc45e45f82 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -302,7 +302,9 @@ def example(input_path, output_path): burnin.render(output_path, overwrite=True) -def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True): +def burnins_from_data( + input_path, output_path, data, codec_data=None, overwrite=True +): ''' This method adds burnins to video/image file based on presets setting. Extension of output MUST be same as input. (mov -> mov, avi -> avi,...) @@ -453,7 +455,7 @@ if __name__ == '__main__': in_data = json.loads(sys.argv[-1]) burnins_from_data( in_data['input'], - in_data['codec'], in_data['output'], - in_data['burnin_data'] + in_data['burnin_data'], + in_data['codec'] ) From f19235f91e4492331f04df281049d8984716fcdd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:04:08 +0100 Subject: [PATCH 050/107] added forgotten import --- pype/scripts/otio_burnin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index bc45e45f82..8a95542c04 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,4 +1,5 @@ import os +import re import datetime import subprocess import json From 6be774b1f8716471e28beb2659f3d27750df6f4e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:04:43 +0100 Subject: [PATCH 051/107] removed imports from __main__ --- pype/scripts/otio_burnin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 8a95542c04..6c1e19690b 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,4 +1,5 @@ import os +import sys import re import datetime import subprocess @@ -451,8 +452,6 @@ def burnins_from_data( if __name__ == '__main__': - import sys - import json in_data = json.loads(sys.argv[-1]) burnins_from_data( in_data['input'], From a2d07a89a9fa19b007c0565459df4973bbf1710d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:06:31 +0100 Subject: [PATCH 052/107] removed deprecated method usage in example --- pype/scripts/otio_burnin.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 6c1e19690b..590939df56 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -296,10 +296,6 @@ def example(input_path, output_path): burnin.add_text('My Text', ModifiedBurnins.TOP_CENTERED) # Datetime burnin.add_text('%d-%m-%y', ModifiedBurnins.TOP_RIGHT) - # Frame number - burnin.add_frame_numbers(ModifiedBurnins.TOP_RIGHT, start_frame=start_frame) - # Timecode - burnin.add_timecode(ModifiedBurnins.TOP_LEFT, start_frame=start_frame) # Start render (overwrite output file if exist) burnin.render(output_path, overwrite=True) From ae387d09778607ec56b12c2d9d75a9e74740786a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 11:39:22 +0100 Subject: [PATCH 053/107] added subproces for status --- pype/ftrack/ftrack_server/sub_event_info.py | 411 ++++++++++++++++++++ 1 file changed, 411 insertions(+) create mode 100644 pype/ftrack/ftrack_server/sub_event_info.py diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py new file mode 100644 index 0000000000..d63b6acadd --- /dev/null +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -0,0 +1,411 @@ +import os +import sys +import copy +import signal +import socket +import uuid +from datetime import datetime + +import ftrack_api +from ftrack_server import FtrackServer +from pype.ftrack.ftrack_server.lib import ( + SocketSession, SocketBaseEventHub, + TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT +) +from pypeapp import Logger + +log = Logger().get_logger("Event storer") +log.info(os.environ.get("FTRACK_EVENT_SUB_ID")) + + +class ObjectFactory: + session = None + sock = None + subprocess_id = os.environ["FTRACK_EVENT_SUB_ID"] + status_factory = None + + +def trigger_status_info(status_id=None, status=None): + if not status and not status_id: + log.warning( + "`status_id` or `status` must be specified to trigger action." + ) + return + + if not status: + status = ObjectFactory.status_factory[status_id] + + if not status: + return + + new_event_data = copy.deepcopy(action_data) + new_event_data.update({ + "selection": [] + }) + new_event_data["subprocess_id"] = ObjectFactory.subprocess_id + new_event_data["status_id"] = status.id + + new_event = ftrack_api.event.base.Event( + topic="ftrack.action.launch", + data=new_event_data, + source=status.source + ) + ObjectFactory.session.event_hub.publish(new_event) + + +action_identifier = ( + "event.server.status" + ObjectFactory.subprocess_id +) + +# TODO add IP adress to label +# TODO add icon +action_data = { + "label": "Pype Admin", + "variant": "Event server Status", + "description": "Get Infromation about event server", + "actionIdentifier": action_identifier, + "icon": None +} + + +class Status: + default_item = { + "type": "label", + "value": "Information not allowed." + } + note_item = { + "type": "label", + "value": "Hit `submit` to refresh data." + } + splitter_item = { + "type": "label", + "value": "---" + } + + def __init__(self, source_info, parent): + self.id = str(uuid.uuid1()) + self.created = datetime.now() + self.parent = parent + + self.source = source_info + + self.main_process = None + self.storer = None + self.processor = None + + def add_result(self, source, data): + if source.lower() == "storer": + self.storer = data + + elif source.lower() == "processor": + self.processor = data + + else: + self.main_process = data + + def filled(self): + # WARNING DEBUG PART!!!! + return True + return ( + self.main_process is not None and + self.storer is not None and + self.processor is not None + ) + + def get_items_from_dict(self, in_dict): + items = [] + for key, value in in_dict.items(): + items.append({ + "type": "label", + "value": "##{}".format(key) + }) + items.append({ + "type": "label", + "value": value + }) + return items + + def bool_items(self): + items = [] + name_labels = { + "shutdown_main": "Shutdown main process", + "reset_storer": "Reset storer", + "reset_processor": "Reset processor" + } + for name, label in name_labels.items(): + items.append({ + "type": "boolean", + "value": False, + "label": label, + "name": name + }) + return items + + def items(self): + items = [] + items.append(self.note_item) + + items.append({"type": "label", "value": "Main process"}) + if not self.main_process: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.main_process) + ) + + items.append(self.splitter_item) + items.append({"type": "label", "value": "Storer process"}) + if not self.storer: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.storer) + ) + + items.append(self.splitter_item) + items.append({"type": "label", "value": "Processor process"}) + if not self.processor: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.processor) + ) + + items.append(self.splitter_item) + items.extend(self.bool_items()) + + return items + + @property + def is_overtime(self): + time_delta = (datetime.now() - self.created).total_seconds() + return time_delta >= self.parent.max_delta_seconds + + +class StatusFactory: + max_delta_seconds = 30 + + def __init__(self): + self.statuses = {} + + def __getitem__(self, key): + return self.statuses.get(key) + + def create_status(self, source_info): + new_status = Status(source_info, self) + self.statuses[new_status.id] = new_status + return new_status + + def process_result(self, event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != ObjectFactory.subprocess_id: + return + + status_id = event["data"].get("status_id") + status = self.statuses[status_id] + if not status: + return + + source = event["data"]["source"] + data = event["data"]["status_info"] + + status.add_result(source, data) + if status.filled(): + trigger_status_info(status=status) + + +def server_activity_validate_user(event): + """Validate user permissions to show server info.""" + session = ObjectFactory.session + + username = event["source"].get("user", {}).get("username") + if not username: + return False + + user_ent = session.query( + "User where username = \"{}\"".format(username) + ).first() + if not user_ent: + return False + + role_list = ["Pypeclub", "Administrator"] + for role in user_ent["user_security_roles"]: + if role["security_role"]["name"] in role_list: + return True + return False + + +def server_activity_discover(event): + """Discover action in actions menu conditions.""" + session = ObjectFactory.session + if session is None: + return + + if not server_activity_validate_user(event): + return + + return {"items": [action_data]} + + +def handle_filled_event(event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != ObjectFactory.subprocess_id: + return None + + status_id = event["data"].get("status_id") + status = ObjectFactory.status_factory[status_id] + if not status: + return None + + values = event.get("values") + if values: + log.info(values) + + title = "Event server - Status" + + event_data = copy.deepcopy(event["data"]) + event_data.update({ + "type": "widget", + "items": status.items(), + "title": title + }) + + ObjectFactory.session.event_hub.publish( + ftrack_api.event.base.Event( + topic="ftrack.action.trigger-user-interface", + data=event_data + ), + on_error='ignore' + ) + + +def server_activity(event): + session = ObjectFactory.session + if session is None: + msg = "Session is not set. Can't trigger Reset action." + log.warning(msg) + return { + "success": False, + "message": msg + } + + valid = server_activity_validate_user(event) + if not valid: + return { + "success": False, + "message": "You don't have permissions to see Event server status!" + } + + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id is not None: + return handle_filled_event(event) + + status = ObjectFactory.status_factory.create_status(event["source"]) + + event_data = { + "status_id": status.id, + "subprocess_id": ObjectFactory.subprocess_id + } + session.event_hub.publish( + ftrack_api.event.base.Event( + topic=TOPIC_STATUS_SERVER, + data=event_data + ), + on_error="ignore" + ) + + return { + "success": True, + "message": "Collecting information (this may take > 20s)" + } + + +def register(session): + '''Registers the event, subscribing the discover and launch topics.''' + session.event_hub.subscribe( + "topic=ftrack.action.discover", + server_activity_discover + ) + + status_launch_subscription = ( + "topic=ftrack.action.launch and data.actionIdentifier={}" + ).format(action_identifier) + + session.event_hub.subscribe( + status_launch_subscription, + server_activity + ) + + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER_RESULT), + ObjectFactory.status_factory.process_result + ) + + +def main(args): + port = int(args[-1]) + + # Create a TCP/IP socket + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # Connect the socket to the port where the server is listening + server_address = ("localhost", port) + log.debug("Storer connected to {} port {}".format(*server_address)) + sock.connect(server_address) + sock.sendall(b"CreatedStatus") + # store socket connection object + ObjectFactory.sock = sock + ObjectFactory.status_factory = StatusFactory() + + _returncode = 0 + try: + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub + ) + ObjectFactory.session = session + register(session) + server = FtrackServer("event") + log.debug("Launched Ftrack Event storer") + server.run_server(session, load_files=False) + + except Exception: + _returncode = 1 + log.error("ServerInfo subprocess crashed", exc_info=True) + + finally: + log.debug("Ending. Closing socket.") + sock.close() + return _returncode + + +if __name__ == "__main__": + # Register interupt signal + def signal_handler(sig, frame): + print("You pressed Ctrl+C. Process ended.") + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + sys.exit(main(sys.argv)) + + +example_action_event = { + 'data': { + 'selection': [], + 'description': 'Test action2', + 'variant': None, + 'label': 'Test action2', + 'actionIdentifier': 'test.action2.3ceffe5e9acf40f8aa80603adebd0d06', + 'values': {}, + 'icon': None, + }, + 'topic': 'ftrack.action.launch', + 'sent': None, + 'source': { + 'id': 'eb67d186301c4cbbab73c1aee9b7c55d', + 'user': {'username': 'jakub.trllo', 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121'} + }, + 'target': '', + 'in_reply_to_event': None +} From c937964dc8c80b54b95d5059670f845a83f4ca82 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 12:13:22 +0100 Subject: [PATCH 054/107] added subprocess to event server cli --- pype/ftrack/ftrack_server/event_server_cli.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..b2c540e993 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -7,6 +7,7 @@ import socket import argparse import atexit import time +import uuid import ftrack_api from pype.ftrack.lib import credentials @@ -175,6 +176,7 @@ def main_loop(ftrack_url): otherwise thread will be killed. """ + os.environ["FTRACK_EVENT_SUB_ID"] = str(uuid.uuid1()) # Get mongo hostname and port for testing mongo connection mongo_list = ftrack_events_mongo_settings() mongo_hostname = mongo_list[0] @@ -202,6 +204,13 @@ def main_loop(ftrack_url): processor_last_failed = datetime.datetime.now() processor_failed_count = 0 + statuser_name = "StorerThread" + statuser_port = 10021 + statuser_path = "{}/sub_event_info.py".format(file_path) + statuser_thread = None + statuser_last_failed = datetime.datetime.now() + statuser_failed_count = 0 + ftrack_accessible = False mongo_accessible = False @@ -336,6 +345,43 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed + if statuser_thread is None: + if statuser_failed_count < max_fail_count: + statuser_thread = socket_thread.SocketThread( + statuser_name, statuser_port, statuser_path + ) + statuser_thread.start() + + elif statuser_failed_count == max_fail_count: + print(( + "Statuser failed {}times in row" + " I'll try to run again {}s later" + ).format(str(max_fail_count), str(wait_time_after_max_fail))) + statuser_failed_count += 1 + + elif (( + datetime.datetime.now() - statuser_last_failed + ).seconds > wait_time_after_max_fail): + statuser_failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not statuser_thread.isAlive(): + statuser_thread.join() + statuser_thread = None + ftrack_accessible = False + mongo_accessible = False + + _processor_last_failed = datetime.datetime.now() + delta_time = ( + _processor_last_failed - statuser_last_failed + ).seconds + + if delta_time < min_fail_seconds: + statuser_failed_count += 1 + else: + statuser_failed_count = 0 + statuser_last_failed = _processor_last_failed + time.sleep(1) From fa60c87c3e0f9e9261dd9b9e5c8b4188c50e0b4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:28:29 +0100 Subject: [PATCH 055/107] created base EventHub that can set callbacks on heartbeat and set message for sockets on heartbeat --- pype/ftrack/ftrack_server/lib.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index fefba580e0..2617b63614 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -123,20 +123,30 @@ def check_ftrack_url(url, log_errors=True): return url -class StorerEventHub(ftrack_api.event.hub.EventHub): +class SocketBaseEventHub(ftrack_api.event.hub.EventHub): + + hearbeat_msg = b"hearbeat" + heartbeat_callbacks = [] + def __init__(self, *args, **kwargs): self.sock = kwargs.pop("sock") - super(StorerEventHub, self).__init__(*args, **kwargs) + super(SocketBaseEventHub, self).__init__(*args, **kwargs) def _handle_packet(self, code, packet_identifier, path, data): """Override `_handle_packet` which extend heartbeat""" code_name = self._code_name_mapping[code] if code_name == "heartbeat": # Reply with heartbeat. - self.sock.sendall(b"storer") - return self._send_packet(self._code_name_mapping['heartbeat']) + for callback in self.heartbeat_callbacks: + callback() + + self.sock.sendall(self.hearbeat_msg) + return self._send_packet(self._code_name_mapping["heartbeat"]) + + return super(SocketBaseEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) - elif code_name == "connect": event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, From 24022c583651f16d70b210e340472be523c447d8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:28:44 +0100 Subject: [PATCH 056/107] Status event hub implemented --- pype/ftrack/ftrack_server/lib.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 2617b63614..71ce6861a4 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -147,6 +147,25 @@ class SocketBaseEventHub(ftrack_api.event.hub.EventHub): code, packet_identifier, path, data ) + +class StatusEventHub(SocketBaseEventHub): + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.status.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(StatusEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, From a97c73258e349291ae8f0899f37ac7ec9a8c13b5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:01 +0100 Subject: [PATCH 057/107] removed user event hub --- pype/ftrack/ftrack_server/lib.py | 29 -------------------- pype/ftrack/ftrack_server/sub_user_server.py | 4 +-- 2 files changed, 2 insertions(+), 31 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 71ce6861a4..57c5b7d5dc 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -296,35 +296,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): return super()._handle_packet(code, packet_identifier, path, data) -class UserEventHub(ftrack_api.event.hub.EventHub): - def __init__(self, *args, **kwargs): - self.sock = kwargs.pop("sock") - super(UserEventHub, self).__init__(*args, **kwargs) - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "heartbeat": - # Reply with heartbeat. - self.sock.sendall(b"hearbeat") - return self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == "connect": - event = ftrack_api.event.base.Event( - topic="pype.storer.started", - data={}, - source={ - "id": self.id, - "user": {"username": self._api_user} - } - ) - self._event_queue.put(event) - - return super(UserEventHub, self)._handle_packet( - code, packet_identifier, path, data - ) - - class SocketSession(ftrack_api.session.Session): '''An isolated session for interaction with an ftrack server.''' def __init__( diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py index f0d39447a8..8c1497a562 100644 --- a/pype/ftrack/ftrack_server/sub_user_server.py +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -5,7 +5,7 @@ import socket import traceback from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub +from pype.ftrack.ftrack_server.lib import SocketSession, SocketBaseEventHub from pypeapp import Logger @@ -28,7 +28,7 @@ def main(args): try: session = SocketSession( - auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub + auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub ) server = FtrackServer("action") log.debug("Launched User Ftrack Server") From 526f9282d1e4136b44eab6e5505b1adf23e4af5b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:24 +0100 Subject: [PATCH 058/107] storer and processor eventhubs are modified --- pype/ftrack/ftrack_server/lib.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 57c5b7d5dc..478bede6ef 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -166,6 +166,16 @@ class StatusEventHub(SocketBaseEventHub): return super(StatusEventHub, self)._handle_packet( code, packet_identifier, path, data ) + + +class StorerEventHub(SocketBaseEventHub): + + hearbeat_msg = b"storer" + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "connect": event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, @@ -181,7 +191,9 @@ class StatusEventHub(SocketBaseEventHub): ) -class ProcessEventHub(ftrack_api.event.hub.EventHub): +class ProcessEventHub(SocketBaseEventHub): + + hearbeat_msg = b"processor" url, database, table_name = get_ftrack_event_mongo_info() is_table_created = False @@ -193,7 +205,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): database_name=self.database, table_name=self.table_name ) - self.sock = kwargs.pop("sock") super(ProcessEventHub, self).__init__(*args, **kwargs) def prepare_dbcon(self): @@ -289,9 +300,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): code_name = self._code_name_mapping[code] if code_name == "event": return - if code_name == "heartbeat": - self.sock.sendall(b"processor") - return self._send_packet(self._code_name_mapping["heartbeat"]) return super()._handle_packet(code, packet_identifier, path, data) From 4fd403bf54a167ea6d0621554b0a9b6768ca2bfb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:38 +0100 Subject: [PATCH 059/107] added constants with topics to lib --- pype/ftrack/ftrack_server/lib.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 478bede6ef..e623cab8fb 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -28,6 +28,10 @@ from pypeapp import Logger from pype.ftrack.lib.custom_db_connector import DbConnector +TOPIC_STATUS_SERVER = "pype.event.server.status" +TOPIC_STATUS_SERVER_RESULT = "pype.event.server.status.result" + + def ftrack_events_mongo_settings(): host = None port = None From 37de60577809c2ace929f7dab880a95ddc0ed0c2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:30:07 +0100 Subject: [PATCH 060/107] socket thread can use additional arguments to execute and -port arg was removed (not used) --- pype/ftrack/ftrack_server/socket_thread.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index 8e217870ba..cb073d83a0 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -12,13 +12,14 @@ class SocketThread(threading.Thread): MAX_TIMEOUT = 35 - def __init__(self, name, port, filepath): + def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() self.log = Logger().get_logger("SocketThread", "Event Thread") self.setName(name) self.name = name self.port = port self.filepath = filepath + self.additional_args = additional_args self.sock = None self.subproc = None self.connection = None @@ -53,7 +54,12 @@ class SocketThread(threading.Thread): ) self.subproc = subprocess.Popen( - [sys.executable, self.filepath, "-port", str(self.port)] + [ + sys.executable, + self.filepath, + *self.additional_args, + str(self.port) + ] ) # Listen for incoming connections From 05929f2b02929b9652411e4f0b53d324f3a67b76 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:31:24 +0100 Subject: [PATCH 061/107] status get suprocess data ony if they are missing (why to collect same data for each action launch) --- pype/ftrack/ftrack_server/sub_event_info.py | 426 +++++++++----------- 1 file changed, 197 insertions(+), 229 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index d63b6acadd..5a38c992f5 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -1,137 +1,189 @@ import os import sys -import copy +import json import signal import socket -import uuid -from datetime import datetime +import datetime import ftrack_api from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( - SocketSession, SocketBaseEventHub, + SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) from pypeapp import Logger log = Logger().get_logger("Event storer") -log.info(os.environ.get("FTRACK_EVENT_SUB_ID")) - - -class ObjectFactory: - session = None - sock = None - subprocess_id = os.environ["FTRACK_EVENT_SUB_ID"] - status_factory = None - - -def trigger_status_info(status_id=None, status=None): - if not status and not status_id: - log.warning( - "`status_id` or `status` must be specified to trigger action." - ) - return - - if not status: - status = ObjectFactory.status_factory[status_id] - - if not status: - return - - new_event_data = copy.deepcopy(action_data) - new_event_data.update({ - "selection": [] - }) - new_event_data["subprocess_id"] = ObjectFactory.subprocess_id - new_event_data["status_id"] = status.id - - new_event = ftrack_api.event.base.Event( - topic="ftrack.action.launch", - data=new_event_data, - source=status.source - ) - ObjectFactory.session.event_hub.publish(new_event) - - action_identifier = ( - "event.server.status" + ObjectFactory.subprocess_id + "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) - -# TODO add IP adress to label -# TODO add icon action_data = { "label": "Pype Admin", - "variant": "Event server Status", + "variant": "- Event server Status", "description": "Get Infromation about event server", "actionIdentifier": action_identifier, "icon": None } +class ObjectFactory: + session = None + status_factory = None + + class Status: default_item = { "type": "label", - "value": "Information not allowed." + "value": "Process info is not available at this moment." } + + def __init__(self, name, label, parent): + self.name = name + self.label = label or name + self.parent = parent + + self.info = None + self.last_update = None + + def update(self, info): + self.last_update = datetime.datetime.now() + self.info = info + + def get_delta_string(self, delta): + days, hours, minutes = ( + delta.days, delta.seconds // 3600, delta.seconds // 60 % 60 + ) + delta_items = [ + "{}d".format(days), + "{}h".format(hours), + "{}m".format(minutes) + ] + if not days: + delta_items.pop(0) + if not hours: + delta_items.pop(0) + delta_items.append("{}s".format(delta.seconds % 60)) + if not minutes: + delta_items.pop(0) + + return " ".join(delta_items) + + def get_items(self): + items = [] + last_update = "N/A" + if self.last_update: + delta = datetime.datetime.now() - self.last_update + last_update = "{} ago".format( + self.get_delta_string(delta) + ) + + last_update = "Updated: {}".format(last_update) + items.append({ + "type": "label", + "value": "#{}".format(self.label) + }) + items.append({ + "type": "label", + "value": "##{}".format(last_update) + }) + + if not self.info: + if self.info is None: + trigger_info_get() + items.append(self.default_item) + return items + + info = {} + for key, value in self.info.items(): + if key not in ["created_at:", "created_at"]: + info[key] = value + continue + + datetime_value = datetime.datetime.strptime( + value, "%Y.%m.%d %H:%M:%S" + ) + delta = datetime.datetime.now() - datetime_value + + running_for = self.get_delta_string(delta) + info["Started at"] = "{} [running: {}]".format(value, running_for) + + for key, value in info.items(): + items.append({ + "type": "label", + "value": "{}: {}".format(key, value) + }) + + return items + + +class StatusFactory: + note_item = { "type": "label", - "value": "Hit `submit` to refresh data." + "value": ( + "NOTE: Hit `submit` and uncheck all" + " checkers to refresh data." + ) } splitter_item = { "type": "label", "value": "---" } - def __init__(self, source_info, parent): - self.id = str(uuid.uuid1()) - self.created = datetime.now() - self.parent = parent + def __init__(self, statuses={}): + self.statuses = [] + for status in statuses.items(): + self.create_status(*status) - self.source = source_info + def __getitem__(self, key): + return self.get(key) - self.main_process = None - self.storer = None - self.processor = None + def get(self, key, default=None): + for status in self.statuses: + if status.name == key: + return status + return default - def add_result(self, source, data): - if source.lower() == "storer": - self.storer = data - - elif source.lower() == "processor": - self.processor = data - - else: - self.main_process = data - - def filled(self): - # WARNING DEBUG PART!!!! + def is_filled(self): + for status in self.statuses: + if status.info is None: + return False return True - return ( - self.main_process is not None and - self.storer is not None and - self.processor is not None - ) - def get_items_from_dict(self, in_dict): - items = [] - for key, value in in_dict.items(): - items.append({ - "type": "label", - "value": "##{}".format(key) - }) - items.append({ - "type": "label", - "value": value - }) - return items + def create_status(self, name, label): + new_status = Status(name, label, self) + self.statuses.append(new_status) + + def process_event_result(self, event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + source = event["data"]["source"] + data = event["data"]["status_info"] + for status in self.statuses: + if status.name == source: + status.update(data) + break def bool_items(self): items = [] - name_labels = { - "shutdown_main": "Shutdown main process", - "reset_storer": "Reset storer", - "reset_processor": "Reset processor" - } + items.append({ + "type": "label", + "value": "#Restart process" + }) + items.append({ + "type": "label", + "value": ( + "WARNING: Main process may not restart" + " if does not run as a service!" + ) + }) + + name_labels = {} + for status in self.statuses: + name_labels[status.name] = status.label + for name, label in name_labels.items(): items.append({ "type": "boolean", @@ -144,75 +196,14 @@ class Status: def items(self): items = [] items.append(self.note_item) - - items.append({"type": "label", "value": "Main process"}) - if not self.main_process: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.main_process) - ) - - items.append(self.splitter_item) - items.append({"type": "label", "value": "Storer process"}) - if not self.storer: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.storer) - ) - - items.append(self.splitter_item) - items.append({"type": "label", "value": "Processor process"}) - if not self.processor: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.processor) - ) - - items.append(self.splitter_item) items.extend(self.bool_items()) + for status in self.statuses: + items.append(self.splitter_item) + items.extend(status.get_items()) + return items - @property - def is_overtime(self): - time_delta = (datetime.now() - self.created).total_seconds() - return time_delta >= self.parent.max_delta_seconds - - -class StatusFactory: - max_delta_seconds = 30 - - def __init__(self): - self.statuses = {} - - def __getitem__(self, key): - return self.statuses.get(key) - - def create_status(self, source_info): - new_status = Status(source_info, self) - self.statuses[new_status.id] = new_status - return new_status - - def process_result(self, event): - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id != ObjectFactory.subprocess_id: - return - - status_id = event["data"].get("status_id") - status = self.statuses[status_id] - if not status: - return - - source = event["data"]["source"] - data = event["data"]["status_info"] - - status.add_result(source, data) - if status.filled(): - trigger_status_info(status=status) - def server_activity_validate_user(event): """Validate user permissions to show server info.""" @@ -247,38 +238,6 @@ def server_activity_discover(event): return {"items": [action_data]} -def handle_filled_event(event): - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id != ObjectFactory.subprocess_id: - return None - - status_id = event["data"].get("status_id") - status = ObjectFactory.status_factory[status_id] - if not status: - return None - - values = event.get("values") - if values: - log.info(values) - - title = "Event server - Status" - - event_data = copy.deepcopy(event["data"]) - event_data.update({ - "type": "widget", - "items": status.items(), - "title": title - }) - - ObjectFactory.session.event_hub.publish( - ftrack_api.event.base.Event( - topic="ftrack.action.trigger-user-interface", - data=event_data - ), - on_error='ignore' - ) - - def server_activity(event): session = ObjectFactory.session if session is None: @@ -289,35 +248,47 @@ def server_activity(event): "message": msg } - valid = server_activity_validate_user(event) - if not valid: + if not server_activity_validate_user(event): return { "success": False, "message": "You don't have permissions to see Event server status!" } - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id is not None: - return handle_filled_event(event) + values = event["data"].get("values") or {} + is_checked = False + for value in values.values(): + if value: + is_checked = True + break - status = ObjectFactory.status_factory.create_status(event["source"]) + if not is_checked: + return { + "items": ObjectFactory.status_factory.items(), + "title": "Server current status" + } - event_data = { - "status_id": status.id, - "subprocess_id": ObjectFactory.subprocess_id - } + +def trigger_info_get(): + session = ObjectFactory.session session.event_hub.publish( ftrack_api.event.base.Event( topic=TOPIC_STATUS_SERVER, - data=event_data + data={"subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"]} ), on_error="ignore" ) - return { - "success": True, - "message": "Collecting information (this may take > 20s)" - } + +def on_start(event): + session = ObjectFactory.session + source_id = event.get("source", {}).get("id") + if not source_id or source_id != session.event_hub.id: + return + + if session is None: + log.warning("Session is not set. Can't trigger Sync to avalon action.") + return True + trigger_info_get() def register(session): @@ -326,6 +297,7 @@ def register(session): "topic=ftrack.action.discover", server_activity_discover ) + session.event_hub.subscribe("topic=pype.status.started", on_start) status_launch_subscription = ( "topic=ftrack.action.launch and data.actionIdentifier={}" @@ -338,34 +310,51 @@ def register(session): session.event_hub.subscribe( "topic={}".format(TOPIC_STATUS_SERVER_RESULT), - ObjectFactory.status_factory.process_result + ObjectFactory.status_factory.process_event_result ) +def heartbeat(): + if ObjectFactory.status_factory.is_filled(): + return + + trigger_info_get() + + def main(args): port = int(args[-1]) + server_info = json.loads(args[-2]) # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Connect the socket to the port where the server is listening server_address = ("localhost", port) - log.debug("Storer connected to {} port {}".format(*server_address)) + log.debug("Statuser connected to {} port {}".format(*server_address)) sock.connect(server_address) sock.sendall(b"CreatedStatus") # store socket connection object ObjectFactory.sock = sock - ObjectFactory.status_factory = StatusFactory() + statuse_names = { + "main": "Main process", + "storer": "Storer", + "processor": "Processor" + } + + ObjectFactory.status_factory = StatusFactory(statuse_names) + ObjectFactory.status_factory["main"].update(server_info) _returncode = 0 try: session = SocketSession( - auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub + auto_connect_event_hub=True, sock=sock, Eventhub=StatusEventHub ) ObjectFactory.session = session + session.event_hub.heartbeat_callbacks.append(heartbeat) register(session) server = FtrackServer("event") - log.debug("Launched Ftrack Event storer") + log.debug("Launched Ftrack Event statuser") + server.run_server(session, load_files=False) except Exception: @@ -388,24 +377,3 @@ if __name__ == "__main__": signal.signal(signal.SIGTERM, signal_handler) sys.exit(main(sys.argv)) - - -example_action_event = { - 'data': { - 'selection': [], - 'description': 'Test action2', - 'variant': None, - 'label': 'Test action2', - 'actionIdentifier': 'test.action2.3ceffe5e9acf40f8aa80603adebd0d06', - 'values': {}, - 'icon': None, - }, - 'topic': 'ftrack.action.launch', - 'sent': None, - 'source': { - 'id': 'eb67d186301c4cbbab73c1aee9b7c55d', - 'user': {'username': 'jakub.trllo', 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121'} - }, - 'target': '', - 'in_reply_to_event': None -} From 1b1a78cb6ed79be18fcf89bd340c4e09528fda56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:31:47 +0100 Subject: [PATCH 062/107] processor suprocess can send status information on ask event --- .../ftrack_server/sub_event_processor.py | 51 ++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/sub_event_processor.py b/pype/ftrack/ftrack_server/sub_event_processor.py index 9c971ca916..2a3ad3e76d 100644 --- a/pype/ftrack/ftrack_server/sub_event_processor.py +++ b/pype/ftrack/ftrack_server/sub_event_processor.py @@ -1,13 +1,59 @@ +import os import sys import signal import socket +import datetime from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub +from pype.ftrack.ftrack_server.lib import ( + SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER +) +import ftrack_api from pypeapp import Logger log = Logger().get_logger("Event processor") +subprocess_started = datetime.datetime.now() + + +class SessionFactory: + session = None + + +def send_status(event): + subprocess_id = event["data"].get("subprocess_id") + if not subprocess_id: + return + + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + session = SessionFactory.session + if not session: + return + + new_event_data = { + "subprocess_id": subprocess_id, + "source": "processor", + "status_info": { + "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S") + } + } + + new_event = ftrack_api.event.base.Event( + topic="pype.event.server.status.result", + data=new_event_data + ) + + session.event_hub.publish(new_event) + + +def register(session): + '''Registers the event, subscribing the discover and launch topics.''' + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER), send_status + ) + def main(args): port = int(args[-1]) @@ -24,6 +70,9 @@ def main(args): session = SocketSession( auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub ) + register(session) + SessionFactory.session = session + server = FtrackServer("event") log.debug("Launched Ftrack Event processor") server.run_server(session) From 2ff7b87956651c3343d195b56f0f871aaa4afee1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:32:02 +0100 Subject: [PATCH 063/107] storer can send status information on ask --- pype/ftrack/ftrack_server/sub_event_storer.py | 36 +++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index dfe8e21654..b4b9b8a7ab 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -8,14 +8,15 @@ import pymongo import ftrack_api from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( + SocketSession, StorerEventHub, get_ftrack_event_mongo_info, - SocketSession, - StorerEventHub + TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) from pype.ftrack.lib.custom_db_connector import DbConnector from pypeapp import Logger log = Logger().get_logger("Event storer") +subprocess_started = datetime.datetime.now() class SessionFactory: @@ -138,11 +139,42 @@ def trigger_sync(event): ) +def send_status(event): + session = SessionFactory.session + if not session: + return + + subprocess_id = event["data"].get("subprocess_id") + if not subprocess_id: + return + + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + new_event_data = { + "subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"], + "source": "storer", + "status_info": { + "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S") + } + } + + new_event = ftrack_api.event.base.Event( + topic=TOPIC_STATUS_SERVER_RESULT, + data=new_event_data + ) + + session.event_hub.publish(new_event) + + def register(session): '''Registers the event, subscribing the discover and launch topics.''' install_db() session.event_hub.subscribe("topic=*", launch) session.event_hub.subscribe("topic=pype.storer.started", trigger_sync) + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER), send_status + ) def main(args): From 5433daf7b065eb7c16720009170b3400a5ee0fd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:32:40 +0100 Subject: [PATCH 064/107] event server cli sent his infomation on status subprocess startup --- pype/ftrack/ftrack_server/event_server_cli.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b2c540e993..2dadb5da25 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -4,7 +4,10 @@ import signal import datetime import subprocess import socket +import json +import platform import argparse +import getpass import atexit import time import uuid @@ -233,6 +236,16 @@ def main_loop(ftrack_url): atexit.register( on_exit, processor_thread=processor_thread, storer_thread=storer_thread ) + + system_name, pc_name = platform.uname()[:2] + host_name = socket.gethostname() + main_info = { + "created_at": datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S"), + "Username": getpass.getuser(), + "Host Name": host_name, + "Host IP": socket.gethostbyname(host_name) + } + main_info_str = json.dumps(main_info) # Main loop while True: # Check if accessible Ftrack and Mongo url @@ -270,6 +283,7 @@ def main_loop(ftrack_url): printed_ftrack_error = False printed_mongo_error = False + # ====== STORER ======= # Run backup thread which does not requeire mongo to work if storer_thread is None: if storer_failed_count < max_fail_count: @@ -304,6 +318,7 @@ def main_loop(ftrack_url): storer_failed_count = 0 storer_last_failed = _storer_last_failed + # ====== PROCESSOR ======= if processor_thread is None: if processor_failed_count < max_fail_count: processor_thread = socket_thread.SocketThread( @@ -345,10 +360,12 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed + # ====== STATUSER ======= if statuser_thread is None: if statuser_failed_count < max_fail_count: statuser_thread = socket_thread.SocketThread( - statuser_name, statuser_port, statuser_path + statuser_name, statuser_port, statuser_path, + [main_info_str] ) statuser_thread.start() From 2f85cdf0be4ed0b54481013ebc57c201dad9f444 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 19:53:16 +0100 Subject: [PATCH 065/107] restarting is working, need to add comunication between main proces and status process --- pype/ftrack/ftrack_server/event_server_cli.py | 99 +++++++++++-------- pype/ftrack/ftrack_server/socket_thread.py | 56 ++++++++++- pype/ftrack/ftrack_server/sub_event_info.py | 35 +++++++ 3 files changed, 148 insertions(+), 42 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 2dadb5da25..19e889f77d 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -222,7 +222,7 @@ def main_loop(ftrack_url): # stop threads on exit # TODO check if works and args have thread objects! - def on_exit(processor_thread, storer_thread): + def on_exit(processor_thread, storer_thread, statuser_thread): if processor_thread is not None: processor_thread.stop() processor_thread.join() @@ -233,8 +233,16 @@ def main_loop(ftrack_url): storer_thread.join() storer_thread = None + if statuser_thread is not None: + statuser_thread.stop() + statuser_thread.join() + statuser_thread = None + atexit.register( - on_exit, processor_thread=processor_thread, storer_thread=storer_thread + on_exit, + processor_thread=processor_thread, + storer_thread=storer_thread, + statuser_thread=statuser_thread ) system_name, pc_name = platform.uname()[:2] @@ -283,6 +291,51 @@ def main_loop(ftrack_url): printed_ftrack_error = False printed_mongo_error = False + # ====== STATUSER ======= + if statuser_thread is None: + if statuser_failed_count < max_fail_count: + statuser_thread = socket_thread.StatusSocketThread( + statuser_name, statuser_port, statuser_path, + [main_info_str] + ) + statuser_thread.start() + + elif statuser_failed_count == max_fail_count: + print(( + "Statuser failed {}times in row" + " I'll try to run again {}s later" + ).format(str(max_fail_count), str(wait_time_after_max_fail))) + statuser_failed_count += 1 + + elif (( + datetime.datetime.now() - statuser_last_failed + ).seconds > wait_time_after_max_fail): + statuser_failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not statuser_thread.isAlive(): + statuser_thread.join() + statuser_thread = None + ftrack_accessible = False + mongo_accessible = False + + _processor_last_failed = datetime.datetime.now() + delta_time = ( + _processor_last_failed - statuser_last_failed + ).seconds + + if delta_time < min_fail_seconds: + statuser_failed_count += 1 + else: + statuser_failed_count = 0 + statuser_last_failed = _processor_last_failed + + elif statuser_thread.stop_subprocess: + print("Main process was stopped by action") + on_exit(processor_thread, storer_thread, statuser_thread) + os.kill(os.getpid(), signal.SIGTERM) + return 1 + # ====== STORER ======= # Run backup thread which does not requeire mongo to work if storer_thread is None: @@ -291,6 +344,7 @@ def main_loop(ftrack_url): storer_name, storer_port, storer_path ) storer_thread.start() + elif storer_failed_count == max_fail_count: print(( "Storer failed {}times I'll try to run again {}s later" @@ -360,44 +414,9 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed - # ====== STATUSER ======= - if statuser_thread is None: - if statuser_failed_count < max_fail_count: - statuser_thread = socket_thread.SocketThread( - statuser_name, statuser_port, statuser_path, - [main_info_str] - ) - statuser_thread.start() - - elif statuser_failed_count == max_fail_count: - print(( - "Statuser failed {}times in row" - " I'll try to run again {}s later" - ).format(str(max_fail_count), str(wait_time_after_max_fail))) - statuser_failed_count += 1 - - elif (( - datetime.datetime.now() - statuser_last_failed - ).seconds > wait_time_after_max_fail): - statuser_failed_count = 0 - - # If thread failed test Ftrack and Mongo connection - elif not statuser_thread.isAlive(): - statuser_thread.join() - statuser_thread = None - ftrack_accessible = False - mongo_accessible = False - - _processor_last_failed = datetime.datetime.now() - delta_time = ( - _processor_last_failed - statuser_last_failed - ).seconds - - if delta_time < min_fail_seconds: - statuser_failed_count += 1 - else: - statuser_failed_count = 0 - statuser_last_failed = _processor_last_failed + if statuser_thread is not None: + statuser_thread.set_process("storer", storer_thread) + statuser_thread.set_process("processor", processor_thread) time.sleep(1) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index cb073d83a0..cbe4f9dd8b 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -3,6 +3,7 @@ import sys import time import socket import threading +import traceback import subprocess from pypeapp import Logger @@ -14,12 +15,13 @@ class SocketThread(threading.Thread): def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() - self.log = Logger().get_logger("SocketThread", "Event Thread") + self.log = Logger().get_logger(self.__class__.__name__) self.setName(name) self.name = name self.port = port self.filepath = filepath self.additional_args = additional_args + self.sock = None self.subproc = None self.connection = None @@ -59,7 +61,8 @@ class SocketThread(threading.Thread): self.filepath, *self.additional_args, str(self.port) - ] + ], + stdin=subprocess.PIPE ) # Listen for incoming connections @@ -133,3 +136,52 @@ class SocketThread(threading.Thread): if data == b"MongoError": self.mongo_error = True connection.sendall(data) + + +class StatusSocketThread(SocketThread): + process_name_mapping = { + b"RestartS": "storer", + b"RestartP": "processor", + b"RestartM": "main" + } + + def __init__(self, *args, **kwargs): + self.process_threads = {} + self.stop_subprocess = False + super(StatusSocketThread, self).__init__(*args, **kwargs) + + def set_process(self, process_name, thread): + try: + if not self.subproc: + self.process_threads[process_name] = None + return + + if ( + process_name in self.process_threads and + self.process_threads[process_name] == thread + ): + return + + self.process_threads[process_name] = thread + self.subproc.stdin.write( + str.encode("reset:{}".format(process_name)) + ) + self.subproc.stdin.flush() + + except Exception: + print("Could not set thread in StatusSocketThread") + traceback.print_exception(*sys.exc_info()) + + def _handle_data(self, connection, data): + if not data: + return + + process_name = self.process_name_mapping.get(data) + if process_name: + if process_name == "main": + self.stop_subprocess = True + else: + subp = self.process_threads.get(process_name) + if subp: + subp.stop() + connection.sendall(data) diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index 5a38c992f5..a0c2564e10 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -1,6 +1,8 @@ import os import sys import json +import time +import threading import signal import socket import datetime @@ -29,6 +31,7 @@ action_data = { class ObjectFactory: session = None status_factory = None + checker_thread = None class Status: @@ -267,6 +270,17 @@ def server_activity(event): "title": "Server current status" } + session = ObjectFactory.session + if values["main"]: + session.event_hub.sock.sendall(b"RestartM") + return + + if values["storer"]: + session.event_hub.sock.sendall(b"RestartS") + + if values["processor"]: + session.event_hub.sock.sendall(b"RestartP") + def trigger_info_get(): session = ObjectFactory.session @@ -367,13 +381,34 @@ def main(args): return _returncode +class OutputChecker(threading.Thread): + read_input = True + + def run(self): + while self.read_input: + line = sys.stdin.readlines() + log.info(str(line)) + # for line in sys.stdin.readlines(): + # log.info(str(line)) + log.info("alive-end") + time.sleep(0.5) + + def stop(self): + self.read_input = False + + if __name__ == "__main__": # Register interupt signal def signal_handler(sig, frame): print("You pressed Ctrl+C. Process ended.") + ObjectFactory.checker_thread.stop() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) + checker_thread = OutputChecker() + ObjectFactory.checker_thread = checker_thread + checker_thread.start() + sys.exit(main(sys.argv)) From 10853e1ade753801109009d0497b389533419316 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:26:43 +0100 Subject: [PATCH 066/107] process information are refreshed by main process now --- pype/ftrack/ftrack_server/socket_thread.py | 2 +- pype/ftrack/ftrack_server/sub_event_info.py | 39 ++++++++++++++------- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index cbe4f9dd8b..942965f9e2 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -164,7 +164,7 @@ class StatusSocketThread(SocketThread): self.process_threads[process_name] = thread self.subproc.stdin.write( - str.encode("reset:{}".format(process_name)) + str.encode("reset:{}\r\n".format(process_name)) ) self.subproc.stdin.flush() diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index a0c2564e10..4c94513eae 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -32,6 +32,7 @@ class ObjectFactory: session = None status_factory = None checker_thread = None + last_trigger = None class Status: @@ -124,8 +125,8 @@ class StatusFactory: note_item = { "type": "label", "value": ( - "NOTE: Hit `submit` and uncheck all" - " checkers to refresh data." + "HINT: To refresh data uncheck" + " all checkboxes and hit `Submit` button." ) } splitter_item = { @@ -164,9 +165,13 @@ class StatusFactory: source = event["data"]["source"] data = event["data"]["status_info"] + + self.update_status_info(source, data) + + def update_status_info(self, process_name, info): for status in self.statuses: - if status.name == source: - status.update(data) + if status.name == process_name: + status.update(info) break def bool_items(self): @@ -178,7 +183,7 @@ class StatusFactory: items.append({ "type": "label", "value": ( - "WARNING: Main process may not restart" + "WARNING: Main process may shut down when checked" " if does not run as a service!" ) }) @@ -283,6 +288,11 @@ def server_activity(event): def trigger_info_get(): + if ObjectFactory.last_trigger: + delta = datetime.datetime.now() - ObjectFactory.last_trigger + if delta.seconds() < 5: + return + session = ObjectFactory.session session.event_hub.publish( ftrack_api.event.base.Event( @@ -352,8 +362,8 @@ def main(args): statuse_names = { "main": "Main process", - "storer": "Storer", - "processor": "Processor" + "storer": "Event Storer", + "processor": "Event Processor" } ObjectFactory.status_factory = StatusFactory(statuse_names) @@ -386,12 +396,15 @@ class OutputChecker(threading.Thread): def run(self): while self.read_input: - line = sys.stdin.readlines() - log.info(str(line)) - # for line in sys.stdin.readlines(): - # log.info(str(line)) - log.info("alive-end") - time.sleep(0.5) + for line in sys.stdin: + line = line.rstrip().lower() + if not line.startswith("reset:"): + continue + process_name = line.replace("reset:", "") + + ObjectFactory.status_factory.update_status_info( + process_name, None + ) def stop(self): self.read_input = False From 49f9dbf4183f057ab2f0ad16fe4b0909de55eef1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:28:33 +0100 Subject: [PATCH 067/107] renamed sub_event_info to sub_event_status --- pype/ftrack/ftrack_server/event_server_cli.py | 2 +- .../ftrack_server/{sub_event_info.py => sub_event_status.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename pype/ftrack/ftrack_server/{sub_event_info.py => sub_event_status.py} (100%) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 19e889f77d..90c7c566fc 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -209,7 +209,7 @@ def main_loop(ftrack_url): statuser_name = "StorerThread" statuser_port = 10021 - statuser_path = "{}/sub_event_info.py".format(file_path) + statuser_path = "{}/sub_event_status.py".format(file_path) statuser_thread = None statuser_last_failed = datetime.datetime.now() statuser_failed_count = 0 diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_status.py similarity index 100% rename from pype/ftrack/ftrack_server/sub_event_info.py rename to pype/ftrack/ftrack_server/sub_event_status.py From e9c4ec7fee46b87a067efc9a7566a09f071a4ea3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:30:38 +0100 Subject: [PATCH 068/107] label has IP adress of server --- pype/ftrack/ftrack_server/sub_event_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_status.py b/pype/ftrack/ftrack_server/sub_event_status.py index 4c94513eae..8dc176a091 100644 --- a/pype/ftrack/ftrack_server/sub_event_status.py +++ b/pype/ftrack/ftrack_server/sub_event_status.py @@ -1,7 +1,6 @@ import os import sys import json -import time import threading import signal import socket @@ -19,9 +18,10 @@ log = Logger().get_logger("Event storer") action_identifier = ( "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) +host_ip = socket.gethostbyname(socket.gethostname()) action_data = { "label": "Pype Admin", - "variant": "- Event server Status", + "variant": "- Event server Status ({})".format(host_ip), "description": "Get Infromation about event server", "actionIdentifier": action_identifier, "icon": None From 4e85279771711e794330d414537381be9025a4b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 12:01:04 +0100 Subject: [PATCH 069/107] added icon to status action --- pype/ftrack/ftrack_server/sub_event_status.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_status.py b/pype/ftrack/ftrack_server/sub_event_status.py index 8dc176a091..1a15a1f28d 100644 --- a/pype/ftrack/ftrack_server/sub_event_status.py +++ b/pype/ftrack/ftrack_server/sub_event_status.py @@ -12,7 +12,7 @@ from pype.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from pypeapp import Logger +from pypeapp import Logger, config log = Logger().get_logger("Event storer") action_identifier = ( @@ -24,7 +24,16 @@ action_data = { "variant": "- Event server Status ({})".format(host_ip), "description": "Get Infromation about event server", "actionIdentifier": action_identifier, - "icon": None + "icon": "{}/ftrack/action_icons/PypeAdmin.svg".format( + os.environ.get( + "PYPE_STATICS_SERVER", + "http://localhost:{}".format( + config.get_presets().get("services", {}).get( + "rest_api", {} + ).get("default_port", 8021) + ) + ) + ) } From 52b41212064898fac697cf60dbf6a4f957f60072 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Feb 2020 14:20:31 +0100 Subject: [PATCH 070/107] fix(nk): was causing troubles with linux workstations --- pype/nuke/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index a7f1b64eec..c64dc0b828 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -196,7 +196,7 @@ def format_anatomy(data): "root": api.Session["AVALON_PROJECTS"], "subset": data["avalon"]["subset"], "asset": data["avalon"]["asset"], - "task": api.Session["AVALON_TASK"].lower(), + "task": api.Session["AVALON_TASK"], "family": data["avalon"]["family"], "project": {"name": project_document["name"], "code": project_document["data"].get("code", '')}, @@ -1070,7 +1070,7 @@ class BuildWorkfile(WorkfileSettings): "project": {"name": self._project["name"], "code": self._project["data"].get("code", '')}, "asset": self._asset or os.environ["AVALON_ASSET"], - "task": kwargs.get("task") or api.Session["AVALON_TASK"].lower(), + "task": kwargs.get("task") or api.Session["AVALON_TASK"], "hierarchy": kwargs.get("hierarchy") or pype.get_hierarchy(), "version": kwargs.get("version", {}).get("name", 1), "user": getpass.getuser(), From 0b1451db3f770a245c9606770e9465ab184909ed Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 13:49:34 +0100 Subject: [PATCH 071/107] fixed variable naming --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index eef24a186d..49ac50c1db 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1643,7 +1643,7 @@ class SyncToAvalonEvent(BaseEvent): new_name, "task", schema_patterns=self.regex_schemas ) if not passed_regex: - self.regex_failed.append(ent_infos["entityId"]) + self.regex_failed.append(ent_info["entityId"]) continue if new_name not in self.task_changes_by_avalon_id[mongo_id]: From e6ba0dea0884e5477aca517a30b259a992ef44ee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:35:05 +0100 Subject: [PATCH 072/107] fix(nk): didnt create backdrop string problem --- pype/plugins/nuke/create/create_backdrop.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py index 2016c66095..8609117a0d 100644 --- a/pype/plugins/nuke/create/create_backdrop.py +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -2,6 +2,7 @@ from avalon.nuke.pipeline import Creator from avalon.nuke import lib as anlib import nuke + class CreateBackdrop(Creator): """Add Publishable Backdrop""" @@ -35,8 +36,8 @@ class CreateBackdrop(Creator): return instance else: - msg = "Please select nodes you " - "wish to add to a container" + msg = str("Please select nodes you " + "wish to add to a container") self.log.error(msg) nuke.message(msg) return From 91aaa4058335d0bbb7d21f6a202f8c36f287dc79 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:35:42 +0100 Subject: [PATCH 073/107] fix(nk): not related code in script --- pype/plugins/nuke/load/load_backdrop.py | 71 ------------------------- 1 file changed, 71 deletions(-) diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py index 07a6724771..04cff311d1 100644 --- a/pype/plugins/nuke/load/load_backdrop.py +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -240,77 +240,6 @@ class LoadBackdropNodes(api.Loader): return update_container(GN, data_imprint) - def connect_active_viewer(self, group_node): - """ - Finds Active viewer and - place the node under it, also adds - name of group into Input Process of the viewer - - Arguments: - group_node (nuke node): nuke group node object - - """ - group_node_name = group_node["name"].value() - - viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] - if len(viewer) > 0: - viewer = viewer[0] - else: - if not (len(nodes) < 2): - msg = "Please create Viewer node before you " - "run this action again" - self.log.error(msg) - nuke.message(msg) - return None - - # get coordinates of Viewer1 - xpos = viewer["xpos"].value() - ypos = viewer["ypos"].value() - - ypos += 150 - - viewer["ypos"].setValue(ypos) - - # set coordinates to group node - group_node["xpos"].setValue(xpos) - group_node["ypos"].setValue(ypos + 50) - - # add group node name to Viewer Input Process - viewer["input_process_node"].setValue(group_node_name) - - # put backdrop under - pnlib.create_backdrop(label="Input Process", layer=2, - nodes=[viewer, group_node], color="0x7c7faaff") - - return True - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes trought all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.iteritems()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, unicode): - return input.encode('utf-8') - else: - return input def switch(self, container, representation): self.update(container, representation) From 4ace0b2d7ccccb71c0bd2a500f944e4849435028 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:36:14 +0100 Subject: [PATCH 074/107] fix(nk): version check --- pype/plugins/nuke/publish/collect_backdrop.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_backdrop.py b/pype/plugins/nuke/publish/collect_backdrop.py index d98a20aee0..10729b217b 100644 --- a/pype/plugins/nuke/publish/collect_backdrop.py +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -58,7 +58,11 @@ class CollectBackdrops(pyblish.api.InstancePlugin): last_frame = int(nuke.root()["last_frame"].getValue()) # get version - version = pype.get_version_from_path(nuke.root().name()) + version = instance.context.data.get('version') + + if not version: + raise RuntimeError("Script name has no version in the name.") + instance.data['version'] = version # Add version data to instance From a3af0be8cf319448efbaac2df98ec2ce26f86cd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 16:39:45 +0100 Subject: [PATCH 075/107] ftrack lib has lib file with get_project_from_entity and get_avalon_entities_for_assetversion --- pype/ftrack/lib/__init__.py | 5 ++ pype/ftrack/lib/lib.py | 135 ++++++++++++++++++++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 pype/ftrack/lib/lib.py diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py index 9af9ded943..eabfdf0d7d 100644 --- a/pype/ftrack/lib/__init__.py +++ b/pype/ftrack/lib/__init__.py @@ -4,3 +4,8 @@ from .ftrack_app_handler import * from .ftrack_event_handler import * from .ftrack_action_handler import * from .ftrack_base_handler import * + +from .lib import ( + get_project_from_entity, + get_avalon_entities_for_assetversion +) diff --git a/pype/ftrack/lib/lib.py b/pype/ftrack/lib/lib.py new file mode 100644 index 0000000000..aee297fc7e --- /dev/null +++ b/pype/ftrack/lib/lib.py @@ -0,0 +1,135 @@ +from bson.objectid import ObjectId + +from .avalon_sync import CustAttrIdKey +import avalon.io + + +def get_project_from_entity(entity): + # TODO add more entities + ent_type_lowered = entity.entity_type.lower() + if ent_type_lowered == "project": + return entity + + elif ent_type_lowered == "assetversion": + return entity["asset"]["parent"]["project"] + + elif "project" in entity: + return entity["project"] + + return None + + +def get_avalon_entities_for_assetversion(asset_version, db_con=None): + output = { + "success": True, + "message": None, + "project": None, + "project_name": None, + "asset": None, + "asset_name": None, + "asset_path": None, + "subset": None, + "subset_name": None, + "version": None, + "version_name": None, + "representations": None + } + + if db_con is None: + db_con = avalon.io + db_con.install() + + ft_asset = asset_version["asset"] + subset_name = ft_asset["name"] + version = asset_version["version"] + parent = ft_asset["parent"] + ent_path = "/".join( + [ent["name"] for ent in parent["link"]] + ) + project = get_project_from_entity(asset_version) + project_name = project["full_name"] + + output["project_name"] = project_name + output["asset_name"] = parent["name"] + output["asset_path"] = ent_path + output["subset_name"] = subset_name + output["version_name"] = version + + db_con.Session["AVALON_PROJECT"] = project_name + + avalon_project = db_con.find_one({"type": "project"}) + output["project"] = avalon_project + + if not avalon_project: + output["success"] = False + output["message"] = "Project not synchronized to avalon `{}`".format( + project_name + ) + return output + + asset_ent = None + asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) + if asset_mongo_id: + try: + asset_mongo_id = ObjectId(asset_mongo_id) + asset_ent = db_con.find_one({ + "type": "asset", + "_id": asset_mongo_id + }) + except Exception: + pass + + if not asset_ent: + asset_ent = db_con.find_one({ + "type": "asset", + "data.ftrackId": parent["id"] + }) + + output["asset"] = asset_ent + + if not asset_ent: + output["success"] = False + output["message"] = "Not synchronized entity to avalon `{}`".format( + ent_path + ) + return output + + asset_mongo_id = asset_ent["_id"] + + subset_ent = db_con.find_one({ + "type": "subset", + "parent": asset_mongo_id, + "name": subset_name + }) + + output["subset"] = subset_ent + + if not subset_ent: + output["success"] = False + output["message"] = ( + "Subset `{}` does not exist under Asset `{}`" + ).format(subset_name, ent_path) + return output + + version_ent = db_con.find_one({ + "type": "version", + "name": version, + "parent": subset_ent["_id"] + }) + + output["version"] = version_ent + + if not version_ent: + output["success"] = False + output["message"] = ( + "Version `{}` does not exist under Subset `{}` | Asset `{}`" + ).format(version, subset_name, ent_path) + return output + + repre_ents = list(db_con.find({ + "type": "representation", + "parent": version_ent["_id"] + })) + + output["representations"] = repre_ents + return output From b69fd842b118107c87bf6f08f3b7eb17510dafa1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 16:40:07 +0100 Subject: [PATCH 076/107] added action for storing thumbnails to avalon entities --- .../action_store_thumbnails_to_avalon.py | 308 ++++++++++++++++++ 1 file changed, 308 insertions(+) create mode 100644 pype/ftrack/actions/action_store_thumbnails_to_avalon.py diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py new file mode 100644 index 0000000000..ff97534656 --- /dev/null +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -0,0 +1,308 @@ +import os +import requests +import errno + +from bson.objectid import ObjectId +from pype.ftrack import BaseAction +from pype.ftrack.lib import ( + get_project_from_entity, + get_avalon_entities_for_assetversion +) +from pypeapp import Anatomy +from pype.ftrack.lib.io_nonsingleton import DbConnector + + +class StoreThumbnailsToAvalon(BaseAction): + # Action identifier + identifier = "store.thubmnail.to.avalon" + # Action label + label = "Pype Admin" + # Action variant + variant = "- Store Thumbnails to avalon" + # Action description + description = 'Test action' + # roles that are allowed to register this action + role_list = ["Pypeclub", "Administrator", "Project Manager"] + + icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) + + thumbnail_key = "AVALON_THUMBNAIL_ROOT" + db_con = DbConnector() + + def discover(self, session, entities, event): + for entity in entities: + if entity.entity_type.lower() == "assetversion": + return True + return False + + def launch(self, session, entities, event): + # DEBUG LINE + # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails" + + thumbnail_roots = os.environ.get(self.thumbnail_key) + if not thumbnail_roots: + return { + "success": False, + "message": "`{}` environment is not set".format( + self.thumbnail_key + ) + } + + existing_thumbnail_root = None + for path in thumbnail_roots.split(os.pathsep): + if os.path.exists(path): + existing_thumbnail_root = path + break + + if existing_thumbnail_root is None: + return { + "success": False, + "message": ( + "Can't access paths, set in `{}` ({})" + ).format(self.thumbnail_key, thumbnail_roots) + } + + project = get_project_from_entity(entities[0]) + project_name = project["full_name"] + anatomy = Anatomy(project_name) + + if "publish" not in anatomy.templates: + msg = "Anatomy does not have set publish key!" + + self.log.warning(msg) + + return { + "success": False, + "message": msg + } + + if "thumbnail" not in anatomy.templates["publish"]: + msg = ( + "There is not set \"thumbnail\"" + " template in Antomy for project \"{}\"" + ).format(project_name) + + self.log.warning(msg) + + return { + "success": False, + "message": msg + } + + example_template_data = { + "_id": "ID", + "thumbnail_root": "THUBMNAIL_ROOT", + "thumbnail_type": "THUMBNAIL_TYPE", + "ext": ".EXT", + "project": { + "name": "PROJECT_NAME", + "code": "PROJECT_CODE" + }, + "asset": "ASSET_NAME", + "subset": "SUBSET_NAME", + "version": "VERSION_NAME", + "hierarchy": "HIERARCHY" + } + tmp_filled = anatomy.format_all(example_template_data) + thumbnail_result = tmp_filled["publish"]["thumbnail"] + if not thumbnail_result.solved: + missing_keys = thumbnail_result.missing_keys + invalid_types = thumbnail_result.invalid_types + submsg = "" + if missing_keys: + submsg += "Missing keys: {}".format(", ".join( + ["\"{}\"".format(key) for key in missing_keys] + )) + + if invalid_types: + items = [] + for key, value in invalid_types.items(): + items.append("{}{}".format(str(key), str(value))) + submsg += "Invalid types: {}".format(", ".join(items)) + + msg = ( + "Thumbnail Anatomy template expects more keys than action" + " can offer. {}" + ).format(submsg) + + self.log.warning(msg) + + return { + "success": False, + "message": msg + } + + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + + self.db_con.install() + + for entity in entities: + # Skip if entity is not AssetVersion (never should happend, but..) + if entity.entity_type.lower() != "assetversion": + continue + + # Skip if AssetVersion don't have thumbnail + thumbnail_ent = entity["thumbnail"] + if thumbnail_ent is None: + self.log.debug(( + "Skipping. AssetVersion don't " + "have set thumbnail. {}" + ).format(entity["id"])) + continue + + avalon_ents_result = get_avalon_entities_for_assetversion( + entity, self.db_con + ) + version_full_path = ( + "Asset: \"{project_name}/{asset_path}\"" + " | Subset: \"{subset_name}\"" + " | Version: \"{version_name}\"" + ).format(**avalon_ents_result) + + version = avalon_ents_result["version"] + if not version: + self.log.warning(( + "AssetVersion does not have version in avalon. {}" + ).format(version_full_path)) + continue + + thumbnail_id = version["data"].get("thumbnail_id") + if thumbnail_id: + self.log.info(( + "AssetVersion skipped, already has thubmanil set. {}" + ).format(version_full_path)) + continue + + # Get thumbnail extension + file_ext = thumbnail_ent["file_type"] + if not file_ext.startswith("."): + file_ext = ".{}".format(file_ext) + + avalon_project = avalon_ents_result["project"] + avalon_asset = avalon_ents_result["asset"] + hierarchy = "" + parents = avalon_asset["data"].get("parents") or [] + if parents: + hierarchy = "/".join(parents) + + # Prepare anatomy template fill data + # 1. Create new id for thumbnail entity + thumbnail_id = ObjectId() + + template_data = { + "_id": str(thumbnail_id), + "thumbnail_root": existing_thumbnail_root, + "thumbnail_type": "thumbnail", + "ext": file_ext, + "project": { + "name": avalon_project["name"], + "code": avalon_project["data"].get("code") + }, + "asset": avalon_ents_result["asset_name"], + "subset": avalon_ents_result["subset_name"], + "version": avalon_ents_result["version_name"], + "hierarchy": hierarchy + } + + anatomy_filled = anatomy.format(template_data) + thumbnail_path = anatomy_filled["publish"]["thumbnail"] + thumbnail_path = thumbnail_path.replace("..", ".") + thumbnail_path = os.path.normpath(thumbnail_path) + + downloaded = False + for loc in (thumbnail_ent.get("component_locations") or []): + res_id = loc.get("resource_identifier") + if not res_id: + continue + + thubmnail_url = self.get_thumbnail_url(res_id) + if self.download_file(thubmnail_url, thumbnail_path): + downloaded = True + break + + if not downloaded: + self.log.warning( + "Could not download thumbnail for {}".format( + version_full_path + ) + ) + continue + + # Clean template data from keys that are dynamic + template_data.pop("_id") + template_data.pop("thumbnail_root") + + thumbnail_entity = { + "_id": thumbnail_id, + "type": "thumbnail", + "schema": "pype:thumbnail-1.0", + "data": { + "template": thumbnail_template, + "template_data": template_data + } + } + + # Create thumbnail entity + self.db_con.insert_one(thumbnail_entity) + self.log.debug( + "Creating entity in database {}".format(str(thumbnail_entity)) + ) + + # Set thumbnail id for version + self.db_con.update_one( + {"_id": version["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) + + return True + + def get_thumbnail_url(self, resource_identifier, size=None): + # TODO use ftrack_api method rather (find way how to use it) + url_string = ( + u'{url}/component/thumbnail?id={id}&username={username}' + u'&apiKey={apiKey}' + ) + url = url_string.format( + url=self.session.server_url, + id=resource_identifier, + username=self.session.api_user, + apiKey=self.session.api_key + ) + if size: + url += u'&size={0}'.format(size) + + return url + + def download_file(self, source_url, dst_file_path): + dir_path = os.path.dirname(dst_file_path) + try: + os.makedirs(dir_path) + except OSError as exc: + if exc.errno != errno.EEXIST: + self.log.warning( + "Could not create folder: \"{}\"".format(dir_path) + ) + return False + + self.log.debug( + "Downloading file \"{}\" -> \"{}\"".format( + source_url, dst_file_path + ) + ) + file_open = open(dst_file_path, "wb") + try: + file_open.write(requests.get(source_url).content) + except Exception: + self.log.warning( + "Download of image `{}` failed.".format(source_url) + ) + return False + finally: + file_open.close() + return True + + +def register(session, plugins_presets={}): + StoreThumbnailsToAvalon(session, plugins_presets).register() From 256cc85d86f819ebb05c2d7e949ae11ff2d44944 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 16:41:31 +0100 Subject: [PATCH 077/107] fix(nk): removing deprecating code --- pype/lib.py | 67 ++--------------------------------------------------- 1 file changed, 2 insertions(+), 65 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index f26395d930..2235efa2f4 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -361,23 +361,7 @@ def _get_host_name(): def get_asset(asset_name=None): - entity_data_keys_from_project_when_miss = [ - "frameStart", "frameEnd", "handleStart", "handleEnd", "fps", - "resolutionWidth", "resolutionHeight" - ] - - entity_keys_from_project_when_miss = [] - - alternatives = { - "handleStart": "handles", - "handleEnd": "handles" - } - - defaults = { - "handleStart": 0, - "handleEnd": 0 - } - + """ Returning asset document from database """ if not asset_name: asset_name = avalon.api.Session["AVALON_ASSET"] @@ -385,57 +369,10 @@ def get_asset(asset_name=None): "name": asset_name, "type": "asset" }) + if not asset_document: raise TypeError("Entity \"{}\" was not found in DB".format(asset_name)) - project_document = io.find_one({"type": "project"}) - - for key in entity_data_keys_from_project_when_miss: - if asset_document["data"].get(key): - continue - - value = project_document["data"].get(key) - if value is not None or key not in alternatives: - asset_document["data"][key] = value - continue - - alt_key = alternatives[key] - value = asset_document["data"].get(alt_key) - if value is not None: - asset_document["data"][key] = value - continue - - value = project_document["data"].get(alt_key) - if value: - asset_document["data"][key] = value - continue - - if key in defaults: - asset_document["data"][key] = defaults[key] - - for key in entity_keys_from_project_when_miss: - if asset_document.get(key): - continue - - value = project_document.get(key) - if value is not None or key not in alternatives: - asset_document[key] = value - continue - - alt_key = alternatives[key] - value = asset_document.get(alt_key) - if value: - asset_document[key] = value - continue - - value = project_document.get(alt_key) - if value: - asset_document[key] = value - continue - - if key in defaults: - asset_document[key] = defaults[key] - return asset_document From 9e13ac98ec34ac376a8f2e81f10722255b4597ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 16:42:13 +0100 Subject: [PATCH 078/107] fix(nuke): cleanup and adding debug log --- pype/nuke/lib.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index c64dc0b828..6eb4da951c 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -519,11 +519,6 @@ class WorkfileSettings(object): self.data = kwargs def get_nodes(self, nodes=None, nodes_filter=None): - # filter out only dictionaries for node creation - # - # print("\n\n") - # pprint(self._nodes) - # if not isinstance(nodes, list) and not isinstance(nodes_filter, list): return [n for n in nuke.allNodes()] @@ -791,6 +786,8 @@ class WorkfileSettings(object): return data = self._asset_entity["data"] + log.debug("__ asset data: `{}`".format(data)) + missing_cols = [] check_cols = ["fps", "frameStart", "frameEnd", "handleStart", "handleEnd"] From e19f04ec8590f9aca24700b7a455a681e680ff8e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 17:14:34 +0100 Subject: [PATCH 079/107] fix(nk): multi line string needed to be added to str() --- pype/plugins/nuke/load/load_gizmo_ip.py | 4 ++-- pype/plugins/nuke/load/load_luts_ip.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py index 23d7ef2f4a..5fecbc4c5c 100644 --- a/pype/plugins/nuke/load/load_gizmo_ip.py +++ b/pype/plugins/nuke/load/load_gizmo_ip.py @@ -176,8 +176,8 @@ class LoadGizmoInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - msg = "Please create Viewer node before you " - "run this action again" + msg = str("Please create Viewer node before you " + "run this action again") self.log.error(msg) nuke.message(msg) return None diff --git a/pype/plugins/nuke/load/load_luts_ip.py b/pype/plugins/nuke/load/load_luts_ip.py index 2b38a9ff08..41cc6c1a43 100644 --- a/pype/plugins/nuke/load/load_luts_ip.py +++ b/pype/plugins/nuke/load/load_luts_ip.py @@ -276,8 +276,8 @@ class LoadLutsInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - msg = "Please create Viewer node before you " - "run this action again" + msg = str("Please create Viewer node before you " + "run this action again") self.log.error(msg) nuke.message(msg) return None From bf35ee99f6e4c623d131aa7cc729e2f67f76e109 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 12 Feb 2020 17:45:54 +0100 Subject: [PATCH 080/107] add thumbnail to asset as well --- pype/ftrack/actions/action_store_thumbnails_to_avalon.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index ff97534656..d63d3a6ae3 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -256,6 +256,11 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) + self.db_con.update_one( + {"_id": avalon_asset["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) + return True def get_thumbnail_url(self, resource_identifier, size=None): From 5d8e2dc37fc618304268f49291a38b69740dec82 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 18:00:59 +0100 Subject: [PATCH 081/107] fix(nk): swap `write` family for `render2d` --- pype/plugins/global/load/open_file.py | 2 +- .../global/publish/collect_filesequences.py | 12 +-- pype/plugins/global/publish/extract_jpeg.py | 93 ++++++++++--------- 3 files changed, 53 insertions(+), 54 deletions(-) diff --git a/pype/plugins/global/load/open_file.py b/pype/plugins/global/load/open_file.py index 9425eaab04..b496311e0c 100644 --- a/pype/plugins/global/load/open_file.py +++ b/pype/plugins/global/load/open_file.py @@ -18,7 +18,7 @@ def open(filepath): class Openfile(api.Loader): """Open Image Sequence with system default""" - families = ["write"] + families = ["render2d"] representations = ["*"] label = "Open" diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 6c06229304..8b42606e4a 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -211,12 +211,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # Get family from the data families = data.get("families", ["render"]) - if "render" not in families: - families.append("render") if "ftrack" not in families: families.append("ftrack") - if "write" in instance_family: - families.append("write") + if families_data and "render2d" in families_data: + families.append("render2d") if families_data and "slate" in families_data: families.append("slate") @@ -334,7 +332,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "stagingDir": root, "anatomy_template": "render", "fps": fps, - "tags": ["review"] if not baked_mov_path else [], + "tags": ["review"] if not baked_mov_path else ["thumb-nuke"], } instance.data["representations"].append( representation) @@ -388,8 +386,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # If no start or end frame provided, get it from collection indices = list(collection.indexes) - start = data.get("frameStart", indices[0]) - end = data.get("frameEnd", indices[-1]) + start = int(data.get("frameStart", indices[0])) + end = int(data.get("frameEnd", indices[-1])) ext = list(collection)[0].split(".")[-1] diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 4978649ba2..7c0820ea28 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -19,7 +19,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): label = "Extract Jpeg EXR" hosts = ["shell"] order = pyblish.api.ExtractorOrder - families = ["imagesequence", "render", "write", "source"] + families = ["imagesequence", "render", "render2d", "source"] enabled = False def process(self, instance): @@ -41,62 +41,63 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): for repre in representations: self.log.debug(repre) - if 'review' not in repre['tags']: - return + if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: + if not isinstance(repre['files'], list): + return - input_file = repre['files'][0] + input_file = repre['files'][0] - # input_file = ( - # collections[0].format('{head}{padding}{tail}') % start - # ) - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("input {}".format(full_input_path)) + # input_file = ( + # collections[0].format('{head}{padding}{tail}') % start + # ) + full_input_path = os.path.join(stagingdir, input_file) + self.log.info("input {}".format(full_input_path)) - filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + filename = os.path.splitext(input_file)[0] + if not filename.endswith('.'): + filename += "." + jpeg_file = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpeg_file) - self.log.info("output {}".format(full_output_path)) + self.log.info("output {}".format(full_output_path)) - config_data = instance.context.data['output_repre_config'] + config_data = instance.context.data['output_repre_config'] - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) + proj_name = os.environ.get('AVALON_PROJECT', '__default__') + profile = config_data.get(proj_name, config_data['__default__']) - jpeg_items = [] - jpeg_items.append( - os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) - # override file if already exists - jpeg_items.append("-y") - # use same input args like with mov - jpeg_items.extend(profile.get('input', [])) - # input file - jpeg_items.append("-i {}".format(full_input_path)) - # output file - jpeg_items.append(full_output_path) + jpeg_items = [] + jpeg_items.append( + os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) + # override file if already exists + jpeg_items.append("-y") + # use same input args like with mov + jpeg_items.extend(profile.get('input', [])) + # input file + jpeg_items.append("-i {}".format(full_input_path)) + # output file + jpeg_items.append(full_output_path) - subprocess_jpeg = " ".join(jpeg_items) + subprocess_jpeg = " ".join(jpeg_items) - # run subprocess - self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg) + # run subprocess + self.log.debug("{}".format(subprocess_jpeg)) + pype.api.subprocess(subprocess_jpeg) - if "representations" not in instance.data: - instance.data["representations"] = [] + if "representations" not in instance.data: + instance.data["representations"] = [] - representation = { - 'name': 'thumbnail', - 'ext': 'jpg', - 'files': jpeg_file, - "stagingDir": stagingdir, - "thumbnail": True, - "tags": ['thumbnail'] - } + representation = { + 'name': 'thumbnail', + 'ext': 'jpg', + 'files': jpeg_file, + "stagingDir": stagingdir, + "thumbnail": True, + "tags": ['thumbnail'] + } - # adding representation - self.log.debug("Adding: {}".format(representation)) - representations_new.append(representation) + # adding representation + self.log.debug("Adding: {}".format(representation)) + representations_new.append(representation) instance.data["representations"] = representations_new From 00e77d690d6e8c42999f0ec154c71f84b9dbe52b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:36:55 +0100 Subject: [PATCH 082/107] added notelabellink to ignored entity types --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 49ac50c1db..708ae707e9 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -28,7 +28,7 @@ class SyncToAvalonEvent(BaseEvent): ignore_entTypes = [ "socialfeed", "socialnotification", "note", "assetversion", "job", "user", "reviewsessionobject", "timer", - "timelog", "auth_userrole", "appointment" + "timelog", "auth_userrole", "appointment", "notelabellink" ] ignore_ent_types = ["Milestone"] ignore_keys = ["statusid", "thumbid"] From 2ff72b5aeea0ce4c83e27b84f7da017733f7b489 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:37:13 +0100 Subject: [PATCH 083/107] small cleanup in code --- pype/ftrack/events/event_sync_to_avalon.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 708ae707e9..643a3d793e 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -573,8 +573,7 @@ class SyncToAvalonEvent(BaseEvent): if auto_sync is not True: return True - debug_msg = "" - debug_msg += "Updated: {}".format(len(updated)) + debug_msg = "Updated: {}".format(len(updated)) debug_action_map = { "add": "Created", "remove": "Removed", From 5b1f33350b2d2b6d6d02d19919435aa73ef35c9a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:37:43 +0100 Subject: [PATCH 084/107] added another bug report message when configuration id is not for specific entity --- pype/ftrack/events/event_sync_to_avalon.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 643a3d793e..c646756788 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1544,6 +1544,14 @@ class SyncToAvalonEvent(BaseEvent): entity_type_conf_ids[entity_type] = configuration_id break + if not configuration_id: + self.log.warning( + "BUG REPORT: Missing configuration for `{} < {} >`".format( + entity_type, ent_info["entityType"] + ) + ) + continue + _entity_key = collections.OrderedDict({ "configuration_id": configuration_id, "entity_id": ftrack_id From bbe3ce3781cd4ff4fc5181e9237cc984c29c6836 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:38:44 +0100 Subject: [PATCH 085/107] added debug logs for specific reason at this moment, to avoid ignoring entity types but to find out which we are using --- pype/ftrack/events/event_sync_to_avalon.py | 33 ++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index c646756788..345bc5b925 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -3,6 +3,7 @@ import collections import copy import queue import time +import datetime import atexit import traceback @@ -51,9 +52,36 @@ class SyncToAvalonEvent(BaseEvent): def __init__(self, session, plugins_presets={}): '''Expects a ftrack_api.Session instance''' + # Debug settings + # - time expiration in seconds + self.debug_print_time_expiration = 5 * 60 + # - store current time + self.debug_print_time = datetime.datetime.now() + # - store synchronize entity types to be able to use + # only entityTypes in interest instead of filtering by ignored + self.debug_sync_types = collections.defaultdict(list) + + # Set processing session to not use global self.set_process_session(session) super().__init__(session, plugins_presets) + def debug_logs(self): + """This is debug method for printing small debugs messages. """ + now_datetime = datetime.datetime.now() + delta = now_datetime - self.debug_print_time + if delta.total_seconds() < self.debug_print_time_expiration: + return + + self.debug_print_time = now_datetime + known_types_items = [] + for entityType, entity_type in self.debug_sync_types.items(): + known_types_items.append("{} <{}>".format(entity_type, entityType)) + + known_entityTypes = ", ".join(known_types_items) + self.log.debug( + "DEBUG MESSAGE: Known entityTypes {}".format(known_entityTypes) + ) + @property def cur_project(self): if self._cur_project is None: @@ -484,6 +512,9 @@ class SyncToAvalonEvent(BaseEvent): if not entity_type or entity_type in self.ignore_ent_types: continue + if entity_type not in self.debug_sync_types[entityType]: + self.debug_sync_types[entityType].append(entity_type) + action = ent_info["action"] ftrack_id = ent_info["entityId"] if isinstance(ftrack_id, list): @@ -633,6 +664,8 @@ class SyncToAvalonEvent(BaseEvent): self.ftrack_added = entities_by_action["add"] self.ftrack_updated = updated + self.debug_logs() + self.log.debug("Synchronization begins") try: time_1 = time.time() From 9fec5fa0e3f997e85d16ae5b83f3771c828a2de8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:08:06 +0100 Subject: [PATCH 086/107] fixed messages --- pype/ftrack/events/event_sync_to_avalon.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 345bc5b925..53de588bcc 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -75,11 +75,14 @@ class SyncToAvalonEvent(BaseEvent): self.debug_print_time = now_datetime known_types_items = [] for entityType, entity_type in self.debug_sync_types.items(): - known_types_items.append("{} <{}>".format(entity_type, entityType)) + ent_types_msg = ", ".join(entity_type) + known_types_items.append( + "<{}> ({})".format(entityType, ent_types_msg) + ) known_entityTypes = ", ".join(known_types_items) self.log.debug( - "DEBUG MESSAGE: Known entityTypes {}".format(known_entityTypes) + "DEBUG MESSAGE: Known types {}".format(known_entityTypes) ) @property @@ -1603,7 +1606,7 @@ class SyncToAvalonEvent(BaseEvent): try: # Commit changes of mongo_id to empty string self.process_session.commit() - self.log.debug("Commititng unsetting") + self.log.debug("Committing unsetting") except Exception: self.process_session.rollback() # TODO logging From f12bb0f8597bcbad0862b5eac50963d225e1284b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:08:26 +0100 Subject: [PATCH 087/107] fixed prints when credentials to event server are not valid --- pype/ftrack/ftrack_server/event_server_cli.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..cae037f2d9 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -63,10 +63,19 @@ def validate_credentials(url, user, api): ) session.close() except Exception as e: - print( - 'ERROR: Can\'t log into Ftrack with used credentials:' - ' Ftrack server: "{}" // Username: {} // API key: {}' - ).format(url, user, api) + print("Can't log into Ftrack with used credentials:") + ftrack_cred = { + "Ftrack server": str(url), + "Username": str(user), + "API key": str(api) + } + item_lens = [len(key) + 1 for key in ftrack_cred.keys()] + justify_len = max(*item_lens) + for key, value in ftrack_cred.items(): + print("{} {}".format( + (key + ":").ljust(justify_len, " "), + value + )) return False print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format( From aea05e2fe912c1a46d60625dc57d0f73ec009165 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:09:12 +0100 Subject: [PATCH 088/107] fixed error message formatting --- pype/ftrack/lib/ftrack_base_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 8329505ffb..f11cb020e9 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -49,7 +49,7 @@ class BaseHandler(object): ).format( str(type(session)), str(ftrack_api.session.Session), - str(session_processor.ProcessSession) + str(SocketSession) )) self._session = session From 75bff66ce21e88cd43f165f2355286882b0f4bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 14 Feb 2020 10:40:22 +0000 Subject: [PATCH 089/107] submit_publish_job.py edited online with Bitbucket --- pype/plugins/global/publish/submit_publish_job.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index a9fa8febd4..792fc05a38 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -166,6 +166,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "PYPE_STUDIO_PROJECTS_PATH", "PYPE_STUDIO_PROJECTS_MOUNT" ] + + deadline_pool = "" def _submit_deadline_post_job(self, instance, job): """ @@ -201,7 +203,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "JobDependency0": job["_id"], "UserName": job["Props"]["User"], "Comment": instance.context.data.get("comment", ""), - "Priority": job["Props"]["Pri"] + "Priority": job["Props"]["Pri"], + "Pool": self.deadline_pool }, "PluginInfo": { "Version": "3.6", From f6b91ed589f94da3c9e3989d1ce04b2aaa405122 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:06:50 +0100 Subject: [PATCH 090/107] credentials functions are not private and are ready to store credentials by host and user --- pype/ftrack/lib/credentials.py | 163 ++++++++++++++++++++++----------- 1 file changed, 109 insertions(+), 54 deletions(-) diff --git a/pype/ftrack/lib/credentials.py b/pype/ftrack/lib/credentials.py index 7e305942f2..16b1fb25fb 100644 --- a/pype/ftrack/lib/credentials.py +++ b/pype/ftrack/lib/credentials.py @@ -2,85 +2,140 @@ import os import json import ftrack_api import appdirs +import getpass +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse -config_path = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype')) -action_file_name = 'ftrack_cred.json' -event_file_name = 'ftrack_event_cred.json' -action_fpath = os.path.join(config_path, action_file_name) -event_fpath = os.path.join(config_path, event_file_name) -folders = set([os.path.dirname(action_fpath), os.path.dirname(event_fpath)]) +CONFIG_PATH = os.path.normpath(appdirs.user_data_dir("pype-app", "pype")) +CREDENTIALS_FILE_NAME = "ftrack_cred.json" +CREDENTIALS_PATH = os.path.join(CONFIG_PATH, CREDENTIALS_FILE_NAME) +CREDENTIALS_FOLDER = os.path.dirname(CREDENTIALS_PATH) -for folder in folders: - if not os.path.isdir(folder): - os.makedirs(folder) +if not os.path.isdir(CREDENTIALS_FOLDER): + os.makedirs(CREDENTIALS_FOLDER) + +USER_GETTER = None -def _get_credentials(event=False): - if event: - fpath = event_fpath - else: - fpath = action_fpath +def get_ftrack_hostname(ftrack_server=None): + if not ftrack_server: + ftrack_server = os.environ["FTRACK_SERVER"] + if "//" not in ftrack_server: + ftrack_server = "//" + ftrack_server + + return urlparse(ftrack_server).hostname + + +def get_user(): + if USER_GETTER: + return USER_GETTER() + return getpass.getuser() + + +def get_credentials(ftrack_server=None, user=None): credentials = {} - try: - file = open(fpath, 'r') - credentials = json.load(file) - except Exception: - file = open(fpath, 'w') + if not os.path.exists(CREDENTIALS_PATH): + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(credentials)) + file.close() + return credentials - file.close() + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + content_json = json.loads(content or "{}") + credentials = content_json.get(hostname, {}).get(user) or {} return credentials -def _save_credentials(username, apiKey, event=False, auto_connect=None): - data = { - 'username': username, - 'apiKey': apiKey +def save_credentials(ft_user, ft_api_key, ftrack_server=None, user=None): + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + content_json = json.loads(content or "{}") + if hostname not in content_json: + content_json[hostname] = {} + + content_json[hostname][user] = { + "username": ft_user, + "api_key": ft_api_key } - if event: - fpath = event_fpath - if auto_connect is None: - cred = _get_credentials(True) - auto_connect = cred.get('auto_connect', False) - data['auto_connect'] = auto_connect - else: - fpath = action_fpath + # Deprecated keys + if "username" in content_json: + content_json.pop("username") + if "apiKey" in content_json: + content_json.pop("apiKey") - file = open(fpath, 'w') - file.write(json.dumps(data)) - file.close() + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(content_json, indent=4)) -def _clear_credentials(event=False): - if event: - fpath = event_fpath - else: - fpath = action_fpath - open(fpath, 'w').close() - _set_env(None, None) +def clear_credentials(ft_user=None, ftrack_server=None, user=None): + if not ft_user: + ft_user = os.environ.get("FTRACK_API_USER") + + if not ft_user: + return + + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + content_json = json.loads(content or "{}") + if hostname not in content_json: + content_json[hostname] = {} + + content_json[hostname].pop(user, None) + + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(content_json)) -def _set_env(username, apiKey): - if not username: - username = '' - if not apiKey: - apiKey = '' - os.environ['FTRACK_API_USER'] = username - os.environ['FTRACK_API_KEY'] = apiKey +def set_env(ft_user=None, ft_api_key=None): + os.environ["FTRACK_API_USER"] = ft_user or "" + os.environ["FTRACK_API_KEY"] = ft_api_key or "" -def _check_credentials(username=None, apiKey=None): +def get_env_credentials(): + return ( + os.environ.get("FTRACK_API_USER"), + os.environ.get("FTRACK_API_KEY") + ) - if username and apiKey: - _set_env(username, apiKey) + +def check_credentials(ft_user, ft_api_key, ftrack_server=None): + if not ftrack_server: + ftrack_server = os.environ["FTRACK_SERVER"] + + if not ft_user or not ft_api_key: + return False try: - session = ftrack_api.Session() + session = ftrack_api.Session( + server_url=ftrack_server, + api_key=ft_api_key, + api_user=ft_user + ) session.close() - except Exception as e: + + except Exception: return False return True From 0272d38c7eb98bb68341b1762b93f5da4571b695 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:09:07 +0100 Subject: [PATCH 091/107] lib init do not import all credentials functions but only credentials module --- pype/ftrack/lib/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py index eabfdf0d7d..9da3b819b3 100644 --- a/pype/ftrack/lib/__init__.py +++ b/pype/ftrack/lib/__init__.py @@ -1,5 +1,5 @@ from . import avalon_sync -from .credentials import * +from . import credentials from .ftrack_app_handler import * from .ftrack_event_handler import * from .ftrack_action_handler import * From 79245bcd00283fb8e424ce438e836af0b17eba70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:10:50 +0100 Subject: [PATCH 092/107] user module can execute callbacks on username change --- pype/user/user_module.py | 16 +++++++++++++++- pype/user/widget_user.py | 2 +- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/pype/user/user_module.py b/pype/user/user_module.py index d70885b211..a43866f471 100644 --- a/pype/user/user_module.py +++ b/pype/user/user_module.py @@ -19,8 +19,8 @@ class UserModule: log = pype.Logger().get_logger("UserModule", "user") def __init__(self, main_parent=None, parent=None): + self._callbacks_on_user_change = [] self.cred = {} - self.cred_path = os.path.normpath(os.path.join( self.cred_folder_path, self.cred_filename )) @@ -28,6 +28,9 @@ class UserModule: self.load_credentials() + def register_callback_on_user_change(self, callback): + self._callbacks_on_user_change.append(callback) + def tray_start(self): """Store credentials to env and preset them to widget""" username = "" @@ -95,6 +98,17 @@ class UserModule: )) return self.save_credentials(getpass.getuser()) + def change_credentials(self, username): + self.save_credentials(username) + for callback in self._callbacks_on_user_change: + try: + callback() + except Exception: + self.log.warning( + "Failed to execute callback \"{}\".".format(str(callback)), + exc_info=True + ) + def save_credentials(self, username): """Save credentials to JSON file, env and widget""" if username is None: diff --git a/pype/user/widget_user.py b/pype/user/widget_user.py index 7ca12ec4d4..27faa857f5 100644 --- a/pype/user/widget_user.py +++ b/pype/user/widget_user.py @@ -77,7 +77,7 @@ class UserWidget(QtWidgets.QWidget): def click_save(self): # all what should happen - validations and saving into appsdir username = self.input_username.text() - self.module.save_credentials(username) + self.module.change_credentials(username) self._close_widget() def closeEvent(self, event): From ce5ad584dd405597272c0b592998cce7e9953ef8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:11:06 +0100 Subject: [PATCH 093/107] user module has get_user method to get currently set user --- pype/user/user_module.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/user/user_module.py b/pype/user/user_module.py index a43866f471..46ceb0031f 100644 --- a/pype/user/user_module.py +++ b/pype/user/user_module.py @@ -40,6 +40,9 @@ class UserModule: os.environ[self.env_name] = username self.widget_login.set_user(username) + def get_user(self): + return self.cred.get("username") or getpass.getuser() + def process_modules(self, modules): """ Gives ability to connect with imported modules from TrayManager. From 908a89f4ca2a0af681021f9fb8c86c7fd4723a93 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:11:44 +0100 Subject: [PATCH 094/107] ftrack module uses new credentials functions and has ability to change user on User module's user change --- pype/ftrack/tray/ftrack_module.py | 57 ++++++++++++++++++------------- pype/ftrack/tray/login_dialog.py | 12 +++---- 2 files changed, 40 insertions(+), 29 deletions(-) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index 250872f239..5811209a02 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -34,29 +34,28 @@ class FtrackModule: def validate(self): validation = False - cred = credentials._get_credentials() - try: - if 'username' in cred and 'apiKey' in cred: - validation = credentials._check_credentials( - cred['username'], - cred['apiKey'] - ) - if validation is False: - self.show_login_widget() - else: - self.show_login_widget() - - except Exception as e: - log.error("We are unable to connect to Ftrack: {0}".format(e)) - - validation = credentials._check_credentials() - if validation is True: + cred = credentials.get_credentials() + ft_user = cred.get("username") + ft_api_key = cred.get("api_key") + validation = credentials.check_credentials(ft_user, ft_api_key) + if validation: + credentials.set_env(ft_user, ft_api_key) log.info("Connected to Ftrack successfully") self.loginChange() - else: - log.warning("Please sign in to Ftrack") - self.bool_logged = False - self.set_menu_visibility() + + return validation + + if not validation and ft_user and ft_api_key: + log.warning( + "Current Ftrack credentials are not valid. {}: {} - {}".format( + str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key + ) + ) + + log.info("Please sign in to Ftrack") + self.bool_logged = False + self.show_login_widget() + self.set_menu_visibility() return validation @@ -67,7 +66,7 @@ class FtrackModule: self.start_action_server() def logout(self): - credentials._clear_credentials() + credentials.clear_credentials() self.stop_action_server() log.info("Logged out of Ftrack") @@ -307,11 +306,23 @@ class FtrackModule: except Exception as e: log.error("During Killing Timer event server: {0}".format(e)) + def changed_user(self): + self.stop_action_server() + credentials.set_env() + self.validate() + def process_modules(self, modules): if 'TimersManager' in modules: self.timer_manager = modules['TimersManager'] self.timer_manager.add_module(self) + if "UserModule" in modules: + credentials.USER_GETTER = modules["UserModule"].get_user + modules["UserModule"].register_callback_on_user_change( + self.changed_user + ) + + def start_timer_manager(self, data): if self.thread_timer is not None: self.thread_timer.ftrack_start_timer(data) @@ -336,7 +347,7 @@ class FtrackEventsThread(QtCore.QThread): def __init__(self, parent): super(FtrackEventsThread, self).__init__() - cred = credentials._get_credentials() + cred = credentials.get_credentials() self.username = cred['username'] self.user = None self.last_task = None diff --git a/pype/ftrack/tray/login_dialog.py b/pype/ftrack/tray/login_dialog.py index 4dcbec5ab3..5f3777f93e 100644 --- a/pype/ftrack/tray/login_dialog.py +++ b/pype/ftrack/tray/login_dialog.py @@ -204,11 +204,11 @@ class Login_Dialog_ui(QtWidgets.QWidget): self.setError("{0} {1}".format(msg, " and ".join(missing))) return - verification = credentials._check_credentials(username, apiKey) + verification = credentials.check_credentials(username, apiKey) if verification: - credentials._save_credentials(username, apiKey, self.is_event) - credentials._set_env(username, apiKey) + credentials.save_credentials(username, apiKey, self.is_event) + credentials.set_env(username, apiKey) if self.parent is not None: self.parent.loginChange() self._close_widget() @@ -304,11 +304,11 @@ class Login_Dialog_ui(QtWidgets.QWidget): self._login_server_thread.start(url) return - verification = credentials._check_credentials(username, apiKey) + verification = credentials.check_credentials(username, apiKey) if verification is True: - credentials._save_credentials(username, apiKey, self.is_event) - credentials._set_env(username, apiKey) + credentials.save_credentials(username, apiKey, self.is_event) + credentials.set_env(username, apiKey) if self.parent is not None: self.parent.loginChange() self._close_widget() From a7c4dffb42c78a096655efa50e6164e579584636 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:12:05 +0100 Subject: [PATCH 095/107] event server cli also uses new credentials functions --- pype/ftrack/ftrack_server/event_server_cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..d889b6be23 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -446,9 +446,9 @@ def main(argv): event_paths = kwargs.ftrackeventpaths if not kwargs.noloadcred: - cred = credentials._get_credentials(True) + cred = credentials.get_credentials(ftrack_url) username = cred.get('username') - api_key = cred.get('apiKey') + api_key = cred.get('api_key') if kwargs.ftrackuser: username = kwargs.ftrackuser @@ -482,7 +482,7 @@ def main(argv): return 1 if kwargs.storecred: - credentials._save_credentials(username, api_key, True) + credentials.save_credentials(username, api_key, ftrack_url) # Set Ftrack environments os.environ["FTRACK_SERVER"] = ftrack_url From 4de7478d9dcf176eb349a52a20eddac76e0424e8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:19:13 +0100 Subject: [PATCH 096/107] again remove collect templates --- .../global/publish/collect_templates.py | 119 ------------------ 1 file changed, 119 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_templates.py diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py deleted file mode 100644 index 3104b5b705..0000000000 --- a/pype/plugins/global/publish/collect_templates.py +++ /dev/null @@ -1,119 +0,0 @@ -""" -Requires: - session -> AVALON_PROJECT - context -> anatomy (pypeapp.Anatomy) - instance -> subset - instance -> asset - instance -> family - -Provides: - instance -> template - instance -> assumedTemplateData - instance -> assumedDestination -""" - -import os - -from avalon import io, api -import pyblish.api - - -class CollectTemplates(pyblish.api.InstancePlugin): - """Fill templates with data needed for publish""" - - order = pyblish.api.CollectorOrder + 0.1 - label = "Collect and fill Templates" - hosts = ["maya", "nuke", "standalonepublisher"] - - def process(self, instance): - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - else: - hierarchy = "" - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} - - # Add datetime data to template data - datetime_data = instance.context.data.get("datetimeData") or {} - template_data.update(datetime_data) - - resolution_width = instance.data.get("resolutionWidth") - resolution_height = instance.data.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) - self.log.info("Assumed Destination has been created...") - self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) - self.log.debug("__ template: `{}`".format(instance.data["template"])) From 0618b7a85ff9767ac6f5d4eaf3f58bd72f2b433c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:20:04 +0100 Subject: [PATCH 097/107] fix order --- pype/plugins/global/publish/collect_resources_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index 9fc8c576f5..734d1f84e4 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -19,7 +19,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored""" label = "Collect Resources Path" - order = pyblish.api.CollectorOrder + 0.995 + order = pyblish.api.CollectorOrder + 0.495 def process(self, instance): anatomy = instance.context.data["anatomy"] From a7ca458e4ee1550859fee03f84592aea9615947e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:24:56 +0100 Subject: [PATCH 098/107] collect scene has publish set to True by default --- pype/plugins/maya/publish/collect_scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_scene.py b/pype/plugins/maya/publish/collect_scene.py index f2fbb4d623..089019f2d3 100644 --- a/pype/plugins/maya/publish/collect_scene.py +++ b/pype/plugins/maya/publish/collect_scene.py @@ -35,7 +35,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin): "subset": subset, "asset": os.getenv("AVALON_ASSET", None), "label": subset, - "publish": False, + "publish": True, "family": 'workfile', "families": ['workfile'], "setMembers": [current_file] From 377513f01f77c49d656f152157a1245e63e3bab6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:25:25 +0100 Subject: [PATCH 099/107] removed locations from version --- pype/plugins/global/publish/integrate_new.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d27582bb71..8735f8fed7 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -168,14 +168,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) - # TODO remove avalon_location (shall we?) - avalon_location = api.Session["AVALON_LOCATION"] # TODO rename method from `create_version` to # `prepare_version` or similar... version = self.create_version( subset=subset, version_number=version_number, - locations=[avalon_location], data=version_data ) @@ -528,26 +525,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return subset - def create_version(self, subset, version_number, locations, data=None): + def create_version(self, subset, version_number, data=None): """ Copy given source to destination Args: subset (dict): the registered subset of the asset version_number (int): the version number - locations (list): the currently registered locations Returns: dict: collection of data to create a version """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] return {"schema": "pype:version-3.0", "type": "version", "parent": subset["_id"], "name": version_number, - "locations": version_locations, "data": data} def create_version_data(self, context, instance): From 3d1e231a0db9f075eb7b6157cb99665f285e34e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:41:24 +0100 Subject: [PATCH 100/107] added job to store thumbnails to avalon action --- .../action_store_thumbnails_to_avalon.py | 52 +++++++++++++++---- 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index d63d3a6ae3..ce0dfeb244 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -1,6 +1,7 @@ import os import requests import errno +import json from bson.objectid import ObjectId from pype.ftrack import BaseAction @@ -41,13 +42,30 @@ class StoreThumbnailsToAvalon(BaseAction): # DEBUG LINE # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails" + user = session.query( + "User where username is '{0}'".format(session.api_user) + ).one() + action_job = session.create("Job", { + "user": user, + "status": "running", + "data": json.dumps({ + "description": "Storing thumbnails to avalon." + }) + }) + session.commit() + thumbnail_roots = os.environ.get(self.thumbnail_key) if not thumbnail_roots: + msg = "`{}` environment is not set".format(self.thumbnail_key) + + action_job["status"] = "failed" + session.commit() + + self.log.warning(msg) + return { "success": False, - "message": "`{}` environment is not set".format( - self.thumbnail_key - ) + "message": msg } existing_thumbnail_root = None @@ -57,11 +75,18 @@ class StoreThumbnailsToAvalon(BaseAction): break if existing_thumbnail_root is None: + msg = ( + "Can't access paths, set in `{}` ({})" + ).format(self.thumbnail_key, thumbnail_roots) + + action_job["status"] = "failed" + session.commit() + + self.log.warning(msg) + return { "success": False, - "message": ( - "Can't access paths, set in `{}` ({})" - ).format(self.thumbnail_key, thumbnail_roots) + "message": msg } project = get_project_from_entity(entities[0]) @@ -71,6 +96,9 @@ class StoreThumbnailsToAvalon(BaseAction): if "publish" not in anatomy.templates: msg = "Anatomy does not have set publish key!" + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -84,6 +112,9 @@ class StoreThumbnailsToAvalon(BaseAction): " template in Antomy for project \"{}\"" ).format(project_name) + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -127,6 +158,9 @@ class StoreThumbnailsToAvalon(BaseAction): " can offer. {}" ).format(submsg) + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -256,10 +290,8 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) - self.db_con.update_one( - {"_id": avalon_asset["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} - ) + action_job["status"] = "done" + session.commit() return True From 5290f6dd58de1abf78be75ab54c949c84972ae83 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:09:03 +0100 Subject: [PATCH 101/107] fix arguments appending --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 590939df56..fd3c51816a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -432,7 +432,7 @@ def burnins_from_data( if not value.startswith(TIME_CODE_KEY): value_items = value.split(TIME_CODE_KEY) text = value_items[0].format(**data) - args.append(value_items[0]) + args.append(text) burnin.add_timecode(*args) continue From feb2037c0259dba1fa5b130dd66da8655571ec6d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:09:42 +0100 Subject: [PATCH 102/107] excahnge timecode and text keys in arguments --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index fd3c51816a..1d00a08521 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -34,7 +34,7 @@ DRAWTEXT = ( "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) TIMECODE = ( - "drawtext=text=\\'%(text)s\\':timecode=\\'%(timecode)s\\'" + "drawtext=timecode=\\'%(timecode)s\\':text=\\'%(text)s\\'" ":timecode_rate=%(fps).2f:x=%(x)s:y=%(y)s:fontcolor=" "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) From 773fbf106a89f6d901addf543dda849d86f8ae1d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:23:37 +0100 Subject: [PATCH 103/107] ftrack server won't raise exception if there are any event handlers to register --- pype/ftrack/ftrack_server/ftrack_server.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py index eebc3f6ec4..8464203c1d 100644 --- a/pype/ftrack/ftrack_server/ftrack_server.py +++ b/pype/ftrack/ftrack_server/ftrack_server.py @@ -100,9 +100,9 @@ class FtrackServer: log.warning(msg, exc_info=e) if len(register_functions_dict) < 1: - raise Exception(( - "There are no events with register function." - " Registered paths: \"{}\"" + log.warning(( + "There are no events with `register` function" + " in registered paths: \"{}\"" ).format("| ".join(paths))) # Load presets for setting plugins @@ -122,7 +122,7 @@ class FtrackServer: else: register(self.session, plugins_presets=plugins_presets) - if function_counter%7 == 0: + if function_counter % 7 == 0: time.sleep(0.1) function_counter += 1 except Exception as exc: From 5f5a80818c20e26deeded4f616d477a479999ee8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 16 Feb 2020 20:07:42 +0100 Subject: [PATCH 104/107] fix(global): fixing version collection --- pype/plugins/nuke/publish/collect_writes.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index bf1c6a4b66..c29f676ef7 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -52,9 +52,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) - # get version to instance for integration - instance.data['version'] = instance.context.data.get( - "version", pype.get_version_from_path(nuke.root().name())) + # # get version to instance for integration + # instance.data['version'] = instance.context.data.get( + # "version", pype.get_version_from_path(nuke.root().name())) self.log.debug('Write Version: %s' % instance.data('version')) @@ -92,16 +92,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): # Add version data to instance version_data = { - "handles": handle_start, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "version": int(instance.data['version']), "colorspace": node["colorspace"].value(), - "families": ["render"], - "subset": instance.data["subset"], - "fps": instance.context.data["fps"] } instance.data["family"] = "write" From e8499b43ff4cf6a0b9a15b502fbf164474ca0e49 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 16 Feb 2020 20:08:18 +0100 Subject: [PATCH 105/107] fix(global): wrong version format print --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d27582bb71..bb65a02bce 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -160,7 +160,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset_entity, instance) version_number = instance.data["version"] - self.log.debug("Next version: v{0:03d}".format(version_number)) + self.log.debug("Next version: v{}".format(version_number)) version_data = self.create_version_data(context, instance) From 957ca8ecd2f03097e4c1d48dff955d49b4150825 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 14:02:59 +0100 Subject: [PATCH 106/107] fix current frame key --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 1d00a08521..e34f7235e4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -378,7 +378,7 @@ def burnins_from_data( # Check frame start and add expression if is available if frame_start is not None: - data[CURRENT_FRAME_KEY] = r'%%{eif\:n+%d\:d}' % frame_start + data[CURRENT_FRAME_KEY[1:-1]] = r'%%{eif\:n+%d\:d}' % frame_start if frame_start_tc is not None: data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY From 3922529058d43a631a7269ba4006707edd68c150 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 15:54:44 +0100 Subject: [PATCH 107/107] escape colon and comma in texts --- pype/scripts/otio_burnin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e34f7235e4..c61ea66d2d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -199,7 +199,11 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): """ resolution = self.resolution data = { - 'text': text, + 'text': ( + text + .replace(",", r"\,") + .replace(':', r'\:') + ), 'color': options['font_color'], 'size': options['font_size'] }