From 52e88951f4ced27620c6f34a220bc5ce0c622d17 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 21 Feb 2024 11:53:41 +0100 Subject: [PATCH 01/67] Traypublisher CSV ingest ayon conversion kickof Traypublisher CSV ingest ayon conversion kickoff Added functionality for ingesting CSV files into projects. Includes commands to ingest CSV data, publish the content, and create instances based on the CSV data. --- client/ayon_core/hosts/traypublisher/addon.py | 61 +- .../hosts/traypublisher/csv_publish.py | 76 ++ .../plugins/create/create_csv_ingest.py | 686 ++++++++++++++++++ .../collect_csv_ingest_instance_data.py | 36 + .../plugins/publish/extract_csv_file.py | 31 + .../publish/validate_existing_version.py | 1 + .../plugins/publish/validate_frame_ranges.py | 2 + client/ayon_core/plugins/publish/integrate.py | 3 +- 8 files changed, 894 insertions(+), 2 deletions(-) create mode 100644 client/ayon_core/hosts/traypublisher/csv_publish.py create mode 100644 client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py create mode 100644 client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py create mode 100644 client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index 70bdfe9a64..f3884aedfe 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib import get_ayon_launcher_args +from pathlib import Path from ayon_core.lib.execute import run_detached_process from ayon_core.addon import ( click_wrap, @@ -57,3 +57,62 @@ def launch(): from ayon_core.tools import traypublisher traypublisher.main() + + +@cli_main.command() +@click_wrap.option( + "--csv-filepath", + help="Full path to CSV file with data", + type=str, + required=True +) +@click_wrap.option( + "--project-name", + help="Project name in which the context will be used", + type=str, + required=True +) +@click_wrap.option( + "--asset-name", + help="Asset name in which the context will be used", + type=str, + required=True +) +@click_wrap.option( + "--task-name", + help="Task name under Asset in which the context will be used", + type=str, + required=False +) +@click_wrap.option( + "--ignore-validators", + help="Option to ignore validators", + type=bool, + is_flag=True, + required=False +) +def ingestcsv( + csv_filepath, + project_name, + asset_name, + task_name, + ignore_validators +): + """Ingest CSV file into project. + + This command will ingest CSV file into project. CSV file must be in + specific format. See documentation for more information. + """ + from .csv_publish import csvpublish + + # use Path to check if csv_filepath exists + if not Path(csv_filepath).exists(): + raise FileNotFoundError(f"File {csv_filepath} does not exist.") + + csvpublish( + csv_filepath, + project_name, + asset_name, + task_name, + ignore_validators + ) diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py new file mode 100644 index 0000000000..f8eed2f2c5 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -0,0 +1,76 @@ +import os + +import pyblish.api +import pyblish.util + +from ayon_core.client import get_asset_by_name +from ayon_core.lib.attribute_definitions import FileDefItem +from ayon_core.pipeline import install_host +from ayon_core.pipeline.create import CreateContext + +from ayon_core.hosts.traypublisher.api import TrayPublisherHost + + +def csvpublish( + csv_filepath, + project_name, + asset_name, + task_name=None, + ignore_validators=False +): + """Publish CSV file. + + Args: + csv_filepath (str): Path to CSV file. + project_name (str): Project name. + asset_name (str): Asset name. + task_name (Optional[str]): Task name. + ignore_validators (Optional[bool]): Option to ignore validators. + """ + + # initialization of host + host = TrayPublisherHost() + install_host(host) + + # setting host context into project + host.set_project_name(project_name) + + # add asset context to environment + # TODO: perhaps this can be done in a better way? + os.environ.update({ + "AVALON_PROJECT": project_name, + "AVALON_ASSET": asset_name, + "AVALON_TASK": task_name or "" + }) + + # form precreate data with field values + file_field = FileDefItem.from_paths([csv_filepath], False).pop().to_dict() + precreate_data = { + "csv_filepath_data": file_field, + } + + # create context initialization + create_context = CreateContext(host, headless=True) + asset_doc = get_asset_by_name( + project_name, + asset_name + ) + + create_context.create( + "io.openpype.creators.traypublisher.csv_ingest", + "Main", + asset_doc=asset_doc, + task_name=task_name, + pre_create_data=precreate_data, + ) + + # publishing context initialization + pyblish_context = pyblish.api.Context() + pyblish_context.data["create_context"] = create_context + + # redefine targets (skip 'local' to disable validators) + if ignore_validators: + targets = ["default", "ingest"] + + # publishing + pyblish.util.publish(context=pyblish_context, targets=targets) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py new file mode 100644 index 0000000000..aa986657dc --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -0,0 +1,686 @@ +import os +import re +import csv +import clique +from copy import deepcopy, copy + +from ayon_core.client import get_asset_by_name +from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline import CreatedInstance +from ayon_core.lib import FileDef, BoolDef +from ayon_core.lib.transcoding import ( + VIDEO_EXTENSIONS, IMAGE_EXTENSIONS +) + +from ayon_core.hosts.traypublisher.api.plugin import ( + TrayPublishCreator +) + + +class IngestCSV(TrayPublishCreator): + """CSV ingest creator class""" + + icon = "fa.file" + + label = "CSV Ingest" + family = "csv_ingest_file" + identifier = "io.ayon_core.creators.traypublisher.csv_ingest" + + default_variants = ["Main"] + + description = "Ingest products' data from CSV file" + detailed_description = """ +Ingest products' data from CSV file following column and representation +configuration in project settings. +""" + + # Position in the list of creators. + order = 10 + + # settings for this creator + columns_config = {} + representations_config = {} + + + def create(self, subset_name, instance_data, pre_create_data): + """Create an product from each row found in the CSV. + + Args: + subset_name (str): The subset name. + instance_data (dict): The instance data. + pre_create_data (dict): + """ + + csv_filepath_data = pre_create_data.get("csv_filepath_data", {}) + + folder = csv_filepath_data.get("directory", "") + if not os.path.exists(folder): + raise FileNotFoundError( + f"Directory '{folder}' does not exist." + ) + filename = csv_filepath_data.get("filenames", []) + self._process_csv_file(subset_name, instance_data, folder, filename[0]) + + def _process_csv_file( + self, subset_name, instance_data, staging_dir, filename): + """Process CSV file. + + Args: + subset_name (str): The subset name. + instance_data (dict): The instance data. + staging_dir (str): The staging directory. + filename (str): The filename. + """ + + # create new instance from the csv file via self function + self._pass_data_to_csv_instance( + instance_data, + staging_dir, + filename + ) + + csv_instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._store_new_instance(csv_instance) + + csv_instance["csvFileData"] = { + "filename": filename, + "staging_dir": staging_dir, + } + + # from special function get all data from csv file and convert them + # to new instances + csv_data_for_instances = self._get_data_from_csv( + staging_dir, filename) + + # create instances from csv data via self function + self._create_instances_from_csv_data( + csv_data_for_instances, staging_dir + ) + + def _create_instances_from_csv_data( + self, + csv_data_for_instances, + staging_dir + ): + """Create instances from csv data""" + + for asset_name, _data in csv_data_for_instances.items(): + asset_doc = _data["asset_doc"] + products = _data["products"] + + for instance_name, product_data in products.items(): + # get important instance variables + task_name = product_data["task_name"] + variant = product_data["variant"] + product_type = product_data["product_type"] + version = product_data["version"] + + # create subset/product name + product_name = get_subset_name( + product_type, + variant, + task_name, + asset_doc, + ) + + # make sure frame start/end is inherited from csv columns + # expected frame range data are handles excluded + for _, repre_data in product_data["representations"].items(): # noqa: E501 + frame_start = repre_data["frameStart"] + frame_end = repre_data["frameEnd"] + handle_start = repre_data["handleStart"] + handle_end = repre_data["handleEnd"] + fps = repre_data["fps"] + break + + # try to find any version comment in representation data + version_comment = next( + iter( + repre_data["comment"] + for _, repre_data in product_data["representations"].items() # noqa: E501 + if repre_data["comment"] + ), + None + ) + + # try to find any slate switch in representation data + slate_exists = any( + repre_data["slate"] + for _, repre_data in product_data["representations"].items() # noqa: E501 + ) + + # get representations from product data + representations = product_data["representations"] + label = f"{asset_name}_{product_name}_v{version:>03}" + + families = ["csv_ingest"] + if slate_exists: + # adding slate to families mainly for loaders to be able + # to filter out slates + families.append("slate") + + # make product data + product_data = { + "name": instance_name, + "asset": asset_name, + "families": families, + "label": label, + "task": task_name, + "variant": variant, + "source": "csv", + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": handle_start, + "handleEnd": handle_end, + "fps": fps, + "version": version, + "comment": version_comment, + } + + # create new instance + new_instance = CreatedInstance( + product_type, product_name, product_data, self + ) + self._store_new_instance(new_instance) + + if not new_instance.get("prepared_data_for_repres"): + new_instance["prepared_data_for_repres"] = [] + + base_thumbnail_repre_data = { + "name": "thumbnail", + "ext": None, + "files": None, + "stagingDir": None, + "stagingDir_persistent": True, + "tags": ["thumbnail", "delete"], + } + # need to populate all thumbnails for all representations + # so we can check if unique thumbnail per representation + # is needed + thumbnails = [ + repre_data["thumbnailPath"] + for repre_data in representations.values() + if repre_data["thumbnailPath"] + ] + multiple_thumbnails = len(set(thumbnails)) > 1 + explicit_output_name = None + thumbnails_processed = False + for filepath, repre_data in representations.items(): + # check if any review derivate tag is present + reviewable = any( + tag for tag in repre_data.get("tags", []) + # tag can be `ftrackreview` or `review` + if "review" in tag + ) + # since we need to populate multiple thumbnails as + # representation with outputName for (Ftrack instance + # integrator) pairing with reviewable video representations + if ( + thumbnails + and multiple_thumbnails + and reviewable + ): + # multiple unique thumbnails per representation needs + # grouping by outputName + # mainly used in Ftrack instance integrator + explicit_output_name = repre_data["representationName"] + relative_thumbnail_path = repre_data["thumbnailPath"] + # representation might not have thumbnail path + # so ignore this one + if not relative_thumbnail_path: + continue + thumb_dir, thumb_file = \ + self._get_refactor_thumbnail_path( + staging_dir, relative_thumbnail_path) + filename, ext = os.path.splitext(thumb_file) + thumbnail_repr_data = deepcopy( + base_thumbnail_repre_data) + thumbnail_repr_data.update({ + "name": "thumbnail_{}".format(filename), + "ext": ext[1:], + "files": thumb_file, + "stagingDir": thumb_dir, + "outputName": explicit_output_name, + }) + new_instance["prepared_data_for_repres"].append( + ("_thumbnail_", thumbnail_repr_data) + ) + elif ( + thumbnails + and not multiple_thumbnails + and not thumbnails_processed + or not reviewable + ): + if not thumbnails: + continue + # here we will use only one thumbnail for + # all representations + relative_thumbnail_path = repre_data["thumbnailPath"] + if not relative_thumbnail_path: + relative_thumbnail_path = thumbnails.pop() + thumb_dir, thumb_file = \ + self._get_refactor_thumbnail_path( + staging_dir, relative_thumbnail_path) + _, ext = os.path.splitext(thumb_file) + thumbnail_repr_data = deepcopy( + base_thumbnail_repre_data) + thumbnail_repr_data.update({ + "ext": ext[1:], + "files": thumb_file, + "stagingDir": thumb_dir + }) + new_instance["prepared_data_for_repres"].append( + ("_thumbnail_", thumbnail_repr_data) + ) + thumbnails_processed = True + + # get representation data + representation_data = self._get_representation_data( + filepath, repre_data, staging_dir, + explicit_output_name + ) + + new_instance["prepared_data_for_repres"].append( + (repre_data["colorspace"], representation_data) + ) + + def _get_refactor_thumbnail_path( + self, staging_dir, relative_thumbnail_path): + thumbnail_abs_path = os.path.join( + staging_dir, relative_thumbnail_path) + return os.path.split( + thumbnail_abs_path) + + def _get_representation_data( + self, filepath, repre_data, staging_dir, explicit_output_name=None + ): + """Get representation data + + Args: + filepath (str): Filepath to representation file. + repre_data (dict): Representation data from CSV file. + staging_dir (str): Staging directory. + explicit_output_name (Optional[str]): Explicit output name. + For grouping purposes with reviewable components. + Defaults to None. + """ + + # get extension of file + basename = os.path.basename(filepath) + _, extension = os.path.splitext(filepath) + + # validate filepath is having correct extension based on output + config_repre_data = self.representations_config["representations"] + repre_name = repre_data["representationName"] + if repre_name not in config_repre_data: + raise KeyError( + f"Representation '{repre_name}' not found " + "in config representation data." + ) + validate_extensions = config_repre_data[repre_name]["extensions"] + if extension not in validate_extensions: + raise TypeError( + f"File extension '{extension}' not valid for " + f"output '{validate_extensions}'." + ) + + is_sequence = (extension in IMAGE_EXTENSIONS) + # convert ### string in file name to %03d + # this is for correct frame range validation + # example: file.###.exr -> file.%03d.exr + if "#" in basename: + padding = len(basename.split("#")) - 1 + basename = basename.replace("#" * padding, f"%0{padding}d") + is_sequence = True + + # make absolute path to file + absfilepath = os.path.normpath(os.path.join(staging_dir, filepath)) + dirname = os.path.dirname(absfilepath) + + # check if dirname exists + if not os.path.isdir(dirname): + raise NotADirectoryError( + f"Directory '{dirname}' does not exist." + ) + + # collect all data from dirname + paths_for_collection = [] + for file in os.listdir(dirname): + filepath = os.path.join(dirname, file) + paths_for_collection.append(filepath) + + collections, _ = clique.assemble(paths_for_collection) + + if collections: + collections = collections[0] + else: + if is_sequence: + raise ValueError( + f"No collections found in directory '{dirname}'." + ) + + frame_start = None + frame_end = None + if is_sequence: + files = [os.path.basename(file) for file in collections] + frame_start = list(collections.indexes)[0] + frame_end = list(collections.indexes)[-1] + else: + files = basename + + tags = deepcopy(repre_data["tags"]) + # if slate in repre_data is True then remove one frame from start + if repre_data["slate"]: + tags.append("has_slate") + + # get representation data + representation_data = { + "name": repre_name, + "ext": extension[1:], + "files": files, + "stagingDir": dirname, + "stagingDir_persistent": True, + "tags": tags, + } + if extension in VIDEO_EXTENSIONS: + representation_data.update({ + "fps": repre_data["fps"], + "outputName": repre_name, + }) + + if explicit_output_name: + representation_data["outputName"] = explicit_output_name + + if frame_start: + representation_data["frameStart"] = frame_start + if frame_end: + representation_data["frameEnd"] = frame_end + + return representation_data + + def _get_data_from_csv( + self, package_dir, filename + ): + """Generate instances from the csv file""" + # get current project name and code from context.data + project_name = self.create_context.get_current_project_name() + + csv_file_path = os.path.join( + package_dir, filename + ) + + # make sure csv file contains columns from following list + required_columns = [ + name for name, value in self.columns_config["columns"].items() + if value["required"] + ] + # get data from csv file + with open(csv_file_path, "r") as csv_file: + csv_reader = csv.DictReader( + csv_file, delimiter=self.columns_config["csv_delimiter"]) + + # fix fieldnames + # sometimes someone can keep extra space at the start or end of + # the column name + all_columns = [ + " ".join(column.rsplit()) for column in csv_reader.fieldnames] + # return back fixed fieldnames + csv_reader.fieldnames = all_columns + + # check if csv file contains all required columns + if any(column not in all_columns for column in required_columns): + raise KeyError( + f"Missing required columns: {required_columns}" + ) + + csv_data = {} + # get data from csv file + for row in csv_reader: + # Get required columns first + context_asset_name = self._get_row_value_with_validation( + "Folder Context", row) + task_name = self._get_row_value_with_validation( + "Task Name", row) + version = self._get_row_value_with_validation( + "Version", row) + + # Get optional columns + variant = self._get_row_value_with_validation( + "Variant", row) + product_type = self._get_row_value_with_validation( + "Product Type", row) + + pre_product_name = ( + f"{task_name}{variant}{product_type}" + f"{version}".replace(" ", "").lower() + ) + + # get representation data + filename, representation_data = \ + self._get_representation_row_data(row) + + # get all csv data into one dict and make sure there are no + # duplicates data are already validated and sorted under + # correct existing asset also check if asset exists and if + # task name is valid task in asset doc and representations + # are distributed under products following variants + if context_asset_name not in csv_data: + asset_doc = get_asset_by_name( + project_name, context_asset_name) + + # make sure asset exists + if not asset_doc: + raise ValueError( + f"Asset '{context_asset_name}' not found." + ) + # check if task name is valid task in asset doc + if task_name not in asset_doc["data"]["tasks"]: + raise ValueError( + f"Task '{task_name}' not found in asset doc." + ) + + csv_data[context_asset_name] = { + "asset_doc": asset_doc, + "products": { + pre_product_name: { + "task_name": task_name, + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + } + } + else: + asset_doc = csv_data[context_asset_name]["asset_doc"] + csv_products = csv_data[context_asset_name]["products"] + if pre_product_name not in csv_products: + csv_products[pre_product_name] = { + "task_name": task_name, + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + else: + csv_representations = \ + csv_products[pre_product_name]["representations"] + if filename in csv_representations: + raise ValueError( + f"Duplicate filename '{filename}' in csv file." + ) + csv_representations[filename] = representation_data + + return csv_data + + def _get_representation_row_data(self, row_data): + """Get representation row data""" + # Get required columns first + file_path = self._get_row_value_with_validation( + "File Path", row_data) + frame_start = self._get_row_value_with_validation( + "Frame Start", row_data) + frame_end = self._get_row_value_with_validation( + "Frame End", row_data) + handle_start = self._get_row_value_with_validation( + "Handle Start", row_data) + handle_end = self._get_row_value_with_validation( + "Handle End", row_data) + fps = self._get_row_value_with_validation( + "FPS", row_data) + + # Get optional columns + thumbnail_path = self._get_row_value_with_validation( + "Thumbnail", row_data) + colorspace = self._get_row_value_with_validation( + "Colorspace", row_data) + comment = self._get_row_value_with_validation( + "Version Comment", row_data) + repre = self._get_row_value_with_validation( + "Representation", row_data) + slate_exists = self._get_row_value_with_validation( + "Slate Exists", row_data) + repre_tags = self._get_row_value_with_validation( + "Representation Tags", row_data) + + # convert tags value to list + tags_list = copy(self.representations_config["default_tags"]) + if repre_tags: + tags_list = [] + tags_delimiter = self.representations_config["tags_delimiter"] + # strip spaces from repre_tags + if tags_delimiter in repre_tags: + tags = repre_tags.split(tags_delimiter) + for _tag in tags: + tags_list.append(("".join(_tag.strip())).lower()) + else: + tags_list.append(repre_tags) + + representation_data = { + "colorspace": colorspace, + "comment": comment, + "representationName": repre, + "slate": slate_exists, + "tags": tags_list, + "thumbnailPath": thumbnail_path, + "frameStart": int(frame_start), + "frameEnd": int(frame_end), + "handleStart": int(handle_start), + "handleEnd": int(handle_end), + "fps": float(fps), + } + + return file_path, representation_data + + def _get_row_value_with_validation( + self, column_name, row_data, default_value=None + ): + """Get row value with validation""" + columns_config = self.columns_config["columns"] + # get column data from column config + column_data = columns_config.get(column_name) + if not column_data: + raise KeyError( + f"Column '{column_name}' not found in column config." + ) + + # get column value from row + column_value = row_data.get(column_name) + column_required = column_data["required"] + + # check if column value is not empty string and column is required + if column_value == "" and column_required: + raise ValueError( + f"Value in column '{column_name}' is required." + ) + + # get column type + column_type = column_data["type"] + # get column validation regex + column_validation = column_data["validate"] + # get column default value + column_default = default_value or column_data["default"] + + if column_type in ["number", "decimal"] and column_default == 0: + column_default = None + + # check if column value is not empty string + if column_value == "": + # set default value if column value is empty string + column_value = column_default + + # set column value to correct type following column type + if column_type == "number" and column_value is not None: + column_value = int(column_value) + elif column_type == "decimal" and column_value is not None: + column_value = float(column_value) + elif column_type == "bool": + column_value = column_value in ["true", "True"] + + # check if column value matches validation regex + if ( + column_value is not None and + not re.match(str(column_validation), str(column_value)) + ): + raise ValueError( + f"Column '{column_name}' value '{column_value}' " + f"does not match validation regex '{column_validation}' \n" + f"Row data: {row_data} \n" + f"Column data: {column_data}" + ) + + return column_value + + def _pass_data_to_csv_instance( + self, instance_data, staging_dir, filename + ): + """Pass CSV representation file to instance data""" + + representation = { + "name": "csv", + "ext": "csv", + "files": filename, + "stagingDir": staging_dir, + "stagingDir_persistent": True, + } + + instance_data.update({ + "label": f"CSV: {filename}", + "representations": [representation], + "stagingDir": staging_dir, + "stagingDir_persistent": True, + }) + + def get_instance_attr_defs(self): + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] + + def get_pre_create_attr_defs(self): + """Creating pre-create attributes at creator plugin. + + Returns: + list: list of attribute object instances + """ + # Use same attributes as for instance attrobites + attr_defs = [ + FileDef( + "csv_filepath_data", + folders=False, + extensions=[".csv"], + allow_sequences=False, + single_item=True, + label="CSV File", + ), + ] + return attr_defs diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py new file mode 100644 index 0000000000..0da3ebed81 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py @@ -0,0 +1,36 @@ +from pprint import pformat +import pyblish.api +from ayon_core.pipeline import publish + + +class CollectCSVIngestInstancesData( + pyblish.api.InstancePlugin, + publish.AYONPyblishPluginMixin, + publish.ColormanagedPyblishPluginMixin +): + """Collect CSV Ingest data from instance. + """ + + label = "Collect CSV Ingest instances data" + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["traypublisher"] + families = ["csv_ingest"] + + def process(self, instance): + self.log.info(f"Collecting {instance.name}") + + # expecting [(colorspace, repre_data), ...] + prepared_repres_data_items = instance.data[ + "prepared_data_for_repres"] + + for colorspace, repre_data in prepared_repres_data_items: + # only apply colorspace to those which are not marked as thumbnail + if colorspace != "_thumbnail_": + # colorspace name is passed from CSV column + self.set_representation_colorspace( + repre_data, instance.context, colorspace + ) + + instance.data["representations"].append(repre_data) + + self.log.debug(pformat(instance.data)) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py b/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py new file mode 100644 index 0000000000..4bdf7c0493 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py @@ -0,0 +1,31 @@ +import pyblish.api + +from ayon_core.pipeline import publish + + +class ExtractCSVFile(publish.Extractor): + """ + Extractor export CSV file + """ + + label = "Extract CSV file" + order = pyblish.api.ExtractorOrder - 0.45 + families = ["csv_ingest_file"] + hosts = ["traypublisher"] + + def process(self, instance): + + csv_file_data = instance.data["csvFileData"] + + representation_csv = { + 'name': "csv_data", + 'ext': "csv", + 'files': csv_file_data["filename"], + "stagingDir": csv_file_data["staging_dir"], + "stagingDir_persistent": True + } + + instance.data["representations"].append(representation_csv) + + self.log.info("Added CSV file representation: {}".format( + representation_csv)) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py index 6a85f92ce1..7a35a19a85 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py @@ -16,6 +16,7 @@ class ValidateExistingVersion( order = ValidateContentsOrder hosts = ["traypublisher"] + targets = ["local"] actions = [RepairAction] diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index cd4a98b84d..ca53a2c8ef 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -16,6 +16,8 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, label = "Validate Frame Range" hosts = ["traypublisher"] families = ["render", "plate"] + targets = ["local"] + order = ValidateContentsOrder optional = True diff --git a/client/ayon_core/plugins/publish/integrate.py b/client/ayon_core/plugins/publish/integrate.py index a502031595..2788875c23 100644 --- a/client/ayon_core/plugins/publish/integrate.py +++ b/client/ayon_core/plugins/publish/integrate.py @@ -140,7 +140,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "uasset", "blendScene", "yeticacheUE", - "tycache" + "tycache", + "csv_ingest_file", ] default_template_name = "publish" From 6274e5259a8b1a0daa31fe96847ead0a6e1cea74 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Feb 2024 17:15:20 +0100 Subject: [PATCH 02/67] Settings models and validators for batch movie creation and CSV ingestion plugins. Update addon version to 0.1.4. --- .../server/settings/creator_plugins.py | 295 ++++++++++++++++++ server_addon/traypublisher/server/version.py | 2 +- 2 files changed, 296 insertions(+), 1 deletion(-) diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index bf66d9a088..7ce241faa6 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -1,4 +1,7 @@ +from pydantic import validator from ayon_server.settings import BaseSettingsModel, SettingsField +from ayon_server.settings.validators import ensure_unique_names +from ayon_server.exceptions import BadRequestException class BatchMovieCreatorPlugin(BaseSettingsModel): @@ -22,11 +25,137 @@ class BatchMovieCreatorPlugin(BaseSettingsModel): ) +class ColumnItemModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + name: str = SettingsField( + title="Name", + default="" + ) + + type: str = SettingsField( + title="Type", + default="" + ) + + default: str = SettingsField( + title="Default", + default="" + ) + + required: bool = SettingsField( + title="Required", + default=False + ) + + validate: str = SettingsField( + title="Validate", + default="" + ) + + +class ColumnConfigModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + csv_delimiter: str = SettingsField( + title="CSV delimiter", + default="," + ) + + columns: list[ColumnItemModel] = SettingsField( + title="Columns", + default_factory=list + ) + + @validator("columns") + def validate_unique_outputs(cls, value): + ensure_unique_names(value) + return value + + +class RepresentationItemModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + name: str = SettingsField( + title="Name", + default="" + ) + + extensions: list[str] = SettingsField( + title="Extensions", + default_factory=list + ) + + @validator("extensions") + def validate_extension(cls, value): + for ext in value: + if not ext.startswith("."): + raise BadRequestException("Extension must start with '.'") + return value + + +class RepresentationConfigModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + tags_delimiter: str = SettingsField( + title="Tags delimiter", + default=";" + ) + + default_tags: list[str] = SettingsField( + title="Default tags", + default_factory=list + ) + + representation: list[RepresentationItemModel] = SettingsField( + title="Representation", + default_factory=list + ) + + @validator("representation") + def validate_unique_outputs(cls, value): + ensure_unique_names(value) + return value + + +class IngestCSVPluginModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + enabled: bool = SettingsField( + title="Enabled", + default=False + ) + + columns_config: ColumnConfigModel = SettingsField( + title="Columns config", + default_factory=ColumnConfigModel + ) + + representations_config: dict = SettingsField( + title="Representations config", + default_factory=dict + ) + + class TrayPublisherCreatePluginsModel(BaseSettingsModel): BatchMovieCreator: BatchMovieCreatorPlugin = SettingsField( title="Batch Movie Creator", default_factory=BatchMovieCreatorPlugin ) + IngestCSV: IngestCSVPluginModel = SettingsField( + title="Ingest CSV", + default_factory=IngestCSVPluginModel + ) DEFAULT_CREATORS = { @@ -41,4 +170,170 @@ DEFAULT_CREATORS = { ".mov" ] }, + "IngestCSV": { + "enabled": True, + "columns_config": { + "csv_delimiter": ",", + "columns": [ + { + "name": "File Path", + "type": "text", + "default": "", + "required": True, + "validate": "^([a-z0-9#._\\/]*)$" + }, + { + "name": "Folder Context", + "type": "text", + "default": "", + "required": True, + "validate": "^([a-zA-Z0-9_]*)$" + }, + { + "name": "Task Name", + "type": "text", + "default": "", + "required": True, + "validate": "^(.*)$" + }, + { + "name": "Version", + "type": "number", + "default": 1, + "required": True, + "validate": "^(\\d{1,3})$" + }, + { + "name": "Frame Start", + "type": "number", + "default": 0, + "required": True, + "validate": "^(\\d{1,8})$" + }, + { + "name": "Frame End", + "type": "number", + "default": 0, + "required": True, + "validate": "^(\\d{1,8})$" + }, + { + "name": "Handle Start", + "type": "number", + "default": 0, + "required": True, + "validate": "^(\\d)$" + }, + { + "name": "Handle End", + "type": "number", + "default": 0, + "required": True, + "validate": "^(\\d)$" + }, + { + "name": "FPS", + "type": "decimal", + "default": 0.0, + "required": True, + "validate": "^[0-9]*\\.[0-9]+$|^[0-9]+$" + }, + { + "name": "Thumbnail", + "type": "text", + "default": "", + "required": False, + "validate": "^([a-z0-9#._\\/]*)$" + }, + { + "name": "Colorspace", + "type": "text", + "default": "", + "required": False, + "validate": "^(.*)$" + }, + { + "name": "Version Comment", + "type": "text", + "default": "", + "required": False, + "validate": "^(.*)$" + }, + { + "name": "Representation", + "type": "text", + "default": "", + "required": False, + "validate": "^(.*)$" + }, + { + "name": "Product Type", + "type": "text", + "default": "", + "required": False, + "validate": "^(.*)$" + }, + { + "name": "Variant", + "type": "text", + "default": "", + "required": False, + "validate": "^(.*)$" + }, + { + "name": "Slate Exists", + "type": "bool", + "default": True, + "required": False, + "validate": "(True|False)" + }, + { + "name": "Representation Tags", + "type": "text", + "default": "", + "required": False, + "validate": "^(.*)$" + } + ] + }, + "representations_config": { + "tags_delimiter": ";", + "default_tags": [ + "review" + ], + "representations": [ + { + "name": "preview", + "extensions": [ + ".mp4", + ".mov" + ] + }, + { + "name": "exr", + "extensions": [ + ".exr" + ] + }, + { + "name": "edit", + "extensions": [ + ".mov" + ] + }, + { + "name": "review", + "extensions": [ + ".mov" + ] + }, + { + "name": "nuke", + "extensions": [ + ".nk" + ] + } + ] + } + } } diff --git a/server_addon/traypublisher/server/version.py b/server_addon/traypublisher/server/version.py index e57ad00718..de699158fd 100644 --- a/server_addon/traypublisher/server/version.py +++ b/server_addon/traypublisher/server/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring addon version.""" -__version__ = "0.1.3" +__version__ = "0.1.4" From fe0ccb2a4126bb7f632b3ace39b9f592d1ae0fe7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Feb 2024 17:15:36 +0100 Subject: [PATCH 03/67] Refactor column validation and retrieval logic. - Refactored how columns are validated and retrieved for better clarity. --- .../plugins/create/create_csv_ingest.py | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index aa986657dc..a11810f902 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -312,14 +312,20 @@ configuration in project settings. _, extension = os.path.splitext(filepath) # validate filepath is having correct extension based on output - config_repre_data = self.representations_config["representations"] repre_name = repre_data["representationName"] - if repre_name not in config_repre_data: + repre_config_data = None + for repre in self.representations_config["representations"]: + if repre["name"] == repre_name: + repre_config_data = repre + break + + if not repre_config_data: raise KeyError( f"Representation '{repre_name}' not found " "in config representation data." ) - validate_extensions = config_repre_data[repre_name]["extensions"] + + validate_extensions = repre_config_data["extensions"] if extension not in validate_extensions: raise TypeError( f"File extension '{extension}' not valid for " @@ -413,8 +419,8 @@ configuration in project settings. # make sure csv file contains columns from following list required_columns = [ - name for name, value in self.columns_config["columns"].items() - if value["required"] + column["name"] for column in self.columns_config["columns"] + if column["required"] ] # get data from csv file with open(csv_file_path, "r") as csv_file: @@ -582,9 +588,14 @@ configuration in project settings. self, column_name, row_data, default_value=None ): """Get row value with validation""" - columns_config = self.columns_config["columns"] + # get column data from column config - column_data = columns_config.get(column_name) + column_data = None + for column in self.columns_config["columns"]: + if column["name"] == column_name: + column_data = column + break + if not column_data: raise KeyError( f"Column '{column_name}' not found in column config." From 5595c021e244470d936059b98e44552a3b078e29 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 22 Feb 2024 22:15:56 +0100 Subject: [PATCH 04/67] Update column attribute names and validation patterns in creator plugins settings. Improve error message for file extension validation. --- .../plugins/create/create_csv_ingest.py | 4 +- .../server/settings/creator_plugins.py | 80 +++++++++---------- 2 files changed, 42 insertions(+), 42 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index a11810f902..7c379784f7 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -603,7 +603,7 @@ configuration in project settings. # get column value from row column_value = row_data.get(column_name) - column_required = column_data["required"] + column_required = column_data["required_column"] # check if column value is not empty string and column is required if column_value == "" and column_required: @@ -614,7 +614,7 @@ configuration in project settings. # get column type column_type = column_data["type"] # get column validation regex - column_validation = column_data["validate"] + column_validation = column_data["validation_pattern"] # get column default value column_default = default_value or column_data["default"] diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index 7ce241faa6..9f9c31da98 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -45,14 +45,14 @@ class ColumnItemModel(BaseSettingsModel): default="" ) - required: bool = SettingsField( - title="Required", + required_column: bool = SettingsField( + title="Required Column", default=False ) - validate: str = SettingsField( - title="Validate", - default="" + validation_pattern: str = SettingsField( + title="Validation Regex Pattern", + default="^(.*)$" ) @@ -96,7 +96,7 @@ class RepresentationItemModel(BaseSettingsModel): def validate_extension(cls, value): for ext in value: if not ext.startswith("."): - raise BadRequestException("Extension must start with '.'") + raise BadRequestException(f"Extension must start with '.': {ext}") return value @@ -179,120 +179,120 @@ DEFAULT_CREATORS = { "name": "File Path", "type": "text", "default": "", - "required": True, - "validate": "^([a-z0-9#._\\/]*)$" + "required_column": True, + "validation_pattern": "^([a-z0-9#._\\/]*)$" }, { "name": "Folder Context", "type": "text", "default": "", - "required": True, - "validate": "^([a-zA-Z0-9_]*)$" + "required_column": True, + "validation_pattern": "^([a-zA-Z0-9_]*)$" }, { "name": "Task Name", "type": "text", "default": "", - "required": True, - "validate": "^(.*)$" + "required_column": True, + "validation_pattern": "^(.*)$" }, { "name": "Version", "type": "number", "default": 1, - "required": True, - "validate": "^(\\d{1,3})$" + "required_column": True, + "validation_pattern": "^(\\d{1,3})$" }, { "name": "Frame Start", "type": "number", "default": 0, - "required": True, - "validate": "^(\\d{1,8})$" + "required_column": True, + "validation_pattern": "^(\\d{1,8})$" }, { "name": "Frame End", "type": "number", "default": 0, - "required": True, - "validate": "^(\\d{1,8})$" + "required_column": True, + "validation_pattern": "^(\\d{1,8})$" }, { "name": "Handle Start", "type": "number", "default": 0, - "required": True, - "validate": "^(\\d)$" + "required_column": True, + "validation_pattern": "^(\\d)$" }, { "name": "Handle End", "type": "number", "default": 0, - "required": True, - "validate": "^(\\d)$" + "required_column": True, + "validation_pattern": "^(\\d)$" }, { "name": "FPS", "type": "decimal", "default": 0.0, - "required": True, - "validate": "^[0-9]*\\.[0-9]+$|^[0-9]+$" + "required_column": True, + "validation_pattern": "^[0-9]*\\.[0-9]+$|^[0-9]+$" }, { "name": "Thumbnail", "type": "text", "default": "", - "required": False, - "validate": "^([a-z0-9#._\\/]*)$" + "required_column": False, + "validation_pattern": "^([a-z0-9#._\\/]*)$" }, { "name": "Colorspace", "type": "text", "default": "", - "required": False, - "validate": "^(.*)$" + "required_column": False, + "validation_pattern": "^(.*)$" }, { "name": "Version Comment", "type": "text", "default": "", - "required": False, - "validate": "^(.*)$" + "required_column": False, + "validation_pattern": "^(.*)$" }, { "name": "Representation", "type": "text", "default": "", - "required": False, - "validate": "^(.*)$" + "required_column": False, + "validation_pattern": "^(.*)$" }, { "name": "Product Type", "type": "text", "default": "", - "required": False, - "validate": "^(.*)$" + "required_column": False, + "validation_pattern": "^(.*)$" }, { "name": "Variant", "type": "text", "default": "", - "required": False, - "validate": "^(.*)$" + "required_column": False, + "validation_pattern": "^(.*)$" }, { "name": "Slate Exists", "type": "bool", "default": True, - "required": False, - "validate": "(True|False)" + "required_column": False, + "validation_pattern": "(True|False)" }, { "name": "Representation Tags", "type": "text", "default": "", - "required": False, - "validate": "^(.*)$" + "required_column": False, + "validation_pattern": "^(.*)$" } ] }, From b7bd29389240a518838fbb2124aae26da6cadea8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 4 Mar 2024 17:19:51 +0100 Subject: [PATCH 05/67] Update addon and plugin files for Ayon Core hosts traypublisher. - Added import statement for `get_ayon_launcher_args`. - Renamed function call from `get_subset_name` to `get_product_name`. - Changed attribute name from `family` to `product_type` in class IngestCSV. - Updated comments and docstrings in the code. --- client/ayon_core/hosts/traypublisher/addon.py | 1 + .../plugins/create/create_csv_ingest.py | 7 +++---- .../server/settings/creator_plugins.py | 18 ++++++++++-------- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index f3884aedfe..ae0705cee2 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -1,6 +1,7 @@ import os from pathlib import Path +from ayon_core.lib import get_ayon_launcher_args from ayon_core.lib.execute import run_detached_process from ayon_core.addon import ( click_wrap, diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 7c379784f7..193e439581 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -5,7 +5,7 @@ import clique from copy import deepcopy, copy from ayon_core.client import get_asset_by_name -from ayon_core.pipeline.create import get_subset_name +from ayon_core.pipeline.create import get_product_name from ayon_core.pipeline import CreatedInstance from ayon_core.lib import FileDef, BoolDef from ayon_core.lib.transcoding import ( @@ -23,7 +23,7 @@ class IngestCSV(TrayPublishCreator): icon = "fa.file" label = "CSV Ingest" - family = "csv_ingest_file" + product_type = "csv_ingest_file" identifier = "io.ayon_core.creators.traypublisher.csv_ingest" default_variants = ["Main"] @@ -41,7 +41,6 @@ configuration in project settings. columns_config = {} representations_config = {} - def create(self, subset_name, instance_data, pre_create_data): """Create an product from each row found in the CSV. @@ -118,7 +117,7 @@ configuration in project settings. version = product_data["version"] # create subset/product name - product_name = get_subset_name( + product_name = get_product_name( product_type, variant, task_name, diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index 9f9c31da98..82c4d37739 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -78,9 +78,11 @@ class ColumnConfigModel(BaseSettingsModel): class RepresentationItemModel(BaseSettingsModel): - """Allows to publish multiple video files in one go.
Name of matching - asset is parsed from file names ('asset.mov', 'asset_v001.mov', - 'my_asset_to_publish.mov')""" + """Allows to publish multiple video files in one go. + + Name of matching asset is parsed from file names + ('asset.mov', 'asset_v001.mov', 'my_asset_to_publish.mov') + """ name: str = SettingsField( title="Name", @@ -115,12 +117,12 @@ class RepresentationConfigModel(BaseSettingsModel): default_factory=list ) - representation: list[RepresentationItemModel] = SettingsField( - title="Representation", + representations: list[RepresentationItemModel] = SettingsField( + title="Representations", default_factory=list ) - @validator("representation") + @validator("representations") def validate_unique_outputs(cls, value): ensure_unique_names(value) return value @@ -141,9 +143,9 @@ class IngestCSVPluginModel(BaseSettingsModel): default_factory=ColumnConfigModel ) - representations_config: dict = SettingsField( + representations_config: RepresentationConfigModel = SettingsField( title="Representations config", - default_factory=dict + default_factory=RepresentationConfigModel ) From 102866da849ecb6e36d0acbbd1743fd6af2e5973 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Tue, 5 Mar 2024 13:48:33 +0100 Subject: [PATCH 06/67] Update client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py Co-authored-by: Roy Nieterau --- .../plugins/publish/collect_csv_ingest_instance_data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py index 0da3ebed81..1840dbb445 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py @@ -17,7 +17,6 @@ class CollectCSVIngestInstancesData( families = ["csv_ingest"] def process(self, instance): - self.log.info(f"Collecting {instance.name}") # expecting [(colorspace, repre_data), ...] prepared_repres_data_items = instance.data[ From 4cbeb1033aab65ecc2882e1f95e8ae909a504246 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Tue, 5 Mar 2024 13:48:41 +0100 Subject: [PATCH 07/67] Update client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py Co-authored-by: Roy Nieterau --- .../plugins/publish/collect_csv_ingest_instance_data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py index 1840dbb445..f76ef93c95 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py @@ -32,4 +32,3 @@ class CollectCSVIngestInstancesData( instance.data["representations"].append(repre_data) - self.log.debug(pformat(instance.data)) From ac4e3157bce395f953fd870317cdf426f9de85d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Mar 2024 14:43:48 +0100 Subject: [PATCH 08/67] ayon conversion fixes --- client/ayon_core/hosts/traypublisher/csv_publish.py | 10 +--------- .../plugins/create/create_csv_ingest.py | 13 ++++++++----- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py index f8eed2f2c5..6f2e335f89 100644 --- a/client/ayon_core/hosts/traypublisher/csv_publish.py +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -35,14 +35,6 @@ def csvpublish( # setting host context into project host.set_project_name(project_name) - # add asset context to environment - # TODO: perhaps this can be done in a better way? - os.environ.update({ - "AVALON_PROJECT": project_name, - "AVALON_ASSET": asset_name, - "AVALON_TASK": task_name or "" - }) - # form precreate data with field values file_field = FileDefItem.from_paths([csv_filepath], False).pop().to_dict() precreate_data = { @@ -57,7 +49,7 @@ def csvpublish( ) create_context.create( - "io.openpype.creators.traypublisher.csv_ingest", + "io.ayon_core.creators.traypublisher.csv_ingest", "Main", asset_doc=asset_doc, task_name=task_name, diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 193e439581..dd2339392f 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -79,7 +79,7 @@ configuration in project settings. ) csv_instance = CreatedInstance( - self.family, subset_name, instance_data, self + self.product_type, subset_name, instance_data, self ) self._store_new_instance(csv_instance) @@ -106,6 +106,7 @@ configuration in project settings. """Create instances from csv data""" for asset_name, _data in csv_data_for_instances.items(): + project_name = self.create_context.get_current_project_name() asset_doc = _data["asset_doc"] products = _data["products"] @@ -118,10 +119,12 @@ configuration in project settings. # create subset/product name product_name = get_product_name( - product_type, - variant, - task_name, + project_name, asset_doc, + task_name, + self.host_name, + product_type, + variant ) # make sure frame start/end is inherited from csv columns @@ -419,7 +422,7 @@ configuration in project settings. # make sure csv file contains columns from following list required_columns = [ column["name"] for column in self.columns_config["columns"] - if column["required"] + if column["required_column"] ] # get data from csv file with open(csv_file_path, "r") as csv_file: From 09effd30fb2d22c519cd16b61b82dd46413454b4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Mar 2024 16:16:52 +0100 Subject: [PATCH 09/67] Update column names and add new fields for better data representation in CSV ingest and creator plugins. --- .../plugins/create/create_csv_ingest.py | 5 +- .../server/settings/creator_plugins.py | 66 +++++++++---------- 2 files changed, 35 insertions(+), 36 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index dd2339392f..00ff7e00ed 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -545,9 +545,9 @@ configuration in project settings. # Get optional columns thumbnail_path = self._get_row_value_with_validation( - "Thumbnail", row_data) + "Version Thumbnail", row_data) colorspace = self._get_row_value_with_validation( - "Colorspace", row_data) + "Representation Colorspace", row_data) comment = self._get_row_value_with_validation( "Version Comment", row_data) repre = self._get_row_value_with_validation( @@ -583,7 +583,6 @@ configuration in project settings. "handleEnd": int(handle_end), "fps": float(fps), } - return file_path, representation_data def _get_row_value_with_validation( diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index 82c4d37739..3a07a76e6f 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -198,6 +198,20 @@ DEFAULT_CREATORS = { "required_column": True, "validation_pattern": "^(.*)$" }, + { + "name": "Product Type", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Variant", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, { "name": "Version", "type": "number", @@ -205,6 +219,20 @@ DEFAULT_CREATORS = { "required_column": True, "validation_pattern": "^(\\d{1,3})$" }, + { + "name": "Version Comment", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Version Thumbnail", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^([a-zA-Z0-9#._\\/]*)$" + }, { "name": "Frame Start", "type": "number", @@ -241,25 +269,11 @@ DEFAULT_CREATORS = { "validation_pattern": "^[0-9]*\\.[0-9]+$|^[0-9]+$" }, { - "name": "Thumbnail", - "type": "text", - "default": "", + "name": "Slate Exists", + "type": "bool", + "default": True, "required_column": False, - "validation_pattern": "^([a-z0-9#._\\/]*)$" - }, - { - "name": "Colorspace", - "type": "text", - "default": "", - "required_column": False, - "validation_pattern": "^(.*)$" - }, - { - "name": "Version Comment", - "type": "text", - "default": "", - "required_column": False, - "validation_pattern": "^(.*)$" + "validation_pattern": "(True|False)" }, { "name": "Representation", @@ -269,26 +283,12 @@ DEFAULT_CREATORS = { "validation_pattern": "^(.*)$" }, { - "name": "Product Type", + "name": "Representation Colorspace", "type": "text", "default": "", "required_column": False, "validation_pattern": "^(.*)$" }, - { - "name": "Variant", - "type": "text", - "default": "", - "required_column": False, - "validation_pattern": "^(.*)$" - }, - { - "name": "Slate Exists", - "type": "bool", - "default": True, - "required_column": False, - "validation_pattern": "(True|False)" - }, { "name": "Representation Tags", "type": "text", From 8ecb1fa3a542b49308bbfa3c35e11de4ebed5379 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Mar 2024 16:17:08 +0100 Subject: [PATCH 10/67] Refactor representation data handling for thumbnails and media types. - Refactored how thumbnail and media representation data is processed. - Added logic to handle different types of representations based on colorspaces. --- .../plugins/create/create_csv_ingest.py | 40 ++++++++++++++----- .../collect_csv_ingest_instance_data.py | 21 ++++++++-- 2 files changed, 48 insertions(+), 13 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 00ff7e00ed..720ef5ddb3 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -246,15 +246,27 @@ configuration in project settings. "stagingDir": thumb_dir, "outputName": explicit_output_name, }) - new_instance["prepared_data_for_repres"].append( - ("_thumbnail_", thumbnail_repr_data) - ) + new_instance["prepared_data_for_repres"].append({ + "type": "thumbnail", + "colorspace": None, + "representation": thumbnail_repr_data, + }) + # also add thumbnailPath for ayon to integrate + if not new_instance.get("thumbnailPath"): + new_instance["thumbnailPath"] = ( + os.path.join(thumb_dir, thumb_file) + ) elif ( thumbnails and not multiple_thumbnails and not thumbnails_processed or not reviewable ): + """ + For case where we have only one thumbnail + and not reviewable medias. This needs to be processed + only once per instance. + """ if not thumbnails: continue # here we will use only one thumbnail for @@ -273,9 +285,17 @@ configuration in project settings. "files": thumb_file, "stagingDir": thumb_dir }) - new_instance["prepared_data_for_repres"].append( - ("_thumbnail_", thumbnail_repr_data) - ) + new_instance["prepared_data_for_repres"].append({ + "type": "thumbnail", + "colorspace": None, + "representation": thumbnail_repr_data, + }) + # also add thumbnailPath for ayon to integrate + if not new_instance.get("thumbnailPath"): + new_instance["thumbnailPath"] = ( + os.path.join(thumb_dir, thumb_file) + ) + thumbnails_processed = True # get representation data @@ -284,9 +304,11 @@ configuration in project settings. explicit_output_name ) - new_instance["prepared_data_for_repres"].append( - (repre_data["colorspace"], representation_data) - ) + new_instance["prepared_data_for_repres"].append({ + "type": "media", + "colorspace": repre_data["colorspace"], + "representation": representation_data, + }) def _get_refactor_thumbnail_path( self, staging_dir, relative_thumbnail_path): diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py index f76ef93c95..33536d0854 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py @@ -22,13 +22,26 @@ class CollectCSVIngestInstancesData( prepared_repres_data_items = instance.data[ "prepared_data_for_repres"] - for colorspace, repre_data in prepared_repres_data_items: - # only apply colorspace to those which are not marked as thumbnail - if colorspace != "_thumbnail_": + for prep_repre_data in prepared_repres_data_items: + type = prep_repre_data["type"] + colorspace = prep_repre_data["colorspace"] + repre_data = prep_repre_data["representation"] + + # thumbnails should be skipped + if type == "media": # colorspace name is passed from CSV column self.set_representation_colorspace( repre_data, instance.context, colorspace ) + elif type == "media" and colorspace is None: + # TODO: implement colorspace file rules file parsing + self.log.warning( + "Colorspace is not defined in csv for following" + f" representation: {pformat(repre_data)}" + ) + pass + elif type == "thumbnail": + # thumbnails should be skipped + pass instance.data["representations"].append(repre_data) - From 8bb9070a9e3d4a775360025197076a528f5c2af4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 5 Mar 2024 16:19:37 +0100 Subject: [PATCH 11/67] comment Improve handling of thumbnails by removing unnecessary iteration. --- .../hosts/traypublisher/plugins/create/create_csv_ingest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 720ef5ddb3..84ab5b72a1 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -272,6 +272,8 @@ configuration in project settings. # here we will use only one thumbnail for # all representations relative_thumbnail_path = repre_data["thumbnailPath"] + # popping last thumbnail from list since it is only one + # and we do not need to iterate again over it if not relative_thumbnail_path: relative_thumbnail_path = thumbnails.pop() thumb_dir, thumb_file = \ From 346db35546d67b38afcb9cdd09529720b6462c9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Mon, 11 Mar 2024 12:17:20 +0100 Subject: [PATCH 12/67] Update client/ayon_core/hosts/traypublisher/csv_publish.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- client/ayon_core/hosts/traypublisher/csv_publish.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py index 6f2e335f89..c9fbbf917a 100644 --- a/client/ayon_core/hosts/traypublisher/csv_publish.py +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -49,7 +49,7 @@ def csvpublish( ) create_context.create( - "io.ayon_core.creators.traypublisher.csv_ingest", + "io.ayon.creators.traypublisher.csv_ingest", "Main", asset_doc=asset_doc, task_name=task_name, From feffa4db66efa415683f3b0253de01ae773c2bd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Mon, 11 Mar 2024 12:17:27 +0100 Subject: [PATCH 13/67] Update client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/traypublisher/plugins/create/create_csv_ingest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 84ab5b72a1..4ec6ca302a 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -24,7 +24,7 @@ class IngestCSV(TrayPublishCreator): label = "CSV Ingest" product_type = "csv_ingest_file" - identifier = "io.ayon_core.creators.traypublisher.csv_ingest" + identifier = "io.ayon.creators.traypublisher.csv_ingest" default_variants = ["Main"] From 0252e8796259221b418c4e3aa6e6e951cffe2674 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Mon, 11 Mar 2024 12:22:28 +0100 Subject: [PATCH 14/67] Update client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/traypublisher/plugins/create/create_csv_ingest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 4ec6ca302a..31d0a022c5 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -141,7 +141,7 @@ configuration in project settings. version_comment = next( iter( repre_data["comment"] - for _, repre_data in product_data["representations"].items() # noqa: E501 + for repre_data in product_data["representations"].values() # noqa: E501 if repre_data["comment"] ), None From 638c68324139fbe365d36615ef21910a83f2fab2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Mar 2024 10:19:50 +0100 Subject: [PATCH 15/67] Update CLI options and function parameters for CSV file ingestion and publishing. Refactor variable names for clarity in functions related to file handling and context creation. --- client/ayon_core/hosts/traypublisher/addon.py | 28 +++++++++---------- .../hosts/traypublisher/csv_publish.py | 16 +++++------ 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index ae0705cee2..ee42784f98 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -62,25 +62,25 @@ def launch(): @cli_main.command() @click_wrap.option( - "--csv-filepath", + "--filepath", help="Full path to CSV file with data", type=str, required=True ) @click_wrap.option( - "--project-name", + "--project", help="Project name in which the context will be used", type=str, required=True ) @click_wrap.option( - "--asset-name", + "--folder", help="Asset name in which the context will be used", type=str, required=True ) @click_wrap.option( - "--task-name", + "--task", help="Task name under Asset in which the context will be used", type=str, required=False @@ -93,10 +93,10 @@ def launch(): required=False ) def ingestcsv( - csv_filepath, - project_name, - asset_name, - task_name, + filepath, + project, + folder, + task, ignore_validators ): """Ingest CSV file into project. @@ -107,13 +107,13 @@ def ingestcsv( from .csv_publish import csvpublish # use Path to check if csv_filepath exists - if not Path(csv_filepath).exists(): - raise FileNotFoundError(f"File {csv_filepath} does not exist.") + if not Path(filepath).exists(): + raise FileNotFoundError(f"File {filepath} does not exist.") csvpublish( - csv_filepath, - project_name, - asset_name, - task_name, + filepath, + project, + folder, + task, ignore_validators ) diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py index c9fbbf917a..32c2b69371 100644 --- a/client/ayon_core/hosts/traypublisher/csv_publish.py +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -3,7 +3,7 @@ import os import pyblish.api import pyblish.util -from ayon_core.client import get_asset_by_name +from ayon_api import get_folder_by_name from ayon_core.lib.attribute_definitions import FileDefItem from ayon_core.pipeline import install_host from ayon_core.pipeline.create import CreateContext @@ -12,18 +12,18 @@ from ayon_core.hosts.traypublisher.api import TrayPublisherHost def csvpublish( - csv_filepath, + filepath, project_name, - asset_name, + folder_name, task_name=None, ignore_validators=False ): """Publish CSV file. Args: - csv_filepath (str): Path to CSV file. + filepath (str): Path to CSV file. project_name (str): Project name. - asset_name (str): Asset name. + folder_name (str): Folder name. task_name (Optional[str]): Task name. ignore_validators (Optional[bool]): Option to ignore validators. """ @@ -36,16 +36,16 @@ def csvpublish( host.set_project_name(project_name) # form precreate data with field values - file_field = FileDefItem.from_paths([csv_filepath], False).pop().to_dict() + file_field = FileDefItem.from_paths([filepath], False).pop().to_dict() precreate_data = { "csv_filepath_data": file_field, } # create context initialization create_context = CreateContext(host, headless=True) - asset_doc = get_asset_by_name( + asset_doc = get_folder_by_name( project_name, - asset_name + folder_name=folder_name ) create_context.create( From 1c11e18314ff929f1fe1b9b7048a063552143088 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Mar 2024 11:11:24 +0100 Subject: [PATCH 16/67] Update CSV ingest plugin to handle folder paths and task types. Improve error handling for missing assets and tasks. Refactor data processing for better organization and validation. --- .../plugins/create/create_csv_ingest.py | 71 +++++++++++-------- 1 file changed, 41 insertions(+), 30 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 31d0a022c5..9d6f04ae99 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -4,14 +4,14 @@ import csv import clique from copy import deepcopy, copy -from ayon_core.client import get_asset_by_name +from ayon_api import get_folder_by_path, get_task_by_name from ayon_core.pipeline.create import get_product_name from ayon_core.pipeline import CreatedInstance from ayon_core.lib import FileDef, BoolDef from ayon_core.lib.transcoding import ( VIDEO_EXTENSIONS, IMAGE_EXTENSIONS ) - +from ayon_core.pipeline.create import CreatorError from ayon_core.hosts.traypublisher.api.plugin import ( TrayPublishCreator ) @@ -54,7 +54,7 @@ configuration in project settings. folder = csv_filepath_data.get("directory", "") if not os.path.exists(folder): - raise FileNotFoundError( + raise CreatorError( f"Directory '{folder}' does not exist." ) filename = csv_filepath_data.get("filenames", []) @@ -105,14 +105,14 @@ configuration in project settings. ): """Create instances from csv data""" - for asset_name, _data in csv_data_for_instances.items(): + for folder_path, prepared_data in csv_data_for_instances.items(): project_name = self.create_context.get_current_project_name() - asset_doc = _data["asset_doc"] - products = _data["products"] + products = prepared_data["products"] for instance_name, product_data in products.items(): # get important instance variables task_name = product_data["task_name"] + task_type = product_data["task_type"] variant = product_data["variant"] product_type = product_data["product_type"] version = product_data["version"] @@ -120,8 +120,8 @@ configuration in project settings. # create subset/product name product_name = get_product_name( project_name, - asset_doc, task_name, + task_type, self.host_name, product_type, variant @@ -155,7 +155,7 @@ configuration in project settings. # get representations from product data representations = product_data["representations"] - label = f"{asset_name}_{product_name}_v{version:>03}" + label = f"{folder_path}_{product_name}_v{version:>03}" families = ["csv_ingest"] if slate_exists: @@ -166,7 +166,7 @@ configuration in project settings. # make product data product_data = { "name": instance_name, - "asset": asset_name, + "folderPath": folder_path, "families": families, "label": label, "task": task_name, @@ -471,8 +471,10 @@ configuration in project settings. # get data from csv file for row in csv_reader: # Get required columns first - context_asset_name = self._get_row_value_with_validation( - "Folder Context", row) + # TODO: will need to be folder path in CSV + # TODO: `context_asset_name` is now `folder_path` + folder_path = self._get_row_value_with_validation( + "Folder Path", row) task_name = self._get_row_value_with_validation( "Task Name", row) version = self._get_row_value_with_validation( @@ -493,31 +495,40 @@ configuration in project settings. filename, representation_data = \ self._get_representation_row_data(row) + # TODO: batch query of all folder paths and task names + + # get folder entity from folder path + folder_entity = get_folder_by_path( + project_name, folder_path) + + # make sure asset exists + if not folder_entity: + raise CreatorError( + f"Asset '{folder_path}' not found." + ) + + # first get all tasks on the folder entity and then find + task_entity = get_task_by_name( + project_name, folder_entity["id"], task_name) + + # check if task name is valid task in asset doc + if not task_entity: + raise CreatorError( + f"Task '{task_name}' not found in asset doc." + ) + # get all csv data into one dict and make sure there are no # duplicates data are already validated and sorted under # correct existing asset also check if asset exists and if # task name is valid task in asset doc and representations # are distributed under products following variants - if context_asset_name not in csv_data: - asset_doc = get_asset_by_name( - project_name, context_asset_name) - - # make sure asset exists - if not asset_doc: - raise ValueError( - f"Asset '{context_asset_name}' not found." - ) - # check if task name is valid task in asset doc - if task_name not in asset_doc["data"]["tasks"]: - raise ValueError( - f"Task '{task_name}' not found in asset doc." - ) - - csv_data[context_asset_name] = { - "asset_doc": asset_doc, + if folder_path not in csv_data: + csv_data[folder_path] = { + "folder_entity": folder_entity, "products": { pre_product_name: { "task_name": task_name, + "task_type": task_entity["taskType"], "variant": variant, "product_type": product_type, "version": version, @@ -528,11 +539,11 @@ configuration in project settings. } } else: - asset_doc = csv_data[context_asset_name]["asset_doc"] - csv_products = csv_data[context_asset_name]["products"] + csv_products = csv_data[folder_path]["products"] if pre_product_name not in csv_products: csv_products[pre_product_name] = { "task_name": task_name, + "task_type": task_entity["taskType"], "variant": variant, "product_type": product_type, "version": version, From 175d299ccc5b20feea95242f156f7d15f6e0b132 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Mar 2024 13:12:30 +0100 Subject: [PATCH 17/67] Refactor error handling to use custom CreatorError class. - Replaced KeyError, TypeError, NotADirectoryError, ValueError with CreatorError for consistency and better error management. --- .../plugins/create/create_csv_ingest.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 9d6f04ae99..1381059fbb 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -346,14 +346,14 @@ configuration in project settings. break if not repre_config_data: - raise KeyError( + raise CreatorError( f"Representation '{repre_name}' not found " "in config representation data." ) validate_extensions = repre_config_data["extensions"] if extension not in validate_extensions: - raise TypeError( + raise CreatorError( f"File extension '{extension}' not valid for " f"output '{validate_extensions}'." ) @@ -373,7 +373,7 @@ configuration in project settings. # check if dirname exists if not os.path.isdir(dirname): - raise NotADirectoryError( + raise CreatorError( f"Directory '{dirname}' does not exist." ) @@ -389,7 +389,7 @@ configuration in project settings. collections = collections[0] else: if is_sequence: - raise ValueError( + raise CreatorError( f"No collections found in directory '{dirname}'." ) @@ -463,7 +463,7 @@ configuration in project settings. # check if csv file contains all required columns if any(column not in all_columns for column in required_columns): - raise KeyError( + raise CreatorError( f"Missing required columns: {required_columns}" ) @@ -555,7 +555,7 @@ configuration in project settings. csv_representations = \ csv_products[pre_product_name]["representations"] if filename in csv_representations: - raise ValueError( + raise CreatorError( f"Duplicate filename '{filename}' in csv file." ) csv_representations[filename] = representation_data @@ -633,7 +633,7 @@ configuration in project settings. break if not column_data: - raise KeyError( + raise CreatorError( f"Column '{column_name}' not found in column config." ) @@ -643,7 +643,7 @@ configuration in project settings. # check if column value is not empty string and column is required if column_value == "" and column_required: - raise ValueError( + raise CreatorError( f"Value in column '{column_name}' is required." ) @@ -675,7 +675,7 @@ configuration in project settings. column_value is not None and not re.match(str(column_validation), str(column_value)) ): - raise ValueError( + raise CreatorError( f"Column '{column_name}' value '{column_value}' " f"does not match validation regex '{column_validation}' \n" f"Row data: {row_data} \n" @@ -719,7 +719,7 @@ configuration in project settings. Returns: list: list of attribute object instances """ - # Use same attributes as for instance attrobites + # Use same attributes as for instance attributes attr_defs = [ FileDef( "csv_filepath_data", From 9090706252175d1cfcb38203214a376f1fd5a84b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Mar 2024 13:19:25 +0100 Subject: [PATCH 18/67] Refactor CSV file reading and data processing logic - Added import for StringIO - Refactored CSV file reading using StringIO - Improved handling of fieldnames and required columns detection --- .../plugins/create/create_csv_ingest.py | 196 +++++++++--------- 1 file changed, 102 insertions(+), 94 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py index 1381059fbb..8143e8b45b 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -2,6 +2,7 @@ import os import re import csv import clique +from io import StringIO from copy import deepcopy, copy from ayon_api import get_folder_by_path, get_task_by_name @@ -335,7 +336,7 @@ configuration in project settings. # get extension of file basename = os.path.basename(filepath) - _, extension = os.path.splitext(filepath) + extension = os.path.splitext(filepath)[-1].lower() # validate filepath is having correct extension based on output repre_name = repre_data["representationName"] @@ -448,100 +449,92 @@ configuration in project settings. column["name"] for column in self.columns_config["columns"] if column["required_column"] ] - # get data from csv file + + # read csv file with open(csv_file_path, "r") as csv_file: - csv_reader = csv.DictReader( - csv_file, delimiter=self.columns_config["csv_delimiter"]) + csv_content = csv_file.read() - # fix fieldnames - # sometimes someone can keep extra space at the start or end of - # the column name - all_columns = [ - " ".join(column.rsplit()) for column in csv_reader.fieldnames] - # return back fixed fieldnames - csv_reader.fieldnames = all_columns + # read csv file with DictReader + csv_reader = csv.DictReader( + StringIO(csv_content), + delimiter=self.columns_config["csv_delimiter"] + ) - # check if csv file contains all required columns - if any(column not in all_columns for column in required_columns): + # fix fieldnames + # sometimes someone can keep extra space at the start or end of + # the column name + all_columns = [ + " ".join(column.rsplit()) for column in csv_reader.fieldnames] + + # return back fixed fieldnames + csv_reader.fieldnames = all_columns + + # check if csv file contains all required columns + if any(column not in all_columns for column in required_columns): + raise CreatorError( + f"Missing required columns: {required_columns}" + ) + + csv_data = {} + # get data from csv file + for row in csv_reader: + # Get required columns first + # TODO: will need to be folder path in CSV + # TODO: `context_asset_name` is now `folder_path` + folder_path = self._get_row_value_with_validation( + "Folder Path", row) + task_name = self._get_row_value_with_validation( + "Task Name", row) + version = self._get_row_value_with_validation( + "Version", row) + + # Get optional columns + variant = self._get_row_value_with_validation( + "Variant", row) + product_type = self._get_row_value_with_validation( + "Product Type", row) + + pre_product_name = ( + f"{task_name}{variant}{product_type}" + f"{version}".replace(" ", "").lower() + ) + + # get representation data + filename, representation_data = \ + self._get_representation_row_data(row) + + # TODO: batch query of all folder paths and task names + + # get folder entity from folder path + folder_entity = get_folder_by_path( + project_name, folder_path) + + # make sure asset exists + if not folder_entity: raise CreatorError( - f"Missing required columns: {required_columns}" + f"Asset '{folder_path}' not found." ) - csv_data = {} - # get data from csv file - for row in csv_reader: - # Get required columns first - # TODO: will need to be folder path in CSV - # TODO: `context_asset_name` is now `folder_path` - folder_path = self._get_row_value_with_validation( - "Folder Path", row) - task_name = self._get_row_value_with_validation( - "Task Name", row) - version = self._get_row_value_with_validation( - "Version", row) + # first get all tasks on the folder entity and then find + task_entity = get_task_by_name( + project_name, folder_entity["id"], task_name) - # Get optional columns - variant = self._get_row_value_with_validation( - "Variant", row) - product_type = self._get_row_value_with_validation( - "Product Type", row) - - pre_product_name = ( - f"{task_name}{variant}{product_type}" - f"{version}".replace(" ", "").lower() + # check if task name is valid task in asset doc + if not task_entity: + raise CreatorError( + f"Task '{task_name}' not found in asset doc." ) - # get representation data - filename, representation_data = \ - self._get_representation_row_data(row) - - # TODO: batch query of all folder paths and task names - - # get folder entity from folder path - folder_entity = get_folder_by_path( - project_name, folder_path) - - # make sure asset exists - if not folder_entity: - raise CreatorError( - f"Asset '{folder_path}' not found." - ) - - # first get all tasks on the folder entity and then find - task_entity = get_task_by_name( - project_name, folder_entity["id"], task_name) - - # check if task name is valid task in asset doc - if not task_entity: - raise CreatorError( - f"Task '{task_name}' not found in asset doc." - ) - - # get all csv data into one dict and make sure there are no - # duplicates data are already validated and sorted under - # correct existing asset also check if asset exists and if - # task name is valid task in asset doc and representations - # are distributed under products following variants - if folder_path not in csv_data: - csv_data[folder_path] = { - "folder_entity": folder_entity, - "products": { - pre_product_name: { - "task_name": task_name, - "task_type": task_entity["taskType"], - "variant": variant, - "product_type": product_type, - "version": version, - "representations": { - filename: representation_data, - }, - } - } - } - else: - csv_products = csv_data[folder_path]["products"] - if pre_product_name not in csv_products: - csv_products[pre_product_name] = { + # get all csv data into one dict and make sure there are no + # duplicates data are already validated and sorted under + # correct existing asset also check if asset exists and if + # task name is valid task in asset doc and representations + # are distributed under products following variants + if folder_path not in csv_data: + csv_data[folder_path] = { + "folder_entity": folder_entity, + "products": { + pre_product_name: { "task_name": task_name, "task_type": task_entity["taskType"], "variant": variant, @@ -551,14 +544,29 @@ configuration in project settings. filename: representation_data, }, } - else: - csv_representations = \ - csv_products[pre_product_name]["representations"] - if filename in csv_representations: - raise CreatorError( - f"Duplicate filename '{filename}' in csv file." - ) - csv_representations[filename] = representation_data + } + } + else: + csv_products = csv_data[folder_path]["products"] + if pre_product_name not in csv_products: + csv_products[pre_product_name] = { + "task_name": task_name, + "task_type": task_entity["taskType"], + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + else: + csv_representations = \ + csv_products[pre_product_name]["representations"] + if filename in csv_representations: + raise CreatorError( + f"Duplicate filename '{filename}' in csv file." + ) + csv_representations[filename] = representation_data return csv_data From 6a01b8b6ac6a7f3701b6be7fdfcbdd073864c2a9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 19 Mar 2024 13:19:35 +0100 Subject: [PATCH 19/67] Update creator plugin field name from "Folder Context" to "Folder Path" and adjust validation pattern to allow slashes. --- server_addon/traypublisher/server/settings/creator_plugins.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index 3a07a76e6f..1ff14002aa 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -185,11 +185,11 @@ DEFAULT_CREATORS = { "validation_pattern": "^([a-z0-9#._\\/]*)$" }, { - "name": "Folder Context", + "name": "Folder Path", "type": "text", "default": "", "required_column": True, - "validation_pattern": "^([a-zA-Z0-9_]*)$" + "validation_pattern": "^([a-zA-Z0-9_\\/]*)$" }, { "name": "Task Name", From 3791731b6ac2b42583f56ccc69501f39b6372f06 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Mar 2024 14:45:30 +0100 Subject: [PATCH 20/67] Refactor option names and handle folder & task entities. - Renamed "--folder" to "--folder-path" for clarity. - Updated function parameters to use "folder_path" consistently. - Added error handling for non-existent folder or task paths. --- client/ayon_core/hosts/traypublisher/addon.py | 6 ++-- .../hosts/traypublisher/csv_publish.py | 32 +++++++++++++++---- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index ee42784f98..3dd275f223 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -74,7 +74,7 @@ def launch(): required=True ) @click_wrap.option( - "--folder", + "--folder-path", help="Asset name in which the context will be used", type=str, required=True @@ -95,7 +95,7 @@ def launch(): def ingestcsv( filepath, project, - folder, + folder_path, task, ignore_validators ): @@ -113,7 +113,7 @@ def ingestcsv( csvpublish( filepath, project, - folder, + folder_path, task, ignore_validators ) diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py index 32c2b69371..b43792a357 100644 --- a/client/ayon_core/hosts/traypublisher/csv_publish.py +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -3,7 +3,7 @@ import os import pyblish.api import pyblish.util -from ayon_api import get_folder_by_name +from ayon_api import get_folder_by_path, get_task_by_name from ayon_core.lib.attribute_definitions import FileDefItem from ayon_core.pipeline import install_host from ayon_core.pipeline.create import CreateContext @@ -14,7 +14,7 @@ from ayon_core.hosts.traypublisher.api import TrayPublisherHost def csvpublish( filepath, project_name, - folder_name, + folder_path, task_name=None, ignore_validators=False ): @@ -23,7 +23,7 @@ def csvpublish( Args: filepath (str): Path to CSV file. project_name (str): Project name. - folder_name (str): Folder name. + folder_path (str): Folder path. task_name (Optional[str]): Task name. ignore_validators (Optional[bool]): Option to ignore validators. """ @@ -43,16 +43,34 @@ def csvpublish( # create context initialization create_context = CreateContext(host, headless=True) - asset_doc = get_folder_by_name( + folder_entity = get_folder_by_path( project_name, - folder_name=folder_name + folder_path=folder_path, ) + if not folder_entity: + ValueError( + f"Folder path '{folder_path}' doesn't " + f"exists at project '{project_name}'." + ) + + task_entity = get_task_by_name( + project_name, + folder_entity["id"], + task_name, + ) + + if not task_entity: + ValueError( + f"Task name '{task_name}' doesn't " + f"exists at folder '{folder_path}'." + ) + create_context.create( "io.ayon.creators.traypublisher.csv_ingest", "Main", - asset_doc=asset_doc, - task_name=task_name, + folder_entity=folder_entity, + task_entity=task_entity, pre_create_data=precreate_data, ) From 49cfea34c7893cd221a2fbf2973241b353a4923a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 27 Mar 2024 01:25:46 +0100 Subject: [PATCH 21/67] Tweak message formatting --- .../hosts/houdini/plugins/publish/validate_cop_output_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index 95414ae7f1..8d3b248ecd 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -26,7 +26,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Output node(s) `{}` are incorrect. " + ("Output node '{}' are incorrect. " "See plug-in log for details.").format(invalid), title=self.label ) From 1626182e925b1d270017a8594a2cb9246cf49c83 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 27 Mar 2024 01:28:07 +0100 Subject: [PATCH 22/67] Make singular --- .../hosts/houdini/plugins/publish/validate_cop_output_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index 8d3b248ecd..a6a7044f77 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -26,7 +26,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Output node '{}' are incorrect. " + ("Output node '{}' is incorrect. " "See plug-in log for details.").format(invalid), title=self.label ) From 9666352b6e625f8b45e577bfea9bc39f18af7585 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 27 Mar 2024 12:08:35 +0100 Subject: [PATCH 23/67] Cleanup logic --- .../plugins/publish/validate_cop_output_node.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index a6a7044f77..59bb8e66f1 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -36,18 +36,9 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): import hou - try: - output_node = instance.data["output_node"] - except KeyError: - six.reraise( - PublishValidationError, - PublishValidationError( - "Can't determine COP output node.", - title=cls.__name__), - sys.exc_info()[2] - ) + output_node = instance.data.get("output_node") - if output_node is None: + if not output_node: node = hou.node(instance.data.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " From efcf5148bd253968b79af43d68654eef66695e81 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 27 Mar 2024 14:47:31 +0100 Subject: [PATCH 24/67] Maya: Yeti - Implement writing and loading user variables with a yeti cache --- client/ayon_core/hosts/maya/api/yeti.py | 101 ++++++++++++++++++ .../maya/plugins/load/load_yeti_cache.py | 41 +++++++ .../plugins/publish/collect_yeti_cache.py | 20 +++- 3 files changed, 161 insertions(+), 1 deletion(-) create mode 100644 client/ayon_core/hosts/maya/api/yeti.py diff --git a/client/ayon_core/hosts/maya/api/yeti.py b/client/ayon_core/hosts/maya/api/yeti.py new file mode 100644 index 0000000000..1526c3a2f3 --- /dev/null +++ b/client/ayon_core/hosts/maya/api/yeti.py @@ -0,0 +1,101 @@ +from typing import List + +from maya import cmds + + +def get_yeti_user_variables(yeti_shape_node: str) -> List[str]: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + + Returns: + list: Attribute names (for a vector attribute it only lists the top + parent attribute, not the attribute per axis) + """ + + attrs = cmds.listAttr(yeti_shape_node, + userDefined=True, + string=("yetiVariableV_*", + "yetiVariableF_*")) or [] + valid_attrs = [] + for attr in attrs: + attr_type = cmds.attributeQuery(attr, node=yeti_shape_node, + attributeType=True) + if attr.startswith("yetiVariableV_") and attr_type == "double3": + # vector + valid_attrs.append(attr) + elif attr.startswith("yetiVariableF_") and attr_type == "double": + valid_attrs.append(attr) + + return valid_attrs + + +def create_yeti_variable(yeti_shape_node: str, + attr_name: str, + value=None, + force_value: bool = False) -> bool: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + attr_name (str): The fully qualified yeti variable name, e.g. + "yetiVariableF_myfloat" or "yetiVariableV_myvector" + value (object): The value to set (must match the type of the attribute) + When value is None it will ignored and not be set. + force_value (bool): Whether to set the value if the attribute already + exists or not. + + Returns: + bool: Whether the attribute value was set or not. + + """ + exists = cmds.attributeQuery(attr_name, node=yeti_shape_node, exists=True) + if not exists: + if attr_name.startswith("yetiVariableV_"): + _create_vector_yeti_user_variable(yeti_shape_node, attr_name) + if attr_name.startswith("yetiVariableF_"): + _create_float_yeti_user_variable(yeti_shape_node, attr_name) + + if value is not None and (not exists or force_value): + plug = "{}.{}".format(yeti_shape_node, attr_name) + if ( + isinstance(value, (list, tuple)) + and attr_name.startswith("yetiVariableV_") + ): + cmds.setAttr(plug, *value, type="double3") + else: + cmds.setAttr(plug, value) + + return True + return False + + +def _create_vector_yeti_user_variable(yeti_shape_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableV_"): + raise ValueError("Must start with yetiVariableV_") + cmds.addAttr(yeti_shape_node, + longName=attr_name, + attributeType="double3", + cachedInternally=True, + keyable=True) + for axis in "XYZ": + cmds.addAttr(yeti_shape_node, + longName="{}{}".format(attr_name, axis), + attributeType="double", + parent=attr_name, + cachedInternally=True, + keyable=True) + + +def _create_float_yeti_user_variable(yeti_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableF_"): + raise ValueError("Must start with yetiVariableF_") + + cmds.addAttr(yeti_node, + longName=attr_name, + attributeType="double", + cachedInternally=True, + softMinValue=0, + softMaxValue=100, + keyable=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py index a5cd04b0f4..06f74e5107 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py @@ -12,6 +12,7 @@ from ayon_core.pipeline import ( get_representation_path ) from ayon_core.hosts.maya.api import lib +from ayon_core.hosts.maya.api.yeti import create_yeti_variable from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type @@ -23,8 +24,19 @@ SKIP_UPDATE_ATTRS = { "viewportDensity", "viewportWidth", "viewportLength", + "renderDensity", + "renderWidth", + "renderLength", + "increaseRenderBounds" } +SKIP_ATTR_MESSAGE = ( + "Skipping updating %s.%s to %s because it " + "is considered a local overridable attribute. " + "Either set manually or the load the cache " + "anew." +) + def set_attribute(node, attr, value): """Wrapper of set attribute which ignores None values""" @@ -209,9 +221,31 @@ class YetiCacheLoader(load.LoaderPlugin): for attr, value in node_settings["attrs"].items(): if attr in SKIP_UPDATE_ATTRS: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) continue set_attribute(attr, value, yeti_node) + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + was_value_set = create_yeti_variable( + yeti_shape_node=yeti_node, + attr_name=attr, + value=value, + # We do not want to update the + # value if it already exists so + # that any local overrides that + # may have been applied still + # persist + force_value=False + ) + if not was_value_set: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) + cmds.setAttr("{}.representation".format(container_node), repre_entity["id"], typ="string") @@ -332,6 +366,13 @@ class YetiCacheLoader(load.LoaderPlugin): for attr, value in attributes.items(): set_attribute(attr, value, yeti_node) + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + create_yeti_variable(yeti_shape_node=yeti_node, + attr_name=attr, + value=value) + # Connect to the time node cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py index 067a7bc532..e1755e4212 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api from ayon_core.hosts.maya.api import lib +from ayon_core.hosts.maya.api.yeti import get_yeti_user_variables SETTINGS = { @@ -34,7 +35,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin): - "increaseRenderBounds" - "imageSearchPath" - Other information is the name of the transform and it's Colorbleed ID + Other information is the name of the transform and its `cbId` """ order = pyblish.api.CollectorOrder + 0.45 @@ -54,6 +55,16 @@ class CollectYetiCache(pyblish.api.InstancePlugin): # Get specific node attributes attr_data = {} for attr in SETTINGS: + # Ignore non-existing attributes with a warning, e.g. cbId + # if they have not been generated yet + if not cmds.attributeQuery(attr, node=shape, exists=True): + self.log.warning( + "Attribute '{}' not found on Yeti node: {}".format( + attr, shape + ) + ) + continue + current = cmds.getAttr("%s.%s" % (shape, attr)) # change None to empty string as Maya doesn't support # NoneType in attributes @@ -61,6 +72,12 @@ class CollectYetiCache(pyblish.api.InstancePlugin): current = "" attr_data[attr] = current + # Get user variable attributes + user_variable_attrs = { + attr: lib.get_attribute("{}.{}".format(shape, attr)) + for attr in get_yeti_user_variables(shape) + } + # Get transform data parent = cmds.listRelatives(shape, parent=True)[0] transform_data = {"name": parent, "cbId": lib.get_id(parent)} @@ -70,6 +87,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin): "name": shape, "cbId": lib.get_id(shape), "attrs": attr_data, + "user_variables": user_variable_attrs } settings["nodes"].append(shape_data) From bf18daafe5c5d3128158b536cbd796fe856f81fa Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 29 Mar 2024 17:42:00 +0100 Subject: [PATCH 25/67] Maya - Allow loading a published workfile as template --- .../maya/plugins/load/load_as_template.py | 39 +++++++++++++++++++ .../workfile/workfile_template_builder.py | 19 ++++----- 2 files changed, 49 insertions(+), 9 deletions(-) create mode 100644 client/ayon_core/hosts/maya/plugins/load/load_as_template.py diff --git a/client/ayon_core/hosts/maya/plugins/load/load_as_template.py b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py new file mode 100644 index 0000000000..a251f1c52e --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py @@ -0,0 +1,39 @@ +from openpype.lib import ( + BoolDef +) +from openpype.pipeline import ( + load, + registered_host +) +from openpype.hosts.maya.api.workfile_template_builder import ( + MayaTemplateBuilder +) + + +class LoadAsTemplate(load.LoaderPlugin): + """Load workfile as a template """ + + families = ["workfile"] + label = "Load as template" + representations = ["ma", "mb"] + icon = "wrench" + color = "#775555" + order = 10 + + options = [ + BoolDef("keep_placeholders", + label="Keep Placeholders", + default=False), + BoolDef("create_first_version", + label="Create First Version", + default=False), + ] + + def load(self, context, name, namespace, data): + keep_placeholders = data.get("keep_placeholders", False) + create_first_version = data.get("create_first_version", False) + path = self.filepath_from_context(context) + builder = MayaTemplateBuilder(registered_host()) + builder.build_template(template_path=path, + keep_placeholders=keep_placeholders, + create_first_version=create_first_version) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 5e63ba444a..53f4bf8c32 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -498,15 +498,16 @@ class AbstractTemplateBuilder(object): process if version is created """ - template_preset = self.get_template_preset() - - if template_path is None: - template_path = template_preset["path"] - - if keep_placeholders is None: - keep_placeholders = template_preset["keep_placeholder"] - if create_first_version is None: - create_first_version = template_preset["create_first_version"] + if any(value is None for value in [template_path, + keep_placeholders, + create_first_version]): + template_preset = self.get_template_preset() + if template_path is None: + template_path = template_preset["path"] + if keep_placeholders is None: + keep_placeholders = template_preset["keep_placeholder"] + if create_first_version is None: + create_first_version = template_preset["create_first_version"] # check if first version is created created_version_workfile = False From bd2527ebe6cb1a856ae0e4882668d12f0739c56f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 29 Mar 2024 17:46:23 +0100 Subject: [PATCH 26/67] Improve type hints --- .../pipeline/workfile/workfile_template_builder.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 53f4bf8c32..7faa67af04 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -16,6 +16,7 @@ import re import collections import copy from abc import ABCMeta, abstractmethod +from typing import TypedDict import six from ayon_api import ( @@ -52,6 +53,14 @@ from ayon_core.pipeline.create import ( _NOT_SET = object() +class TemplatePresetDict(TypedDict): + """Dictionary with `path`, `keep_placeholder` and `create_first_version` + settings from the template preset for current context.""" + path: str + keep_placeholder: bool + create_first_version: bool + + class TemplateNotFound(Exception): """Exception raised when template does not exist.""" pass @@ -773,7 +782,9 @@ class AbstractTemplateBuilder(object): - 'project_settings/{host name}/templated_workfile_build/profiles' Returns: - str: Path to a template file with placeholders. + TemplatePresetDict: Dictionary with `path`, `keep_placeholder` and + `create_first_version` settings from the template preset + for current context. Raises: TemplateProfileNotFound: When profiles are not filled. From ed68f16b720c05d1efef74de43eee428773a2c41 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 29 Mar 2024 17:51:38 +0100 Subject: [PATCH 27/67] Fix refactor --- .../ayon_core/hosts/maya/plugins/load/load_as_template.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_as_template.py b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py index a251f1c52e..5c546a1837 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_as_template.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py @@ -1,11 +1,11 @@ -from openpype.lib import ( +from ayon_core.lib import ( BoolDef ) -from openpype.pipeline import ( +from ayon_core.pipeline import ( load, registered_host ) -from openpype.hosts.maya.api.workfile_template_builder import ( +from ayon_core.hosts.maya.api.workfile_template_builder import ( MayaTemplateBuilder ) @@ -13,7 +13,7 @@ from openpype.hosts.maya.api.workfile_template_builder import ( class LoadAsTemplate(load.LoaderPlugin): """Load workfile as a template """ - families = ["workfile"] + product_types = {"workfile"} label = "Load as template" representations = ["ma", "mb"] icon = "wrench" From 04f57187040d5c8698b4c90cda3dbbfae050d9f5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 29 Mar 2024 17:55:56 +0100 Subject: [PATCH 28/67] Bugfix: refactor `family` -> `product_type` --- client/ayon_core/pipeline/workfile/workfile_template_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 7faa67af04..fb357d8b9b 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1480,7 +1480,7 @@ class PlaceholderLoadMixin(object): product_name_regex = None if product_name_regex_value: product_name_regex = re.compile(product_name_regex_value) - product_type = placeholder.data["family"] + product_type = placeholder.data["product_type"] builder_type = placeholder.data["builder_type"] folder_ids = [] From eb0de8685408c95f63f8e4d9e15b1c0be926779e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 2 Apr 2024 14:43:37 +0200 Subject: [PATCH 29/67] Remove `TypedDict` to support Py 3.7 (e.g. Maya 2022) --- .../pipeline/workfile/workfile_template_builder.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index fb357d8b9b..cd63198317 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -16,7 +16,6 @@ import re import collections import copy from abc import ABCMeta, abstractmethod -from typing import TypedDict import six from ayon_api import ( @@ -53,14 +52,6 @@ from ayon_core.pipeline.create import ( _NOT_SET = object() -class TemplatePresetDict(TypedDict): - """Dictionary with `path`, `keep_placeholder` and `create_first_version` - settings from the template preset for current context.""" - path: str - keep_placeholder: bool - create_first_version: bool - - class TemplateNotFound(Exception): """Exception raised when template does not exist.""" pass @@ -782,14 +773,14 @@ class AbstractTemplateBuilder(object): - 'project_settings/{host name}/templated_workfile_build/profiles' Returns: - TemplatePresetDict: Dictionary with `path`, `keep_placeholder` and + dict: Dictionary with `path`, `keep_placeholder` and `create_first_version` settings from the template preset for current context. Raises: TemplateProfileNotFound: When profiles are not filled. TemplateLoadFailed: Profile was found but path is not set. - TemplateNotFound: Path was set but file does not exists. + TemplateNotFound: Path was set but file does not exist. """ host_name = self.host_name From ab408bd177972e1d49778fa5da1c59c7fcba04ec Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 3 Apr 2024 16:16:06 +0200 Subject: [PATCH 30/67] Update client/ayon_core/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../ayon_core/pipeline/workfile/workfile_template_builder.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index cd63198317..22c732a07a 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1471,7 +1471,9 @@ class PlaceholderLoadMixin(object): product_name_regex = None if product_name_regex_value: product_name_regex = re.compile(product_name_regex_value) - product_type = placeholder.data["product_type"] + product_type = placeholder.data.get("product_type") + if product_type is None: + product_type = placeholder.data["family"] builder_type = placeholder.data["builder_type"] folder_ids = [] From 75dbba65bf02b0e3d16fc7ecb04363f940807d72 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 3 Apr 2024 16:16:17 +0200 Subject: [PATCH 31/67] Update client/ayon_core/pipeline/workfile/workfile_template_builder.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../pipeline/workfile/workfile_template_builder.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 22c732a07a..ceac5405c5 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -498,9 +498,14 @@ class AbstractTemplateBuilder(object): process if version is created """ - if any(value is None for value in [template_path, - keep_placeholders, - create_first_version]): + if any( + value is None + for value in [ + template_path, + keep_placeholders, + create_first_version, + ] + ): template_preset = self.get_template_preset() if template_path is None: template_path = template_preset["path"] From bbcee5fd6b352e6ef309e00199139b02dbbe014b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 4 Apr 2024 17:18:12 +0200 Subject: [PATCH 32/67] Cleanup code + add description to report, also remove `title=cls.label` because that is the default behavior --- .../plugins/publish/validate_cop_output_node.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index 5796eef1b2..16e72491cc 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- -import sys +import hou import pyblish.api -import six from ayon_core.pipeline import PublishValidationError @@ -33,9 +32,6 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - - import hou - output_node = instance.data.get("output_node") if not output_node: @@ -62,8 +58,9 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # the isinstance check above should be stricter than this category if output_node.type().category().name() != "Cop2": raise PublishValidationError( - ( - "Output node {} is not of category Cop2." - " This is a bug..." - ).format(output_node.path()), - title=cls.label) + f"Output node {output_node.path()} is not of category Cop2.", + description=( + "### Invalid COP output node\n\n" + "The output node path for the instance must be set to a " + "valid COP node path. See the log for more details." + )) From df201386f52721803aa6280da236886881ee6887 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Apr 2024 16:04:25 +0200 Subject: [PATCH 33/67] Tweak validation --- .../publish/validate_cop_output_node.py | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index 16e72491cc..91bd36018a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -25,9 +25,14 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Output node '{}' is incorrect. " - "See plug-in log for details.").format(invalid), - title=self.label + "Output node '{}' is incorrect. " + "See plug-in log for details.".format(invalid), + title=self.label, + description=( + "### Invalid COP output node\n\n" + "The output node path for the instance must be set to a " + "valid COP node path.\n\nSee the log for more details." + ) ) @classmethod @@ -48,8 +53,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): cls.log.error( "Output node %s is not a COP node. " "COP Path must point to a COP node, " - "instead found category type: %s" - % (output_node.path(), output_node.type().category().name()) + "instead found category type: %s", + output_node.path(), output_node.type().category().name() ) return [output_node.path()] @@ -57,10 +62,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category if output_node.type().category().name() != "Cop2": - raise PublishValidationError( - f"Output node {output_node.path()} is not of category Cop2.", - description=( - "### Invalid COP output node\n\n" - "The output node path for the instance must be set to a " - "valid COP node path. See the log for more details." - )) + cls.log.error( + "Output node %s is not of category Cop2.", output_node.path() + ) + return [output_node.path()] From 6f8ab66eb2684b36326a563bc7c91d792ce88484 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Apr 2024 09:32:22 +0200 Subject: [PATCH 34/67] Update client/ayon_core/hosts/maya/plugins/load/load_as_template.py Co-authored-by: Toke Jepsen --- client/ayon_core/hosts/maya/plugins/load/load_as_template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_as_template.py b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py index 5c546a1837..f696d369e3 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_as_template.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_as_template.py @@ -13,7 +13,7 @@ from ayon_core.hosts.maya.api.workfile_template_builder import ( class LoadAsTemplate(load.LoaderPlugin): """Load workfile as a template """ - product_types = {"workfile"} + product_types = {"workfile", "mayaScene"} label = "Load as template" representations = ["ma", "mb"] icon = "wrench" From ddf90da4fdc2284b3ef05eed265d730fad4db23b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Wed, 10 Apr 2024 10:13:59 +0100 Subject: [PATCH 35/67] Working version --- client/ayon_core/hosts/hiero/api/events.py | 4 + client/ayon_core/hosts/hiero/api/lib.py | 82 +++++++++++++++---- client/ayon_core/hosts/hiero/api/tags.py | 2 +- client/ayon_core/hosts/nuke/api/lib.py | 75 +++++++++-------- client/ayon_core/hosts/nuke/api/plugin.py | 1 - server_addon/hiero/server/settings/imageio.py | 18 ++-- server_addon/hiero/server/version.py | 2 +- server_addon/nuke/server/settings/imageio.py | 39 +++++++-- server_addon/nuke/server/version.py | 2 +- 9 files changed, 158 insertions(+), 67 deletions(-) diff --git a/client/ayon_core/hosts/hiero/api/events.py b/client/ayon_core/hosts/hiero/api/events.py index 304605e24e..663004abd2 100644 --- a/client/ayon_core/hosts/hiero/api/events.py +++ b/client/ayon_core/hosts/hiero/api/events.py @@ -8,6 +8,7 @@ from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, before_project_save, + apply_colorspace_project ) from .tags import add_tags_to_workfile from .menu import update_menu_task_label @@ -44,6 +45,8 @@ def afterNewProjectCreated(event): # reset workfiles startup not to open any more in session os.environ["WORKFILES_STARTUP"] = "0" + apply_colorspace_project() + def beforeProjectLoad(event): log.info("before project load event...") @@ -122,6 +125,7 @@ def register_hiero_events(): except RuntimeError: pass + def register_events(): """ Adding all callbacks. diff --git a/client/ayon_core/hosts/hiero/api/lib.py b/client/ayon_core/hosts/hiero/api/lib.py index 8682ff7780..aaf99546c7 100644 --- a/client/ayon_core/hosts/hiero/api/lib.py +++ b/client/ayon_core/hosts/hiero/api/lib.py @@ -11,7 +11,6 @@ import warnings import json import ast import secrets -import shutil import hiero from qtpy import QtWidgets, QtCore @@ -36,9 +35,6 @@ from .constants import ( DEFAULT_SEQUENCE_NAME, DEFAULT_BIN_NAME ) -from ayon_core.pipeline.colorspace import ( - get_imageio_config -) class _CTX: @@ -105,9 +101,9 @@ def flatten(list_): def get_current_project(remove_untitled=False): - projects = flatten(hiero.core.projects()) + projects = hiero.core.projects() if not remove_untitled: - return next(iter(projects)) + return projects[0] # if remove_untitled for proj in projects: @@ -1050,18 +1046,68 @@ def _set_hrox_project_knobs(doc, **knobs): def apply_colorspace_project(): - project_name = get_current_project_name() - # get path the the active projects - project = get_current_project(remove_untitled=True) - current_file = project.path() - - # close the active project - project.close() + """Apply colorspaces from settings. + Due to not being able to set the project settings through the Python API, + we need to do use some dubious code to find the widgets and set them. It is + possible to set the project settings without traversing through the widgets + but it involves reading the hrox files from disk with XML, so no in-memory + support. See https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa + for more details. + """ # get presets for hiero + project_name = get_current_project_name() imageio = get_project_settings(project_name)["hiero"]["imageio"] presets = imageio.get("workfile") + # Open Project Settings UI. + for act in hiero.ui.registeredActions(): + if act.objectName() == "foundry.project.settings": + act.trigger() + + # Find widgets from their sibling label. + labels = { + "Working Space:": "workingSpace", + "Viewer:": "viewerLut", + "Thumbnails:": "thumbnailLut", + "Monitor Out:": "monitorOutLut", + "8 Bit Files:": "eightBitLut", + "16 Bit Files:": "sixteenBitLut", + "Log Files:": "logLut", + "Floating Point Files:": "floatLut" + } + widgets = {x: None for x in labels.values()} + + def _recursive_children(widget, labels, widgets): + children = widget.children() + for count, child in enumerate(children): + if isinstance(child, QtWidgets.QLabel): + if child.text() in labels.keys(): + widgets[labels[child.text()]] = children[count + 1] + _recursive_children(child, labels, widgets) + + app = QtWidgets.QApplication.instance() + title = "Project Settings" + for widget in app.topLevelWidgets(): + if isinstance(widget, QtWidgets.QMainWindow): + if widget.windowTitle() != title: + continue + _recursive_children(widget, labels, widgets) + widget.close() + + msg = "Setting value \"{}\" is not a valid option for \"{}\"" + for key, widget in widgets.items(): + options = [widget.itemText(i) for i in range(widget.count())] + setting_value = presets[key] + assert setting_value in options, msg.format(setting_value, key) + widget.setCurrentText(presets[key]) + + # This code block is for setting up project colorspaces for files on disk. + # Due to not having Python API access to set the project settings, the + # Foundry recommended way is to modify the hrox files on disk with XML. See + # this forum thread for more details; + # https://community.foundry.com/discuss/topic/137771/change-a-project-s-default-color-transform-with-python # noqa + ''' # backward compatibility layer # TODO: remove this after some time config_data = get_imageio_config( @@ -1074,6 +1120,13 @@ def apply_colorspace_project(): "ocioConfigName": "custom" }) + # get path the the active projects + project = get_current_project() + current_file = project.path() + + msg = "The project needs to be saved to disk to apply colorspace settings." + assert current_file, msg + # save the workfile as subversion "comment:_colorspaceChange" split_current_file = os.path.splitext(current_file) copy_current_file = current_file @@ -1116,6 +1169,7 @@ def apply_colorspace_project(): # open the file as current project hiero.core.openProject(copy_current_file) + ''' def apply_colorspace_clips(): @@ -1125,10 +1179,8 @@ def apply_colorspace_clips(): # get presets for hiero imageio = get_project_settings(project_name)["hiero"]["imageio"] - from pprint import pprint presets = imageio.get("regexInputs", {}).get("inputs", {}) - pprint(presets) for clip in clips: clip_media_source_path = clip.mediaSource().firstpath() clip_name = clip.name() diff --git a/client/ayon_core/hosts/hiero/api/tags.py b/client/ayon_core/hosts/hiero/api/tags.py index 5abfee75d0..d4acb23493 100644 --- a/client/ayon_core/hosts/hiero/api/tags.py +++ b/client/ayon_core/hosts/hiero/api/tags.py @@ -144,7 +144,7 @@ def add_tags_to_workfile(): # Get project task types. project_name = get_current_project_name() project_entity = ayon_api.get_project(project_name) - task_types = project_entity["taskType"] + task_types = project_entity["taskTypes"] nks_pres_tags["[Tasks]"] = {} log.debug("__ tasks: {}".format(task_types)) for task_type in task_types: diff --git a/client/ayon_core/hosts/nuke/api/lib.py b/client/ayon_core/hosts/nuke/api/lib.py index 78cbe85097..e3505a16f2 100644 --- a/client/ayon_core/hosts/nuke/api/lib.py +++ b/client/ayon_core/hosts/nuke/api/lib.py @@ -1495,18 +1495,28 @@ class WorkfileSettings(object): filter_knobs = [ "viewerProcess", - "wipe_position" + "wipe_position", + "monitorOutOutputTransform" ] + display, viewer = get_viewer_config_from_string( + viewer_dict["viewerProcess"] + ) + viewer_process = create_viewer_profile_string( + viewer, display, path_like=False + ) + display, viewer = get_viewer_config_from_string( + viewer_dict["output_transform"] + ) + output_transform = create_viewer_profile_string( + viewer, display, path_like=False + ) erased_viewers = [] for v in nuke.allNodes(filter="Viewer"): # set viewProcess to preset from settings - v["viewerProcess"].setValue( - str(viewer_dict["viewerProcess"]) - ) + v["viewerProcess"].setValue(viewer_process) - if str(viewer_dict["viewerProcess"]) \ - not in v["viewerProcess"].value(): + if viewer_process not in v["viewerProcess"].value(): copy_inputs = v.dependencies() copy_knobs = {k: v[k].value() for k in v.knobs() if k not in filter_knobs} @@ -1524,11 +1534,11 @@ class WorkfileSettings(object): # set copied knobs for k, v in copy_knobs.items(): - print(k, v) nv[k].setValue(v) # set viewerProcess - nv["viewerProcess"].setValue(str(viewer_dict["viewerProcess"])) + nv["viewerProcess"].setValue(viewer_process) + nv["monitorOutOutputTransform"].setValue(output_transform) if erased_viewers: log.warning( @@ -1547,7 +1557,6 @@ class WorkfileSettings(object): host_name="nuke" ) - viewer_process_settings = imageio_host["viewer"]["viewerProcess"] workfile_settings = imageio_host["workfile"] color_management = workfile_settings["color_management"] native_ocio_config = workfile_settings["native_ocio_config"] @@ -1574,29 +1583,6 @@ class WorkfileSettings(object): residual_path )) - # get monitor lut from settings respecting Nuke version differences - monitor_lut = workfile_settings["thumbnail_space"] - monitor_lut_data = self._get_monitor_settings( - viewer_process_settings, monitor_lut - ) - monitor_lut_data["workingSpaceLUT"] = ( - workfile_settings["working_space"] - ) - - # then set the rest - for knob, value_ in monitor_lut_data.items(): - # skip unfilled ocio config path - # it will be dict in value - if isinstance(value_, dict): - continue - # skip empty values - if not value_: - continue - if self._root_node[knob].value() not in value_: - self._root_node[knob].setValue(str(value_)) - log.debug("nuke.root()['{}'] changed to: {}".format( - knob, value_)) - # set ocio config path if config_data: config_path = config_data["path"].replace("\\", "/") @@ -1611,6 +1597,31 @@ class WorkfileSettings(object): if correct_settings: self._set_ocio_config_path_to_workfile(config_data) + # get monitor lut from settings respecting Nuke version differences + monitor_lut_data = self._get_monitor_settings( + workfile_settings["monitor_out_lut"], + workfile_settings["monitor_lut"] + ) + monitor_lut_data.update({ + "workingSpaceLUT": workfile_settings["working_space"], + "int8Lut": workfile_settings["int_8_lut"], + "int16Lut": workfile_settings["int_16_lut"], + "logLut": workfile_settings["log_lut"], + "floatLut": workfile_settings["float_lut"] + }) + + # then set the rest + for knob, value_ in monitor_lut_data.items(): + # skip unfilled ocio config path + # it will be dict in value + if isinstance(value_, dict): + continue + # skip empty values + if not value_: + continue + self._root_node[knob].setValue(str(value_)) + log.debug("nuke.root()['{}'] changed to: {}".format(knob, value_)) + def _get_monitor_settings(self, viewer_lut, monitor_lut): """ Get monitor settings from viewer and monitor lut diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index 6aa098c558..56c30a8ff5 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -1151,7 +1151,6 @@ def _remove_old_knobs(node): "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" ] - print(node.name()) # remove all old knobs for knob in node.allKnobs(): diff --git a/server_addon/hiero/server/settings/imageio.py b/server_addon/hiero/server/settings/imageio.py index f2bc71ac33..9e15e15597 100644 --- a/server_addon/hiero/server/settings/imageio.py +++ b/server_addon/hiero/server/settings/imageio.py @@ -149,15 +149,15 @@ class ImageIOSettings(BaseSettingsModel): DEFAULT_IMAGEIO_SETTINGS = { "workfile": { - "ocioConfigName": "nuke-default", - "workingSpace": "linear", - "viewerLut": "sRGB", - "eightBitLut": "sRGB", - "sixteenBitLut": "sRGB", - "logLut": "Cineon", - "floatLut": "linear", - "thumbnailLut": "sRGB", - "monitorOutLut": "sRGB" + "ocioConfigName": "aces_1.2", + "workingSpace": "role_scene_linear", + "viewerLut": "ACES/sRGB", + "eightBitLut": "role_matte_paint", + "sixteenBitLut": "role_texture_paint", + "logLut": "role_compositing_log", + "floatLut": "role_scene_linear", + "thumbnailLut": "ACES/sRGB", + "monitorOutLut": "ACES/sRGB" }, "regexInputs": { "inputs": [ diff --git a/server_addon/hiero/server/version.py b/server_addon/hiero/server/version.py index b3f4756216..ae7362549b 100644 --- a/server_addon/hiero/server/version.py +++ b/server_addon/hiero/server/version.py @@ -1 +1 @@ -__version__ = "0.1.2" +__version__ = "0.1.3" diff --git a/server_addon/nuke/server/settings/imageio.py b/server_addon/nuke/server/settings/imageio.py index 1b84457133..9cdb0bf1d7 100644 --- a/server_addon/nuke/server/settings/imageio.py +++ b/server_addon/nuke/server/settings/imageio.py @@ -97,8 +97,23 @@ class WorkfileColorspaceSettings(BaseSettingsModel): working_space: str = SettingsField( title="Working Space" ) - thumbnail_space: str = SettingsField( - title="Thumbnail Space" + monitor_lut: str = SettingsField( + title="Thumbnails" + ) + monitor_out_lut: str = SettingsField( + title="Monitor Out" + ) + int_8_lut: str = SettingsField( + title="8-bit Files" + ) + int_16_lut: str = SettingsField( + title="16-bit Files" + ) + log_lut: str = SettingsField( + title="Log Files" + ) + float_lut: str = SettingsField( + title="Float Files" ) @@ -120,6 +135,9 @@ class ViewProcessModel(BaseSettingsModel): viewerProcess: str = SettingsField( title="Viewer Process Name" ) + output_transform: str = SettingsField( + title="Output Transform" + ) class ImageIOConfigModel(BaseSettingsModel): @@ -214,16 +232,23 @@ class ImageIOSettings(BaseSettingsModel): DEFAULT_IMAGEIO_SETTINGS = { "viewer": { - "viewerProcess": "sRGB (default)" + "viewerProcess": "ACES/sRGB", + "output_transform": "ACES/sRGB" }, "baking": { - "viewerProcess": "rec709 (default)" + "viewerProcess": "ACES/Rec.709", + "output_transform": "ACES/Rec.709" }, "workfile": { "color_management": "OCIO", - "native_ocio_config": "nuke-default", - "working_space": "scene_linear", - "thumbnail_space": "sRGB (default)", + "native_ocio_config": "aces_1.2", + "working_space": "role_scene_linear", + "monitor_lut": "ACES/sRGB", + "monitor_out_lut": "ACES/sRGB", + "int_8_lut": "role_matte_paint", + "int_16_lut": "role_texture_paint", + "log_lut": "role_compositing_log", + "float_lut": "role_scene_linear" }, "nodes": { "required_nodes": [ diff --git a/server_addon/nuke/server/version.py b/server_addon/nuke/server/version.py index 569b1212f7..0c5c30071a 100644 --- a/server_addon/nuke/server/version.py +++ b/server_addon/nuke/server/version.py @@ -1 +1 @@ -__version__ = "0.1.10" +__version__ = "0.1.11" From e75f44f6c1fe200720e6af12f31a8488b825df94 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 15 Apr 2024 15:06:56 +0200 Subject: [PATCH 36/67] Bugfix: Parent to world only if not already at world - support root level placeholders --- client/ayon_core/hosts/maya/api/workfile_template_builder.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index 75386d7e64..ddf19125e3 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -331,7 +331,8 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if scene_parent: cmds.parent(node, scene_parent) else: - cmds.parent(node, world=True) + if cmds.listRelatives(node, parent=True): + cmds.parent(node, world=True) holding_sets = cmds.listSets(object=placeholder.scene_identifier) if not holding_sets: From 5b7511ed84ac348f3a0065b8e77fb2be9e44cdc9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 17 Apr 2024 23:28:54 +0200 Subject: [PATCH 37/67] Workfile Templates: Implement registering and discovering of `PlaceholderPlugin` --- .../hosts/aftereffects/api/pipeline.py | 13 +- .../api/workfile_template_builder.py | 104 +-- .../plugins/template/create_placeholder.py | 49 + .../plugins/template/load_placeholder.py | 60 ++ client/ayon_core/hosts/maya/api/pipeline.py | 12 +- .../maya/api/workfile_template_builder.py | 258 +----- .../maya/plugins/template/load_placeholder.py | 264 ++++++ client/ayon_core/hosts/nuke/api/pipeline.py | 15 +- .../nuke/api/workfile_template_builder.py | 853 +----------------- .../plugins/template/create_placeholder.py | 428 +++++++++ .../nuke/plugins/template/load_placeholder.py | 455 ++++++++++ client/ayon_core/pipeline/__init__.py | 16 + .../ayon_core/pipeline/workfile/__init__.py | 15 + .../workfile/workfile_template_builder.py | 33 +- 14 files changed, 1342 insertions(+), 1233 deletions(-) create mode 100644 client/ayon_core/hosts/aftereffects/plugins/template/create_placeholder.py create mode 100644 client/ayon_core/hosts/aftereffects/plugins/template/load_placeholder.py create mode 100644 client/ayon_core/hosts/maya/plugins/template/load_placeholder.py create mode 100644 client/ayon_core/hosts/nuke/plugins/template/create_placeholder.py create mode 100644 client/ayon_core/hosts/nuke/plugins/template/load_placeholder.py diff --git a/client/ayon_core/hosts/aftereffects/api/pipeline.py b/client/ayon_core/hosts/aftereffects/api/pipeline.py index 105fee64b9..214986a2fc 100644 --- a/client/ayon_core/hosts/aftereffects/api/pipeline.py +++ b/client/ayon_core/hosts/aftereffects/api/pipeline.py @@ -8,14 +8,11 @@ from ayon_core.lib import Logger, register_event_callback from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, + register_template_placeholder_plugin_path, AVALON_CONTAINER_ID, AVALON_INSTANCE_ID, AYON_INSTANCE_ID, ) -from ayon_core.hosts.aftereffects.api.workfile_template_builder import ( - AEPlaceholderLoadPlugin, - AEPlaceholderCreatePlugin -) from ayon_core.pipeline.load import any_outdated_containers import ayon_core.hosts.aftereffects @@ -40,6 +37,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +TEMPLATE_PLUGINS_PATH = os.path.join(PLUGINS_DIR, "template") class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -76,6 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) + register_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) register_event_callback("application.launched", application_launch) @@ -118,12 +117,6 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): item["id"] = "publish_context" self.stub.imprint(item["id"], item) - def get_workfile_build_placeholder_plugins(self): - return [ - AEPlaceholderLoadPlugin, - AEPlaceholderCreatePlugin - ] - # created instances section def list_instances(self): """List all created instances from current workfile which diff --git a/client/ayon_core/hosts/aftereffects/api/workfile_template_builder.py b/client/ayon_core/hosts/aftereffects/api/workfile_template_builder.py index aa2f36e8aa..99d5bbb938 100644 --- a/client/ayon_core/hosts/aftereffects/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/aftereffects/api/workfile_template_builder.py @@ -1,6 +1,7 @@ import os.path import uuid import shutil +from abc import abstractmethod from ayon_core.pipeline import registered_host from ayon_core.tools.workfile_template_build import ( @@ -9,13 +10,9 @@ from ayon_core.tools.workfile_template_build import ( from ayon_core.pipeline.workfile.workfile_template_builder import ( AbstractTemplateBuilder, PlaceholderPlugin, - LoadPlaceholderItem, - CreatePlaceholderItem, - PlaceholderLoadMixin, - PlaceholderCreateMixin + PlaceholderItem ) from ayon_core.hosts.aftereffects.api import get_stub -from ayon_core.hosts.aftereffects.api.lib import set_settings PLACEHOLDER_SET = "PLACEHOLDERS_SET" PLACEHOLDER_ID = "openpype.placeholder" @@ -51,6 +48,10 @@ class AETemplateBuilder(AbstractTemplateBuilder): class AEPlaceholderPlugin(PlaceholderPlugin): """Contains generic methods for all PlaceholderPlugins.""" + @abstractmethod + def _create_placeholder_item(self, item_data: dict) -> PlaceholderItem: + pass + def collect_placeholders(self): """Collect info from file metadata about created placeholders. @@ -63,17 +64,7 @@ class AEPlaceholderPlugin(PlaceholderPlugin): if item.get("plugin_identifier") != self.identifier: continue - if isinstance(self, AEPlaceholderLoadPlugin): - item = LoadPlaceholderItem(item["uuid"], - item["data"], - self) - elif isinstance(self, AEPlaceholderCreatePlugin): - item = CreatePlaceholderItem(item["uuid"], - item["data"], - self) - else: - raise NotImplementedError(f"Not implemented for {type(self)}") - + item = self._create_placeholder_item(item) output.append(item) return output @@ -135,87 +126,6 @@ class AEPlaceholderPlugin(PlaceholderPlugin): stub.imprint(item_id, container_data) -class AEPlaceholderCreatePlugin(AEPlaceholderPlugin, PlaceholderCreateMixin): - """Adds Create placeholder. - - This adds composition and runs Create - """ - identifier = "aftereffects.create" - label = "AfterEffects create" - - def create_placeholder(self, placeholder_data): - stub = get_stub() - name = "CREATEPLACEHOLDER" - item_id = stub.add_item(name, "COMP") - - self._imprint_item(item_id, name, placeholder_data, stub) - - def populate_placeholder(self, placeholder): - """Replace 'placeholder' with publishable instance. - - Renames prepared composition name, creates publishable instance, sets - frame/duration settings according to DB. - """ - pre_create_data = {"use_selection": True} - item_id, item = self._get_item(placeholder) - get_stub().select_items([item_id]) - self.populate_create_placeholder(placeholder, pre_create_data) - - # apply settings for populated composition - item_id, metadata_item = self._get_item(placeholder) - set_settings(True, True, [item_id]) - - def get_placeholder_options(self, options=None): - return self.get_create_plugin_options(options) - - -class AEPlaceholderLoadPlugin(AEPlaceholderPlugin, PlaceholderLoadMixin): - identifier = "aftereffects.load" - label = "AfterEffects load" - - def create_placeholder(self, placeholder_data): - """Creates AE's Placeholder item in Project items list. - - Sets dummy resolution/duration/fps settings, will be replaced when - populated. - """ - stub = get_stub() - name = "LOADERPLACEHOLDER" - item_id = stub.add_placeholder(name, 1920, 1060, 25, 10) - - self._imprint_item(item_id, name, placeholder_data, stub) - - def populate_placeholder(self, placeholder): - """Use Openpype Loader from `placeholder` to create new FootageItems - - New FootageItems are created, files are imported. - """ - self.populate_load_placeholder(placeholder) - errors = placeholder.get_errors() - stub = get_stub() - if errors: - stub.print_msg("\n".join(errors)) - else: - if not placeholder.data["keep_placeholder"]: - metadata = stub.get_metadata() - for item in metadata: - if not item.get("is_placeholder"): - continue - scene_identifier = item.get("uuid") - if (scene_identifier and - scene_identifier == placeholder.scene_identifier): - stub.delete_item(item["members"][0]) - stub.remove_instance(placeholder.scene_identifier, metadata) - - def get_placeholder_options(self, options=None): - return self.get_load_plugin_options(options) - - def load_succeed(self, placeholder, container): - placeholder_item_id, _ = self._get_item(placeholder) - item_id = container.id - get_stub().add_item_instead_placeholder(placeholder_item_id, item_id) - - def build_workfile_template(*args, **kwargs): builder = AETemplateBuilder(registered_host()) builder.build_template(*args, **kwargs) diff --git a/client/ayon_core/hosts/aftereffects/plugins/template/create_placeholder.py b/client/ayon_core/hosts/aftereffects/plugins/template/create_placeholder.py new file mode 100644 index 0000000000..c7927f176f --- /dev/null +++ b/client/ayon_core/hosts/aftereffects/plugins/template/create_placeholder.py @@ -0,0 +1,49 @@ +from ayon_core.pipeline.workfile.workfile_template_builder import ( + CreatePlaceholderItem, + PlaceholderCreateMixin +) +from ayon_core.hosts.aftereffects.api import get_stub +from ayon_core.hosts.aftereffects.api.lib import set_settings +import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb + + +class AEPlaceholderCreatePlugin(wtb.AEPlaceholderPlugin, + PlaceholderCreateMixin): + """Adds Create placeholder. + + This adds composition and runs Create + """ + identifier = "aftereffects.create" + label = "AfterEffects create" + + def _create_placeholder_item(self, item_data) -> CreatePlaceholderItem: + return CreatePlaceholderItem( + scene_identifier=item_data["uuid"], + data=item_data["data"], + plugin=self + ) + + def create_placeholder(self, placeholder_data): + stub = get_stub() + name = "CREATEPLACEHOLDER" + item_id = stub.add_item(name, "COMP") + + self._imprint_item(item_id, name, placeholder_data, stub) + + def populate_placeholder(self, placeholder): + """Replace 'placeholder' with publishable instance. + + Renames prepared composition name, creates publishable instance, sets + frame/duration settings according to DB. + """ + pre_create_data = {"use_selection": True} + item_id, item = self._get_item(placeholder) + get_stub().select_items([item_id]) + self.populate_create_placeholder(placeholder, pre_create_data) + + # apply settings for populated composition + item_id, metadata_item = self._get_item(placeholder) + set_settings(True, True, [item_id]) + + def get_placeholder_options(self, options=None): + return self.get_create_plugin_options(options) diff --git a/client/ayon_core/hosts/aftereffects/plugins/template/load_placeholder.py b/client/ayon_core/hosts/aftereffects/plugins/template/load_placeholder.py new file mode 100644 index 0000000000..7f7e4f49ce --- /dev/null +++ b/client/ayon_core/hosts/aftereffects/plugins/template/load_placeholder.py @@ -0,0 +1,60 @@ +from ayon_core.pipeline.workfile.workfile_template_builder import ( + LoadPlaceholderItem, + PlaceholderLoadMixin +) +from ayon_core.hosts.aftereffects.api import get_stub +import ayon_core.hosts.aftereffects.api.workfile_template_builder as wtb + + +class AEPlaceholderLoadPlugin(wtb.AEPlaceholderPlugin, PlaceholderLoadMixin): + identifier = "aftereffects.load" + label = "AfterEffects load" + + def _create_placeholder_item(self, item_data) -> LoadPlaceholderItem: + return LoadPlaceholderItem( + scene_identifier=item_data["uuid"], + data=item_data["data"], + plugin=self + ) + + def create_placeholder(self, placeholder_data): + """Creates AE's Placeholder item in Project items list. + + Sets dummy resolution/duration/fps settings, will be replaced when + populated. + """ + stub = get_stub() + name = "LOADERPLACEHOLDER" + item_id = stub.add_placeholder(name, 1920, 1060, 25, 10) + + self._imprint_item(item_id, name, placeholder_data, stub) + + def populate_placeholder(self, placeholder): + """Use Openpype Loader from `placeholder` to create new FootageItems + + New FootageItems are created, files are imported. + """ + self.populate_load_placeholder(placeholder) + errors = placeholder.get_errors() + stub = get_stub() + if errors: + stub.print_msg("\n".join(errors)) + else: + if not placeholder.data["keep_placeholder"]: + metadata = stub.get_metadata() + for item in metadata: + if not item.get("is_placeholder"): + continue + scene_identifier = item.get("uuid") + if (scene_identifier and + scene_identifier == placeholder.scene_identifier): + stub.delete_item(item["members"][0]) + stub.remove_instance(placeholder.scene_identifier, metadata) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def load_succeed(self, placeholder, container): + placeholder_item_id, _ = self._get_item(placeholder) + item_id = container.id + get_stub().add_item_instead_placeholder(placeholder_item_id, item_id) diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index 864a0c1599..eca98fa306 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -30,9 +30,11 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_inventory_action_path, register_creator_plugin_path, + register_template_placeholder_plugin_path, deregister_loader_plugin_path, deregister_inventory_action_path, deregister_creator_plugin_path, + deregister_template_placeholder_plugin_path, AYON_CONTAINER_ID, AVALON_CONTAINER_ID, ) @@ -47,7 +49,6 @@ from ayon_core.hosts.maya import MAYA_ROOT_DIR from ayon_core.hosts.maya.lib import create_workspace_mel from . import menu, lib -from .workfile_template_builder import MayaPlaceholderLoadPlugin from .workio import ( open_file, save_file, @@ -64,6 +65,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +TEMPLATE_PLUGINS_PATH = os.path.join(PLUGINS_DIR, "template") AVALON_CONTAINERS = ":AVALON_CONTAINERS" @@ -93,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - self.log.info(PUBLISH_PATH) + register_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) self.log.info("Installing callbacks ... ") register_event_callback("init", on_init) @@ -148,11 +150,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def get_containers(self): return ls() - def get_workfile_build_placeholder_plugins(self): - return [ - MayaPlaceholderLoadPlugin - ] - @contextlib.contextmanager def maintained_selection(self): with lib.maintained_selection(): @@ -338,6 +335,7 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) deregister_creator_plugin_path(CREATE_PATH) deregister_inventory_action_path(INVENTORY_PATH) + deregister_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) menu.uninstall() diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index 75386d7e64..cfd416b708 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -1,5 +1,3 @@ -import json - from maya import cmds from ayon_core.pipeline import ( @@ -10,16 +8,13 @@ from ayon_core.pipeline import ( ) from ayon_core.pipeline.workfile.workfile_template_builder import ( TemplateAlreadyImported, - AbstractTemplateBuilder, - PlaceholderPlugin, - LoadPlaceholderItem, - PlaceholderLoadMixin, + AbstractTemplateBuilder ) from ayon_core.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, ) -from .lib import read, imprint, get_reference_node, get_main_window +from .lib import get_main_window PLACEHOLDER_SET = "PLACEHOLDERS_SET" @@ -91,255 +86,6 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): return True -class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): - identifier = "maya.load" - label = "Maya load" - - def _collect_scene_placeholders(self): - # Cache placeholder data to shared data - placeholder_nodes = self.builder.get_shared_populate_data( - "placeholder_nodes" - ) - if placeholder_nodes is None: - attributes = cmds.ls("*.plugin_identifier", long=True) - placeholder_nodes = {} - for attribute in attributes: - node_name = attribute.rpartition(".")[0] - placeholder_nodes[node_name] = ( - self._parse_placeholder_node_data(node_name) - ) - - self.builder.set_shared_populate_data( - "placeholder_nodes", placeholder_nodes - ) - return placeholder_nodes - - def _parse_placeholder_node_data(self, node_name): - placeholder_data = read(node_name) - parent_name = ( - cmds.getAttr(node_name + ".parent", asString=True) - or node_name.rpartition("|")[0] - or "" - ) - if parent_name: - siblings = cmds.listRelatives(parent_name, children=True) - else: - siblings = cmds.ls(assemblies=True) - node_shortname = node_name.rpartition("|")[2] - current_index = cmds.getAttr(node_name + ".index", asString=True) - if current_index < 0: - current_index = siblings.index(node_shortname) - - placeholder_data.update({ - "parent": parent_name, - "index": current_index - }) - return placeholder_data - - def _create_placeholder_name(self, placeholder_data): - placeholder_name_parts = placeholder_data["builder_type"].split("_") - - pos = 1 - placeholder_product_type = placeholder_data.get("product_type") - if placeholder_product_type is None: - placeholder_product_type = placeholder_data.get("family") - - if placeholder_product_type: - placeholder_name_parts.insert(pos, placeholder_product_type) - pos += 1 - - # add loader arguments if any - loader_args = placeholder_data["loader_args"] - if loader_args: - loader_args = json.loads(loader_args.replace('\'', '\"')) - values = [v for v in loader_args.values()] - for value in values: - placeholder_name_parts.insert(pos, value) - pos += 1 - - placeholder_name = "_".join(placeholder_name_parts) - - return placeholder_name.capitalize() - - def _get_loaded_repre_ids(self): - loaded_representation_ids = self.builder.get_shared_populate_data( - "loaded_representation_ids" - ) - if loaded_representation_ids is None: - try: - containers = cmds.sets("AVALON_CONTAINERS", q=True) - except ValueError: - containers = [] - - loaded_representation_ids = { - cmds.getAttr(container + ".representation") - for container in containers - } - self.builder.set_shared_populate_data( - "loaded_representation_ids", loaded_representation_ids - ) - return loaded_representation_ids - - def create_placeholder(self, placeholder_data): - selection = cmds.ls(selection=True) - if len(selection) > 1: - raise ValueError("More then one item are selected") - - parent = selection[0] if selection else None - - placeholder_data["plugin_identifier"] = self.identifier - - placeholder_name = self._create_placeholder_name(placeholder_data) - - placeholder = cmds.spaceLocator(name=placeholder_name)[0] - if parent: - placeholder = cmds.parent(placeholder, selection[0])[0] - - imprint(placeholder, placeholder_data) - - # Add helper attributes to keep placeholder info - cmds.addAttr( - placeholder, - longName="parent", - hidden=True, - dataType="string" - ) - cmds.addAttr( - placeholder, - longName="index", - hidden=True, - attributeType="short", - defaultValue=-1 - ) - - cmds.setAttr(placeholder + ".parent", "", type="string") - - def update_placeholder(self, placeholder_item, placeholder_data): - node_name = placeholder_item.scene_identifier - new_values = {} - for key, value in placeholder_data.items(): - placeholder_value = placeholder_item.data.get(key) - if value != placeholder_value: - new_values[key] = value - placeholder_item.data[key] = value - - for key in new_values.keys(): - cmds.deleteAttr(node_name + "." + key) - - imprint(node_name, new_values) - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, placeholder_data in scene_placeholders.items(): - if placeholder_data.get("plugin_identifier") != self.identifier: - continue - - # TODO do data validations and maybe upgrades if they are invalid - output.append( - LoadPlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_load_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - repre_ids = self._get_loaded_repre_ids() - self.populate_load_placeholder(placeholder, repre_ids) - - def get_placeholder_options(self, options=None): - return self.get_load_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # Hide placeholder and add them to placeholder set - node = placeholder.scene_identifier - - cmds.sets(node, addElement=PLACEHOLDER_SET) - cmds.hide(node) - cmds.setAttr(node + ".hiddenInOutliner", True) - - def delete_placeholder(self, placeholder): - """Remove placeholder if building was successful""" - cmds.delete(placeholder.scene_identifier) - - def load_succeed(self, placeholder, container): - self._parent_in_hierarchy(placeholder, container) - - def _parent_in_hierarchy(self, placeholder, container): - """Parent loaded container to placeholder's parent. - - ie : Set loaded content as placeholder's sibling - - Args: - container (str): Placeholder loaded containers - """ - - if not container: - return - - roots = cmds.sets(container, q=True) or [] - ref_node = None - try: - ref_node = get_reference_node(roots) - except AssertionError as e: - self.log.info(e.args[0]) - - nodes_to_parent = [] - for root in roots: - if ref_node: - ref_root = cmds.referenceQuery(root, nodes=True)[0] - ref_root = ( - cmds.listRelatives(ref_root, parent=True, path=True) or - [ref_root] - ) - nodes_to_parent.extend(ref_root) - continue - if root.endswith("_RN"): - # Backwards compatibility for hardcoded reference names. - refRoot = cmds.referenceQuery(root, n=True)[0] - refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] - nodes_to_parent.extend(refRoot) - elif root not in cmds.listSets(allSets=True): - nodes_to_parent.append(root) - - elif not cmds.sets(root, q=True): - return - - # Move loaded nodes to correct index in outliner hierarchy - placeholder_form = cmds.xform( - placeholder.scene_identifier, - q=True, - matrix=True, - worldSpace=True - ) - scene_parent = cmds.listRelatives( - placeholder.scene_identifier, parent=True, fullPath=True - ) - for node in set(nodes_to_parent): - cmds.reorder(node, front=True) - cmds.reorder(node, relative=placeholder.data["index"]) - cmds.xform(node, matrix=placeholder_form, ws=True) - if scene_parent: - cmds.parent(node, scene_parent) - else: - cmds.parent(node, world=True) - - holding_sets = cmds.listSets(object=placeholder.scene_identifier) - if not holding_sets: - return - for holding_set in holding_sets: - cmds.sets(roots, forceElement=holding_set) - - def build_workfile_template(*args): builder = MayaTemplateBuilder(registered_host()) builder.build_template() diff --git a/client/ayon_core/hosts/maya/plugins/template/load_placeholder.py b/client/ayon_core/hosts/maya/plugins/template/load_placeholder.py new file mode 100644 index 0000000000..5bfaae6500 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/template/load_placeholder.py @@ -0,0 +1,264 @@ +import json + +from maya import cmds + +from ayon_core.pipeline.workfile.workfile_template_builder import ( + PlaceholderPlugin, + LoadPlaceholderItem, + PlaceholderLoadMixin, +) +from ayon_core.hosts.maya.api.lib import ( + read, + imprint, + get_reference_node +) +from ayon_core.hosts.maya.api.workfile_template_builder import PLACEHOLDER_SET + + +class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): + identifier = "maya.load" + label = "Maya load" + + def _collect_scene_placeholders(self): + # Cache placeholder data to shared data + placeholder_nodes = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if placeholder_nodes is None: + attributes = cmds.ls("*.plugin_identifier", long=True) + placeholder_nodes = {} + for attribute in attributes: + node_name = attribute.rpartition(".")[0] + placeholder_nodes[node_name] = ( + self._parse_placeholder_node_data(node_name) + ) + + self.builder.set_shared_populate_data( + "placeholder_nodes", placeholder_nodes + ) + return placeholder_nodes + + def _parse_placeholder_node_data(self, node_name): + placeholder_data = read(node_name) + parent_name = ( + cmds.getAttr(node_name + ".parent", asString=True) + or node_name.rpartition("|")[0] + or "" + ) + if parent_name: + siblings = cmds.listRelatives(parent_name, children=True) + else: + siblings = cmds.ls(assemblies=True) + node_shortname = node_name.rpartition("|")[2] + current_index = cmds.getAttr(node_name + ".index", asString=True) + if current_index < 0: + current_index = siblings.index(node_shortname) + + placeholder_data.update({ + "parent": parent_name, + "index": current_index + }) + return placeholder_data + + def _create_placeholder_name(self, placeholder_data): + placeholder_name_parts = placeholder_data["builder_type"].split("_") + + pos = 1 + placeholder_product_type = placeholder_data.get("product_type") + if placeholder_product_type is None: + placeholder_product_type = placeholder_data.get("family") + + if placeholder_product_type: + placeholder_name_parts.insert(pos, placeholder_product_type) + pos += 1 + + # add loader arguments if any + loader_args = placeholder_data["loader_args"] + if loader_args: + loader_args = json.loads(loader_args.replace('\'', '\"')) + values = [v for v in loader_args.values()] + for value in values: + placeholder_name_parts.insert(pos, value) + pos += 1 + + placeholder_name = "_".join(placeholder_name_parts) + + return placeholder_name.capitalize() + + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + try: + containers = cmds.sets("AVALON_CONTAINERS", q=True) + except ValueError: + containers = [] + + loaded_representation_ids = { + cmds.getAttr(container + ".representation") + for container in containers + } + self.builder.set_shared_populate_data( + "loaded_representation_ids", loaded_representation_ids + ) + return loaded_representation_ids + + def create_placeholder(self, placeholder_data): + selection = cmds.ls(selection=True) + if len(selection) > 1: + raise ValueError("More then one item are selected") + + parent = selection[0] if selection else None + + placeholder_data["plugin_identifier"] = self.identifier + + placeholder_name = self._create_placeholder_name(placeholder_data) + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + if parent: + placeholder = cmds.parent(placeholder, selection[0])[0] + + imprint(placeholder, placeholder_data) + + # Add helper attributes to keep placeholder info + cmds.addAttr( + placeholder, + longName="parent", + hidden=True, + dataType="string" + ) + cmds.addAttr( + placeholder, + longName="index", + hidden=True, + attributeType="short", + defaultValue=-1 + ) + + cmds.setAttr(placeholder + ".parent", "", type="string") + + def update_placeholder(self, placeholder_item, placeholder_data): + node_name = placeholder_item.scene_identifier + new_values = {} + for key, value in placeholder_data.items(): + placeholder_value = placeholder_item.data.get(key) + if value != placeholder_value: + new_values[key] = value + placeholder_item.data[key] = value + + for key in new_values.keys(): + cmds.deleteAttr(node_name + "." + key) + + imprint(node_name, new_values) + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, placeholder_data in scene_placeholders.items(): + if placeholder_data.get("plugin_identifier") != self.identifier: + continue + + # TODO do data validations and maybe upgrades if they are invalid + output.append( + LoadPlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_load_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self.populate_load_placeholder(placeholder, repre_ids) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def post_placeholder_process(self, placeholder, failed): + """Cleanup placeholder after load of its corresponding representations. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + # Hide placeholder and add them to placeholder set + node = placeholder.scene_identifier + + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr(node + ".hiddenInOutliner", True) + + def delete_placeholder(self, placeholder): + """Remove placeholder if building was successful""" + cmds.delete(placeholder.scene_identifier) + + def load_succeed(self, placeholder, container): + self._parent_in_hierarchy(placeholder, container) + + def _parent_in_hierarchy(self, placeholder, container): + """Parent loaded container to placeholder's parent. + + ie : Set loaded content as placeholder's sibling + + Args: + container (str): Placeholder loaded containers + """ + + if not container: + return + + roots = cmds.sets(container, q=True) or [] + ref_node = None + try: + ref_node = get_reference_node(roots) + except AssertionError as e: + self.log.info(e.args[0]) + + nodes_to_parent = [] + for root in roots: + if ref_node: + ref_root = cmds.referenceQuery(root, nodes=True)[0] + ref_root = ( + cmds.listRelatives(ref_root, parent=True, path=True) or + [ref_root] + ) + nodes_to_parent.extend(ref_root) + continue + if root.endswith("_RN"): + # Backwards compatibility for hardcoded reference names. + refRoot = cmds.referenceQuery(root, n=True)[0] + refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot] + nodes_to_parent.extend(refRoot) + elif root not in cmds.listSets(allSets=True): + nodes_to_parent.append(root) + + elif not cmds.sets(root, q=True): + return + + # Move loaded nodes to correct index in outliner hierarchy + placeholder_form = cmds.xform( + placeholder.scene_identifier, + q=True, + matrix=True, + worldSpace=True + ) + scene_parent = cmds.listRelatives( + placeholder.scene_identifier, parent=True, fullPath=True + ) + for node in set(nodes_to_parent): + cmds.reorder(node, front=True) + cmds.reorder(node, relative=placeholder.data["index"]) + cmds.xform(node, matrix=placeholder_form, ws=True) + if scene_parent: + cmds.parent(node, scene_parent) + else: + cmds.parent(node, world=True) + + holding_sets = cmds.listSets(object=placeholder.scene_identifier) + if not holding_sets: + return + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index 0d44aba2f9..bdf601e30d 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -18,6 +18,7 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, register_inventory_action_path, + register_template_placeholder_plugin_path, AYON_INSTANCE_ID, AVALON_INSTANCE_ID, AVALON_CONTAINER_ID, @@ -52,8 +53,6 @@ from .lib import ( MENU_LABEL, ) from .workfile_template_builder import ( - NukePlaceholderLoadPlugin, - NukePlaceholderCreatePlugin, build_workfile_template, create_placeholder, update_placeholder, @@ -76,6 +75,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +TEMPLATE_PLUGINS_PATH = os.path.join(PLUGINS_DIR, "template") # registering pyblish gui regarding settings in presets if os.getenv("PYBLISH_GUI", None): @@ -105,18 +105,11 @@ class NukeHost( def get_workfile_extensions(self): return file_extensions() - def get_workfile_build_placeholder_plugins(self): - return [ - NukePlaceholderLoadPlugin, - NukePlaceholderCreatePlugin - ] - def get_containers(self): return ls() def install(self): - ''' Installing all requarements for Nuke host - ''' + """Installing all requirements for Nuke host""" pyblish.api.register_host("nuke") @@ -125,6 +118,7 @@ class NukeHost( register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) + register_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) # Register AYON event for workfiles loading. register_event_callback("workio.open_file", check_inventory_versions) @@ -178,7 +172,6 @@ def add_nuke_callbacks(): # set apply all workfile settings on script load and save nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) - if nuke_settings["dirmap"]["enabled"]: log.info("Added Nuke's dir-mapping callback ...") # Add dirmap for file paths. diff --git a/client/ayon_core/hosts/nuke/api/workfile_template_builder.py b/client/ayon_core/hosts/nuke/api/workfile_template_builder.py index 495edd9e5f..aebf91c4a4 100644 --- a/client/ayon_core/hosts/nuke/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/nuke/api/workfile_template_builder.py @@ -1,30 +1,17 @@ import collections import nuke + from ayon_core.pipeline import registered_host from ayon_core.pipeline.workfile.workfile_template_builder import ( AbstractTemplateBuilder, PlaceholderPlugin, - LoadPlaceholderItem, - CreatePlaceholderItem, - PlaceholderLoadMixin, - PlaceholderCreateMixin, ) from ayon_core.tools.workfile_template_build import ( WorkfileBuildPlaceholderDialog, ) from .lib import ( - find_free_space_to_paste_nodes, - get_extreme_positions, - get_group_io_nodes, imprint, - refresh_node, - refresh_nodes, reset_selection, - get_names_from_nodes, - get_nodes_by_names, - select_nodes, - duplicate_node, - node_tempfile, get_main_window, WorkfileSettings, ) @@ -54,6 +41,7 @@ class NukeTemplateBuilder(AbstractTemplateBuilder): return True + class NukePlaceholderPlugin(PlaceholderPlugin): node_color = 4278190335 @@ -120,843 +108,6 @@ class NukePlaceholderPlugin(PlaceholderPlugin): nuke.delete(placeholder_node) -class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): - identifier = "nuke.load" - label = "Nuke load" - - def _parse_placeholder_node_data(self, node): - placeholder_data = super( - NukePlaceholderLoadPlugin, self - )._parse_placeholder_node_data(node) - - node_knobs = node.knobs() - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - placeholder_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - placeholder_data["siblings"] = siblings - - node_full_name = node.fullName() - placeholder_data["group_name"] = node_full_name.rpartition(".")[0] - placeholder_data["last_loaded"] = [] - placeholder_data["delete"] = False - return placeholder_data - - def _get_loaded_repre_ids(self): - loaded_representation_ids = self.builder.get_shared_populate_data( - "loaded_representation_ids" - ) - if loaded_representation_ids is None: - loaded_representation_ids = set() - for node in nuke.allNodes(): - if "repre_id" in node.knobs(): - loaded_representation_ids.add( - node.knob("repre_id").getValue() - ) - - self.builder.set_shared_populate_data( - "loaded_representation_ids", loaded_representation_ids - ) - return loaded_representation_ids - - def _before_placeholder_load(self, placeholder): - placeholder.data["nodes_init"] = nuke.allNodes() - - def _before_repre_load(self, placeholder, representation): - placeholder.data["last_repre_id"] = representation["id"] - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, node in scene_placeholders.items(): - plugin_identifier_knob = node.knob("plugin_identifier") - if ( - plugin_identifier_knob is None - or plugin_identifier_knob.getValue() != self.identifier - ): - continue - - placeholder_data = self._parse_placeholder_node_data(node) - # TODO do data validations and maybe updgrades if are invalid - output.append( - LoadPlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_load_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - repre_ids = self._get_loaded_repre_ids() - self.populate_load_placeholder(placeholder, repre_ids) - - def get_placeholder_options(self, options=None): - return self.get_load_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # deselect all selected nodes - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - # getting the latest nodes added - # TODO get from shared populate data! - nodes_init = placeholder.data["nodes_init"] - nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Loaded nodes: {}".format(nodes_loaded)) - if not nodes_loaded: - return - - placeholder.data["delete"] = True - - nodes_loaded = self._move_to_placeholder_group( - placeholder, nodes_loaded - ) - placeholder.data["last_loaded"] = nodes_loaded - refresh_nodes(nodes_loaded) - - # positioning of the loaded nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_loaded) - - # fix the problem of z_order for backdrops - self._fix_z_order(placeholder) - - if placeholder.data.get("keep_placeholder"): - self._imprint_siblings(placeholder) - - if placeholder.data["nb_children"] == 0: - # save initial nodes positions and dimensions, update them - # and set inputs and outputs of loaded nodes - if placeholder.data.get("keep_placeholder"): - self._imprint_inits() - self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded) - - self._set_loaded_connections(placeholder) - - elif placeholder.data["siblings"]: - # create copies of placeholder siblings for the new loaded nodes, - # set their inputs and outputs and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(placeholder.data["siblings"]) - refresh_nodes(siblings) - copies = self._create_sib_copies(placeholder) - new_nodes = list(copies.values()) # copies nodes - self._update_nodes(new_nodes, nodes_loaded) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self._set_copies_connections(placeholder, copies) - - self._update_nodes( - nuke.allNodes(), - new_nodes + nodes_loaded, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - placeholder.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the loaded - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_loaded, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - placeholder.data["nb_children"] += 1 - reset_selection() - - # go back to root group - nuke.root().begin() - - def _move_to_placeholder_group(self, placeholder, nodes_loaded): - """ - opening the placeholder's group and copying loaded nodes in it. - - Returns : - nodes_loaded (list): the new list of pasted nodes - """ - - groups_name = placeholder.data["group_name"] - reset_selection() - select_nodes(nodes_loaded) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_loaded = nuke.selectedNodes() - return nodes_loaded - - def _fix_z_order(self, placeholder): - """Fix the problem of z_order when a backdrop is loaded.""" - - nodes_loaded = placeholder.data["last_loaded"] - loaded_backdrops = [] - bd_orders = set() - for node in nodes_loaded: - if isinstance(node, nuke.BackdropNode): - loaded_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in placeholder.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in loaded_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def _imprint_siblings(self, placeholder): - """ - - add siblings names to placeholder attributes (nodes loaded with it) - - add Id to the attributes of all the other nodes - """ - - loaded_nodes = placeholder.data["last_loaded"] - loaded_nodes_set = set(loaded_nodes) - data = {"repre_id": str(placeholder.data["last_repre_id"])} - - for node in loaded_nodes: - node_knobs = node.knobs() - if "builder_type" not in node_knobs: - # save the id of representation for all imported nodes - imprint(node, data) - node.knob("repre_id").setVisible(False) - refresh_node(node) - continue - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(loaded_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def _imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def _update_nodes( - self, placeholder, nodes, considered_nodes, offset_y=None - ): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(placeholder.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def _set_loaded_connections(self, placeholder): - """ - set inputs and outputs of loaded nodes""" - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - input_node, output_node = get_group_io_nodes( - placeholder.data["last_loaded"] - ) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node and output_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node and input_node: - input_node.setInput(0, node) - - def _create_sib_copies(self, placeholder): - """ creating copies of the palce_holder siblings (the ones who were - loaded with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(placeholder.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def _set_copies_connections(self, placeholder, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes( - placeholder.data["last_loaded"] - ) - siblings = get_nodes_by_names(placeholder.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) - - -class NukePlaceholderCreatePlugin( - NukePlaceholderPlugin, PlaceholderCreateMixin -): - identifier = "nuke.create" - label = "Nuke create" - - def _parse_placeholder_node_data(self, node): - placeholder_data = super( - NukePlaceholderCreatePlugin, self - )._parse_placeholder_node_data(node) - - node_knobs = node.knobs() - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - placeholder_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - placeholder_data["siblings"] = siblings - - node_full_name = node.fullName() - placeholder_data["group_name"] = node_full_name.rpartition(".")[0] - placeholder_data["last_loaded"] = [] - placeholder_data["delete"] = False - return placeholder_data - - def _before_instance_create(self, placeholder): - placeholder.data["nodes_init"] = nuke.allNodes() - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, node in scene_placeholders.items(): - plugin_identifier_knob = node.knob("plugin_identifier") - if ( - plugin_identifier_knob is None - or plugin_identifier_knob.getValue() != self.identifier - ): - continue - - placeholder_data = self._parse_placeholder_node_data(node) - - output.append( - CreatePlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_create_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - self.populate_create_placeholder(placeholder) - - def get_placeholder_options(self, options=None): - return self.get_create_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # deselect all selected nodes - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - # getting the latest nodes added - nodes_init = placeholder.data["nodes_init"] - nodes_created = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Created nodes: {}".format(nodes_created)) - if not nodes_created: - return - - placeholder.data["delete"] = True - - nodes_created = self._move_to_placeholder_group( - placeholder, nodes_created - ) - placeholder.data["last_created"] = nodes_created - refresh_nodes(nodes_created) - - # positioning of the created nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_created) - for node in nodes_created: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_created) - - # fix the problem of z_order for backdrops - self._fix_z_order(placeholder) - - if placeholder.data.get("keep_placeholder"): - self._imprint_siblings(placeholder) - - if placeholder.data["nb_children"] == 0: - # save initial nodes positions and dimensions, update them - # and set inputs and outputs of created nodes - - if placeholder.data.get("keep_placeholder"): - self._imprint_inits() - self._update_nodes(placeholder, nuke.allNodes(), nodes_created) - - self._set_created_connections(placeholder) - - elif placeholder.data["siblings"]: - # create copies of placeholder siblings for the new created nodes, - # set their inputs and outputs and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(placeholder.data["siblings"]) - refresh_nodes(siblings) - copies = self._create_sib_copies(placeholder) - new_nodes = list(copies.values()) # copies nodes - self._update_nodes(new_nodes, nodes_created) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self._set_copies_connections(placeholder, copies) - - self._update_nodes( - nuke.allNodes(), - new_nodes + nodes_created, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - placeholder.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the created - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_created, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_created: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - placeholder.data["nb_children"] += 1 - reset_selection() - - # go back to root group - nuke.root().begin() - - def _move_to_placeholder_group(self, placeholder, nodes_created): - """ - opening the placeholder's group and copying created nodes in it. - - Returns : - nodes_created (list): the new list of pasted nodes - """ - groups_name = placeholder.data["group_name"] - reset_selection() - select_nodes(nodes_created) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_created = nuke.selectedNodes() - return nodes_created - - def _fix_z_order(self, placeholder): - """Fix the problem of z_order when a backdrop is create.""" - - nodes_created = placeholder.data["last_created"] - created_backdrops = [] - bd_orders = set() - for node in nodes_created: - if isinstance(node, nuke.BackdropNode): - created_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in placeholder.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in created_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def _imprint_siblings(self, placeholder): - """ - - add siblings names to placeholder attributes (nodes created with it) - - add Id to the attributes of all the other nodes - """ - - created_nodes = placeholder.data["last_created"] - created_nodes_set = set(created_nodes) - - for node in created_nodes: - node_knobs = node.knobs() - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(created_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def _imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def _update_nodes( - self, placeholder, nodes, considered_nodes, offset_y=None - ): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(placeholder.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def _set_created_connections(self, placeholder): - """ - set inputs and outputs of created nodes""" - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - input_node, output_node = get_group_io_nodes( - placeholder.data["last_created"] - ) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node and output_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node and input_node: - input_node.setInput(0, node) - - def _create_sib_copies(self, placeholder): - """ creating copies of the palce_holder siblings (the ones who were - created with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(placeholder.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def _set_copies_connections(self, placeholder, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes( - placeholder.data["last_created"] - ) - siblings = get_nodes_by_names(placeholder.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) - - def build_workfile_template(*args, **kwargs): builder = NukeTemplateBuilder(registered_host()) builder.build_template(*args, **kwargs) diff --git a/client/ayon_core/hosts/nuke/plugins/template/create_placeholder.py b/client/ayon_core/hosts/nuke/plugins/template/create_placeholder.py new file mode 100644 index 0000000000..a5490021e4 --- /dev/null +++ b/client/ayon_core/hosts/nuke/plugins/template/create_placeholder.py @@ -0,0 +1,428 @@ +import nuke + +from ayon_core.pipeline.workfile.workfile_template_builder import ( + CreatePlaceholderItem, + PlaceholderCreateMixin, +) +from ayon_core.hosts.nuke.api.lib import ( + find_free_space_to_paste_nodes, + get_extreme_positions, + get_group_io_nodes, + imprint, + refresh_node, + refresh_nodes, + reset_selection, + get_names_from_nodes, + get_nodes_by_names, + select_nodes, + duplicate_node, + node_tempfile, +) +from ayon_core.hosts.nuke.api.workfile_template_builder import ( + NukePlaceholderPlugin +) + + +class NukePlaceholderCreatePlugin( + NukePlaceholderPlugin, PlaceholderCreateMixin +): + identifier = "nuke.create" + label = "Nuke create" + + def _parse_placeholder_node_data(self, node): + placeholder_data = super( + NukePlaceholderCreatePlugin, self + )._parse_placeholder_node_data(node) + + node_knobs = node.knobs() + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + placeholder_data["nb_children"] = nb_children + + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + placeholder_data["siblings"] = siblings + + node_full_name = node.fullName() + placeholder_data["group_name"] = node_full_name.rpartition(".")[0] + placeholder_data["last_loaded"] = [] + placeholder_data["delete"] = False + return placeholder_data + + def _before_instance_create(self, placeholder): + placeholder.data["nodes_init"] = nuke.allNodes() + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, node in scene_placeholders.items(): + plugin_identifier_knob = node.knob("plugin_identifier") + if ( + plugin_identifier_knob is None + or plugin_identifier_knob.getValue() != self.identifier + ): + continue + + placeholder_data = self._parse_placeholder_node_data(node) + + output.append( + CreatePlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_create_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + self.populate_create_placeholder(placeholder) + + def get_placeholder_options(self, options=None): + return self.get_create_plugin_options(options) + + def post_placeholder_process(self, placeholder, failed): + """Cleanup placeholder after load of its corresponding representations. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + # deselect all selected nodes + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + # getting the latest nodes added + nodes_init = placeholder.data["nodes_init"] + nodes_created = list(set(nuke.allNodes()) - set(nodes_init)) + self.log.debug("Created nodes: {}".format(nodes_created)) + if not nodes_created: + return + + placeholder.data["delete"] = True + + nodes_created = self._move_to_placeholder_group( + placeholder, nodes_created + ) + placeholder.data["last_created"] = nodes_created + refresh_nodes(nodes_created) + + # positioning of the created nodes + min_x, min_y, _, _ = get_extreme_positions(nodes_created) + for node in nodes_created: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) + refresh_nodes(nodes_created) + + # fix the problem of z_order for backdrops + self._fix_z_order(placeholder) + + if placeholder.data.get("keep_placeholder"): + self._imprint_siblings(placeholder) + + if placeholder.data["nb_children"] == 0: + # save initial nodes positions and dimensions, update them + # and set inputs and outputs of created nodes + + if placeholder.data.get("keep_placeholder"): + self._imprint_inits() + self._update_nodes(placeholder, nuke.allNodes(), nodes_created) + + self._set_created_connections(placeholder) + + elif placeholder.data["siblings"]: + # create copies of placeholder siblings for the new created nodes, + # set their inputs and outputs and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_by_names(placeholder.data["siblings"]) + refresh_nodes(siblings) + copies = self._create_sib_copies(placeholder) + new_nodes = list(copies.values()) # copies nodes + self._update_nodes(new_nodes, nodes_created) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(placeholder_node, {"siblings": new_nodes_name}) + self._set_copies_connections(placeholder, copies) + + self._update_nodes( + nuke.allNodes(), + new_nodes + nodes_created, + 20 + ) + + new_siblings = get_names_from_nodes(new_nodes) + placeholder.data["siblings"] = new_siblings + + else: + # if the placeholder doesn't have siblings, the created + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_created, direction="bottom", offset=200 + ) + node = nuke.createNode("NoOp") + reset_selection() + nuke.delete(node) + for node in nodes_created: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) + + placeholder.data["nb_children"] += 1 + reset_selection() + + # go back to root group + nuke.root().begin() + + def _move_to_placeholder_group(self, placeholder, nodes_created): + """ + opening the placeholder's group and copying created nodes in it. + + Returns : + nodes_created (list): the new list of pasted nodes + """ + groups_name = placeholder.data["group_name"] + reset_selection() + select_nodes(nodes_created) + if groups_name: + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_created = nuke.selectedNodes() + return nodes_created + + def _fix_z_order(self, placeholder): + """Fix the problem of z_order when a backdrop is create.""" + + nodes_created = placeholder.data["last_created"] + created_backdrops = [] + bd_orders = set() + for node in nodes_created: + if isinstance(node, nuke.BackdropNode): + created_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) + + if not bd_orders: + return + + sib_orders = set() + for node_name in placeholder.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in created_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def _imprint_siblings(self, placeholder): + """ + - add siblings names to placeholder attributes (nodes created with it) + - add Id to the attributes of all the other nodes + """ + + created_nodes = placeholder.data["last_created"] + created_nodes_set = set(created_nodes) + + for node in created_nodes: + node_knobs = node.knobs() + + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(created_nodes_set - {node}) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(node, siblings) + + def _imprint_inits(self): + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) + + def _update_nodes( + self, placeholder, nodes, considered_nodes, offset_y=None + ): + """Adjust backdrop nodes dimensions and positions. + + Considering some nodes sizes. + + Args: + nodes (list): list of nodes to update + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies + """ + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() + else: + siblings = get_nodes_by_names(placeholder.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y <= 0 and diff_x <= 0: + return + + for node in nodes: + refresh_node(node) + + if ( + node == placeholder_node + or node in considered_nodes + ): + continue + + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) + + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) + + def _set_created_connections(self, placeholder): + """ + set inputs and outputs of created nodes""" + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + input_node, output_node = get_group_io_nodes( + placeholder.data["last_created"] + ) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node and output_node: + node.setInput(idx, output_node) + + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node and input_node: + input_node.setInput(0, node) + + def _create_sib_copies(self, placeholder): + """ creating copies of the palce_holder siblings (the ones who were + created with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_by_names(placeholder.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) + + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node + return copies + + def _set_copies_connections(self, placeholder, copies): + """Set inputs and outputs of the copies. + + Args: + copies (dict): Copied nodes by their names. + """ + + last_input, last_output = get_group_io_nodes( + placeholder.data["last_created"] + ) + siblings = get_nodes_by_names(placeholder.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] + + for node_init in siblings: + if node_init == siblings_output: + continue + + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue + + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) diff --git a/client/ayon_core/hosts/nuke/plugins/template/load_placeholder.py b/client/ayon_core/hosts/nuke/plugins/template/load_placeholder.py new file mode 100644 index 0000000000..258f48c9d3 --- /dev/null +++ b/client/ayon_core/hosts/nuke/plugins/template/load_placeholder.py @@ -0,0 +1,455 @@ +import nuke + +from ayon_core.pipeline.workfile.workfile_template_builder import ( + LoadPlaceholderItem, + PlaceholderLoadMixin, +) +from ayon_core.hosts.nuke.api.lib import ( + find_free_space_to_paste_nodes, + get_extreme_positions, + get_group_io_nodes, + imprint, + refresh_node, + refresh_nodes, + reset_selection, + get_names_from_nodes, + get_nodes_by_names, + select_nodes, + duplicate_node, + node_tempfile, +) +from ayon_core.hosts.nuke.api.workfile_template_builder import ( + NukePlaceholderPlugin +) + + +class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): + identifier = "nuke.load" + label = "Nuke load" + + def _parse_placeholder_node_data(self, node): + placeholder_data = super( + NukePlaceholderLoadPlugin, self + )._parse_placeholder_node_data(node) + + node_knobs = node.knobs() + nb_children = 0 + if "nb_children" in node_knobs: + nb_children = int(node_knobs["nb_children"].getValue()) + placeholder_data["nb_children"] = nb_children + + siblings = [] + if "siblings" in node_knobs: + siblings = node_knobs["siblings"].values() + placeholder_data["siblings"] = siblings + + node_full_name = node.fullName() + placeholder_data["group_name"] = node_full_name.rpartition(".")[0] + placeholder_data["last_loaded"] = [] + placeholder_data["delete"] = False + return placeholder_data + + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + loaded_representation_ids = set() + for node in nuke.allNodes(): + if "repre_id" in node.knobs(): + loaded_representation_ids.add( + node.knob("repre_id").getValue() + ) + + self.builder.set_shared_populate_data( + "loaded_representation_ids", loaded_representation_ids + ) + return loaded_representation_ids + + def _before_placeholder_load(self, placeholder): + placeholder.data["nodes_init"] = nuke.allNodes() + + def _before_repre_load(self, placeholder, representation): + placeholder.data["last_repre_id"] = representation["id"] + + def collect_placeholders(self): + output = [] + scene_placeholders = self._collect_scene_placeholders() + for node_name, node in scene_placeholders.items(): + plugin_identifier_knob = node.knob("plugin_identifier") + if ( + plugin_identifier_knob is None + or plugin_identifier_knob.getValue() != self.identifier + ): + continue + + placeholder_data = self._parse_placeholder_node_data(node) + # TODO do data validations and maybe updgrades if are invalid + output.append( + LoadPlaceholderItem(node_name, placeholder_data, self) + ) + + return output + + def populate_placeholder(self, placeholder): + self.populate_load_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self.populate_load_placeholder(placeholder, repre_ids) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def post_placeholder_process(self, placeholder, failed): + """Cleanup placeholder after load of its corresponding representations. + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + # deselect all selected nodes + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + # getting the latest nodes added + # TODO get from shared populate data! + nodes_init = placeholder.data["nodes_init"] + nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) + self.log.debug("Loaded nodes: {}".format(nodes_loaded)) + if not nodes_loaded: + return + + placeholder.data["delete"] = True + + nodes_loaded = self._move_to_placeholder_group( + placeholder, nodes_loaded + ) + placeholder.data["last_loaded"] = nodes_loaded + refresh_nodes(nodes_loaded) + + # positioning of the loaded nodes + min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + placeholder_node.xpos() + ypos = (node.ypos() - min_y) + placeholder_node.ypos() + node.setXYpos(xpos, ypos) + refresh_nodes(nodes_loaded) + + # fix the problem of z_order for backdrops + self._fix_z_order(placeholder) + + if placeholder.data.get("keep_placeholder"): + self._imprint_siblings(placeholder) + + if placeholder.data["nb_children"] == 0: + # save initial nodes positions and dimensions, update them + # and set inputs and outputs of loaded nodes + if placeholder.data.get("keep_placeholder"): + self._imprint_inits() + self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded) + + self._set_loaded_connections(placeholder) + + elif placeholder.data["siblings"]: + # create copies of placeholder siblings for the new loaded nodes, + # set their inputs and outputs and update all nodes positions and + # dimensions and siblings names + + siblings = get_nodes_by_names(placeholder.data["siblings"]) + refresh_nodes(siblings) + copies = self._create_sib_copies(placeholder) + new_nodes = list(copies.values()) # copies nodes + self._update_nodes(new_nodes, nodes_loaded) + placeholder_node.removeKnob(placeholder_node.knob("siblings")) + new_nodes_name = get_names_from_nodes(new_nodes) + imprint(placeholder_node, {"siblings": new_nodes_name}) + self._set_copies_connections(placeholder, copies) + + self._update_nodes( + nuke.allNodes(), + new_nodes + nodes_loaded, + 20 + ) + + new_siblings = get_names_from_nodes(new_nodes) + placeholder.data["siblings"] = new_siblings + + else: + # if the placeholder doesn't have siblings, the loaded + # nodes will be placed in a free space + + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes_loaded, direction="bottom", offset=200 + ) + node = nuke.createNode("NoOp") + reset_selection() + nuke.delete(node) + for node in nodes_loaded: + xpos = (node.xpos() - min_x) + xpointer + ypos = (node.ypos() - min_y) + ypointer + node.setXYpos(xpos, ypos) + + placeholder.data["nb_children"] += 1 + reset_selection() + + # go back to root group + nuke.root().begin() + + def _move_to_placeholder_group(self, placeholder, nodes_loaded): + """ + opening the placeholder's group and copying loaded nodes in it. + + Returns : + nodes_loaded (list): the new list of pasted nodes + """ + + groups_name = placeholder.data["group_name"] + reset_selection() + select_nodes(nodes_loaded) + if groups_name: + with node_tempfile() as filepath: + nuke.nodeCopy(filepath) + for node in nuke.selectedNodes(): + nuke.delete(node) + group = nuke.toNode(groups_name) + group.begin() + nuke.nodePaste(filepath) + nodes_loaded = nuke.selectedNodes() + return nodes_loaded + + def _fix_z_order(self, placeholder): + """Fix the problem of z_order when a backdrop is loaded.""" + + nodes_loaded = placeholder.data["last_loaded"] + loaded_backdrops = [] + bd_orders = set() + for node in nodes_loaded: + if isinstance(node, nuke.BackdropNode): + loaded_backdrops.append(node) + bd_orders.add(node.knob("z_order").getValue()) + + if not bd_orders: + return + + sib_orders = set() + for node_name in placeholder.data["siblings"]: + node = nuke.toNode(node_name) + if isinstance(node, nuke.BackdropNode): + sib_orders.add(node.knob("z_order").getValue()) + + if not sib_orders: + return + + min_order = min(bd_orders) + max_order = max(sib_orders) + for backdrop_node in loaded_backdrops: + z_order = backdrop_node.knob("z_order").getValue() + backdrop_node.knob("z_order").setValue( + z_order + max_order - min_order + 1) + + def _imprint_siblings(self, placeholder): + """ + - add siblings names to placeholder attributes (nodes loaded with it) + - add Id to the attributes of all the other nodes + """ + + loaded_nodes = placeholder.data["last_loaded"] + loaded_nodes_set = set(loaded_nodes) + data = {"repre_id": str(placeholder.data["last_repre_id"])} + + for node in loaded_nodes: + node_knobs = node.knobs() + if "builder_type" not in node_knobs: + # save the id of representation for all imported nodes + imprint(node, data) + node.knob("repre_id").setVisible(False) + refresh_node(node) + continue + + if ( + "is_placeholder" not in node_knobs + or ( + "is_placeholder" in node_knobs + and node.knob("is_placeholder").value() + ) + ): + siblings = list(loaded_nodes_set - {node}) + siblings_name = get_names_from_nodes(siblings) + siblings = {"siblings": siblings_name} + imprint(node, siblings) + + def _imprint_inits(self): + """Add initial positions and dimensions to the attributes""" + + for node in nuke.allNodes(): + refresh_node(node) + imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) + node.knob("x_init").setVisible(False) + node.knob("y_init").setVisible(False) + width = node.screenWidth() + height = node.screenHeight() + if "bdwidth" in node.knobs(): + imprint(node, {"w_init": width, "h_init": height}) + node.knob("w_init").setVisible(False) + node.knob("h_init").setVisible(False) + refresh_node(node) + + def _update_nodes( + self, placeholder, nodes, considered_nodes, offset_y=None + ): + """Adjust backdrop nodes dimensions and positions. + + Considering some nodes sizes. + + Args: + nodes (list): list of nodes to update + considered_nodes (list): list of nodes to consider while updating + positions and dimensions + offset (int): distance between copies + """ + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + + min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) + + diff_x = diff_y = 0 + contained_nodes = [] # for backdrops + + if offset_y is None: + width_ph = placeholder_node.screenWidth() + height_ph = placeholder_node.screenHeight() + diff_y = max_y - min_y - height_ph + diff_x = max_x - min_x - width_ph + contained_nodes = [placeholder_node] + min_x = placeholder_node.xpos() + min_y = placeholder_node.ypos() + else: + siblings = get_nodes_by_names(placeholder.data["siblings"]) + minX, _, maxX, _ = get_extreme_positions(siblings) + diff_y = max_y - min_y + 20 + diff_x = abs(max_x - min_x - maxX + minX) + contained_nodes = considered_nodes + + if diff_y <= 0 and diff_x <= 0: + return + + for node in nodes: + refresh_node(node) + + if ( + node == placeholder_node + or node in considered_nodes + ): + continue + + if ( + not isinstance(node, nuke.BackdropNode) + or ( + isinstance(node, nuke.BackdropNode) + and not set(contained_nodes) <= set(node.getNodes()) + ) + ): + if offset_y is None and node.xpos() >= min_x: + node.setXpos(node.xpos() + diff_x) + + if node.ypos() >= min_y: + node.setYpos(node.ypos() + diff_y) + + else: + width = node.screenWidth() + height = node.screenHeight() + node.knob("bdwidth").setValue(width + diff_x) + node.knob("bdheight").setValue(height + diff_y) + + refresh_node(node) + + def _set_loaded_connections(self, placeholder): + """ + set inputs and outputs of loaded nodes""" + + placeholder_node = nuke.toNode(placeholder.scene_identifier) + input_node, output_node = get_group_io_nodes( + placeholder.data["last_loaded"] + ) + for node in placeholder_node.dependent(): + for idx in range(node.inputs()): + if node.input(idx) == placeholder_node and output_node: + node.setInput(idx, output_node) + + for node in placeholder_node.dependencies(): + for idx in range(placeholder_node.inputs()): + if placeholder_node.input(idx) == node and input_node: + input_node.setInput(0, node) + + def _create_sib_copies(self, placeholder): + """ creating copies of the palce_holder siblings (the ones who were + loaded with it) for the new nodes added + + Returns : + copies (dict) : with copied nodes names and their copies + """ + + copies = {} + siblings = get_nodes_by_names(placeholder.data["siblings"]) + for node in siblings: + new_node = duplicate_node(node) + + x_init = int(new_node.knob("x_init").getValue()) + y_init = int(new_node.knob("y_init").getValue()) + new_node.setXYpos(x_init, y_init) + if isinstance(new_node, nuke.BackdropNode): + w_init = new_node.knob("w_init").getValue() + h_init = new_node.knob("h_init").getValue() + new_node.knob("bdwidth").setValue(w_init) + new_node.knob("bdheight").setValue(h_init) + refresh_node(node) + + if "repre_id" in node.knobs().keys(): + node.removeKnob(node.knob("repre_id")) + copies[node.name()] = new_node + return copies + + def _set_copies_connections(self, placeholder, copies): + """Set inputs and outputs of the copies. + + Args: + copies (dict): Copied nodes by their names. + """ + + last_input, last_output = get_group_io_nodes( + placeholder.data["last_loaded"] + ) + siblings = get_nodes_by_names(placeholder.data["siblings"]) + siblings_input, siblings_output = get_group_io_nodes(siblings) + copy_input = copies[siblings_input.name()] + copy_output = copies[siblings_output.name()] + + for node_init in siblings: + if node_init == siblings_output: + continue + + node_copy = copies[node_init.name()] + for node in node_init.dependent(): + for idx in range(node.inputs()): + if node.input(idx) != node_init: + continue + + if node in siblings: + copies[node.name()].setInput(idx, node_copy) + else: + last_input.setInput(0, node_copy) + + for node in node_init.dependencies(): + for idx in range(node_init.inputs()): + if node_init.input(idx) != node: + continue + + if node_init == siblings_input: + copy_input.setInput(idx, node) + elif node in siblings: + node_copy.setInput(idx, copies[node.name()]) + else: + node_copy.setInput(idx, last_output) + + siblings_input.setInput(0, copy_output) diff --git a/client/ayon_core/pipeline/__init__.py b/client/ayon_core/pipeline/__init__.py index d1a181a353..3102ce1da3 100644 --- a/client/ayon_core/pipeline/__init__.py +++ b/client/ayon_core/pipeline/__init__.py @@ -97,6 +97,15 @@ from .context_tools import ( get_current_folder_path, get_current_task_name ) + +from .workfile import ( + discover_template_placeholder_plugins, + register_template_placeholder_plugin, + deregister_template_placeholder_plugin, + register_template_placeholder_plugin_path, + deregister_template_placeholder_plugin_path, +) + install = install_host uninstall = uninstall_host @@ -198,6 +207,13 @@ __all__ = ( "get_current_folder_path", "get_current_task_name", + # Workfile templates + "discover_template_placeholder_plugins", + "register_template_placeholder_plugin", + "deregister_template_placeholder_plugin", + "register_template_placeholder_plugin_path", + "deregister_template_placeholder_plugin_path", + # Backwards compatible function names "install", "uninstall", diff --git a/client/ayon_core/pipeline/workfile/__init__.py b/client/ayon_core/pipeline/workfile/__init__.py index 36766e3a04..149036117a 100644 --- a/client/ayon_core/pipeline/workfile/__init__.py +++ b/client/ayon_core/pipeline/workfile/__init__.py @@ -21,6 +21,15 @@ from .utils import ( from .build_workfile import BuildWorkfile +from .workfile_template_builder import ( + discover_template_placeholder_plugins, + register_template_placeholder_plugin, + deregister_template_placeholder_plugin, + register_template_placeholder_plugin_path, + deregister_template_placeholder_plugin_path, +) + + __all__ = ( "get_workfile_template_key_from_context", "get_workfile_template_key", @@ -39,4 +48,10 @@ __all__ = ( "should_open_workfiles_tool_on_launch", "BuildWorkfile", + + "discover_template_placeholder_plugins", + "register_template_placeholder_plugin", + "deregister_template_placeholder_plugin", + "register_template_placeholder_plugin_path", + "deregister_template_placeholder_plugin_path", ) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 5e63ba444a..4dad7ae17f 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -43,6 +43,13 @@ from ayon_core.pipeline.load import ( get_representation_contexts, load_with_repre_context, ) +from ayon_core.pipeline.plugin_discover import ( + discover, + register_plugin, + register_plugin_path, + deregister_plugin, + deregister_plugin_path +) from ayon_core.pipeline.create import ( discover_legacy_creator_plugins, @@ -211,10 +218,14 @@ class AbstractTemplateBuilder(object): Returns: List[PlaceholderPlugin]: Plugin classes available for host. """ + plugins = [] + # Backwards compatibility if hasattr(self._host, "get_workfile_build_placeholder_plugins"): return self._host.get_workfile_build_placeholder_plugins() - return [] + + plugins.extend(discover(PlaceholderPlugin)) + return plugins @property def host(self): @@ -1918,3 +1929,23 @@ class CreatePlaceholderItem(PlaceholderItem): def create_failed(self, creator_data): self._failed_created_publish_instances.append(creator_data) + + +def discover_template_placeholder_plugins(*args, **kwargs): + return discover(PlaceholderPlugin, *args, **kwargs) + + +def register_template_placeholder_plugin(plugin: PlaceholderPlugin): + register_plugin(PlaceholderPlugin, plugin) + + +def deregister_template_placeholder_plugin(plugin: PlaceholderPlugin): + deregister_plugin(PlaceholderPlugin, plugin) + + +def register_template_placeholder_plugin_path(path: str): + register_plugin_path(PlaceholderPlugin, path) + + +def deregister_template_placeholder_plugin_path(path: str): + deregister_plugin_path(PlaceholderPlugin, path) From 269d395141306c3173f316b172d160f8d43d041a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 17 Apr 2024 23:42:30 +0200 Subject: [PATCH 38/67] Fix ayon core refactor bug, similar to fix in #330 --- .../ayon_core/pipeline/workfile/workfile_template_builder.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 4dad7ae17f..0434c44cb3 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1479,7 +1479,9 @@ class PlaceholderLoadMixin(object): product_name_regex = None if product_name_regex_value: product_name_regex = re.compile(product_name_regex_value) - product_type = placeholder.data["family"] + product_type = placeholder.data.get("product_type") + if product_type is None: + product_type = placeholder.data["family"] builder_type = placeholder.data["builder_type"] folder_ids = [] From d7b20dff37693492a16e6742d7e679808c4fe7a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 17 Apr 2024 23:53:20 +0200 Subject: [PATCH 39/67] Workfile templates: add event system to Workfile Template Builder --- .../workfile/workfile_template_builder.py | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 5e63ba444a..f53aee6341 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -36,6 +36,7 @@ from ayon_core.lib import ( filter_profiles, attribute_definitions, ) +from ayon_core.lib.events import EventSystem from ayon_core.lib.attribute_definitions import get_attributes_keys from ayon_core.pipeline import Anatomy from ayon_core.pipeline.load import ( @@ -124,6 +125,8 @@ class AbstractTemplateBuilder(object): self._current_task_entity = _NOT_SET self._linked_folder_entities = _NOT_SET + self._event_system = EventSystem() + @property def project_name(self): if isinstance(self._host, HostBase): @@ -244,6 +247,14 @@ class AbstractTemplateBuilder(object): self._log = Logger.get_logger(repr(self)) return self._log + @property + def event_system(self): + """Event System of the Workfile templatee builder. + Returns: + EventSystem: The event system. + """ + return self._event_system + def refresh(self): """Reset cached data.""" @@ -257,6 +268,8 @@ class AbstractTemplateBuilder(object): self._project_settings = None + self._event_system = EventSystem() + self.clear_shared_data() self.clear_shared_populate_data() @@ -729,6 +742,16 @@ class AbstractTemplateBuilder(object): placeholder.set_finished() + # Trigger on_depth_processed event + self.event_system.emit( + topic="template.depth_processed", + data={ + "depth": iter_counter, + "placeholders_by_scene_id": placeholder_by_scene_id + }, + source="builder" + ) + # Clear shared data before getting new placeholders self.clear_shared_populate_data() @@ -747,6 +770,16 @@ class AbstractTemplateBuilder(object): placeholder_by_scene_id[identifier] = placeholder placeholders.append(placeholder) + # Trigger on_finished event + self.event_system.emit( + topic="template.finished", + data={ + "depth": iter_counter, + "placeholders_by_scene_id": placeholder_by_scene_id, + }, + source="builder" + ) + self.refresh() def _get_build_profiles(self): @@ -1102,6 +1135,41 @@ class PlaceholderPlugin(object): plugin_data[key] = value self.builder.set_shared_populate_data(self.identifier, plugin_data) + def register_on_finished_callback( + self, placeholder, callback, order=None + ): + self.register_callback( + placeholder, + topic="template.finished", + callback=callback, + order=order + ) + + def register_on_depth_processed_callback( + self, placeholder, callback, order=0 + ): + self.register_callback( + placeholder, + topic="template.depth_processed", + callback=callback, + order=order + ) + + def register_callback(self, placeholder, topic, callback, order=None): + + if order is None: + # Match placeholder order by default + order = placeholder.order + + # We must persist the callback over time otherwise it will be removed + # by the event system as a valid function reference. We do that here + # always just so it's easier to develop plugins where callbacks might + # be partials or lambdas + placeholder.data.setdefault("callbacks", []).append(callback) + self.log.debug("Registering '%s' callback: %s", topic, callback) + self.builder.event_system.add_callback(topic, callback, order=order) + + class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. From edee279f15fa16b549db68e3adb6aba14fc2f541 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 18 Apr 2024 12:04:24 +0200 Subject: [PATCH 40/67] Do not store in placeholder data - it's up to the registering code itself to persist or use the `weakref_partial` implementation from the event system --- .../pipeline/workfile/workfile_template_builder.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index f53aee6341..d08c951b36 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1161,16 +1161,10 @@ class PlaceholderPlugin(object): # Match placeholder order by default order = placeholder.order - # We must persist the callback over time otherwise it will be removed - # by the event system as a valid function reference. We do that here - # always just so it's easier to develop plugins where callbacks might - # be partials or lambdas - placeholder.data.setdefault("callbacks", []).append(callback) self.log.debug("Registering '%s' callback: %s", topic, callback) self.builder.event_system.add_callback(topic, callback, order=order) - class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. From 2befe843dc6fe9168b156ccf9501c99ddd48fe9e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 18 Apr 2024 12:05:06 +0200 Subject: [PATCH 41/67] Do not force order of the placeholder, allow it to be `None` --- .../pipeline/workfile/workfile_template_builder.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index d08c951b36..b27b614579 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1146,7 +1146,7 @@ class PlaceholderPlugin(object): ) def register_on_depth_processed_callback( - self, placeholder, callback, order=0 + self, placeholder, callback, order=None ): self.register_callback( placeholder, @@ -1156,11 +1156,6 @@ class PlaceholderPlugin(object): ) def register_callback(self, placeholder, topic, callback, order=None): - - if order is None: - # Match placeholder order by default - order = placeholder.order - self.log.debug("Registering '%s' callback: %s", topic, callback) self.builder.event_system.add_callback(topic, callback, order=order) From 47c7e8634c9deafe307e25cfe4bb654851fbbb63 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 18 Apr 2024 12:10:45 +0200 Subject: [PATCH 42/67] Do not expose the `event_system` on the builder directly - but expose the register and trigger event methods --- .../workfile/workfile_template_builder.py | 60 ++++++++----------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index b27b614579..22a4c984bc 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -247,14 +247,6 @@ class AbstractTemplateBuilder(object): self._log = Logger.get_logger(repr(self)) return self._log - @property - def event_system(self): - """Event System of the Workfile templatee builder. - Returns: - EventSystem: The event system. - """ - return self._event_system - def refresh(self): """Reset cached data.""" @@ -743,7 +735,7 @@ class AbstractTemplateBuilder(object): placeholder.set_finished() # Trigger on_depth_processed event - self.event_system.emit( + self.trigger_event( topic="template.depth_processed", data={ "depth": iter_counter, @@ -771,7 +763,7 @@ class AbstractTemplateBuilder(object): placeholders.append(placeholder) # Trigger on_finished event - self.event_system.emit( + self.trigger_event( topic="template.finished", data={ "depth": iter_counter, @@ -905,6 +897,30 @@ class AbstractTemplateBuilder(object): "create_first_version": create_first_version } + def trigger_event(self, topic, data=None, source=None): + self._event_system.emit(topic, data, source) + + def register_event_callback(self, topic, callback, order=None): + self._event_system.add_callback(topic, callback, order=order) + + def register_on_finished_callback( + self, callback, order=None + ): + self.register_event_callback( + topic="template.finished", + callback=callback, + order=order + ) + + def register_on_depth_processed_callback( + self, callback, order=None + ): + self.register_event_callback( + topic="template.depth_processed", + callback=callback, + order=order + ) + @six.add_metaclass(ABCMeta) class PlaceholderPlugin(object): @@ -1135,30 +1151,6 @@ class PlaceholderPlugin(object): plugin_data[key] = value self.builder.set_shared_populate_data(self.identifier, plugin_data) - def register_on_finished_callback( - self, placeholder, callback, order=None - ): - self.register_callback( - placeholder, - topic="template.finished", - callback=callback, - order=order - ) - - def register_on_depth_processed_callback( - self, placeholder, callback, order=None - ): - self.register_callback( - placeholder, - topic="template.depth_processed", - callback=callback, - order=order - ) - - def register_callback(self, placeholder, topic, callback, order=None): - self.log.debug("Registering '%s' callback: %s", topic, callback) - self.builder.event_system.add_callback(topic, callback, order=order) - class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. From da5abf836773283ccb4d1296a35258cc9106e443 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Apr 2024 22:24:44 +0200 Subject: [PATCH 43/67] Update client/ayon_core/pipeline/workfile/workfile_template_builder.py --- .../pipeline/workfile/workfile_template_builder.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 22a4c984bc..a4fa2b4ddd 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -897,25 +897,25 @@ class AbstractTemplateBuilder(object): "create_first_version": create_first_version } - def trigger_event(self, topic, data=None, source=None): + def emit_event(self, topic, data=None, source=None): self._event_system.emit(topic, data, source) - def register_event_callback(self, topic, callback, order=None): + def add_event_callback(self, topic, callback, order=None): self._event_system.add_callback(topic, callback, order=order) - def register_on_finished_callback( + def add_on_finished_callback( self, callback, order=None ): - self.register_event_callback( + self.add_event_callback( topic="template.finished", callback=callback, order=order ) - def register_on_depth_processed_callback( + def add_on_depth_processed_callback( self, callback, order=None ): - self.register_event_callback( + self.add_event_callback( topic="template.depth_processed", callback=callback, order=order From 8194a7b07b1799ffafe960b25e0d86c2feaff258 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Apr 2024 22:26:54 +0200 Subject: [PATCH 44/67] Return the values of the called functions --- .../workfile/workfile_template_builder.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index a4fa2b4ddd..d189e3e2d6 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -36,7 +36,7 @@ from ayon_core.lib import ( filter_profiles, attribute_definitions, ) -from ayon_core.lib.events import EventSystem +from ayon_core.lib.events import EventSystem, EventCallback, Event from ayon_core.lib.attribute_definitions import get_attributes_keys from ayon_core.pipeline import Anatomy from ayon_core.pipeline.load import ( @@ -897,16 +897,16 @@ class AbstractTemplateBuilder(object): "create_first_version": create_first_version } - def emit_event(self, topic, data=None, source=None): - self._event_system.emit(topic, data, source) + def emit_event(self, topic, data=None, source=None) -> Event: + return self._event_system.emit(topic, data, source) def add_event_callback(self, topic, callback, order=None): - self._event_system.add_callback(topic, callback, order=order) + return self._event_system.add_callback(topic, callback, order=order) def add_on_finished_callback( self, callback, order=None - ): - self.add_event_callback( + ) -> EventCallback: + return self.add_event_callback( topic="template.finished", callback=callback, order=order @@ -914,8 +914,8 @@ class AbstractTemplateBuilder(object): def add_on_depth_processed_callback( self, callback, order=None - ): - self.add_event_callback( + ) -> EventCallback: + return self.add_event_callback( topic="template.depth_processed", callback=callback, order=order From c6e61028976c2fc1410cb3bac34e9f17ed51fe83 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Apr 2024 22:30:18 +0200 Subject: [PATCH 45/67] Refactor `TEMPLATE_PLUGINS_PATH` -> `WORKFILE_BUILD_PLUGIN_PATH` Refactor plugin folders `template/` to `workfile_build/` --- client/ayon_core/hosts/aftereffects/api/pipeline.py | 4 ++-- .../{template => workfile_build}/create_placeholder.py | 0 .../{template => workfile_build}/load_placeholder.py | 0 client/ayon_core/hosts/maya/api/pipeline.py | 6 +++--- .../{template => workfile_build}/load_placeholder.py | 0 client/ayon_core/hosts/nuke/api/pipeline.py | 4 ++-- .../{template => workfile_build}/create_placeholder.py | 0 .../{template => workfile_build}/load_placeholder.py | 0 8 files changed, 7 insertions(+), 7 deletions(-) rename client/ayon_core/hosts/aftereffects/plugins/{template => workfile_build}/create_placeholder.py (100%) rename client/ayon_core/hosts/aftereffects/plugins/{template => workfile_build}/load_placeholder.py (100%) rename client/ayon_core/hosts/maya/plugins/{template => workfile_build}/load_placeholder.py (100%) rename client/ayon_core/hosts/nuke/plugins/{template => workfile_build}/create_placeholder.py (100%) rename client/ayon_core/hosts/nuke/plugins/{template => workfile_build}/load_placeholder.py (100%) diff --git a/client/ayon_core/hosts/aftereffects/api/pipeline.py b/client/ayon_core/hosts/aftereffects/api/pipeline.py index 214986a2fc..754f952fb4 100644 --- a/client/ayon_core/hosts/aftereffects/api/pipeline.py +++ b/client/ayon_core/hosts/aftereffects/api/pipeline.py @@ -37,7 +37,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -TEMPLATE_PLUGINS_PATH = os.path.join(PLUGINS_DIR, "template") +WORKFILE_BUILD_PLUGIN_PATH = os.path.join(PLUGINS_DIR, "workfile_build") class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -74,7 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - register_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) + register_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) register_event_callback("application.launched", application_launch) diff --git a/client/ayon_core/hosts/aftereffects/plugins/template/create_placeholder.py b/client/ayon_core/hosts/aftereffects/plugins/workfile_build/create_placeholder.py similarity index 100% rename from client/ayon_core/hosts/aftereffects/plugins/template/create_placeholder.py rename to client/ayon_core/hosts/aftereffects/plugins/workfile_build/create_placeholder.py diff --git a/client/ayon_core/hosts/aftereffects/plugins/template/load_placeholder.py b/client/ayon_core/hosts/aftereffects/plugins/workfile_build/load_placeholder.py similarity index 100% rename from client/ayon_core/hosts/aftereffects/plugins/template/load_placeholder.py rename to client/ayon_core/hosts/aftereffects/plugins/workfile_build/load_placeholder.py diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index eca98fa306..4b993b6fbd 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -65,7 +65,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -TEMPLATE_PLUGINS_PATH = os.path.join(PLUGINS_DIR, "template") +WORKFILE_BUILD_PLUGIN_PATH = os.path.join(PLUGINS_DIR, "workfile_build") AVALON_CONTAINERS = ":AVALON_CONTAINERS" @@ -95,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - register_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) + register_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) self.log.info("Installing callbacks ... ") register_event_callback("init", on_init) @@ -335,7 +335,7 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) deregister_creator_plugin_path(CREATE_PATH) deregister_inventory_action_path(INVENTORY_PATH) - deregister_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) + deregister_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) menu.uninstall() diff --git a/client/ayon_core/hosts/maya/plugins/template/load_placeholder.py b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py similarity index 100% rename from client/ayon_core/hosts/maya/plugins/template/load_placeholder.py rename to client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index bdf601e30d..ccb0f12ff9 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -75,7 +75,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -TEMPLATE_PLUGINS_PATH = os.path.join(PLUGINS_DIR, "template") +WORKFILE_BUILD_PLUGIN_PATH = os.path.join(PLUGINS_DIR, "workfile_build") # registering pyblish gui regarding settings in presets if os.getenv("PYBLISH_GUI", None): @@ -118,7 +118,7 @@ class NukeHost( register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - register_template_placeholder_plugin_path(TEMPLATE_PLUGINS_PATH) + register_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) # Register AYON event for workfiles loading. register_event_callback("workio.open_file", check_inventory_versions) diff --git a/client/ayon_core/hosts/nuke/plugins/template/create_placeholder.py b/client/ayon_core/hosts/nuke/plugins/workfile_build/create_placeholder.py similarity index 100% rename from client/ayon_core/hosts/nuke/plugins/template/create_placeholder.py rename to client/ayon_core/hosts/nuke/plugins/workfile_build/create_placeholder.py diff --git a/client/ayon_core/hosts/nuke/plugins/template/load_placeholder.py b/client/ayon_core/hosts/nuke/plugins/workfile_build/load_placeholder.py similarity index 100% rename from client/ayon_core/hosts/nuke/plugins/template/load_placeholder.py rename to client/ayon_core/hosts/nuke/plugins/workfile_build/load_placeholder.py From 57157f9a09dc62c8c6cecc9e5423b739f5310457 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 19 Apr 2024 22:31:51 +0200 Subject: [PATCH 46/67] Refactor `WORKFILE_BUILD_PLUGIN_PATH` -> `WORKFILE_BUILD_PATH` to match other constants that do not contain `PLUGIN` --- client/ayon_core/hosts/aftereffects/api/pipeline.py | 4 ++-- client/ayon_core/hosts/maya/api/pipeline.py | 6 +++--- client/ayon_core/hosts/nuke/api/pipeline.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/client/ayon_core/hosts/aftereffects/api/pipeline.py b/client/ayon_core/hosts/aftereffects/api/pipeline.py index 754f952fb4..6b213822f3 100644 --- a/client/ayon_core/hosts/aftereffects/api/pipeline.py +++ b/client/ayon_core/hosts/aftereffects/api/pipeline.py @@ -37,7 +37,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -WORKFILE_BUILD_PLUGIN_PATH = os.path.join(PLUGINS_DIR, "workfile_build") +WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build") class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -74,7 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - register_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) + register_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) register_event_callback("application.launched", application_launch) diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index 4b993b6fbd..257c822e0b 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -65,7 +65,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -WORKFILE_BUILD_PLUGIN_PATH = os.path.join(PLUGINS_DIR, "workfile_build") +WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build") AVALON_CONTAINERS = ":AVALON_CONTAINERS" @@ -95,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - register_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) + register_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) self.log.info("Installing callbacks ... ") register_event_callback("init", on_init) @@ -335,7 +335,7 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) deregister_creator_plugin_path(CREATE_PATH) deregister_inventory_action_path(INVENTORY_PATH) - deregister_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) + deregister_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) menu.uninstall() diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index ccb0f12ff9..f5e48eb375 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -75,7 +75,7 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -WORKFILE_BUILD_PLUGIN_PATH = os.path.join(PLUGINS_DIR, "workfile_build") +WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build") # registering pyblish gui regarding settings in presets if os.getenv("PYBLISH_GUI", None): @@ -118,7 +118,7 @@ class NukeHost( register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - register_template_placeholder_plugin_path(WORKFILE_BUILD_PLUGIN_PATH) + register_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) # Register AYON event for workfiles loading. register_event_callback("workio.open_file", check_inventory_versions) From 6b2b28d7d235c1da4d684e638c863305ed3b2d35 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 25 Apr 2024 14:57:58 +0800 Subject: [PATCH 47/67] make sure the collect render layer is just collecting beauty render with the global aov mode disabled in Arnold renderer --- .../ayon_core/hosts/maya/api/lib_renderproducts.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/client/ayon_core/hosts/maya/api/lib_renderproducts.py b/client/ayon_core/hosts/maya/api/lib_renderproducts.py index 832d1c21c2..b949845f1d 100644 --- a/client/ayon_core/hosts/maya/api/lib_renderproducts.py +++ b/client/ayon_core/hosts/maya/api/lib_renderproducts.py @@ -588,6 +588,20 @@ class RenderProductsArnold(ARenderProducts): "Unrecognized arnold driver format " "for AOV - {}".format(aov_name) ) + global_aov_enabled = bool( + self._get_attr( + "defaultArnoldRenderOptions.aovMode", as_string=False) + ) + if not global_aov_enabled: + for camera in cameras: + products.insert(0, + RenderProduct(productName="", + ext=ext, + driver=ai_driver, + multipart=self.multipart, + camera=camera, + colorspace=colorspace)) + return products # If aov RGBA is selected, arnold will translate it to `beauty` name = aov_name From ba1242316e228ffe98d022fc27ad81202469c3b4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 25 Apr 2024 15:12:04 +0800 Subject: [PATCH 48/67] add asstring argment in the existing aovs_enabled --- .../hosts/maya/api/lib_renderproducts.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/client/ayon_core/hosts/maya/api/lib_renderproducts.py b/client/ayon_core/hosts/maya/api/lib_renderproducts.py index b949845f1d..52c282c6de 100644 --- a/client/ayon_core/hosts/maya/api/lib_renderproducts.py +++ b/client/ayon_core/hosts/maya/api/lib_renderproducts.py @@ -588,20 +588,6 @@ class RenderProductsArnold(ARenderProducts): "Unrecognized arnold driver format " "for AOV - {}".format(aov_name) ) - global_aov_enabled = bool( - self._get_attr( - "defaultArnoldRenderOptions.aovMode", as_string=False) - ) - if not global_aov_enabled: - for camera in cameras: - products.insert(0, - RenderProduct(productName="", - ext=ext, - driver=ai_driver, - multipart=self.multipart, - camera=camera, - colorspace=colorspace)) - return products # If aov RGBA is selected, arnold will translate it to `beauty` name = aov_name @@ -734,7 +720,8 @@ class RenderProductsArnold(ARenderProducts): # AOVs > Legacy > Maya Render View > Mode aovs_enabled = bool( - self._get_attr("defaultArnoldRenderOptions.aovMode") + self._get_attr( + "defaultArnoldRenderOptions.aovMode", as_string=False) ) if not aovs_enabled: return beauty_products From 129070aefec7a6b855f80b95933845298fb2465e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 25 Apr 2024 17:53:29 +0800 Subject: [PATCH 49/67] make sure dict for Alembic Extractors having the attributes for enabling it --- .../hosts/max/plugins/publish/extract_alembic.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py b/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py index 67b5174200..67cec23ecc 100644 --- a/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py +++ b/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py @@ -53,6 +53,7 @@ class ExtractAlembic(publish.Extractor, hosts = ["max"] families = ["pointcache"] optional = True + active = True def process(self, instance): if not self.is_active(instance.data): @@ -102,24 +103,27 @@ class ExtractAlembic(publish.Extractor, @classmethod def get_attribute_defs(cls): - return [ + defs = super(ExtractAlembic, cls).get_attribute_defs() + defs.extend([ BoolDef("custom_attrs", label="Custom Attributes", default=False), - ] + ]) + return defs class ExtractCameraAlembic(ExtractAlembic): """Extract Camera with AlembicExport.""" - label = "Extract Alembic Camera" families = ["camera"] + optional = True -class ExtractModel(ExtractAlembic): +class ExtractModelAlembic(ExtractAlembic): """Extract Geometry in Alembic Format""" label = "Extract Geometry (Alembic)" families = ["model"] + optional = True def _set_abc_attributes(self, instance): attr_values = self.get_attr_values_from_data(instance.data) From 8c2bc797e654873e50964e25a23bb2a1b85b22d2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 26 Apr 2024 11:38:57 +0200 Subject: [PATCH 50/67] fix import in create ayon addons --- server_addon/create_ayon_addons.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py index 79b9aa5450..f0a36d4740 100644 --- a/server_addon/create_ayon_addons.py +++ b/server_addon/create_ayon_addons.py @@ -5,7 +5,7 @@ import shutil import argparse import zipfile import types -import importlib +import importlib.machinery import platform import collections from pathlib import Path From 561021195d2e686a7a1f9667ce0244a0d5a9b969 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 26 Apr 2024 18:31:58 +0800 Subject: [PATCH 51/67] rename node callback added to detect the renaming of the asset --- client/ayon_core/hosts/max/api/lib.py | 27 ++++++++++++++++++++++ client/ayon_core/hosts/max/api/pipeline.py | 2 ++ 2 files changed, 29 insertions(+) diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 02b099b3ff..974b483eac 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -519,6 +519,33 @@ def get_plugins() -> list: return plugin_info_list +def update_modifier_node_names(event, node): + """Update the name of the nodes after renaming + + Args: + event (pymxs.MXSWrapperBase): Event Name ( + Mandatory argument for rt.NodeEventCallback) + node (list): Event Number ( + Mandatory argument for rt.NodeEventCallback) + + """ + containers = [ + obj for obj in rt.Objects + if rt.ClassOf(obj) == rt.Container and + rt.getUserProp(obj, "id") == "pyblish.avalon.instance" + and rt.getUserProp( + obj, "productType") not in {"workfile", "tyflow"} + ] + if not containers: + return + for container in containers: + ayon_data = container.modifiers[0] + updated_node_names = [str(node.node) for node + in ayon_data.openPypeData.all_handles] + rt.setProperty( + ayon_data.openPypeData, "sel_list", updated_node_names) + + @contextlib.contextmanager def render_resolution(width, height): """Set render resolution option during context diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index 675f36c24f..dc13f47795 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -63,6 +63,8 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): rt.callbacks.addScript(rt.Name('postWorkspaceChange'), self._deferred_menu_creation) + rt.NodeEventCallback( + nameChanged=lib.update_modifier_node_names) def workfile_has_unsaved_changes(self): return rt.getSaveRequired() From 43d2a78170057f22284e67080f48f4553a19d360 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 26 Apr 2024 20:41:30 +0800 Subject: [PATCH 52/67] clean up the code --- client/ayon_core/hosts/max/api/lib.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 974b483eac..a6f1c2d2de 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -539,11 +539,10 @@ def update_modifier_node_names(event, node): if not containers: return for container in containers: - ayon_data = container.modifiers[0] + ayon_data = container.modifiers[0].openPypeData updated_node_names = [str(node.node) for node - in ayon_data.openPypeData.all_handles] - rt.setProperty( - ayon_data.openPypeData, "sel_list", updated_node_names) + in ayon_data.all_handles] + rt.setProperty(ayon_data, "sel_list", updated_node_names) @contextlib.contextmanager From bff416ecad14863b4d04a01fb1cb7e4580010468 Mon Sep 17 00:00:00 2001 From: MustafaJafar Date: Fri, 26 Apr 2024 17:13:45 +0300 Subject: [PATCH 53/67] Do nothing if workfile color settings don't exist - add a note about it inside the hook --- .../hosts/houdini/hooks/set_default_display_and_view.py | 7 ++++++- .../hosts/houdini/plugins/create/create_review.py | 4 ++-- .../houdini/plugins/publish/validate_review_colorspace.py | 4 ++-- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py index 2e97c06bff..31bb5c1c5d 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py +++ b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py @@ -25,7 +25,12 @@ class SetDefaultDisplayView(PreLaunchHook): return houdini_color_settings = \ - self.data["project_settings"]["houdini"]["imageio"]["workfile"] + self.data["project_settings"]["houdini"]["imageio"].get("workfile", {}) + + if not houdini_color_settings: + self.log.info("Hook 'SetDefaultDisplayView' requires Houdini " + "addon version >= '0.2.13'") + return if not houdini_color_settings["enabled"]: self.log.info( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_review.py b/client/ayon_core/hosts/houdini/plugins/create/create_review.py index 94dcf23181..4a00ed4d37 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_review.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_review.py @@ -18,8 +18,8 @@ class CreateReview(plugin.HoudiniCreator): def apply_settings(self, project_settings): super(CreateReview, self).apply_settings(project_settings) - color_settings = project_settings["houdini"]["imageio"]["workfile"] - if color_settings["enabled"]: + color_settings = project_settings["houdini"]["imageio"].get("workfile", {}) + if color_settings and color_settings["enabled"]: self.review_color_space = color_settings.get("review_color_space") def create(self, product_name, instance_data, pre_create_data): diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py index d3afa83b67..3b70aea894 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py @@ -46,8 +46,8 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, apply_plugin_settings_automatically(cls, settings, logger=cls.log) # Add review color settings - color_settings = project_settings["houdini"]["imageio"]["workfile"] - if color_settings["enabled"]: + color_settings = project_settings["houdini"]["imageio"].get("workfile", {}) + if color_settings and color_settings["enabled"]: cls.review_color_space = color_settings.get("review_color_space") From 7a2b77ce9ae946bbae3f5edfa876322a24dfb855 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 29 Apr 2024 08:16:05 +0100 Subject: [PATCH 54/67] Account for no placeholder set. --- client/ayon_core/hosts/maya/api/workfile_template_builder.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index 75386d7e64..b8759e0740 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -263,6 +263,11 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): # Hide placeholder and add them to placeholder set node = placeholder.scene_identifier + # If we just populate the placeholders from current scene, the + # placeholder set will not be created so account for that. + if not cmds.objExists(PLACEHOLDER_SET): + cmds.sets(name=PLACEHOLDER_SET, empty=True) + cmds.sets(node, addElement=PLACEHOLDER_SET) cmds.hide(node) cmds.setAttr(node + ".hiddenInOutliner", True) From 5018db2f084ad7995d4827d6b326859a4eafa132 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 29 Apr 2024 17:54:40 +0800 Subject: [PATCH 55/67] cosmetic fix - Jakub'scomment --- client/ayon_core/hosts/max/api/lib.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index a6f1c2d2de..ea17d1df05 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -530,11 +530,15 @@ def update_modifier_node_names(event, node): """ containers = [ - obj for obj in rt.Objects - if rt.ClassOf(obj) == rt.Container and - rt.getUserProp(obj, "id") == "pyblish.avalon.instance" - and rt.getUserProp( - obj, "productType") not in {"workfile", "tyflow"} + obj + for obj in rt.Objects + if ( + rt.ClassOf(obj) == rt.Container + and rt.getUserProp(obj, "id") == "pyblish.avalon.instance" + and rt.getUserProp(obj, "productType") not in { + "workfile", "tyflow" + } + ) ] if not containers: return From 24b590d592914e312234f00937238dcfe5e1dfc9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 29 Apr 2024 14:28:46 +0200 Subject: [PATCH 56/67] define compatibility of applications addon --- server_addon/applications/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/server_addon/applications/package.py b/server_addon/applications/package.py index ce312ed662..43a301b7c2 100644 --- a/server_addon/applications/package.py +++ b/server_addon/applications/package.py @@ -1,3 +1,10 @@ name = "applications" title = "Applications" version = "0.2.0" + +ayon_server_version = ">=1.0.7" +ayon_launcher_version = ">=1.0.2" +ayon_required_addons = { + "core": ">0.3.0", +} +ayon_compatible_addons = {} From 2059ffd74275b024a48021b3e3e5fdd7dfedee25 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 29 Apr 2024 21:12:03 +0800 Subject: [PATCH 57/67] fix the malfunctioning issue in maxscene loader --- client/ayon_core/hosts/max/api/lib.py | 4 ++-- client/ayon_core/hosts/max/plugins/load/load_max_scene.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 02b099b3ff..4f365cb1c1 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -496,9 +496,9 @@ def object_transform_set(container_children): """ transform_set = {} for node in container_children: - name = f"{node.name}.transform" + name = f"{node}.transform" transform_set[name] = node.pos - name = f"{node.name}.scale" + name = f"{node}.scale" transform_set[name] = node.scale return transform_set diff --git a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py index 4f982dd5ba..97b8c6cd52 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py +++ b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py @@ -117,7 +117,7 @@ class MaxSceneLoader(load.LoaderPlugin): ) for max_obj, obj_name in zip(max_objects, max_object_names): max_obj.name = f"{namespace}:{obj_name}" - max_container.append(rt.getNodeByName(max_obj.name)) + max_container.append(max_obj) return containerise( name, max_container, context, namespace, loader=self.__class__.__name__) @@ -158,11 +158,11 @@ class MaxSceneLoader(load.LoaderPlugin): current_max_object_names): max_obj.name = f"{namespace}:{obj_name}" max_objects.append(max_obj) - max_transform = f"{max_obj.name}.transform" + max_transform = f"{max_obj}.transform" if max_transform in transform_data.keys(): max_obj.pos = transform_data[max_transform] or 0 max_obj.scale = transform_data[ - f"{max_obj.name}.scale"] or 0 + f"{max_obj}.scale"] or 0 update_custom_attribute_data(node, max_objects) lib.imprint(container["instance_node"], { From 72028abc2c70955fb25a63e6628cdaa656e97a50 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Apr 2024 15:55:36 +0200 Subject: [PATCH 58/67] Fix import --- .../ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py index 88ef4b201a..c1d9f019e4 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py @@ -7,7 +7,7 @@ from maya import cmds import ayon_api from ayon_core.pipeline import get_current_project_name -import ayon_core.hosts.maya.lib as maya_lib +import ayon_core.hosts.maya.api.lib as maya_lib from . import lib from .alembic import get_alembic_ids_cache From 7db14f47c16c9e7b736b430d587bb7cd559aa757 Mon Sep 17 00:00:00 2001 From: MustafaJafar Date: Mon, 29 Apr 2024 18:13:57 +0300 Subject: [PATCH 59/67] imporve code logic --- .../hosts/houdini/hooks/set_default_display_and_view.py | 3 ++- .../hosts/houdini/plugins/create/create_review.py | 7 +++++-- .../houdini/plugins/publish/validate_review_colorspace.py | 7 +++++-- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py index 31bb5c1c5d..7d41979600 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py +++ b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py @@ -24,8 +24,9 @@ class SetDefaultDisplayView(PreLaunchHook): if not OCIO: return + # workfile settings added in '0.2.13' houdini_color_settings = \ - self.data["project_settings"]["houdini"]["imageio"].get("workfile", {}) + self.data["project_settings"]["houdini"]["imageio"].get("workfile") if not houdini_color_settings: self.log.info("Hook 'SetDefaultDisplayView' requires Houdini " diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_review.py b/client/ayon_core/hosts/houdini/plugins/create/create_review.py index 4a00ed4d37..336a1c9318 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_review.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_review.py @@ -18,8 +18,11 @@ class CreateReview(plugin.HoudiniCreator): def apply_settings(self, project_settings): super(CreateReview, self).apply_settings(project_settings) - color_settings = project_settings["houdini"]["imageio"].get("workfile", {}) - if color_settings and color_settings["enabled"]: + # workfile settings added in '0.2.13' + color_settings = project_settings["houdini"]["imageio"].get( + "workfile", {} + ) + if not color_settings.get("enabled"): self.review_color_space = color_settings.get("review_color_space") def create(self, product_name, instance_data, pre_create_data): diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py index 3b70aea894..cdbdba5361 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py @@ -45,9 +45,12 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, category="houdini") apply_plugin_settings_automatically(cls, settings, logger=cls.log) + # workfile settings added in '0.2.13' + color_settings = project_settings["houdini"]["imageio"].get( + "workfile", {} + ) # Add review color settings - color_settings = project_settings["houdini"]["imageio"].get("workfile", {}) - if color_settings and color_settings["enabled"]: + if not color_settings.get("enabled"): cls.review_color_space = color_settings.get("review_color_space") From 40cc4d2b98b6e7c59febea0e3bd040e05e8fb138 Mon Sep 17 00:00:00 2001 From: MustafaJafar Date: Mon, 29 Apr 2024 21:32:54 +0300 Subject: [PATCH 60/67] fix color_settings condition --- client/ayon_core/hosts/houdini/plugins/create/create_review.py | 2 +- .../hosts/houdini/plugins/publish/validate_review_colorspace.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_review.py b/client/ayon_core/hosts/houdini/plugins/create/create_review.py index 336a1c9318..f5e4d4ce64 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_review.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_review.py @@ -22,7 +22,7 @@ class CreateReview(plugin.HoudiniCreator): color_settings = project_settings["houdini"]["imageio"].get( "workfile", {} ) - if not color_settings.get("enabled"): + if color_settings.get("enabled"): self.review_color_space = color_settings.get("review_color_space") def create(self, product_name, instance_data, pre_create_data): diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py index cdbdba5361..e7f528ba57 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py @@ -50,7 +50,7 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, "workfile", {} ) # Add review color settings - if not color_settings.get("enabled"): + if color_settings.get("enabled"): cls.review_color_space = color_settings.get("review_color_space") From 07bd2e21de2ad54316cf41b75396371cb2d2b4fe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Apr 2024 06:47:08 +0200 Subject: [PATCH 61/67] Resolve merge conflict --- .../hosts/maya/plugins/workfile_build/load_placeholder.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py index 5bfaae6500..cf4a350c36 100644 --- a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py +++ b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py @@ -255,7 +255,8 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if scene_parent: cmds.parent(node, scene_parent) else: - cmds.parent(node, world=True) + if cmds.listRelatives(node, parent=True): + cmds.parent(node, world=True) holding_sets = cmds.listSets(object=placeholder.scene_identifier) if not holding_sets: From 90ece1cdf98cabeec76cf52bc7854667818f8d3f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Apr 2024 06:48:50 +0200 Subject: [PATCH 62/67] Refactor `template_placeholder_plugin` -> `workfile_build_plugin` --- .../hosts/aftereffects/api/pipeline.py | 4 ++-- client/ayon_core/hosts/maya/api/pipeline.py | 8 ++++---- client/ayon_core/hosts/nuke/api/pipeline.py | 4 ++-- client/ayon_core/pipeline/__init__.py | 20 +++++++++---------- .../ayon_core/pipeline/workfile/__init__.py | 20 +++++++++---------- .../workfile/workfile_template_builder.py | 10 +++++----- 6 files changed, 33 insertions(+), 33 deletions(-) diff --git a/client/ayon_core/hosts/aftereffects/api/pipeline.py b/client/ayon_core/hosts/aftereffects/api/pipeline.py index 6b213822f3..2239040f09 100644 --- a/client/ayon_core/hosts/aftereffects/api/pipeline.py +++ b/client/ayon_core/hosts/aftereffects/api/pipeline.py @@ -8,7 +8,7 @@ from ayon_core.lib import Logger, register_event_callback from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, - register_template_placeholder_plugin_path, + register_workfile_build_plugin_path, AVALON_CONTAINER_ID, AVALON_INSTANCE_ID, AYON_INSTANCE_ID, @@ -74,7 +74,7 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - register_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) + register_workfile_build_plugin_path(WORKFILE_BUILD_PATH) register_event_callback("application.launched", application_launch) diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index 257c822e0b..74d73e5f95 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -30,11 +30,11 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_inventory_action_path, register_creator_plugin_path, - register_template_placeholder_plugin_path, + register_workfile_build_plugin_path, deregister_loader_plugin_path, deregister_inventory_action_path, deregister_creator_plugin_path, - deregister_template_placeholder_plugin_path, + deregister_workfile_build_plugin_path, AYON_CONTAINER_ID, AVALON_CONTAINER_ID, ) @@ -95,7 +95,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - register_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) + register_workfile_build_plugin_path(WORKFILE_BUILD_PATH) self.log.info("Installing callbacks ... ") register_event_callback("init", on_init) @@ -335,7 +335,7 @@ def uninstall(): deregister_loader_plugin_path(LOAD_PATH) deregister_creator_plugin_path(CREATE_PATH) deregister_inventory_action_path(INVENTORY_PATH) - deregister_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) + deregister_workfile_build_plugin_path(WORKFILE_BUILD_PATH) menu.uninstall() diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index f5e48eb375..d35a2e89e0 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -18,7 +18,7 @@ from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, register_inventory_action_path, - register_template_placeholder_plugin_path, + register_workfile_build_plugin_path, AYON_INSTANCE_ID, AVALON_INSTANCE_ID, AVALON_CONTAINER_ID, @@ -118,7 +118,7 @@ class NukeHost( register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - register_template_placeholder_plugin_path(WORKFILE_BUILD_PATH) + register_workfile_build_plugin_path(WORKFILE_BUILD_PATH) # Register AYON event for workfiles loading. register_event_callback("workio.open_file", check_inventory_versions) diff --git a/client/ayon_core/pipeline/__init__.py b/client/ayon_core/pipeline/__init__.py index 3102ce1da3..8fd00ee6b6 100644 --- a/client/ayon_core/pipeline/__init__.py +++ b/client/ayon_core/pipeline/__init__.py @@ -99,11 +99,11 @@ from .context_tools import ( ) from .workfile import ( - discover_template_placeholder_plugins, - register_template_placeholder_plugin, - deregister_template_placeholder_plugin, - register_template_placeholder_plugin_path, - deregister_template_placeholder_plugin_path, + discover_workfile_build_plugins, + register_workfile_build_plugin, + deregister_workfile_build_plugin, + register_workfile_build_plugin_path, + deregister_workfile_build_plugin_path, ) install = install_host @@ -208,11 +208,11 @@ __all__ = ( "get_current_task_name", # Workfile templates - "discover_template_placeholder_plugins", - "register_template_placeholder_plugin", - "deregister_template_placeholder_plugin", - "register_template_placeholder_plugin_path", - "deregister_template_placeholder_plugin_path", + "discover_workfile_build_plugins", + "register_workfile_build_plugin", + "deregister_workfile_build_plugin", + "register_workfile_build_plugin_path", + "deregister_workfile_build_plugin_path", # Backwards compatible function names "install", diff --git a/client/ayon_core/pipeline/workfile/__init__.py b/client/ayon_core/pipeline/workfile/__init__.py index 149036117a..05f939024c 100644 --- a/client/ayon_core/pipeline/workfile/__init__.py +++ b/client/ayon_core/pipeline/workfile/__init__.py @@ -22,11 +22,11 @@ from .build_workfile import BuildWorkfile from .workfile_template_builder import ( - discover_template_placeholder_plugins, - register_template_placeholder_plugin, - deregister_template_placeholder_plugin, - register_template_placeholder_plugin_path, - deregister_template_placeholder_plugin_path, + discover_workfile_build_plugins, + register_workfile_build_plugin, + deregister_workfile_build_plugin, + register_workfile_build_plugin_path, + deregister_workfile_build_plugin_path, ) @@ -49,9 +49,9 @@ __all__ = ( "BuildWorkfile", - "discover_template_placeholder_plugins", - "register_template_placeholder_plugin", - "deregister_template_placeholder_plugin", - "register_template_placeholder_plugin_path", - "deregister_template_placeholder_plugin_path", + "discover_workfile_build_plugins", + "register_workfile_build_plugin", + "deregister_workfile_build_plugin", + "register_workfile_build_plugin_path", + "deregister_workfile_build_plugin_path", ) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 642ccd1cbc..6d200cd7dd 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -1925,21 +1925,21 @@ class CreatePlaceholderItem(PlaceholderItem): self._failed_created_publish_instances.append(creator_data) -def discover_template_placeholder_plugins(*args, **kwargs): +def discover_workfile_build_plugins(*args, **kwargs): return discover(PlaceholderPlugin, *args, **kwargs) -def register_template_placeholder_plugin(plugin: PlaceholderPlugin): +def register_workfile_build_plugin(plugin: PlaceholderPlugin): register_plugin(PlaceholderPlugin, plugin) -def deregister_template_placeholder_plugin(plugin: PlaceholderPlugin): +def deregister_workfile_build_plugin(plugin: PlaceholderPlugin): deregister_plugin(PlaceholderPlugin, plugin) -def register_template_placeholder_plugin_path(path: str): +def register_workfile_build_plugin_path(path: str): register_plugin_path(PlaceholderPlugin, path) -def deregister_template_placeholder_plugin_path(path: str): +def deregister_workfile_build_plugin_path(path: str): deregister_plugin_path(PlaceholderPlugin, path) From b5f4a843d516bf505a73455a956bb81b3e5dbe31 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 30 Apr 2024 13:31:30 +0800 Subject: [PATCH 63/67] add more fps value support & supports to reset fps value based on the task entity --- client/ayon_core/hosts/maya/api/lib.py | 35 ++++++++++++++++++-------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index 321bcbc0b5..017d0cd2c4 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -2520,7 +2520,17 @@ def set_scene_fps(fps, update=True): """ fps_mapping = { + # 2, 3, 4, 5, 6, 8, 10, 12, 16 + '2': '2fps', + '3': '3fps', + '4': '4fps', + '5': '5fps', + '6': '6fps', + '8': '8fps', + '10': '10fps', + '12': '12fps', '15': 'game', + '16': '16fps', '24': 'film', '25': 'pal', '30': 'ntsc', @@ -2612,21 +2622,24 @@ def get_fps_for_current_context(): Returns: Union[int, float]: FPS value. """ - - project_name = get_current_project_name() - folder_path = get_current_folder_path() - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path, fields={"attrib.fps"} - ) or {} - fps = folder_entity.get("attrib", {}).get("fps") + task_entity = get_current_task_entity(fields={"attrib"}) + fps = task_entity.get("attrib", {}).get("fps") if not fps: - project_entity = ayon_api.get_project( - project_name, fields=["attrib.fps"] + project_name = get_current_project_name() + folder_path = get_current_folder_path() + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"attrib.fps"} ) or {} - fps = project_entity.get("attrib", {}).get("fps") + fps = folder_entity.get("attrib", {}).get("fps") if not fps: - fps = 25 + project_entity = ayon_api.get_project( + project_name, fields=["attrib.fps"] + ) or {} + fps = project_entity.get("attrib", {}).get("fps") + + if not fps: + fps = 25 return convert_to_maya_fps(fps) From 444b0d8fc7bacf5ad4517b061cf05c55ce998643 Mon Sep 17 00:00:00 2001 From: Kayla Man <64118225+moonyuet@users.noreply.github.com> Date: Tue, 30 Apr 2024 15:38:32 +0800 Subject: [PATCH 64/67] Update client/ayon_core/hosts/maya/api/lib.py Co-authored-by: Toke Jepsen --- client/ayon_core/hosts/maya/api/lib.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index 017d0cd2c4..b8c9bedc60 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -2520,7 +2520,6 @@ def set_scene_fps(fps, update=True): """ fps_mapping = { - # 2, 3, 4, 5, 6, 8, 10, 12, 16 '2': '2fps', '3': '3fps', '4': '4fps', From e74a1d303e0d9cbed5bce2f1936301b2d65b15b7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 30 Apr 2024 16:15:17 +0800 Subject: [PATCH 65/67] fix the bug encountered when getting transform for the asset when updating it --- client/ayon_core/hosts/max/plugins/load/load_model_fbx.py | 4 ++-- client/ayon_core/hosts/max/plugins/load/load_model_obj.py | 4 ++-- client/ayon_core/hosts/max/plugins/load/load_model_usd.py | 4 ++-- .../hosts/max/plugins/load/load_pointcache_ornatrix.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py index 82cad71c3e..6f5de20ae0 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py @@ -76,11 +76,11 @@ class FbxModelLoader(load.LoaderPlugin): for fbx_object in current_fbx_objects: fbx_object.name = f"{namespace}:{fbx_object.name}" fbx_objects.append(fbx_object) - fbx_transform = f"{fbx_object.name}.transform" + fbx_transform = f"{fbx_object}.transform" if fbx_transform in transform_data.keys(): fbx_object.pos = transform_data[fbx_transform] or 0 fbx_object.scale = transform_data[ - f"{fbx_object.name}.scale"] or 0 + f"{fbx_object}.scale"] or 0 with maintained_selection(): rt.Select(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py index 38f2cdf43c..a9119259df 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py @@ -67,11 +67,11 @@ class ObjLoader(load.LoaderPlugin): selections = rt.GetCurrentSelection() for selection in selections: selection.name = f"{namespace}:{selection.name}" - selection_transform = f"{selection.name}.transform" + selection_transform = f"{selection}.transform" if selection_transform in transform_data.keys(): selection.pos = transform_data[selection_transform] or 0 selection.scale = transform_data[ - f"{selection.name}.scale"] or 0 + f"{selection}.scale"] or 0 update_custom_attribute_data(node, selections) with maintained_selection(): rt.Select(node) diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py index 2b946eb2aa..2ed5d64a18 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py @@ -95,11 +95,11 @@ class ModelUSDLoader(load.LoaderPlugin): for children in asset.Children: children.name = f"{namespace}:{children.name}" usd_objects.append(children) - children_transform = f"{children.name}.transform" + children_transform = f"{children}.transform" if children_transform in transform_data.keys(): children.pos = transform_data[children_transform] or 0 children.scale = transform_data[ - f"{children.name}.scale"] or 0 + f"{children}.scale"] or 0 asset.name = f"{namespace}:{asset.name}" usd_objects.append(asset) diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py index 2efb7c7f62..47690f84e9 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py @@ -92,10 +92,10 @@ class OxAbcLoader(load.LoaderPlugin): abc.Parent = container abc.name = f"{namespace}:{abc.name}" ox_abc_objects.append(abc) - ox_transform = f"{abc.name}.transform" + ox_transform = f"{abc}.transform" if ox_transform in transform_data.keys(): abc.pos = transform_data[ox_transform] or 0 - abc.scale = transform_data[f"{abc.name}.scale"] or 0 + abc.scale = transform_data[f"{abc}.scale"] or 0 update_custom_attribute_data(node, ox_abc_objects) lib.imprint( container["instance_node"], From c50bd1498e820ed9af035578fbfe8f6c8ffb8854 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Apr 2024 10:34:31 +0200 Subject: [PATCH 66/67] Apply suggestions from code review Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../pipeline/workfile/workfile_template_builder.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index 64f2ad1c18..fc4ba3fe98 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -741,7 +741,7 @@ class AbstractTemplateBuilder(object): placeholder.set_finished() # Trigger on_depth_processed event - self.trigger_event( + self.emit_event( topic="template.depth_processed", data={ "depth": iter_counter, @@ -769,7 +769,7 @@ class AbstractTemplateBuilder(object): placeholders.append(placeholder) # Trigger on_finished event - self.trigger_event( + self.emit_event( topic="template.finished", data={ "depth": iter_counter, @@ -912,7 +912,7 @@ class AbstractTemplateBuilder(object): return self._event_system.add_callback(topic, callback, order=order) def add_on_finished_callback( - self, callback, order=None + self, callback, order=None ) -> EventCallback: return self.add_event_callback( topic="template.finished", @@ -921,7 +921,7 @@ class AbstractTemplateBuilder(object): ) def add_on_depth_processed_callback( - self, callback, order=None + self, callback, order=None ) -> EventCallback: return self.add_event_callback( topic="template.depth_processed", From 8dd30e33efca0755a7241a1ea95e7b91bad1d2f1 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 30 Apr 2024 12:06:26 +0100 Subject: [PATCH 67/67] Update fix --- .../hosts/maya/plugins/workfile_build/load_placeholder.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py index cf4a350c36..5e73933722 100644 --- a/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py +++ b/client/ayon_core/hosts/maya/plugins/workfile_build/load_placeholder.py @@ -187,6 +187,11 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): # Hide placeholder and add them to placeholder set node = placeholder.scene_identifier + # If we just populate the placeholders from current scene, the + # placeholder set will not be created so account for that. + if not cmds.objExists(PLACEHOLDER_SET): + cmds.sets(name=PLACEHOLDER_SET, empty=True) + cmds.sets(node, addElement=PLACEHOLDER_SET) cmds.hide(node) cmds.setAttr(node + ".hiddenInOutliner", True)