diff --git a/openpype/api.py b/openpype/api.py deleted file mode 100644 index b60cd21d2b..0000000000 --- a/openpype/api.py +++ /dev/null @@ -1,112 +0,0 @@ -from .settings import ( - get_system_settings, - get_project_settings, - get_current_project_settings, - get_anatomy_settings, - - SystemSettings, - ProjectSettings -) -from .lib import ( - PypeLogger, - Logger, - Anatomy, - execute, - run_subprocess, - version_up, - get_asset, - get_workdir_data, - get_version_from_path, - get_last_version_from_path, - get_app_environments_for_context, - source_hash, - get_latest_version, - get_local_site_id, - change_openpype_mongo_url, - create_project_folders, - get_project_basic_paths -) - -from .lib.mongo import ( - get_default_components -) - -from .lib.applications import ( - ApplicationManager -) - -from .lib.avalon_context import ( - BuildWorkfile -) - -from . import resources - -from .plugin import ( - Extractor, - - ValidatePipelineOrder, - ValidateContentsOrder, - ValidateSceneOrder, - ValidateMeshOrder, -) - -# temporary fix, might -from .action import ( - get_errored_instances_from_context, - RepairAction, - RepairContextAction -) - - -__all__ = [ - "get_system_settings", - "get_project_settings", - "get_current_project_settings", - "get_anatomy_settings", - "get_project_basic_paths", - - "SystemSettings", - "ProjectSettings", - - "PypeLogger", - "Logger", - "Anatomy", - "execute", - "get_default_components", - "ApplicationManager", - "BuildWorkfile", - - # Resources - "resources", - - # plugin classes - "Extractor", - # ordering - "ValidatePipelineOrder", - "ValidateContentsOrder", - "ValidateSceneOrder", - "ValidateMeshOrder", - # action - "get_errored_instances_from_context", - "RepairAction", - "RepairContextAction", - - # get contextual data - "version_up", - "get_asset", - "get_workdir_data", - "get_version_from_path", - "get_last_version_from_path", - "get_app_environments_for_context", - "source_hash", - - "run_subprocess", - "get_latest_version", - - "get_local_site_id", - "change_openpype_mongo_url", - - "get_project_basic_paths", - "create_project_folders" - -] diff --git a/openpype/host/dirmap.py b/openpype/host/dirmap.py index 88d68f27bf..347c5fbf85 100644 --- a/openpype/host/dirmap.py +++ b/openpype/host/dirmap.py @@ -8,6 +8,7 @@ exists is used. import os from abc import ABCMeta, abstractmethod +import platform import six @@ -187,11 +188,19 @@ class HostDirmap(object): self.log.debug("local overrides {}".format(active_overrides)) self.log.debug("remote overrides {}".format(remote_overrides)) + current_platform = platform.system().lower() for root_name, active_site_dir in active_overrides.items(): remote_site_dir = ( remote_overrides.get(root_name) or sync_settings["sites"][remote_site]["root"][root_name] ) + + if isinstance(remote_site_dir, dict): + remote_site_dir = remote_site_dir.get(current_platform) + + if not remote_site_dir: + continue + if os.path.isdir(active_site_dir): if "destination-path" not in mapping: mapping["destination-path"] = [] diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 8d38288257..10ded8b912 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -11,9 +11,15 @@ from openpype.pipeline import ( ) from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances from openpype.lib import prepare_template_data +from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS class RenderCreator(Creator): + """Creates 'render' instance for publishing. + + Result of 'render' instance is video or sequence of images for particular + composition based of configuration in its RenderQueue. + """ identifier = "render" label = "Render" family = "render" @@ -28,45 +34,6 @@ class RenderCreator(Creator): ["RenderCreator"] ["defaults"]) - def get_icon(self): - return resources.get_openpype_splash_filepath() - - def collect_instances(self): - for instance_data in cache_and_get_instances(self): - # legacy instances have family=='render' or 'renderLocal', use them - creator_id = (instance_data.get("creator_identifier") or - instance_data.get("family", '').replace("Local", '')) - if creator_id == self.identifier: - instance_data = self._handle_legacy(instance_data) - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - for created_inst, _changes in update_list: - api.get_stub().imprint(created_inst.get("instance_id"), - created_inst.data_to_store()) - subset_change = _changes.get("subset") - if subset_change: - api.get_stub().rename_item(created_inst.data["members"][0], - subset_change[1]) - - def remove_instances(self, instances): - for instance in instances: - self._remove_instance_from_context(instance) - self.host.remove_instance(instance) - - subset = instance.data["subset"] - comp_id = instance.data["members"][0] - comp = api.get_stub().get_item(comp_id) - if comp: - new_comp_name = comp.name.replace(subset, '') - if not new_comp_name: - new_comp_name = "dummyCompName" - api.get_stub().rename_item(comp_id, - new_comp_name) - def create(self, subset_name_from_ui, data, pre_create_data): stub = api.get_stub() # only after After Effects is up if pre_create_data.get("use_selection"): @@ -82,10 +49,19 @@ class RenderCreator(Creator): "if 'useSelection' or create at least " "one composition." ) - + use_composition_name = (pre_create_data.get("use_composition_name") or + len(comps) > 1) for comp in comps: - if pre_create_data.get("use_composition_name"): - composition_name = comp.name + if use_composition_name: + if "{composition}" not in subset_name_from_ui.lower(): + subset_name_from_ui += "{Composition}" + + composition_name = re.sub( + "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), + "", + comp.name + ) + dynamic_fill = prepare_template_data({"composition": composition_name}) subset_name = subset_name_from_ui.format(**dynamic_fill) @@ -129,8 +105,72 @@ class RenderCreator(Creator): ] return output + def get_icon(self): + return resources.get_openpype_splash_filepath() + + def collect_instances(self): + for instance_data in cache_and_get_instances(self): + # legacy instances have family=='render' or 'renderLocal', use them + creator_id = (instance_data.get("creator_identifier") or + instance_data.get("family", '').replace("Local", '')) + if creator_id == self.identifier: + instance_data = self._handle_legacy(instance_data) + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + api.get_stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) + subset_change = _changes.get("subset") + if subset_change: + api.get_stub().rename_item(created_inst.data["members"][0], + subset_change[1]) + + def remove_instances(self, instances): + for instance in instances: + self._remove_instance_from_context(instance) + self.host.remove_instance(instance) + + subset = instance.data["subset"] + comp_id = instance.data["members"][0] + comp = api.get_stub().get_item(comp_id) + if comp: + new_comp_name = comp.name.replace(subset, '') + if not new_comp_name: + new_comp_name = "dummyCompName" + api.get_stub().rename_item(comp_id, + new_comp_name) + def get_detail_description(self): - return """Creator for Render instances""" + return """Creator for Render instances + + Main publishable item in AfterEffects will be of `render` family. + Result of this item (instance) is picture sequence or video that could + be a final delivery product or loaded and used in another DCCs. + + Select single composition and create instance of 'render' family or + turn off 'Use selection' to create instance for all compositions. + + 'Use composition name in subset' allows to explicitly add composition + name into created subset name. + + Position of composition name could be set in + `project_settings/global/tools/creator/subset_name_profiles` with some + form of '{composition}' placeholder. + + Composition name will be used implicitly if multiple composition should + be handled at same time. + + If {composition} placeholder is not us 'subset_name_profiles' + composition name will be capitalized and set at the end of subset name + if necessary. + + If composition name should be used, it will be cleaned up of characters + that would cause an issue in published file names. + """ def get_dynamic_data(self, variant, task_name, asset_doc, project_name, host_name, instance): diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 61127bda57..f0985973a6 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -225,12 +225,12 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): self._add_instance_to_context(created_instance) def update_instances(self, update_list): - for created_inst, _changes in update_list: + for created_inst, changes in update_list: instance_node = hou.node(created_inst.get("instance_node")) new_values = { - key: new_value - for key, (_old_value, new_value) in _changes.items() + key: changes[key].new_value + for key in changes.changed_keys } imprint( instance_node, diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py index 4788bfd383..c16d9e61ec 100644 --- a/openpype/hosts/max/api/plugin.py +++ b/openpype/hosts/max/api/plugin.py @@ -78,12 +78,12 @@ class MaxCreator(Creator, MaxCreatorBase): self._add_instance_to_context(created_instance) def update_instances(self, update_list): - for created_inst, _changes in update_list: + for created_inst, changes in update_list: instance_node = created_inst.get("instance_node") new_values = { - key: new_value - for key, (_old_value, new_value) in _changes.items() + key: changes[key].new_value + for key in changes.changed_keys } imprint( instance_node, diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 8375149442..387b7321b9 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -54,6 +54,7 @@ class CreateRender(plugin.Creator): tileRendering (bool): Instance is set to tile rendering mode. We won't submit actual render, but we'll make publish job to wait for Tile Assembly job done and then publish. + strict_error_checking (bool): Enable/disable error checking on DL See Also: https://pype.club/docs/artist_hosts_maya#creating-basic-render-setup @@ -271,6 +272,9 @@ class CreateRender(plugin.Creator): secondary_pool = pool_setting["secondary_pool"] self.data["secondaryPool"] = self._set_default_pool(pool_names, secondary_pool) + strict_error_checking = maya_submit_dl.get("strict_error_checking", + True) + self.data["strict_error_checking"] = strict_error_checking if muster_enabled: self.log.info(">>> Loading Muster credentials ...") diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index b1ad3ca58e..fc297ef612 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -318,7 +318,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501 "renderSetupIncludeLights": render_instance.data.get( "renderSetupIncludeLights" - ) + ), + "strict_error_checking": render_instance.data.get( + "strict_error_checking") } # Collect Deadline url if Deadline module is enabled diff --git a/openpype/hosts/nuke/plugins/publish/collect_context_data.py b/openpype/hosts/nuke/plugins/publish/collect_context_data.py index 5a1cdcf49e..b487c946f0 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_context_data.py +++ b/openpype/hosts/nuke/plugins/publish/collect_context_data.py @@ -1,7 +1,7 @@ import os import nuke import pyblish.api -import openpype.api as api +from openpype.lib import get_version_from_path import openpype.hosts.nuke.api as napi from openpype.pipeline import KnownPublishError @@ -57,7 +57,7 @@ class CollectContextData(pyblish.api.ContextPlugin): "fps": root_node['fps'].value(), "currentFile": current_file, - "version": int(api.get_version_from_path(current_file)), + "version": int(get_version_from_path(current_file)), "host": pyblish.api.current_host(), "hostVersion": nuke.NUKE_VERSION_STRING diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index ca3bbfd27c..cdea82cb05 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -5,7 +5,7 @@ from openpype.lib import BoolDef from openpype.pipeline import ( Creator, CreatedInstance, - legacy_io + CreatorError ) from openpype.lib import prepare_template_data from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS @@ -13,27 +13,16 @@ from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances class ImageCreator(Creator): - """Creates image instance for publishing.""" + """Creates image instance for publishing. + + Result of 'image' instance is image of all visible layers, or image(s) of + selected layers. + """ identifier = "image" label = "Image" family = "image" description = "Image creator" - def collect_instances(self): - for instance_data in cache_and_get_instances(self): - # legacy instances have family=='image' - creator_id = (instance_data.get("creator_identifier") or - instance_data.get("family")) - - if creator_id == self.identifier: - instance_data = self._handle_legacy(instance_data) - layer = api.stub().get_layer(instance_data["members"][0]) - instance_data["layer"] = layer - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name_from_ui, data, pre_create_data): groups_to_create = [] top_layers_to_wrap = [] @@ -59,9 +48,10 @@ class ImageCreator(Creator): try: group = stub.group_selected_layers(subset_name_from_ui) except: - raise ValueError("Cannot group locked Bakcground layer!") + raise CreatorError("Cannot group locked Background layer!") groups_to_create.append(group) + # create empty group if nothing selected if not groups_to_create and not top_layers_to_wrap: group = stub.create_group(subset_name_from_ui) groups_to_create.append(group) @@ -73,13 +63,16 @@ class ImageCreator(Creator): groups_to_create.append(group) layer_name = '' - creating_multiple_groups = len(groups_to_create) > 1 + # use artist chosen option OR force layer if more subsets are created + # to differentiate them + use_layer_name = (pre_create_data.get("use_layer_name") or + len(groups_to_create) > 1) for group in groups_to_create: subset_name = subset_name_from_ui # reset to name from creator UI layer_names_in_hierarchy = [] created_group_name = self._clean_highlights(stub, group.name) - if creating_multiple_groups: + if use_layer_name: layer_name = re.sub( "[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS), "", @@ -112,6 +105,21 @@ class ImageCreator(Creator): stub.rename_layer(group.id, stub.PUBLISH_ICON + created_group_name) + def collect_instances(self): + for instance_data in cache_and_get_instances(self): + # legacy instances have family=='image' + creator_id = (instance_data.get("creator_identifier") or + instance_data.get("family")) + + if creator_id == self.identifier: + instance_data = self._handle_legacy(instance_data) + layer = api.stub().get_layer(instance_data["members"][0]) + instance_data["layer"] = layer + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) for created_inst, _changes in update_list: @@ -137,12 +145,42 @@ class ImageCreator(Creator): label="Create only for selected"), BoolDef("create_multiple", default=True, - label="Create separate instance for each selected") + label="Create separate instance for each selected"), + BoolDef("use_layer_name", + default=False, + label="Use layer name in subset") ] return output def get_detail_description(self): - return """Creator for Image instances""" + return """Creator for Image instances + + Main publishable item in Photoshop will be of `image` family. Result of + this item (instance) is picture that could be loaded and used + in another DCCs (for example as single layer in composition in + AfterEffects, reference in Maya etc). + + There are couple of options what to publish: + - separate image per selected layer (or group of layers) + - one image for all selected layers + - all visible layers (groups) flattened into single image + + In most cases you would like to keep `Create only for selected` + toggled on and select what you would like to publish. + Toggling this option off will allow you to create instance for all + visible layers without a need to select them explicitly. + + Use 'Create separate instance for each selected' to create separate + images per selected layer (group of layers). + + 'Use layer name in subset' will explicitly add layer name into subset + name. Position of this name is configurable in + `project_settings/global/tools/creator/subset_name_profiles`. + If layer placeholder ({layer}) is not used in `subset_name_profiles` + but layer name should be used (set explicitly in UI or implicitly if + multiple images should be created), it is added in capitalized form + as a suffix to subset name. + """ def _handle_legacy(self, instance_data): """Converts old instances to new format.""" diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py index 0a8ddaa343..3264f52b0f 100644 --- a/openpype/hosts/traypublisher/api/pipeline.py +++ b/openpype/hosts/traypublisher/api/pipeline.py @@ -37,7 +37,7 @@ class TrayPublisherHost(HostBase, IPublishHost): return HostContext.get_context_data() def update_context_data(self, data, changes): - HostContext.save_context_data(data, changes) + HostContext.save_context_data(data) def set_project_name(self, project_name): # TODO Deregister project specific plugins and register new project diff --git a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py index 1dc4bad9b3..d077131e4c 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/openpype/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -33,6 +33,8 @@ class BatchMovieCreator(TrayPublishCreator): create_allow_context_change = False version_regex = re.compile(r"^(.+)_v([0-9]+)$") + # Position batch creator after simple creators + order = 110 def __init__(self, project_settings, *args, **kwargs): super(BatchMovieCreator, self).__init__(project_settings, diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 070d4eab18..ed37ff1897 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -64,6 +64,7 @@ class MayaPluginInfo(object): # Include all lights flag RenderSetupIncludeLights = attr.ib( default="1", validator=_validate_deadline_bool_value) + StrictErrorChecking = attr.ib(default=True) @attr.s @@ -219,6 +220,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "renderSetupIncludeLights", default_rs_include_lights) if rs_include_lights not in {"1", "0", True, False}: rs_include_lights = default_rs_include_lights + strict_error_checking = instance.data.get("strict_error_checking", + True) plugin_info = MayaPluginInfo( SceneFile=self.scene_path, Version=cmds.about(version=True), @@ -227,6 +230,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): RenderSetupIncludeLights=rs_include_lights, # noqa ProjectPath=context.data["workspaceDir"], UsingRenderLayers=True, + StrictErrorChecking=strict_error_checking ) plugin_payload = attr.asdict(plugin_info) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 25ebad6658..ad7ffd8e25 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -316,7 +316,7 @@ def main_loop(ftrack_url): statuser_failed_count = 0 # If thread failed test Ftrack and Mongo connection - elif not statuser_thread.isAlive(): + elif not statuser_thread.is_alive(): statuser_thread.join() statuser_thread = None ftrack_accessible = False @@ -359,7 +359,7 @@ def main_loop(ftrack_url): storer_failed_count = 0 # If thread failed test Ftrack and Mongo connection - elif not storer_thread.isAlive(): + elif not storer_thread.is_alive(): if storer_thread.mongo_error: raise MongoPermissionsError() storer_thread.join() @@ -396,7 +396,7 @@ def main_loop(ftrack_url): processor_failed_count = 0 # If thread failed test Ftrack and Mongo connection - elif not processor_thread.isAlive(): + elif not processor_thread.is_alive(): if processor_thread.mongo_error: raise Exception( "Exiting because have issue with acces to MongoDB" diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/ftrack/tray/login_dialog.py index 0e676545f7..f374a71178 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/ftrack/tray/login_dialog.py @@ -259,7 +259,7 @@ class CredentialsDialog(QtWidgets.QDialog): # If there is an existing server thread running we need to stop it. if self._login_server_thread: - if self._login_server_thread.isAlive(): + if self._login_server_thread.is_alive(): self._login_server_thread.stop() self._login_server_thread.join() self._login_server_thread = None diff --git a/openpype/modules/slack/manifest.yml b/openpype/modules/slack/manifest.yml index 7a65cc5915..233c39fbaf 100644 --- a/openpype/modules/slack/manifest.yml +++ b/openpype/modules/slack/manifest.yml @@ -19,6 +19,8 @@ oauth_config: - chat:write.public - files:write - channels:read + - users:read + - usergroups:read settings: org_deploy_enabled: false socket_mode_enabled: false diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index f260e483d9..bce304ab55 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -183,6 +183,319 @@ def prepare_failed_creator_operation_info( } +_EMPTY_VALUE = object() + + +class TrackChangesItem(object): + """Helper object to track changes in data. + + Has access to full old and new data and will create deep copy of them, + so it is not needed to create copy before passed in. + + Can work as a dictionary if old or new value is a dictionary. In + that case received object is another object of 'TrackChangesItem'. + + Goal is to be able to get old or new value as was or only changed values + or get information about removed/changed keys, and all of that on + any "dictionary level". + + ``` + # Example of possible usages + >>> old_value = { + ... "key_1": "value_1", + ... "key_2": { + ... "key_sub_1": 1, + ... "key_sub_2": { + ... "enabled": True + ... } + ... }, + ... "key_3": "value_2" + ... } + >>> new_value = { + ... "key_1": "value_1", + ... "key_2": { + ... "key_sub_2": { + ... "enabled": False + ... }, + ... "key_sub_3": 3 + ... }, + ... "key_3": "value_3" + ... } + + >>> changes = TrackChangesItem(old_value, new_value) + >>> changes.changed + True + + >>> changes["key_2"]["key_sub_1"].new_value is None + True + + >>> list(sorted(changes.changed_keys)) + ['key_2', 'key_3'] + + >>> changes["key_2"]["key_sub_2"]["enabled"].changed + True + + >>> changes["key_2"].removed_keys + {'key_sub_1'} + + >>> list(sorted(changes["key_2"].available_keys)) + ['key_sub_1', 'key_sub_2', 'key_sub_3'] + + >>> changes.new_value == new_value + True + + # Get only changed values + only_changed_new_values = { + key: changes[key].new_value + for key in changes.changed_keys + } + ``` + + Args: + old_value (Any): Old value. + new_value (Any): New value. + """ + + def __init__(self, old_value, new_value): + self._changed = old_value != new_value + # Resolve if value is '_EMPTY_VALUE' after comparison of the values + if old_value is _EMPTY_VALUE: + old_value = None + if new_value is _EMPTY_VALUE: + new_value = None + self._old_value = copy.deepcopy(old_value) + self._new_value = copy.deepcopy(new_value) + + self._old_is_dict = isinstance(old_value, dict) + self._new_is_dict = isinstance(new_value, dict) + + self._old_keys = None + self._new_keys = None + self._available_keys = None + self._removed_keys = None + + self._changed_keys = None + + self._sub_items = None + + def __getitem__(self, key): + """Getter looks into subitems if object is dictionary.""" + + if self._sub_items is None: + self._prepare_sub_items() + return self._sub_items[key] + + def __bool__(self): + """Boolean of object is if old and new value are the same.""" + + return self._changed + + def get(self, key, default=None): + """Try to get sub item.""" + + if self._sub_items is None: + self._prepare_sub_items() + return self._sub_items.get(key, default) + + @property + def old_value(self): + """Get copy of old value. + + Returns: + Any: Whatever old value was. + """ + + return copy.deepcopy(self._old_value) + + @property + def new_value(self): + """Get copy of new value. + + Returns: + Any: Whatever new value was. + """ + + return copy.deepcopy(self._new_value) + + @property + def changed(self): + """Value changed. + + Returns: + bool: If data changed. + """ + + return self._changed + + @property + def is_dict(self): + """Object can be used as dictionary. + + Returns: + bool: When can be used that way. + """ + + return self._old_is_dict or self._new_is_dict + + @property + def changes(self): + """Get changes in raw data. + + This method should be used only if 'is_dict' value is 'True'. + + Returns: + Dict[str, Tuple[Any, Any]]: Changes are by key in tuple + (, ). If 'is_dict' is 'False' then + output is always empty dictionary. + """ + + output = {} + if not self.is_dict: + return output + + old_value = self.old_value + new_value = self.new_value + for key in self.changed_keys: + _old = None + _new = None + if self._old_is_dict: + _old = old_value.get(key) + if self._new_is_dict: + _new = new_value.get(key) + output[key] = (_old, _new) + return output + + # Methods/properties that can be used when 'is_dict' is 'True' + @property + def old_keys(self): + """Keys from old value. + + Empty set is returned if old value is not a dict. + + Returns: + Set[str]: Keys from old value. + """ + + if self._old_keys is None: + self._prepare_keys() + return set(self._old_keys) + + @property + def new_keys(self): + """Keys from new value. + + Empty set is returned if old value is not a dict. + + Returns: + Set[str]: Keys from new value. + """ + + if self._new_keys is None: + self._prepare_keys() + return set(self._new_keys) + + @property + def changed_keys(self): + """Keys that has changed from old to new value. + + Empty set is returned if both old and new value are not a dict. + + Returns: + Set[str]: Keys of changed keys. + """ + + if self._changed_keys is None: + self._prepare_sub_items() + return set(self._changed_keys) + + @property + def available_keys(self): + """All keys that are available in old and new value. + + Empty set is returned if both old and new value are not a dict. + Output is Union of 'old_keys' and 'new_keys'. + + Returns: + Set[str]: All keys from old and new value. + """ + + if self._available_keys is None: + self._prepare_keys() + return set(self._available_keys) + + @property + def removed_keys(self): + """Key that are not available in new value but were in old value. + + Returns: + Set[str]: All removed keys. + """ + + if self._removed_keys is None: + self._prepare_sub_items() + return set(self._removed_keys) + + def _prepare_keys(self): + old_keys = set() + new_keys = set() + if self._old_is_dict and self._new_is_dict: + old_keys = set(self._old_value.keys()) + new_keys = set(self._new_value.keys()) + + elif self._old_is_dict: + old_keys = set(self._old_value.keys()) + + elif self._new_is_dict: + new_keys = set(self._new_value.keys()) + + self._old_keys = old_keys + self._new_keys = new_keys + self._available_keys = old_keys | new_keys + self._removed_keys = old_keys - new_keys + + def _prepare_sub_items(self): + sub_items = {} + changed_keys = set() + + old_keys = self.old_keys + new_keys = self.new_keys + new_value = self.new_value + old_value = self.old_value + if self._old_is_dict and self._new_is_dict: + for key in self.available_keys: + item = TrackChangesItem( + old_value.get(key), new_value.get(key) + ) + sub_items[key] = item + if item.changed or key not in old_keys or key not in new_keys: + changed_keys.add(key) + + elif self._old_is_dict: + old_keys = set(old_value.keys()) + available_keys = set(old_keys) + changed_keys = set(available_keys) + for key in available_keys: + # NOTE Use '_EMPTY_VALUE' because old value could be 'None' + # which would result in "unchanged" item + sub_items[key] = TrackChangesItem( + old_value.get(key), _EMPTY_VALUE + ) + + elif self._new_is_dict: + new_keys = set(new_value.keys()) + available_keys = set(new_keys) + changed_keys = set(available_keys) + for key in available_keys: + # NOTE Use '_EMPTY_VALUE' because new value could be 'None' + # which would result in "unchanged" item + sub_items[key] = TrackChangesItem( + _EMPTY_VALUE, new_value.get(key) + ) + + self._sub_items = sub_items + self._changed_keys = changed_keys + + class InstanceMember: """Representation of instance member. @@ -300,6 +613,10 @@ class AttributeValues(object): return list(self._attr_defs) + @property + def origin_data(self): + return copy.deepcopy(self._origin_data) + def data_to_store(self): """Create new dictionary with data to store. @@ -316,30 +633,6 @@ class AttributeValues(object): output[key] = attr_def.default return output - @staticmethod - def calculate_changes(new_data, old_data): - """Calculate changes of 2 dictionary objects.""" - - changes = {} - for key, new_value in new_data.items(): - old_value = old_data.get(key) - if old_value != new_value: - changes[key] = (old_value, new_value) - return changes - - def changes(self): - return self.calculate_changes(self._data, self._origin_data) - - def apply_changes(self, changes): - for key, item in changes.items(): - old_value, new_value = item - if new_value is None: - if key in self: - self.pop(key) - - elif self.get(key) != new_value: - self[key] = new_value - def get_serialized_attr_defs(self): """Serialize attribute definitions to json serializable types. @@ -467,36 +760,9 @@ class PublishAttributes: output[key] = attr_value.data_to_store() return output - def changes(self): - """Return changes per each key.""" - - changes = {} - for key, attr_val in self._data.items(): - attr_changes = attr_val.changes() - if attr_changes: - if key not in changes: - changes[key] = {} - changes[key].update(attr_val) - - for key, value in self._origin_data.items(): - if key not in self._data: - changes[key] = (value, None) - return changes - - def apply_changes(self, changes): - for key, item in changes.items(): - if isinstance(item, dict): - self._data[key].apply_changes(item) - continue - - old_value, new_value = item - if new_value is not None: - raise ValueError( - "Unexpected type \"{}\" expected None".format( - str(type(new_value)) - ) - ) - self.pop(key) + @property + def origin_data(self): + return copy.deepcopy(self._origin_data) def set_publish_plugins(self, attr_plugins): """Set publish plugins attribute definitions.""" @@ -763,6 +1029,10 @@ class CreatedInstance: return label return self._group_label + @property + def origin_data(self): + return copy.deepcopy(self._orig_data) + @property def creator_identifier(self): return self._data["creator_identifier"] @@ -817,29 +1087,7 @@ class CreatedInstance: def changes(self): """Calculate and return changes.""" - changes = {} - new_keys = set() - for key, new_value in self._data.items(): - new_keys.add(key) - if key in ("creator_attributes", "publish_attributes"): - continue - - old_value = self._orig_data.get(key) - if old_value != new_value: - changes[key] = (old_value, new_value) - - creator_attr_changes = self.creator_attributes.changes() - if creator_attr_changes: - changes["creator_attributes"] = creator_attr_changes - - publish_attr_changes = self.publish_attributes.changes() - if publish_attr_changes: - changes["publish_attributes"] = publish_attr_changes - - for key, old_value in self._orig_data.items(): - if key not in new_keys: - changes[key] = (old_value, None) - return changes + return TrackChangesItem(self._orig_data, self.data_to_store()) def mark_as_stored(self): """Should be called when instance data are stored. @@ -1002,59 +1250,6 @@ class CreatedInstance: return obj - def remote_changes(self): - """Prepare serializable changes on remote side. - - Returns: - Dict[str, Any]: Prepared changes that can be send to client side. - """ - - return { - "changes": self.changes(), - "asset_is_valid": self._asset_is_valid, - "task_is_valid": self._task_is_valid, - } - - def update_from_remote(self, remote_changes): - """Apply changes from remote side on client side. - - Args: - remote_changes (Dict[str, Any]): Changes created on remote side. - """ - - self._asset_is_valid = remote_changes["asset_is_valid"] - self._task_is_valid = remote_changes["task_is_valid"] - - changes = remote_changes["changes"] - creator_attributes = changes.pop("creator_attributes", None) or {} - publish_attributes = changes.pop("publish_attributes", None) or {} - if changes: - self.apply_changes(changes) - - if creator_attributes: - self.creator_attributes.apply_changes(creator_attributes) - - if publish_attributes: - self.publish_attributes.apply_changes(publish_attributes) - - def apply_changes(self, changes): - """Apply changes created via 'changes'. - - Args: - Dict[str, Tuple[Any, Any]]: Instance changes to apply. Same values - are kept untouched. - """ - - for key, item in changes.items(): - old_value, new_value = item - if new_value is None: - if key in self: - self.pop(key) - else: - current_value = self.get(key) - if current_value != new_value: - self[key] = new_value - # Context validation related methods/properties @property def has_set_asset(self): @@ -1237,6 +1432,53 @@ class CreateContext: """Access to global publish attributes.""" return self._publish_attributes + def get_sorted_creators(self, identifiers=None): + """Sorted creators by 'order' attribute. + + Args: + identifiers (Iterable[str]): Filter creators by identifiers. All + creators are returned if 'None' is passed. + + Returns: + List[BaseCreator]: Sorted creator plugins by 'order' value. + """ + + if identifiers is not None: + identifiers = set(identifiers) + creators = [ + creator + for identifier, creator in self.creators.items() + if identifier in identifiers + ] + else: + creators = self.creators.values() + + return sorted( + creators, key=lambda creator: creator.order + ) + + @property + def sorted_creators(self): + """Sorted creators by 'order' attribute. + + Returns: + List[BaseCreator]: Sorted creator plugins by 'order' value. + """ + + return self.get_sorted_creators() + + @property + def sorted_autocreators(self): + """Sorted auto-creators by 'order' attribute. + + Returns: + List[AutoCreator]: Sorted plugins by 'order' value. + """ + + return sorted( + self.autocreators.values(), key=lambda creator: creator.order + ) + @classmethod def get_host_misssing_methods(cls, host): """Collect missing methods from host. @@ -1515,11 +1757,10 @@ class CreateContext: def context_data_changes(self): """Changes of attributes.""" - changes = {} - publish_attribute_changes = self._publish_attributes.changes() - if publish_attribute_changes: - changes["publish_attributes"] = publish_attribute_changes - return changes + + return TrackChangesItem( + self._original_context_data, self.context_data_to_store() + ) def creator_adds_instance(self, instance): """Creator adds new instance to context. @@ -1599,6 +1840,9 @@ class CreateContext: ) ]) + def _remove_instance(self, instance): + self._instances_by_id.pop(instance.id, None) + def creator_removed_instance(self, instance): """When creator removes instance context should be acknowledged. @@ -1610,7 +1854,7 @@ class CreateContext: from scene metadata. """ - self._instances_by_id.pop(instance.id, None) + self._remove_instance(instance) def add_convertor_item(self, convertor_identifier, label): self.convertor_items_by_id[convertor_identifier] = ConvertorItem( @@ -1654,7 +1898,7 @@ class CreateContext: # Collect instances error_message = "Collection of instances for creator {} failed. {}" failed_info = [] - for creator in self.creators.values(): + for creator in self.sorted_creators: label = creator.label identifier = creator.identifier failed = False @@ -1726,7 +1970,8 @@ class CreateContext: error_message = "Failed to run AutoCreator with identifier \"{}\". {}" failed_info = [] - for identifier, creator in self.autocreators.items(): + for creator in self.sorted_autocreators: + identifier = creator.identifier label = creator.label failed = False add_traceback = False @@ -1831,19 +2076,26 @@ class CreateContext: """Save instance specific values.""" instances_by_identifier = collections.defaultdict(list) for instance in self._instances_by_id.values(): + instance_changes = instance.changes() + if not instance_changes: + continue + identifier = instance.creator_identifier - instances_by_identifier[identifier].append(instance) + instances_by_identifier[identifier].append( + UpdateData(instance, instance_changes) + ) + + if not instances_by_identifier: + return error_message = "Instances update of creator \"{}\" failed. {}" failed_info = [] - for identifier, creator_instances in instances_by_identifier.items(): - update_list = [] - for instance in creator_instances: - instance_changes = instance.changes() - if instance_changes: - update_list.append(UpdateData(instance, instance_changes)) - creator = self.creators[identifier] + for creator in self.get_sorted_creators( + instances_by_identifier.keys() + ): + identifier = creator.identifier + update_list = instances_by_identifier[identifier] if not update_list: continue @@ -1879,9 +2131,13 @@ class CreateContext: def remove_instances(self, instances): """Remove instances from context. + All instances that don't have creator identifier leading to existing + creator are just removed from context. + Args: - instances(list): Instances that should be removed - from context. + instances(List[CreatedInstance]): Instances that should be removed. + Remove logic is done using creator, which may require to + do other cleanup than just remove instance from context. """ instances_by_identifier = collections.defaultdict(list) @@ -1889,10 +2145,21 @@ class CreateContext: identifier = instance.creator_identifier instances_by_identifier[identifier].append(instance) + # Just remove instances from context if creator is not available + missing_creators = set(instances_by_identifier) - set(self.creators) + for identifier in missing_creators: + for instance in instances_by_identifier[identifier]: + self._remove_instance(instance) + error_message = "Instances removement of creator \"{}\" failed. {}" failed_info = [] - for identifier, creator_instances in instances_by_identifier.items(): - creator = self.creators.get(identifier) + # Remove instances by creator plugin order + for creator in self.get_sorted_creators( + instances_by_identifier.keys() + ): + identifier = creator.identifier + creator_instances = instances_by_identifier[identifier] + label = creator.label failed = False add_traceback = False @@ -1935,6 +2202,7 @@ class CreateContext: family(str): Instance family for which should be attribute definitions returned. """ + if family not in self._attr_plugins_by_family: import pyblish.logic @@ -1950,7 +2218,13 @@ class CreateContext: return self._attr_plugins_by_family[family] def _get_publish_plugins_with_attr_for_context(self): - """Publish plugins attributes for Context plugins.""" + """Publish plugins attributes for Context plugins. + + Returns: + List[pyblish.api.Plugin]: Publish plugins that have attribute + definitions for context. + """ + plugins = [] for plugin in self.plugins_with_defs: if not plugin.__instanceEnabled__: @@ -1975,7 +2249,7 @@ class CreateContext: return self._collection_shared_data def run_convertor(self, convertor_identifier): - """Run convertor plugin by it's idenfitifier. + """Run convertor plugin by identifier. Conversion is skipped if convertor is not available. @@ -1988,7 +2262,7 @@ class CreateContext: convertor.convert() def run_convertors(self, convertor_identifiers): - """Run convertor plugins by idenfitifiers. + """Run convertor plugins by identifiers. Conversion is skipped if convertor is not available. It is recommended to trigger reset after conversion to reload instances. diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 1f92056b23..53acb618ed 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -107,7 +107,11 @@ class SubsetConvertorPlugin(object): @property def create_context(self): - """Quick access to create context.""" + """Quick access to create context. + + Returns: + CreateContext: Context which initialized the plugin. + """ return self._create_context @@ -157,6 +161,10 @@ class BaseCreator: # Cached group label after first call 'get_group_label' _cached_group_label = None + # Order in which will be plugin executed (collect & update instances) + # less == earlier -> Order '90' will be processed before '100' + order = 100 + # Variable to store logger _log = None @@ -489,6 +497,17 @@ class Creator(BaseCreator): # - similar to instance attribute definitions pre_create_attr_defs = [] + @property + def show_order(self): + """Order in which is creator shown in UI. + + Returns: + int: Order in which is creator shown (less == earlier). By default + is using Creator's 'order' or processing. + """ + + return self.order + @abstractmethod def create(self, subset_name, instance_data, pre_create_data): """Create new instance and store it. diff --git a/openpype/pipeline/load/plugins.py b/openpype/pipeline/load/plugins.py index b5e55834db..9b891a4da3 100644 --- a/openpype/pipeline/load/plugins.py +++ b/openpype/pipeline/load/plugins.py @@ -2,7 +2,10 @@ import os import logging from openpype.settings import get_system_settings, get_project_settings -from openpype.pipeline import legacy_io +from openpype.pipeline import ( + schema, + legacy_io, +) from openpype.pipeline.plugin_discover import ( discover, register_plugin, @@ -79,6 +82,45 @@ class LoaderPlugin(list): print(" - setting `{}`: `{}`".format(option, value)) setattr(cls, option, value) + @classmethod + def is_compatible_loader(cls, context): + """Return whether a loader is compatible with a context. + + This checks the version's families and the representation for the given + Loader. + + Returns: + bool + """ + + plugin_repre_names = cls.get_representations() + plugin_families = cls.families + if not plugin_repre_names or not plugin_families: + return False + + repre_doc = context.get("representation") + if not repre_doc: + return False + + plugin_repre_names = set(plugin_repre_names) + if ( + "*" not in plugin_repre_names + and repre_doc["name"] not in plugin_repre_names + ): + return False + + maj_version, _ = schema.get_schema_version(context["subset"]["schema"]) + if maj_version < 3: + families = context["version"]["data"].get("families", []) + else: + families = context["subset"]["data"]["families"] + + plugin_families = set(plugin_families) + return ( + "*" in plugin_families + or any(family in plugin_families for family in families) + ) + @classmethod def get_representations(cls): return cls.representations diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index e2b3675115..e30923f922 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -748,25 +748,9 @@ def is_compatible_loader(Loader, context): Returns: bool - """ - maj_version, _ = schema.get_schema_version(context["subset"]["schema"]) - if maj_version < 3: - families = context["version"]["data"].get("families", []) - else: - families = context["subset"]["data"]["families"] - representation = context["representation"] - has_family = ( - "*" in Loader.families or any( - family in Loader.families for family in families - ) - ) - representations = Loader.get_representations() - has_representation = ( - "*" in representations or representation["name"] in representations - ) - return has_family and has_representation + return Loader.is_compatible_loader(context) def loaders_from_repre_context(loaders, repre_context): diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index ceb0b2e39a..0a4318a659 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -33,7 +33,8 @@ "limit": [], "jobInfo": {}, "pluginInfo": {}, - "scene_patches": [] + "scene_patches": [], + "strict_error_checking": true }, "NukeSubmitDeadline": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 08a505bd47..03f6489a41 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -195,6 +195,12 @@ ] } + }, + { + "type": "boolean", + "key": "strict_error_checking", + "label": "Strict Error Checking", + "default": true } ] }, diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index e9fdd4774a..b2bfd7dd5c 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -24,6 +24,7 @@ CREATOR_THUMBNAIL_ENABLED_ROLE = QtCore.Qt.UserRole + 5 FAMILY_ROLE = QtCore.Qt.UserRole + 6 GROUP_ROLE = QtCore.Qt.UserRole + 7 CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 8 +CREATOR_SORT_ROLE = QtCore.Qt.UserRole + 9 __all__ = ( @@ -36,6 +37,7 @@ __all__ = ( "IS_GROUP_ROLE", "CREATOR_IDENTIFIER_ROLE", "CREATOR_THUMBNAIL_ENABLED_ROLE", + "CREATOR_SORT_ROLE", "FAMILY_ROLE", "GROUP_ROLE", "CONVERTER_IDENTIFIER_ROLE", diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 7c8da66744..435db5fcb3 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -832,7 +832,8 @@ class CreatorItem: default_variants, create_allow_context_change, create_allow_thumbnail, - pre_create_attributes_defs + show_order, + pre_create_attributes_defs, ): self.identifier = identifier self.creator_type = creator_type @@ -846,6 +847,7 @@ class CreatorItem: self.default_variants = default_variants self.create_allow_context_change = create_allow_context_change self.create_allow_thumbnail = create_allow_thumbnail + self.show_order = show_order self.pre_create_attributes_defs = pre_create_attributes_defs def get_group_label(self): @@ -869,6 +871,7 @@ class CreatorItem: pre_create_attr_defs = None create_allow_context_change = None create_allow_thumbnail = None + show_order = creator.order if creator_type is CreatorTypes.artist: description = creator.get_description() detail_description = creator.get_detail_description() @@ -877,6 +880,7 @@ class CreatorItem: pre_create_attr_defs = creator.get_pre_create_attr_defs() create_allow_context_change = creator.create_allow_context_change create_allow_thumbnail = creator.create_allow_thumbnail + show_order = creator.show_order identifier = creator.identifier return cls( @@ -892,7 +896,8 @@ class CreatorItem: default_variants, create_allow_context_change, create_allow_thumbnail, - pre_create_attr_defs + show_order, + pre_create_attr_defs, ) def to_data(self): @@ -915,6 +920,7 @@ class CreatorItem: "default_variants": self.default_variants, "create_allow_context_change": self.create_allow_context_change, "create_allow_thumbnail": self.create_allow_thumbnail, + "show_order": self.show_order, "pre_create_attributes_defs": pre_create_attributes_defs, } @@ -1502,9 +1508,6 @@ class BasePublisherController(AbstractPublisherController): def _reset_attributes(self): """Reset most of attributes that can be reset.""" - # Reset creator items - self._creator_items = None - self.publish_is_running = False self.publish_has_validated = False self.publish_has_crashed = False @@ -1760,6 +1763,8 @@ class PublisherController(BasePublisherController): self._resetting_plugins = True self._create_context.reset_plugins() + # Reset creator items + self._creator_items = None self._resetting_plugins = False diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index 07b124f616..dbf075c216 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -18,9 +18,10 @@ from .tasks_widget import CreateWidgetTasksWidget from .precreate_widget import PreCreateWidget from ..constants import ( VARIANT_TOOLTIP, - CREATOR_IDENTIFIER_ROLE, FAMILY_ROLE, + CREATOR_IDENTIFIER_ROLE, CREATOR_THUMBNAIL_ENABLED_ROLE, + CREATOR_SORT_ROLE, ) SEPARATORS = ("---separator---", "---") @@ -90,12 +91,19 @@ class CreatorShortDescWidget(QtWidgets.QWidget): self._description_label.setText(description) +class CreatorsProxyModel(QtCore.QSortFilterProxyModel): + def lessThan(self, left, right): + l_show_order = left.data(CREATOR_SORT_ROLE) + r_show_order = right.data(CREATOR_SORT_ROLE) + if l_show_order == r_show_order: + return super(CreatorsProxyModel, self).lessThan(left, right) + return l_show_order < r_show_order + + class CreateWidget(QtWidgets.QWidget): def __init__(self, controller, parent=None): super(CreateWidget, self).__init__(parent) - self.setWindowTitle("Create new instance") - self._controller = controller self._asset_name = None @@ -141,7 +149,7 @@ class CreateWidget(QtWidgets.QWidget): creators_view = QtWidgets.QListView(creators_view_widget) creators_model = QtGui.QStandardItemModel() - creators_sort_model = QtCore.QSortFilterProxyModel() + creators_sort_model = CreatorsProxyModel() creators_sort_model.setSourceModel(creators_model) creators_view.setModel(creators_sort_model) @@ -441,7 +449,8 @@ class CreateWidget(QtWidgets.QWidget): # Add new families new_creators = set() - for identifier, creator_item in self._controller.creator_items.items(): + creator_items_by_identifier = self._controller.creator_items + for identifier, creator_item in creator_items_by_identifier.items(): if creator_item.creator_type != "artist": continue @@ -457,6 +466,7 @@ class CreateWidget(QtWidgets.QWidget): self._creators_model.appendRow(item) item.setData(creator_item.label, QtCore.Qt.DisplayRole) + item.setData(creator_item.show_order, CREATOR_SORT_ROLE) item.setData(identifier, CREATOR_IDENTIFIER_ROLE) item.setData( creator_item.create_allow_thumbnail, @@ -482,8 +492,9 @@ class CreateWidget(QtWidgets.QWidget): index = indexes[0] identifier = index.data(CREATOR_IDENTIFIER_ROLE) + create_item = creator_items_by_identifier.get(identifier) - self._set_creator_by_identifier(identifier) + self._set_creator(create_item) def _on_plugins_refresh(self): # Trigger refresh only if is visible