From c895b096687b25e5e2232029ddf7a22081254cc4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 30 Jan 2023 11:15:36 +0100 Subject: [PATCH] Implement data persistence for renderlayers --- openpype/hosts/maya/api/plugin.py | 208 +++++++++--------- .../maya/plugins/create/create_render.py | 114 ++++++---- 2 files changed, 183 insertions(+), 139 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index e943d27920..16000d7f03 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -122,108 +122,7 @@ class Creator(LegacyCreator): @six.add_metaclass(ABCMeta) -class MayaCreator(NewCreator): - - def create(self, subset_name, instance_data, pre_create_data): - - members = list() - if pre_create_data.get("use_selection"): - members = cmds.ls(selection=True) - - with lib.undo_chunk(): - instance_node = cmds.sets(members, name=subset_name) - instance_data["instance_node"] = instance_node - instance = CreatedInstance( - self.family, - subset_name, - instance_data, - self) - self._add_instance_to_context(instance) - - self.imprint_instance_node(instance_node, - data=instance.data_to_store()) - return instance - - def collect_instances(self): - self.cache_subsets(self.collection_shared_data) - cached_subsets = self.collection_shared_data["maya_cached_subsets"] - for node in cached_subsets.get(self.identifier, []): - node_data = self.read_instance_node(node) - - # Explicitly re-parse the node name - node_data["instance_node"] = node - - created_instance = CreatedInstance.from_existing(node_data, self) - self._add_instance_to_context(created_instance) - - def update_instances(self, update_list): - for created_inst, _changes in update_list: - data = created_inst.data_to_store() - node = data.get("instance_node") - - self.imprint_instance_node(node, data) - - def imprint_instance_node(self, node, data): - - # We never store the instance_node as value on the node since - # it's the node name itself - data.pop("instance_node", None) - - # We store creator attributes at the root level and assume they - # will not clash in names with `subset`, `task`, etc. and other - # default names. This is just so these attributes in many cases - # are still editable in the maya UI by artists. - data.update(data.pop("creator_attributes", {})) - - # We know the "publish_attributes" will be complex data of - # settings per plugins, we'll store this as a flattened json structure - publish_attributes = json.dumps(data.get("publish_attributes", {})) - data.pop("publish_attributes", None) # pop to move to end of dict - data["publish_attributes"] = publish_attributes - - # Kill any existing attributes just we can imprint cleanly again - for attr in data.keys(): - if cmds.attributeQuery(attr, node=node, exists=True): - cmds.deleteAttr("{}.{}".format(node, attr)) - - return imprint(node, data) - - def read_instance_node(self, node): - node_data = read(node) - - # Move the relevant attributes into "creator_attributes" that - # we flattened originally - node_data["creator_attributes"] = {} - for key, value in node_data.items(): - if key not in CREATOR_INSTANCE_ATTRS: - node_data["creator_attributes"][key] = value - - publish_attributes = node_data.get("publish_attributes") - if publish_attributes: - node_data["publish_attributes"] = json.loads(publish_attributes) - - return node_data - - def remove_instances(self, instances): - """Remove specified instance from the scene. - - This is only removing `id` parameter so instance is no longer - instance, because it might contain valuable data for artist. - - """ - for instance in instances: - node = instance.data.get("instance_node") - if node: - cmds.delete(node) - - self._remove_instance_from_context(instance) - - def get_pre_create_attr_defs(self): - return [ - BoolDef("use_selection", - label="Use selection", - default=True) - ] +class MayaCreatorBase(object): @staticmethod def cache_subsets(shared_data): @@ -270,6 +169,111 @@ class MayaCreator(NewCreator): shared_data["maya_cached_legacy_subsets"] = cache_legacy return shared_data + def imprint_instance_node(self, node, data): + + # We never store the instance_node as value on the node since + # it's the node name itself + data.pop("instance_node", None) + + # We store creator attributes at the root level and assume they + # will not clash in names with `subset`, `task`, etc. and other + # default names. This is just so these attributes in many cases + # are still editable in the maya UI by artists. + data.update(data.pop("creator_attributes", {})) + + # We know the "publish_attributes" will be complex data of + # settings per plugins, we'll store this as a flattened json structure + publish_attributes = json.dumps(data.get("publish_attributes", {})) + data.pop("publish_attributes", None) # pop to move to end of dict + data["publish_attributes"] = publish_attributes + + # Kill any existing attributes just we can imprint cleanly again + for attr in data.keys(): + if cmds.attributeQuery(attr, node=node, exists=True): + cmds.deleteAttr("{}.{}".format(node, attr)) + + return imprint(node, data) + + def read_instance_node(self, node): + node_data = read(node) + + # Move the relevant attributes into "creator_attributes" that + # we flattened originally + node_data["creator_attributes"] = {} + for key, value in node_data.items(): + if key not in CREATOR_INSTANCE_ATTRS: + node_data["creator_attributes"][key] = value + + publish_attributes = node_data.get("publish_attributes") + if publish_attributes: + node_data["publish_attributes"] = json.loads(publish_attributes) + + # Explicitly re-parse the node name + node_data["instance_node"] = node + + return node_data + + +@six.add_metaclass(ABCMeta) +class MayaCreator(NewCreator, MayaCreatorBase): + + def create(self, subset_name, instance_data, pre_create_data): + + members = list() + if pre_create_data.get("use_selection"): + members = cmds.ls(selection=True) + + with lib.undo_chunk(): + instance_node = cmds.sets(members, name=subset_name) + instance_data["instance_node"] = instance_node + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) + self._add_instance_to_context(instance) + + self.imprint_instance_node(instance_node, + data=instance.data_to_store()) + return instance + + def collect_instances(self): + self.cache_subsets(self.collection_shared_data) + cached_subsets = self.collection_shared_data["maya_cached_subsets"] + for node in cached_subsets.get(self.identifier, []): + node_data = self.read_instance_node(node) + + created_instance = CreatedInstance.from_existing(node_data, self) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + data = created_inst.data_to_store() + node = data.get("instance_node") + + self.imprint_instance_node(node, data) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + node = instance.data.get("instance_node") + if node: + cmds.delete(node) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", + label="Use selection", + default=True) + ] + class Loader(LoaderPlugin): hosts = ["maya"] diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index a119aff8a9..8565b4b212 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -147,52 +147,92 @@ class CreateRenderlayer(HiddenCreator, plugin.MayaCreatorBase): rs = renderSetup.instance() layers = rs.getRenderLayers() for layer in layers: - subset_name = "render" + layer.name() + layer_instance_node = self.find_layer_instance_node(layer) + if layer_instance_node: + data = self.read_instance_node(layer_instance_node) + instance = CreatedInstance.from_existing(data, creator=self) + else: + subset_name = "render" + layer.name() - instance_data = { - "asset": legacy_io.Session["AVALON_ASSET"], - "task": legacy_io.Session["AVALON_TASK"], - "variant": layer.name(), - } + instance_data = { + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"], + "variant": layer.name(), + } - instance = CreatedInstance( - family=self.family, - subset_name=subset_name, - data=instance_data, - creator=self - ) + instance = CreatedInstance( + family=self.family, + subset_name=subset_name, + data=instance_data, + creator=self + ) + instance.transient_data["layer"] = layer self._add_instance_to_context(instance) + def find_layer_instance_node(self, layer): + connected_sets = cmds.listConnections( + "{}.message".format(layer.name()), + source=False, + destination=True, + type="objectSet" + ) or [] + + for node in connected_sets: + if not cmds.attributeQuery("creator_identifier", + node=node, + exists=True): + continue + + creator_identifier = cmds.getAttr(node + ".creator_identifier") + if creator_identifier == self.identifier: + print(f"Found node: {node}") + return node + + def _create_layer_instance_node(self, layer): + + # We only collect if a CreateRender instance exists + create_render_sets = lib.lsattr("pre_creator_identifier", + CreateRender.identifier) + if not create_render_sets: + raise CreatorError("Creating a renderlayer instance node is not " + "allowed if no 'CreateRender' instance exists") + create_render_set = create_render_sets[0] + + namespace = "_renderingMain" + namespace = ensure_namespace(namespace) + + name = "{}:{}".format(namespace, layer.name()) + render_set = cmds.sets(name=name, empty=True) + + # Keep an active link with the renderlayer so we can retrieve it + # later by a physical maya connection instead of relying on the layer + # name to still exist + cmds.addAttr(render_set, longName="_renderLayer", at="message") + cmds.connectAttr(layer.name() + ".message", + render_set + "._renderLayer", force=True) + + # Add the set to the 'CreateRender' set. + cmds.sets(render_set, forceElement=create_render_set) + + return render_set + def update_instances(self, update_list): # We only generate the persisting layer data into the scene once # we save with the UI on e.g. validate or publish - # TODO: Implement this behavior for data persistence + for instance, changes in update_list: + instance_node = instance.data.get("instance_node") - # # create namespace with instance - # namespace_name = "_{}".format(subset_name) - # namespace = ensure_namespace(namespace_name) - # - # # Pre-process any existing layers - # self.log.info("Processing existing layers") - # sets = [] - # for layer in layers: - # set_name = "{}:{}".format(namespace, layer.name()) - # self.log.info(" - creating set for {}".format(set_name)) - # render_set = cmds.sets(name=set_name, empty=True) - # sets.append(render_set) - # - # cmds.sets(sets, forceElement=instance_node) - # + # Ensure a node to persist the data to exists + if not instance_node: + layer = instance.transient_data["layer"] + instance_node = self._create_layer_instance_node(layer) + instance.data["instance_node"] = instance_node + else: + # TODO: Keep name in sync with the actual renderlayer? + pass - # for instance, changes in update_list.items(): - # instance_node = instance.data.get("instance_node") - # if not instance_node: - # layer = instance.data.get("layer") - # instance_node = self._create_layer_instance_node(layer) - # - # self.imprint_instance_node(instance_node, - # data=instance.data_to_store()) - pass + self.imprint_instance_node(instance_node, + data=instance.data_to_store()) def remove_instances(self, instances): """Remove specified instance from the scene.