diff --git a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py b/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py deleted file mode 100644 index 4e01b55249..0000000000 --- a/openpype/hosts/maya/plugins/publish/validate_look_members_unique.py +++ /dev/null @@ -1,77 +0,0 @@ -from collections import defaultdict - -import pyblish.api - -import openpype.hosts.maya.api.action -from openpype.pipeline.publish import ( - PublishValidationError, ValidatePipelineOrder) - - -class ValidateUniqueRelationshipMembers(pyblish.api.InstancePlugin): - """Validate the relational nodes of the look data to ensure every node is - unique. - - This ensures the all member ids are unique. Every node id must be from - a single node in the scene. - - That means there's only ever one of a specific node inside the look to be - published. For example if you'd have a loaded 3x the same tree and by - accident you're trying to publish them all together in a single look that - would be invalid, because they are the same tree. It should be included - inside the look instance only once. - - """ - - order = ValidatePipelineOrder - label = 'Look members unique' - hosts = ['maya'] - families = ['look'] - - actions = [openpype.hosts.maya.api.action.SelectInvalidAction, - openpype.hosts.maya.api.action.GenerateUUIDsOnInvalidAction] - - def process(self, instance): - """Process all meshes""" - - invalid = self.get_invalid(instance) - if invalid: - raise PublishValidationError( - ("Members found without non-unique IDs: " - "{0}").format(invalid)) - - @staticmethod - def get_invalid(instance): - """ - Check all the relationship members of the objectSets - - Example of the lookData relationships: - {"uuid": 59b2bb27bda2cb2776206dd8:79ab0a63ffdf, - "members":[{"uuid": 59b2bb27bda2cb2776206dd8:1b158cc7496e, - "name": |model_GRP|body_GES|body_GESShape} - ..., - ...]} - - Args: - instance: - - Returns: - - """ - - # Get all members from the sets - id_nodes = defaultdict(set) - relationships = instance.data["lookData"]["relationships"] - - for relationship in relationships.values(): - for member in relationship['members']: - node_id = member["uuid"] - node = member["name"] - id_nodes[node_id].add(node) - - # Check if any id has more than 1 node - invalid = [] - for nodes in id_nodes.values(): - if len(nodes) > 1: - invalid.extend(nodes) - - return invalid diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index d03416ca00..746b009255 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -47,6 +47,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, env_allowed_keys = [] env_search_replace_values = {} workfile_dependency = True + use_published_workfile = True @classmethod def get_attribute_defs(cls): @@ -85,8 +86,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, ), BoolDef( "workfile_dependency", - default=True, + default=cls.workfile_dependency, label="Workfile Dependency" + ), + BoolDef( + "use_published_workfile", + default=cls.use_published_workfile, + label="Use Published Workfile" ) ] @@ -125,20 +131,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, render_path = instance.data['path'] script_path = context.data["currentFile"] - for item_ in context: - if "workfile" in item_.data["family"]: - template_data = item_.data.get("anatomyData") - rep = item_.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = context.data["anatomy"].format(template_data) - template_filled = anatomy_filled["publish"]["path"] - script_path = os.path.normpath(template_filled) - - self.log.info( - "Using published scene for render {}".format(script_path) - ) + use_published_workfile = instance.data["attributeValues"].get( + "use_published_workfile", self.use_published_workfile + ) + if use_published_workfile: + script_path = self._get_published_workfile_path(context) # only add main rendering job if target is not frames_farm r_job_response_json = None @@ -197,6 +194,44 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, families.insert(0, "prerender") instance.data["families"] = families + def _get_published_workfile_path(self, context): + """This method is temporary while the class is not inherited from + AbstractSubmitDeadline""" + for instance in context: + if ( + instance.data["family"] != "workfile" + # Disabled instances won't be integrated + or instance.data("publish") is False + ): + continue + template_data = instance.data["anatomyData"] + # Expect workfile instance has only one representation + representation = instance.data["representations"][0] + # Get workfile extension + repre_file = representation["files"] + self.log.info(repre_file) + ext = os.path.splitext(repre_file)[1].lstrip(".") + + # Fill template data + template_data["representation"] = representation["name"] + template_data["ext"] = ext + template_data["comment"] = None + + anatomy = context.data["anatomy"] + # WARNING Hardcoded template name 'publish' > may not be used + template_obj = anatomy.templates_obj["publish"]["path"] + + template_filled = template_obj.format(template_data) + script_path = os.path.normpath(template_filled) + self.log.info( + "Using published scene for render {}".format( + script_path + ) + ) + return script_path + + return None + def payload_submit( self, instance, diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a19464a5c1..b02cfa8207 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -65,6 +65,8 @@ "group": "", "department": "", "use_gpu": true, + "workfile_dependency": true, + "use_published_workfile": true, "env_allowed_keys": [], "env_search_replace_values": {}, "limit_groups": {} diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 1aea778e32..42dea33ef9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -362,6 +362,16 @@ "key": "use_gpu", "label": "Use GPU" }, + { + "type": "boolean", + "key": "workfile_dependency", + "label": "Workfile Dependency" + }, + { + "type": "boolean", + "key": "use_published_workfile", + "label": "Use Published Workfile" + }, { "type": "list", "key": "env_allowed_keys", diff --git a/server_addon/deadline/server/settings/publish_plugins.py b/server_addon/deadline/server/settings/publish_plugins.py index dfb30f9b41..dc2cd7591f 100644 --- a/server_addon/deadline/server/settings/publish_plugins.py +++ b/server_addon/deadline/server/settings/publish_plugins.py @@ -161,6 +161,8 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): group: str = Field(title="Group") department: str = Field(title="Department") use_gpu: bool = Field(title="Use GPU") + workfile_dependency: bool = Field(title="Workfile Dependency") + use_published_workfile: bool = Field(title="Use Published Workfile") env_allowed_keys: list[str] = Field( default_factory=list, @@ -382,6 +384,8 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = { "group": "", "department": "", "use_gpu": True, + "workfile_dependency": True, + "use_published_workfile": True, "env_allowed_keys": [], "env_search_replace_values": [], "limit_groups": [] diff --git a/server_addon/deadline/server/version.py b/server_addon/deadline/server/version.py index f1380eede2..9cb17e7976 100644 --- a/server_addon/deadline/server/version.py +++ b/server_addon/deadline/server/version.py @@ -1 +1 @@ -__version__ = "0.1.7" +__version__ = "0.1.8"