From 0aeded448613e0faddbeb09a711456dada9e0826 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 8 Aug 2022 16:04:31 +0200 Subject: [PATCH 001/169] Add collectors for input links for Maya + Fusion --- .../fusion/plugins/publish/collect_inputs.py | 112 +++++++++ openpype/hosts/maya/api/lib_rendersetup.py | 68 ++++++ .../maya/plugins/publish/collect_inputs.py | 214 ++++++++++++++++++ 3 files changed, 394 insertions(+) create mode 100644 openpype/hosts/fusion/plugins/publish/collect_inputs.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_inputs.py diff --git a/openpype/hosts/fusion/plugins/publish/collect_inputs.py b/openpype/hosts/fusion/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..e610575e3a --- /dev/null +++ b/openpype/hosts/fusion/plugins/publish/collect_inputs.py @@ -0,0 +1,112 @@ +import pyblish.api + +from openpype.pipeline import registered_host + + +def collect_input_containers(tools): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + + # Lookup by node ids + lookup = frozenset([tool.Name for tool in tools]) + + containers = [] + host = registered_host() + for container in host.ls(): + + name = container["_tool"].Name + + # We currently assume no "groups" as containers but just single tools + # like a single "Loader" operator. As such we just check whether the + # Loader is part of the processing queue. + if name in lookup: + containers.append(container) + + return containers + + +def iter_upstream(tool): + """Yields all upstream inputs for the current tool. + + Yields: + tool: The input tools. + + """ + + def get_connected_input_tools(tool): + """Helper function that returns connected input tools for a tool.""" + inputs = [] + + # Filter only to actual types that will have sensible upstream + # connections. So we ignore just "Number" inputs as they can be + # many to iterate, slowing things down quite a bit - and in practice + # they don't have upstream connections. + VALID_INPUT_TYPES = ['Image', 'Particles', 'Mask', 'DataType3D'] + for type_ in VALID_INPUT_TYPES: + for input_ in tool.GetInputList(type_).values(): + output = input_.GetConnectedOutput() + if output: + input_tool = output.GetTool() + inputs.append(input_tool) + + return inputs + + # Initialize process queue with the node's inputs itself + queue = get_connected_input_tools(tool) + + # We keep track of which node names we have processed so far, to ensure we + # don't process the same hierarchy again. We are not pushing the tool + # itself into the set as that doesn't correctly recognize the same tool. + # Since tool names are unique in a comp in Fusion we rely on that. + collected = set(tool.Name for tool in queue) + + # Traverse upstream references for all nodes and yield them as we + # process the queue. + while queue: + upstream_tool = queue.pop() + yield upstream_tool + + # Find upstream tools that are not collected yet. + upstream_inputs = get_connected_input_tools(upstream_tool) + upstream_inputs = [t for t in upstream_inputs if + t.Name not in collected] + + queue.extend(upstream_inputs) + collected.update(tool.Name for tool in upstream_inputs) + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect source input containers used for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.2 + hosts = ["fusion"] + + def process(self, instance): + + # Get all upstream and include itself + tool = instance[0] + nodes = list(iter_upstream(tool)) + nodes.append(tool) + + # Collect containers for the given set of nodes + containers = collect_input_containers(nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputs"] = inputs + + self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/maya/api/lib_rendersetup.py b/openpype/hosts/maya/api/lib_rendersetup.py index 0fdc54a068..a00be52d8e 100644 --- a/openpype/hosts/maya/api/lib_rendersetup.py +++ b/openpype/hosts/maya/api/lib_rendersetup.py @@ -348,3 +348,71 @@ def get_attr_overrides(node_attr, layer, break return reversed(plug_overrides) + + +def get_shader_in_layer(node, layer): + """Return the assigned shader in a renderlayer without switching layers. + + This has been developed and tested for Legacy Renderlayers and *not* for + Render Setup. + + Note: This will also return the shader for any face assignments, however + it will *not* return the components they are assigned to. This could + be implemented, but since Maya's renderlayers are famous for breaking + with face assignments there has been no need for this function to + support that. + + Returns: + list: The list of assigned shaders in the given layer. + + """ + + def _get_connected_shader(shape): + """Return current shader""" + return cmds.listConnections(shape + ".instObjGroups", + source=False, + destination=True, + plugs=False, + connections=False, + type="shadingEngine") or [] + + # We check the instObjGroups (shader connection) for layer overrides. + plug = node + ".instObjGroups" + + # Ignore complex query if we're in the layer anyway (optimization) + current_layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + if layer == current_layer: + return _get_connected_shader(plug) + + connections = cmds.listConnections(plug, + plugs=True, + source=False, + destination=True, + type="renderLayer") or [] + connections = filter(lambda x: x.endswith(".outPlug"), connections) + if not connections: + # If no overrides anywhere on the shader, just get the current shader + return _get_connected_shader(plug) + + def _get_override(connections, layer): + """Return the overridden connection for that layer in connections""" + # If there's an override on that layer, return that. + for connection in connections: + if (connection.startswith(layer + ".outAdjustments") and + connection.endswith(".outPlug")): + + # This is a shader override on that layer so get the shader + # connected to .outValue of the .outAdjustment[i] + out_adjustment = connection.rsplit(".", 1)[0] + connection_attr = out_adjustment + ".outValue" + override = cmds.listConnections(connection_attr) or [] + + return override + + override_shader = _get_override(connections, layer) + if override_shader is not None: + return override_shader + else: + # Get the override for "defaultRenderLayer" (=masterLayer) + return _get_override(connections, layer="defaultRenderLayer") diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..8afa1e4757 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -0,0 +1,214 @@ +import copy +from maya import cmds +import maya.api.OpenMaya as om +import pyblish.api + +from openpype.pipeline import registered_host +from openpype.hosts.maya.api.lib import get_container_members +from openpype.hosts.maya.api.lib_rendersetup import get_shader_in_layer + + +def iter_history(nodes, + filter=om.MFn.kInvalid, + direction=om.MItDependencyGraph.kUpstream): + """Iterate unique upstream history for list of nodes. + + This acts as a replacement to maya.cmds.listHistory. + It's faster by about 2x-3x. It returns less than + maya.cmds.listHistory as it excludes the input nodes + from the output (unless an input node was history + for another input node). It also excludes duplicates. + + Args: + nodes (list): Maya node names to start search from. + filter (om.MFn.Type): Filter to only specific types. + e.g. to dag nodes using om.MFn.kDagNode + direction (om.MItDependencyGraph.Direction): Direction to traverse in. + Defaults to upstream. + + Yields: + str: Node names in upstream history. + + """ + if not nodes: + return + + sel = om.MSelectionList() + for node in nodes: + sel.add(node) + + it = om.MItDependencyGraph(sel.getDependNode(0)) # init iterator + handle = om.MObjectHandle + + traversed = set() + fn_dep = om.MFnDependencyNode() + fn_dag = om.MFnDagNode() + for i in range(sel.length()): + + start_node = sel.getDependNode(i) + start_node_hash = handle(start_node).hashCode() + if start_node_hash in traversed: + continue + + it.resetTo(start_node, + filter=filter, + direction=direction) + while not it.isDone(): + + node = it.currentNode() + node_hash = handle(node).hashCode() + + if node_hash in traversed: + it.prune() + it.next() + continue + + traversed.add(node_hash) + + if node.hasFn(om.MFn.kDagNode): + fn_dag.setObject(node) + yield fn_dag.fullPathName() + else: + fn_dep.setObject(node) + yield fn_dep.name() + + it.next() + + +def collect_input_containers(containers, nodes): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect input source inputs for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.34 + hosts = ["maya"] + + def process(self, instance): + + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data["__cache_containers"] = scene_containers + + # Collect the relevant input containers for this instance + if "renderlayer" in set(instance.data.get("families", [])): + # Special behavior for renderlayers + self.log.debug("Collecting renderlayer inputs....") + containers = self._collect_renderlayer_inputs(scene_containers, + instance) + + else: + # Basic behavior + nodes = instance[:] + + # Include any input connections of history with long names + # For optimization purposes only trace upstream from shape nodes + # looking for used dag nodes. This way having just a constraint + # on a transform is also ignored which tended to give irrelevant + # inputs for the majority of our use cases. We tend to care more + # about geometry inputs. + shapes = cmds.ls(nodes, + type=("mesh", "nurbsSurface", "nurbsCurve"), + noIntermediate=True) + if shapes: + history = list(iter_history(shapes, filter=om.MFn.kShape)) + history = cmds.ls(history, long=True) + + # Include the transforms in the collected history as shapes + # are excluded from containers + transforms = cmds.listRelatives(cmds.ls(history, shapes=True), + parent=True, + fullPath=True, + type="transform") + if transforms: + history.extend(transforms) + + if history: + nodes = list(set(nodes + history)) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, + nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputs"] = inputs + + self.log.info("Collected inputs: %s" % inputs) + + def _collect_renderlayer_inputs(self, scene_containers, instance): + """Collects inputs from nodes in renderlayer, incl. shaders + camera""" + + # Get the renderlayer + renderlayer = instance.data.get("setMembers") + + if renderlayer == "defaultRenderLayer": + # Assume all loaded containers in the scene are inputs + # for the masterlayer + return copy.deepcopy(scene_containers) + else: + # Get the members of the layer + members = cmds.editRenderLayerMembers(renderlayer, + query=True, + fullNames=True) or [] + + # In some cases invalid objects are returned from + # `editRenderLayerMembers` so we filter them out + members = cmds.ls(members, long=True) + + # Include all children + children = cmds.listRelatives(members, + allDescendents=True, + fullPath=True) or [] + members.extend(children) + + # Include assigned shaders in renderlayer + shapes = cmds.ls(members, shapes=True, long=True) + shaders = set() + for shape in shapes: + shape_shaders = get_shader_in_layer(shape, layer=renderlayer) + if not shape_shaders: + continue + shaders.update(shape_shaders) + members.extend(shaders) + + # Explicitly include the camera being rendered in renderlayer + cameras = instance.data.get("cameras") + members.extend(cameras) + + containers = collect_input_containers(scene_containers, members) + + return containers + From d92c6eac115d0e857738c5944abe11fc2c840f1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 8 Aug 2022 16:58:33 +0200 Subject: [PATCH 002/169] Remove blank line --- openpype/hosts/maya/plugins/publish/collect_inputs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py index 8afa1e4757..d34f289e05 100644 --- a/openpype/hosts/maya/plugins/publish/collect_inputs.py +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -211,4 +211,3 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): containers = collect_input_containers(scene_containers, members) return containers - From 4721a683094acfc2fd709d44c83d3907d91a2aa2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 8 Aug 2022 17:00:17 +0200 Subject: [PATCH 003/169] Shush the hound - code is correct --- openpype/hosts/maya/plugins/publish/collect_inputs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py index d34f289e05..43941bde4f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_inputs.py +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -60,7 +60,7 @@ def iter_history(nodes, if node_hash in traversed: it.prune() - it.next() + it.next() # noqa: B305 continue traversed.add(node_hash) @@ -72,7 +72,7 @@ def iter_history(nodes, fn_dep.setObject(node) yield fn_dep.name() - it.next() + it.next() # noqa: B305 def collect_input_containers(containers, nodes): From 4a4bb22f60353bb9cfcdb64c516b2b145cb7c966 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 11:36:36 +0200 Subject: [PATCH 004/169] Refactor collect inputs as `inputRepresentations` --- openpype/hosts/fusion/plugins/publish/collect_inputs.py | 6 ++++-- openpype/hosts/houdini/plugins/publish/collect_inputs.py | 6 ++++-- openpype/hosts/maya/plugins/publish/collect_inputs.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/fusion/plugins/publish/collect_inputs.py b/openpype/hosts/fusion/plugins/publish/collect_inputs.py index e610575e3a..8f9857b02f 100644 --- a/openpype/hosts/fusion/plugins/publish/collect_inputs.py +++ b/openpype/hosts/fusion/plugins/publish/collect_inputs.py @@ -1,3 +1,5 @@ +from bson.objectid import ObjectId + import pyblish.api from openpype.pipeline import registered_host @@ -106,7 +108,7 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): # Collect containers for the given set of nodes containers = collect_input_containers(nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/houdini/plugins/publish/collect_inputs.py b/openpype/hosts/houdini/plugins/publish/collect_inputs.py index 8c7098c710..9ee0248bd9 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_inputs.py +++ b/openpype/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,3 +1,5 @@ +from bson.objectid import ObjectId + import pyblish.api from openpype.pipeline import registered_host @@ -115,7 +117,7 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): # Collect containers for the given set of nodes containers = collect_input_containers(nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/maya/plugins/publish/collect_inputs.py b/openpype/hosts/maya/plugins/publish/collect_inputs.py index 43941bde4f..470fceffc9 100644 --- a/openpype/hosts/maya/plugins/publish/collect_inputs.py +++ b/openpype/hosts/maya/plugins/publish/collect_inputs.py @@ -1,4 +1,6 @@ import copy +from bson.objectid import ObjectId + from maya import cmds import maya.api.OpenMaya as om import pyblish.api @@ -163,8 +165,8 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): containers = collect_input_containers(scene_containers, nodes) - inputs = [c["representation"] for c in containers] - instance.data["inputs"] = inputs + inputs = [ObjectId(c["representation"]) for c in containers] + instance.data["inputRepresentations"] = inputs self.log.info("Collected inputs: %s" % inputs) From 19f81dbf40bbe509506f9f13b4dcfa70133f8b92 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 11:38:44 +0200 Subject: [PATCH 005/169] Add Collector to convert `inputRepresentations` -> `inputVersions` --- ...llect_input_representations_to_versions.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 openpype/plugins/publish/collect_input_representations_to_versions.py diff --git a/openpype/plugins/publish/collect_input_representations_to_versions.py b/openpype/plugins/publish/collect_input_representations_to_versions.py new file mode 100644 index 0000000000..03f2abf51f --- /dev/null +++ b/openpype/plugins/publish/collect_input_representations_to_versions.py @@ -0,0 +1,48 @@ +import pyblish.api + +from bson.objectid import ObjectId + +from openpype.client import get_representations + + +class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): + """Converts collected input representations to input versions. + + Any data in `instance.data["inputRepresentations"]` gets converted into + `instance.data["inputVersions"]` as supported in OpenPype v3. + + """ + # This is a ContextPlugin because then we can query the database only once + # for the conversion of representation ids to version ids (optimization) + label = "Input Representations to Versions" + order = pyblish.api.CollectorOrder + 0.499 + hosts = ["*"] + + def process(self, context): + # Query all version ids for representation ids from the database once + representations = set() + for instance in context: + inst_repre = instance.data.get("inputRepresentations", []) + representations.update(inst_repre) + + representations_docs = get_representations( + project_name=context.data["projectEntity"]["name"], + representation_ids=representations, + fields=["_id", "parent"]) + + representation_id_to_version_id = { + repre["_id"]: repre["parent"] for repre in representations_docs + } + + for instance in context: + inst_repre = instance.data.get("inputRepresentations", []) + if not inst_repre: + continue + + input_versions = instance.data.get("inputVersions", []) + for repre_id in inst_repre: + repre_id = ObjectId(repre_id) + version_id = representation_id_to_version_id[repre_id] + input_versions.append(version_id) + instance.data["inputVersions"] = input_versions + From 257f027d900e259d611bc70becaa1a30065ee3fd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 9 Aug 2022 11:40:28 +0200 Subject: [PATCH 006/169] Remove blank line --- .../plugins/publish/collect_input_representations_to_versions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_input_representations_to_versions.py b/openpype/plugins/publish/collect_input_representations_to_versions.py index 03f2abf51f..18a19bce80 100644 --- a/openpype/plugins/publish/collect_input_representations_to_versions.py +++ b/openpype/plugins/publish/collect_input_representations_to_versions.py @@ -45,4 +45,3 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): version_id = representation_id_to_version_id[repre_id] input_versions.append(version_id) instance.data["inputVersions"] = input_versions - From 5b559fd28d439a2e9ba2185eae428b7c63b69fb5 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 9 Aug 2022 13:17:06 +0200 Subject: [PATCH 007/169] create shelf manager definition for houdini in openpype project settings --- .../defaults/project_settings/houdini.json | 21 +++++ .../schema_project_houdini.json | 6 +- .../schemas/schema_houdini_scriptshelf.json | 81 +++++++++++++++++++ 3 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 911bf82d9b..5805f600c5 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -1,4 +1,25 @@ { + "shelves": [ + { + "shelf_set_name": "OpenPype Shelves", + "shelf_set_source_path": "/path/to/your/shelf_set_file", + "shelf_definition": [ + { + "shelf_name": "OpenPype Shelf", + "shelf_file_path": "/path/to/your/shelf_file", + "tools_list": [ + { + "name": "OpenPype Tool", + "filepath": "/path/to/your/tool_file", + "script": "/path/to/your/tool_script", + "icon": "/path/to/your/icon", + "help": "Help message for your tool" + } + ] + } + ] + } + ], "create": { "CreateArnoldAss": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json index cad99dde22..bde4352964 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -5,6 +5,10 @@ "label": "Houdini", "is_file": true, "children": [ + { + "type": "schema", + "name": "schema_houdini_scriptshelf" + }, { "type": "schema", "name": "schema_houdini_create" @@ -28,4 +32,4 @@ ] } ] -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json new file mode 100644 index 0000000000..5a84c6d5cc --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -0,0 +1,81 @@ +{ + "type": "list", + "key": "shelves", + "label": "Shelves Manager", + "is_group": true, + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "shelf_set_name", + "label": "Shelf Set Name" + }, + { + "type": "path", + "key": "shelf_set_source_path", + "label": "Shelf Set Path", + "multipath": true, + "multiplatform": true + }, + { + "type": "list", + "key": "shelf_definition", + "label": "Shelves", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "shelf_name", + "label": "Shelf Name" + }, + { + "type": "text", + "key": "shelf_file_path", + "label": "Shelf File Path" + }, + { + "type": "list", + "key": "tools_list", + "label": "Tools", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "filepath", + "label": "File Path" + }, + { + "type": "text", + "key": "script", + "label": "Script" + }, + { + "type": "text", + "key": "icon", + "label": "Icon" + }, + { + "type": "text", + "key": "help", + "label": "Help" + } + ] + } + } + ] + } + } + ] + } +} \ No newline at end of file From a006b5df63bb0b3f3935f0873a2f4537966ffddb Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 9 Aug 2022 15:43:47 +0200 Subject: [PATCH 008/169] set up the shelf creation in the _set_context_settings function --- openpype/hosts/houdini/api/lib.py | 32 ++++++++++++++++++++++++++ openpype/hosts/houdini/api/pipeline.py | 2 ++ 2 files changed, 34 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index c8a7f92bb9..55832abeb3 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -460,3 +460,35 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def create_shelf(): + hou.shelves.beginChangeBlock() + + custom_shelf = hou.shelves.newShelf( + file_path='', + name="custom_shelf", + label="Custom Shelf" + ) + + new_tool = hou.shelves.newTool( + file_path='', + name='new_tool', + label='New Tool', + script='', + language=hou.scriptLanguage.Python, + icon='', + help='This is a new tool' + ) + + if new_tool not in custom_shelf.tools(): + custom_shelf.setTools(list(custom_shelf.tools()) + [new_tool]) + + shelf_set = [ + shelf for shelf in hou.shelves.shelfSets().values() + if shelf.label() == "Create and Refine" + ][0] + + shelf_set.setShelves(shelf_set.shelves() + (custom_shelf,)) + + hou.shelves.endChangeBlock() diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b5f5459392..2f414020c4 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -309,6 +309,7 @@ def _set_context_settings(): fps resolution renderer + shelves Returns: None @@ -320,6 +321,7 @@ def _set_context_settings(): lib.set_scene_fps(fps) lib.reset_framerange() + lib.create_shelf() def on_pyblish_instance_toggled(instance, new_value, old_value): From cdd90ad2a79de9ba0c2000a00eff65efcde30a8d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 9 Aug 2022 17:13:08 +0200 Subject: [PATCH 009/169] main structure to generate shelves --- openpype/hosts/houdini/api/pipeline.py | 4 +-- openpype/hosts/houdini/api/shelves.py | 47 ++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/houdini/api/shelves.py diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 2f414020c4..f809f0ce56 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.pipeline.load import any_outdated_containers import openpype.hosts.houdini -from openpype.hosts.houdini.api import lib +from openpype.hosts.houdini.api import lib, shelves from openpype.lib import ( register_event_callback, @@ -74,6 +74,7 @@ def install(): # so it initializes into the correct scene FPS, Frame Range, etc. # todo: make sure this doesn't trigger when opening with last workfile _set_context_settings() + shelves.generate_shelves() def uninstall(): @@ -321,7 +322,6 @@ def _set_context_settings(): lib.set_scene_fps(fps) lib.reset_framerange() - lib.create_shelf() def on_pyblish_instance_toggled(instance, new_value, old_value): diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py new file mode 100644 index 0000000000..b8f6419175 --- /dev/null +++ b/openpype/hosts/houdini/api/shelves.py @@ -0,0 +1,47 @@ +import os +import logging + +from openpype.settings import get_project_settings + +log = logging.getLogger(__name__) + + +def generate_shelves(): + # load configuration of custom menu + project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + shelves_set_config = project_settings["houdini"]["shelves"] + + if not shelves_set_config: + log.warning("No custom shelves found.") + return + + # run the shelf generator for Houdini + for shelf_set in shelves_set_config: + pass + # if shelf_set_source_path is not None we load the source path and return + + # if the shelf set name already exists, do nothing, else, create a new one + + # go through each shelf + # if shelf_file_path exists, load the shelf and return + # if the shelf name already exists, do nothing, else, create a new one + + # go through each tool + # if filepath exists, load the tool, add it to the shelf and continue + # create the tool + # add it to a list of tools + + # add the tools list to the shelf with the tools already in it + # add the shelf to the shelf set with the shelfs already in it + + +def get_or_create_shelf_set(): + pass + + +def get_or_create_shelf(): + pass + + +def get_or_create_tool(): + pass From b74655c167aefd2d84e8cd6716d7b7b3c02783cd Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 11:56:05 +0200 Subject: [PATCH 010/169] set multipath to false for shelf set path --- .../settings/defaults/project_settings/houdini.json | 6 +++++- .../schemas/schema_houdini_scriptshelf.json | 10 +++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 5805f600c5..2ceed37935 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -2,7 +2,11 @@ "shelves": [ { "shelf_set_name": "OpenPype Shelves", - "shelf_set_source_path": "/path/to/your/shelf_set_file", + "shelf_set_source_path": { + "windows": "", + "darwin": "", + "linux": "/path/to/your/shelf_set_file" + }, "shelf_definition": [ { "shelf_name": "OpenPype Shelf", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json index 5a84c6d5cc..ae05cef74e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -16,7 +16,7 @@ "type": "path", "key": "shelf_set_source_path", "label": "Shelf Set Path", - "multipath": true, + "multipath": false, "multiplatform": true }, { @@ -33,7 +33,7 @@ "label": "Shelf Name" }, { - "type": "text", + "type": "path", "key": "shelf_file_path", "label": "Shelf File Path" }, @@ -51,17 +51,17 @@ "label": "Name" }, { - "type": "text", + "type": "path", "key": "filepath", "label": "File Path" }, { - "type": "text", + "type": "path", "key": "script", "label": "Script" }, { - "type": "text", + "type": "path", "key": "icon", "label": "Icon" }, From a302caf6bd431b98136ce5b41c56cb0c60e49b4f Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 13:05:37 +0200 Subject: [PATCH 011/169] setting shelf set filepath if any in right OS --- openpype/hosts/houdini/api/shelves.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index b8f6419175..6ea4b4a9fd 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,26 +1,38 @@ import os import logging +import platform from openpype.settings import get_project_settings -log = logging.getLogger(__name__) +import hou + +log = logging.getLogger("openpype.hosts.houdini") def generate_shelves(): + current_os = platform.system().lower() # load configuration of custom menu project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.warning("No custom shelves found.") + log.warning( + "SHELF ERROR: No custom shelves found in project settings." + ) return # run the shelf generator for Houdini - for shelf_set in shelves_set_config: - pass - # if shelf_set_source_path is not None we load the source path and return + for shelf_set_config in shelves_set_config: + shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') + # if shelf_set_source_path is not None we load the source path and continue + if shelf_set_filepath[current_os]: + hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) + # hou.ShelfSet.setFilePath(file_path=shelf_set_filepath[operating_system]) + continue # if the shelf set name already exists, do nothing, else, create a new one + shelf_set_name = shelf_set_config.get('shelf_set_name') + shelf_set = get_or_create_shelf_set(shelf_set_name) # go through each shelf # if shelf_file_path exists, load the shelf and return @@ -35,8 +47,9 @@ def generate_shelves(): # add the shelf to the shelf set with the shelfs already in it -def get_or_create_shelf_set(): - pass +def get_or_create_shelf_set(shelf_set_name): + log.warning("IN GET OR CREATE SHELF SET: {}".format(shelf_set_name)) + hou.shelves.shelves() def get_or_create_shelf(): From 6f68b998965893401d1679913084942e7329b086 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 10 Aug 2022 15:47:32 +0200 Subject: [PATCH 012/169] Fix refactor typo --- openpype/hosts/maya/api/lib_rendersetup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersetup.py b/openpype/hosts/maya/api/lib_rendersetup.py index a00be52d8e..e616f26e1b 100644 --- a/openpype/hosts/maya/api/lib_rendersetup.py +++ b/openpype/hosts/maya/api/lib_rendersetup.py @@ -367,9 +367,9 @@ def get_shader_in_layer(node, layer): """ - def _get_connected_shader(shape): + def _get_connected_shader(plug): """Return current shader""" - return cmds.listConnections(shape + ".instObjGroups", + return cmds.listConnections(plug, source=False, destination=True, plugs=False, From b69e2e2003f768b111fa50635a0c5f3268ca7357 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 17:34:48 +0200 Subject: [PATCH 013/169] get shelf set or create one --- openpype/hosts/houdini/api/shelves.py | 29 ++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 6ea4b4a9fd..d89f3153ea 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -21,13 +21,19 @@ def generate_shelves(): ) return - # run the shelf generator for Houdini for shelf_set_config in shelves_set_config: shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') + # if shelf_set_source_path is not None we load the source path and continue if shelf_set_filepath[current_os]: + if not os.path.isfile(shelf_set_filepath[current_os]): + raise FileNotFoundError( + "SHELF ERROR: This path doesn't exist - {}".format( + shelf_set_filepath[current_os] + ) + ) + hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) - # hou.ShelfSet.setFilePath(file_path=shelf_set_filepath[operating_system]) continue # if the shelf set name already exists, do nothing, else, create a new one @@ -47,9 +53,22 @@ def generate_shelves(): # add the shelf to the shelf set with the shelfs already in it -def get_or_create_shelf_set(shelf_set_name): - log.warning("IN GET OR CREATE SHELF SET: {}".format(shelf_set_name)) - hou.shelves.shelves() +def get_or_create_shelf_set(shelf_set_label): + all_shelves = hou.shelves.shelfSets().values() + + shelf_set = [ + shelf for shelf in all_shelves if shelf.label() == shelf_set_label + ] + + if shelf_set: + return shelf_set[0] + + shelf_set_name = shelf_set_label.replace(' ', '_').lower() + new_shelf_set = hou.shelves.newShelfSet( + name=shelf_set_name, + label=shelf_set_label + ) + return new_shelf_set def get_or_create_shelf(): From 937ba13ea0c62b63d2d56a0f1895932089070983 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 17:47:33 +0200 Subject: [PATCH 014/169] remove filepath for shelf and tools --- .../settings/defaults/project_settings/houdini.json | 2 -- .../schemas/schema_houdini_scriptshelf.json | 12 +----------- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 2ceed37935..a818f82d6b 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -10,11 +10,9 @@ "shelf_definition": [ { "shelf_name": "OpenPype Shelf", - "shelf_file_path": "/path/to/your/shelf_file", "tools_list": [ { "name": "OpenPype Tool", - "filepath": "/path/to/your/tool_file", "script": "/path/to/your/tool_script", "icon": "/path/to/your/icon", "help": "Help message for your tool" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json index ae05cef74e..812ab7d8c9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -15,7 +15,7 @@ { "type": "path", "key": "shelf_set_source_path", - "label": "Shelf Set Path", + "label": "Shelf Set Path (optional)", "multipath": false, "multiplatform": true }, @@ -32,11 +32,6 @@ "key": "shelf_name", "label": "Shelf Name" }, - { - "type": "path", - "key": "shelf_file_path", - "label": "Shelf File Path" - }, { "type": "list", "key": "tools_list", @@ -50,11 +45,6 @@ "key": "name", "label": "Name" }, - { - "type": "path", - "key": "filepath", - "label": "File Path" - }, { "type": "path", "key": "script", From ea37f4c3c5313e6c088e533c10c721b33d490333 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 10 Aug 2022 18:06:38 +0200 Subject: [PATCH 015/169] get or create shelf implementation --- openpype/hosts/houdini/api/shelves.py | 52 +++++++++++++++++++++------ 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index d89f3153ea..76fe0cbd87 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,3 +1,4 @@ +from cProfile import label import os import logging import platform @@ -17,14 +18,13 @@ def generate_shelves(): if not shelves_set_config: log.warning( - "SHELF ERROR: No custom shelves found in project settings." + "SHELF WARNGING: No custom shelves found in project settings." ) return for shelf_set_config in shelves_set_config: shelf_set_filepath = shelf_set_config.get('shelf_set_source_path') - # if shelf_set_source_path is not None we load the source path and continue if shelf_set_filepath[current_os]: if not os.path.isfile(shelf_set_filepath[current_os]): raise FileNotFoundError( @@ -36,13 +36,33 @@ def generate_shelves(): hou.shelves.newShelfSet(file_path=shelf_set_filepath[current_os]) continue - # if the shelf set name already exists, do nothing, else, create a new one shelf_set_name = shelf_set_config.get('shelf_set_name') + if not shelf_set_name: + log.warning( + "SHELF WARNGING: No name found in shelf set definition." + ) + return + shelf_set = get_or_create_shelf_set(shelf_set_name) - # go through each shelf - # if shelf_file_path exists, load the shelf and return - # if the shelf name already exists, do nothing, else, create a new one + shelves_definition = shelf_set_config.get('shelf_definition') + + if not shelves_definition: + log.warning( + "SHELF WARNING: \ +No shelf definition found for shelf set named '{}'".format(shelf_set_name) + ) + return + + for shelf_definition in shelves_definition: + shelf_name = shelf_definition.get('shelf_name') + if not shelf_name: + log.warning( + "SHELF WARNGING: No name found in shelf set definition." + ) + return + + shelf = get_or_create_shelf(shelf_name) # go through each tool # if filepath exists, load the tool, add it to the shelf and continue @@ -54,10 +74,10 @@ def generate_shelves(): def get_or_create_shelf_set(shelf_set_label): - all_shelves = hou.shelves.shelfSets().values() + all_shelves_sets = hou.shelves.shelfSets().values() shelf_set = [ - shelf for shelf in all_shelves if shelf.label() == shelf_set_label + shelf for shelf in all_shelves_sets if shelf.label() == shelf_set_label ] if shelf_set: @@ -71,8 +91,20 @@ def get_or_create_shelf_set(shelf_set_label): return new_shelf_set -def get_or_create_shelf(): - pass +def get_or_create_shelf(shelf_label): + all_shelves = hou.shelves.shelves().values() + + shelf = [s for s in all_shelves if s.label() == shelf_label] + + if shelf: + return shelf[0] + + shelf_name = shelf_label.replace(' ', '_').lower() + new_shelf = hou.shelves.newShelf( + name=shelf_name, + label=shelf_label + ) + return new_shelf def get_or_create_tool(): From a6ddb2d44b9ec9edb76c2a41f1b471909afabde6 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 11:44:42 +0200 Subject: [PATCH 016/169] filter mandatory attributes for tool --- openpype/hosts/houdini/api/shelves.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 76fe0cbd87..a37ec88d64 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -64,8 +64,17 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf = get_or_create_shelf(shelf_name) - # go through each tool - # if filepath exists, load the tool, add it to the shelf and continue + tools = [] + for tool in shelf_definition.get('tools_list'): + mandatory_attributes = ['name', 'script'] + if not all( + [v for k, v in tool.items() if k in mandatory_attributes] + ): + log.warning("TOOLS ERROR: You need to specify at least \ +the name and the script path of the tool.") + return + + tool = get_or_create_tool(tool, shelf) # create the tool # add it to a list of tools @@ -107,5 +116,5 @@ def get_or_create_shelf(shelf_label): return new_shelf -def get_or_create_tool(): +def get_or_create_tool(tool_definition, shelf): pass From eab14fc5e9204126554e8be5898010e0db0398ca Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 13:20:27 +0200 Subject: [PATCH 017/169] Include inputVersions with the publish job instance metadata --- .../modules/deadline/plugins/publish/submit_publish_job.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index f05ef31938..2fa7da5dac 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -774,7 +774,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "resolutionHeight": data.get("resolutionHeight", 1080), "multipartExr": data.get("multipartExr", False), "jobBatchName": data.get("jobBatchName", ""), - "useSequenceForReview": data.get("useSequenceForReview", True) + "useSequenceForReview": data.get("useSequenceForReview", True), + # map inputVersions `ObjectId` -> `str` so json supports it + "inputVersions": list(map(str, data.get("inputVersions", []))) } # skip locking version if we are creating v01 From 161ae6ef77f0ac0f2017e7b64fdd50331c03592d Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 14:59:52 +0200 Subject: [PATCH 018/169] change key 'name' by 'label' for tool name --- openpype/settings/defaults/project_settings/houdini.json | 2 +- .../projects_schema/schemas/schema_houdini_scriptshelf.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index a818f82d6b..78e0d595cf 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -12,7 +12,7 @@ "shelf_name": "OpenPype Shelf", "tools_list": [ { - "name": "OpenPype Tool", + "label": "OpenPype Tool", "script": "/path/to/your/tool_script", "icon": "/path/to/your/icon", "help": "Help message for your tool" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json index 812ab7d8c9..bab9b604b4 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_scriptshelf.json @@ -42,7 +42,7 @@ "children": [ { "type": "text", - "key": "name", + "key": "label", "label": "Name" }, { From 778140b388c57ef8af0c4f69250cebf673dd6e74 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 16:21:23 +0200 Subject: [PATCH 019/169] add tool creation and adding tool to shelf and shelf to shelf_set --- openpype/hosts/houdini/api/shelves.py | 49 +++++++++++++++++++++------ 1 file changed, 39 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index a37ec88d64..0687e2f519 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,4 +1,3 @@ -from cProfile import label import os import logging import platform @@ -64,22 +63,23 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf = get_or_create_shelf(shelf_name) - tools = [] - for tool in shelf_definition.get('tools_list'): + for tool_definition in shelf_definition.get('tools_list'): mandatory_attributes = ['name', 'script'] if not all( - [v for k, v in tool.items() if k in mandatory_attributes] + [v for k, v in tool_definition.items() if + k in mandatory_attributes] ): log.warning("TOOLS ERROR: You need to specify at least \ the name and the script path of the tool.") return - tool = get_or_create_tool(tool, shelf) - # create the tool - # add it to a list of tools + tool = get_or_create_tool(tool_definition, shelf) - # add the tools list to the shelf with the tools already in it - # add the shelf to the shelf set with the shelfs already in it + if tool not in shelf.tools(): + shelf.setTools(list(shelf.tools()) + [tool]) + + if shelf not in shelf_set.shelves(): + shelf_set.setShelves(shelf_set.shelves() + (shelf,)) def get_or_create_shelf_set(shelf_set_label): @@ -117,4 +117,33 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - pass + existing_tools = shelf.tools() + tool_label = tool_definition.get('label') + + existing_tool = [ + tool for tool in existing_tools if tool.label() == tool_label + ] + + if existing_tool: + tool_definition.pop('name', None) + tool_definition.pop('label', None) + existing_tool[0].setData(**tool_definition) + return existing_tool[0] + + tool_name = tool_label.replace(' ', '_').lower() + + if not os.path.exists(tool_definition['script']): + log.warning( + "TOOL ERROR: This path doesn't exist - {}".format( + tool_definition['script'] + ) + ) + return + + with open(tool_definition['script']) as f: + script = f.read() + tool_definition.update({'script': script}) + + new_tool = hou.shelves.newTool(name=tool_name, **tool_definition) + + return new_tool From 532432d81739b2996ae94e56b6bf2faf36498dc3 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 17:57:38 +0200 Subject: [PATCH 020/169] add docstrings --- openpype/hosts/houdini/api/shelves.py | 43 +++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 0687e2f519..bb92aa828e 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -10,8 +10,15 @@ log = logging.getLogger("openpype.hosts.houdini") def generate_shelves(): + """This function generates complete shelves from shef set to tools + in Houdini from openpype project settings houdini shelf definition. + + Raises: + FileNotFoundError: Raised when the shelf set filepath does not exist + """ current_os = platform.system().lower() - # load configuration of custom menu + + # load configuration of houdini shelves project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) shelves_set_config = project_settings["houdini"]["shelves"] @@ -57,13 +64,15 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf_name = shelf_definition.get('shelf_name') if not shelf_name: log.warning( - "SHELF WARNGING: No name found in shelf set definition." + "SHELF WARNGING: No name found in shelf definition." ) return shelf = get_or_create_shelf(shelf_name) for tool_definition in shelf_definition.get('tools_list'): + # We verify that the name and script attibutes of the tool + # are set mandatory_attributes = ['name', 'script'] if not all( [v for k, v in tool_definition.items() if @@ -75,14 +84,25 @@ the name and the script path of the tool.") tool = get_or_create_tool(tool_definition, shelf) + # Add the tool to the shelf if not already in it if tool not in shelf.tools(): shelf.setTools(list(shelf.tools()) + [tool]) + # Add the shelf in the shelf set if not already in it if shelf not in shelf_set.shelves(): shelf_set.setShelves(shelf_set.shelves() + (shelf,)) def get_or_create_shelf_set(shelf_set_label): + """This function verifies if the shelf set label exists. If not, + creates a new shelf set. + + Arguments: + shelf_set_label {str} -- The label of the shelf set + + Returns: + hou.ShelfSet -- The shelf set existing or the new one + """ all_shelves_sets = hou.shelves.shelfSets().values() shelf_set = [ @@ -101,6 +121,15 @@ def get_or_create_shelf_set(shelf_set_label): def get_or_create_shelf(shelf_label): + """This function verifies if the shelf label exists. If not, creates + a new shelf. + + Arguments: + shelf_label {str} -- The label of the shelf + + Returns: + hou.Shelf -- The shelf existing or the new one + """ all_shelves = hou.shelves.shelves().values() shelf = [s for s in all_shelves if s.label() == shelf_label] @@ -117,6 +146,16 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): + """This function verifies if the tool exsist and update it. If not, creates + a new one. + + Arguments: + tool_definition {dict} -- Dict with label, script, icon and help + shelf {hou.Shelf} -- The parent shelf of the tool + + Returns: + hou.Tool -- The tool updated or the new one + """ existing_tools = shelf.tools() tool_label = tool_definition.get('label') From 74161e931e17a736c3dae6ae24678f9db4d497d0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 18:02:47 +0200 Subject: [PATCH 021/169] Show whether a subset is loaded into the current scene --- openpype/tools/loader/model.py | 157 +++++++++++++++++++------------ openpype/tools/loader/widgets.py | 3 +- 2 files changed, 101 insertions(+), 59 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index a5174bd804..3130f879df 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -14,9 +14,11 @@ from openpype.client import ( get_versions, get_hero_versions, get_version_by_name, - get_representations + get_representations, + get_representations_parents ) from openpype.pipeline import ( + registered_host, HeroVersionType, schema, ) @@ -136,7 +138,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration", "handles", "step", - "repre_info" + "repre_info", + "loaded_in_scene" ] column_labels_mapping = { @@ -150,7 +153,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration": "Duration", "handles": "Handles", "step": "Step", - "repre_info": "Availability" + "repre_info": "Availability", + "loaded_in_scene": "In scene" } SortAscendingRole = QtCore.Qt.UserRole + 2 @@ -231,8 +235,14 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self._doc_fetching_stop = False self._doc_payload = {} - self.doc_fetched.connect(self._on_doc_fetched) + self._host = registered_host() + self._loaded_representation_ids = set() + # Refresh loaded scene containers only every 3 seconds at most + self._host_loaded_refresh_timeout = 3 + self._host_loaded_refresh_time = 0 + + self.doc_fetched.connect(self._on_doc_fetched) self.refresh() def get_item_by_id(self, item_id): @@ -472,6 +482,29 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version + # Check loaded subsets + subsets_loaded_by_id = set() + ids = self._loaded_representation_ids + if ids: + if self._doc_fetching_stop: + return + + # Get subsets from representations + # todo: optimize with aggregation query to distinct subset id + representations = get_representations(project_name, + representation_ids=ids, + fields=["parent"]) + parents_by_repre_id = get_representations_parents( + project_name, + representations=representations + ) + for repre_id, repre_parents in parents_by_repre_id.items(): + _, repre_subset, _, _ = repre_parents + subsets_loaded_by_id.add(repre_subset["_id"]) + + if self._doc_fetching_stop: + return + repre_info = {} if self.sync_server.enabled: version_ids = set() @@ -494,7 +527,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "subset_docs_by_id": subset_docs_by_id, "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, - "repre_info_by_version_id": repre_info + "repre_info_by_version_id": repre_info, + "subsets_loaded_by_id": subsets_loaded_by_id } self.doc_fetched.emit() @@ -526,6 +560,17 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): self.doc_fetched.emit() return + # Collect scene container representations to compare loaded state + # This runs in the main thread because it involves the host DCC + if self._host: + time_since_refresh = time.time() - self._host_loaded_refresh_time + print(time_since_refresh) + if time_since_refresh > self._host_loaded_refresh_timeout: + repre_ids = {con.get("representation") + for con in self._host.ls()} + self._loaded_representation_ids = repre_ids + self._host_loaded_refresh_time = time.time() + self.fetch_subset_and_version() def _on_doc_fetched(self): @@ -547,6 +592,10 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "repre_info_by_version_id" ) + subsets_loaded_by_id = self._doc_payload.get( + "subsets_loaded_by_id" + ) + if ( asset_docs_by_id is None or subset_docs_by_id is None @@ -561,7 +610,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id, - repre_info_by_version_id + repre_info_by_version_id, + subsets_loaded_by_id ) self.endResetModel() self.refreshed.emit(True) @@ -589,8 +639,12 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return merge_group def _fill_subset_items( - self, asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id, - repre_info_by_version_id + self, + asset_docs_by_id, + subset_docs_by_id, + last_versions_by_subset_id, + repre_info_by_version_id, + subsets_loaded_by_id ): _groups_tuple = self.groups_config.split_subsets_for_groups( subset_docs_by_id.values(), self._grouping @@ -614,6 +668,37 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "index": self.index(group_item.row(), 0) } + def _add_subset_item(subset_doc, parent_item, parent_index): + last_version = last_versions_by_subset_id.get( + subset_doc["_id"] + ) + # do not show subset without version + if not last_version: + return + + data = copy.deepcopy(subset_doc) + data["subset"] = subset_doc["name"] + + asset_id = subset_doc["parent"] + data["asset"] = asset_docs_by_id[asset_id]["name"] + + data["last_version"] = last_version + + loaded = subset_doc["_id"] in subsets_loaded_by_id + data["loaded_in_scene"] = "yes" if loaded else "no" + + # Sync server data + data.update( + self._get_last_repre_info(repre_info_by_version_id, + last_version["_id"])) + + item = Item() + item.update(data) + self.add_child(item, parent_item) + + index = self.index(item.row(), 0, parent_index) + self.set_version(index, last_version) + subset_counter = 0 for group_name, subset_docs_by_name in subset_docs_by_group.items(): parent_item = group_item_by_name[group_name]["item"] @@ -636,31 +721,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): _parent_index = parent_index for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - - data = copy.deepcopy(subset_doc) - data["subset"] = subset_name - data["asset"] = asset_docs_by_id[asset_id]["name"] - - last_version = last_versions_by_subset_id.get( - subset_doc["_id"] - ) - data["last_version"] = last_version - - # do not show subset without version - if not last_version: - continue - - data.update( - self._get_last_repre_info(repre_info_by_version_id, - last_version["_id"])) - - item = Item() - item.update(data) - self.add_child(item, _parent_item) - - index = self.index(item.row(), 0, _parent_index) - self.set_version(index, last_version) + _add_subset_item(subset_doc, + parent_item=_parent_item, + parent_index=_parent_index) for subset_name in sorted(subset_docs_without_group.keys()): subset_docs = subset_docs_without_group[subset_name] @@ -675,31 +738,9 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): subset_counter += 1 for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - - data = copy.deepcopy(subset_doc) - data["subset"] = subset_name - data["asset"] = asset_docs_by_id[asset_id]["name"] - - last_version = last_versions_by_subset_id.get( - subset_doc["_id"] - ) - data["last_version"] = last_version - - # do not show subset without version - if not last_version: - continue - - data.update( - self._get_last_repre_info(repre_info_by_version_id, - last_version["_id"])) - - item = Item() - item.update(data) - self.add_child(item, parent_item) - - index = self.index(item.row(), 0, parent_index) - self.set_version(index, last_version) + _add_subset_item(subset_doc, + parent_item=parent_item, + parent_index=parent_index) def data(self, index, role): if not index.isValid(): diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 48c038418a..3c4a89aa0f 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -168,7 +168,8 @@ class SubsetWidget(QtWidgets.QWidget): ("duration", 60), ("handles", 55), ("step", 10), - ("repre_info", 65) + ("repre_info", 65), + ("loaded_in_scene", 20) ) def __init__( From a07650226a3fe54eb3becfbc16881acf7e8ae6cf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 11 Aug 2022 18:09:19 +0200 Subject: [PATCH 022/169] Remove unused variables --- openpype/tools/loader/model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 3130f879df..6cb9ba2c6d 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -498,8 +498,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): project_name, representations=representations ) - for repre_id, repre_parents in parents_by_repre_id.items(): - _, repre_subset, _, _ = repre_parents + for repre_parents in parents_by_repre_id.values(): + repre_subset = repre_parents[1] subsets_loaded_by_id.add(repre_subset["_id"]) if self._doc_fetching_stop: From 56dad829047afc22ea61d5281660f18b47e9b585 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Thu, 11 Aug 2022 18:13:18 +0200 Subject: [PATCH 023/169] admin docs for houdini shelves manager --- website/docs/admin_hosts_houdini.md | 11 +++++++++++ .../assets/houdini-admin_shelvesmanager.png | Bin 0 -> 28464 bytes website/sidebars.js | 5 +++-- 3 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 website/docs/admin_hosts_houdini.md create mode 100644 website/docs/assets/houdini-admin_shelvesmanager.png diff --git a/website/docs/admin_hosts_houdini.md b/website/docs/admin_hosts_houdini.md new file mode 100644 index 0000000000..64c54db591 --- /dev/null +++ b/website/docs/admin_hosts_houdini.md @@ -0,0 +1,11 @@ +--- +id: admin_hosts_houdini +title: Houdini +sidebar_label: Houdini +--- + +## Shelves Manager +You can add your custom shelf set into Houdini by setting your shelf sets, shelves and tools in **Houdini -> Shelves Manager**. +![Custom menu definition](assets/houdini-admin_shelvesmanager.png) + +The Shelf Set Path is used to load a .shelf file to generate your shelf set. If the path is specified, you don't have to set the shelves and tools. \ No newline at end of file diff --git a/website/docs/assets/houdini-admin_shelvesmanager.png b/website/docs/assets/houdini-admin_shelvesmanager.png new file mode 100644 index 0000000000000000000000000000000000000000..ba2f15a6a5eb5af0286a4f1485ddd97b351beab6 GIT binary patch literal 28464 zcmce;byQqmwlzwW;1Dc9gF6Iw2^8*5p+PG^fZ$#P5}@$l!9$_JgL{Qw!QCxTxVy_c z^sl?$?Y@2Qcz=A~V~mWlbM`rB@3r@yYtFe6qNXB?g-(Kwf`Wo2F9*^=?(&^{9d{Enj^zieS?OdPvQtu}F)vhC-j6K%#X^tZiZqpdePF&z-BxIRO1-hOJ zp#tyM@zj9(Lis5EZ54Py@l)e*5Q-L?aDV*@Qf1=&0Qf~^V8^M-?$z$U9465R{DgA* zG5Hbja{Yk!zNvF5i~-=WP*75d!7-0p=oE~>F)tGq^#V1%TTP13PXBcdF(`pHZ-l<* z&>^o}_Yelx%^Koo7cQ&5?<`^vt|8VF*+<}nCyO`^hyOSsu92-K%nUgaSu8keGiK!8 zucQ=ylx8f(h79dtsG;g_3rSSUfC}On`8eu#WGfDg?o8X3DygWv)64}L9h4~_J*My6 za0A=;OiQ0SU%Rn&M#eJ|Kzi$5A=b4lb22o5Ykxd)q~Mi(zb(-;Kls6>0vr|^YQT${ zr&g#9YAY{tXp$|Rh#*awXJH++sE0Mi?}XFASDx6R zuXW~$J#0!}Jtq9U$9Rn6rB7$qLZ4WfOoCuO#+Ys0@4%YLN!Au(Xgs|gM zM!=`OKG`Qvk_1!^dN?i^R1#!lj=5?^88}LRZn)IV*%#}~Tw>gZ#GNqWhOD`~)qU_% z-Sy-oCuapj`|M%sYs}U%R_99yrW7cKwkuo-)SPkH_{&`h#MRY|%_c+i7zdBnTjV~9 zgwl@YrM!&CWWp@K>}+g%3kU~oWo4h)S9z^q1A{#S(U+;I`D6LMV={KiX^f$tV)3;V zv@^cWV+S8STzdixBno=(m0-2Qf@KxBGlINW>rmW&o<4LbI;*p%hb1T%dwjF9IfdH3 zrHRE`_1!x;C#PROvgjUk;RDy6#)wT7%9S4pIj(E|^gyuiCJh_-*2UgFu~0s2PRBjB zU#O|eUCr3>`5qN;DHOl|I1L`z+@H0NI!Xq-{K3f=1YUkp{_8aRk0(lDAbOhJ$I%!3 zXAs|klO*_quKt%c-kIWTcnbb~vYrI4-p`=4^qCc#8Hw37;QjyU+iMNHjjTCsdtF5X z5qxU&>vuMD55I%wcdy~rnC=&+W4|ge%jrzi`JJpr8!BE5jZ|CHRb_7H^1~RNc1-kS zdGWNP{C-ZkSBl-SXr5l#g`oj=sO$p2A}d+XJk7SQU$P6jG5NrUL8Ax)(OyB^WDbS0 zf^nmJKkVic$PnFda{wKJGo*edXl9POc@h+)nbN1PrR8%<71JRHN|s7e=*bL{Nz&G% z7ia$5X&t%8=U}8#{$8DMXV(@767$PX^`#V>ftCYE6L)o`QX3l@l zrelufGp5#PVv>8&GF!0Sr_xG#6FL?TZENUn97WQBeqC}$*}QdZdm`#3i+(ARxgupMEV!#=J3t*l3nDjd7!bQSf0LC8Ha7A3R%Z`>ey`s)#l}lKKo#^|u}NJ!f%x#| zC^<_wrMHAt{7)o_5+-s}RLOaX*i6tw*u==}_YKc-107c*KkVDxvmh8Z}Fi?_ZA@?nl zGrAceZI43F8{GvS86qsf44xz-&rMx}B`qqKkA?09jtdaUNePn%Vd^0vi=b>y`Nrgf zdY_2UOO2gq^7(J`;X(4u0&kuZ>Q83Sz)OsQc3(ik9WuOXE$zAOa- z0bSQOW8Cy5iUbAao8tOU(d@sif!$Q8eb29v<(GK;U{n*f?-8K3dOJ|tZ!WwpF60)` z=?OA?LK#%_r>ry5;YDF3K?7WKZb-@{Fa>4`lk@%%=D0wNbs&W>dLNbXUO8zuQVHF} zd%?j#1_paks;CXNh{|A1rq*U~Vmk4nrVjc}F05-*-g$k|!;4RGX!FT84csCRIyTU$ zu9Yuo&hql*%z%lyo^ZF};*AXYM}tiAS1)j*f2q%P;{Yq~H^r?<(QKW<;*%%Ayp~>e zm+!r=X{p}h(oDW|$9SZ@5(u&g%V6D$BCK;xrZI|s3im&Tw`kqjLuE3i<^NilfYORJmgMp9^`xHV++ zJ#{c1Kc@#hvb1zm8&4?_`yQ`JG;PknvymkjL-#$Rait92oC*slpIX{X%Ys&;WE z8`bgfrbO5A%k*mP_}vkTS+l5t4`-o}IUSRl)eGRxj>(jqHos!L^rQAI*?TL|zv(8I zC(N97QX=M;<+vy7uksk^TJl;W<+~yKnz6L+6)Jw03zI8^48kMAE%kV%Ox;r)MquN0 zv<(K9IE5a;bo{3PDmztkcEOZ)IY!h%e`EIB@oz7D6KfV8L<6?i$1iLmu`|nbKKos& zeX9WOU>zhhL;;{ql=Y7P51Y(?-ksQwMYEeKbk4D7iBs>9h>Lmcz<)q^e?K`m{jgzM zN=iv6Eg4xW!2oPLgYC}(IyamqCWJ*O_5#~BZh3=a*0SJ|s@mhDqthUDOcx=bRiki? zuA@e3A?9AQ?^RQ2c9A1DwvoT{2Ne_}S{Kn>sP7MndFAoA1$pA(E&Y`vdv$&FN>EZt zDL$UpO&A^6RzG4a5s8dxpJYEqxiyIT_XD@^P548Dh<;jqUu(m~Gw4LHWGO`GV5uI|!kkKg_{6u$H@y#r_j^*o7Hv#+m|lQ3AT zaL;Oc%wGkV57TnkQ74$`R!J4?E-NQHg!sIul?)Kcrv~c14j$Bl0FwXt#FRNSj!+u@ z)|pmmmzG)8wJ0y;))vMHlMW_HwmXqp8V1y5d|J3$Ktr5>xp zD*BjehY*W5n36NxZZg!C1_CCXrtFZ3Oh{IVBc79qjs0PAj60_Cg?>f4_f=VV; zI}n7{oGeqsfAa1j5Y{8PZ$3S}IU17Pt3T@J>wL-Q;{ozG zPZe~Q%l-1jtpV~31qBkE-0G**={)%sLXtl@SbMav9%SL~k^|WXPE*=efeUbaNU;RYw;z7{6&M5K}eh1^6 zoO1WQc~etUO6b0+o12@|zWtsfU32u2w6VJPwqk@(_t(#8bDl%qiuxJ6;DOE=j4QMyDx0Ep1MO&ZH*s_G%$7 z-D_CGTD@?zwDGbV=yrOD;kZX=TwI*+UcF*W_U7hhUWQizo1Z!`;h09rptN4$CSp{E zJi}tE*v%HD_7f!38Wc0ipwI$jIt)a8C_^uZX$qosR%mYg!`f1N8TzdXrxu@n%;+W# zx`i%%;y6TLnRdyD*Tvjagpr#ZvzqlH$aI#^uHOp?Kwj_h3lciJFXnB^T%|FtErw$o z9Jan1IOY|w7#Ye(if6pReqH5<@Ayn&47iV6jZRTP%LYUjI~(qdil+sk(C3#?r=WEE zYv0yjmrLmq%+^$gAg(TD0}E>Sqd-&(@TphR#giwGE^*{2Qb$Wo?nX^pUr?Xc@XxNS zt>E9sN=yD()hw6u>hVkNNJ{bB>){+wrR$99eaAXF9>5k`CyKAmmpF;}(QF4vFWZj6 zl^&dU^5yT)xwOFru-{uJVyBblSuzpzTP0K8r@NkaO=mF1tF`ykLXJzHTi(5b6KPVz z?UH6F&aWPk^1YX%PKZ@;c}f~o5jUf-~{)b>G;@`io*#0@y0tbJT!avSg}6f@ej+Lauf(j zh=kt${P-&%0A{?XoSR>+MM%Mz>Ua{~u9zSq;xM1|S9}a~Uq3&}%H7QBC zjVl_CURS5+DydsU>_W4Wadgwq135U3$!-0n-jcej&W)*7xLU3?hK@r#uFoAC=EQam ztpUz~On7S?oRpMA5fu#6&pkoXV%aCGq(3?t0O8Q_>uD+`1*@!b>zU_}0KJD$Dv2U{5n{SGt z<<0o@?)CV@7~*JG*d|^fYpC*29|gth;q2Nz6kxM*@WZz2bU<(a}e^aHfpM@`6>LWE}h(3uyzb%fQ_n9p`Fe#t7 z9-1e@A6YFD7ag}di?B;y1UK$y+|6DkB4$4zBg$F#ZavRP9a5q1R+hVB@1TcW-@bjr zBIVQ4*0!5JG{1XrlgQzxP0(H}esg@#Nm;{nTU~5G>9bF@5D~g+8uD(Z(>77$Dx9^) z($X?4T`bMb$LAW@^O!^^04hsbn%*i^l$VdT z^t?LVQCSnTCwYK!%yyu~M2$^A~nVi{Di(XejzKjE7h4wGXKZYc%9b|ySiZa3 zy1DzqWdRyxC!|n>Hn{WBl)7T7al73zIvOvf=DnD^0}9GJd?t}dVb`)IxT!dR-Bv;j z9lCki-K8NLBj;1Ge1+e?d+z&-UzdM%L&HEpiI6wFiKjzU>hjF4eLT&cxkzWQO!nCz zfer{*&TgjDQ~|X z`f}&RJz^AhU3B04wf8U$;EPwVEvs^0v)Qhjjm*r%QC%PwGZwzpP- z$6sYod5ZoRN}mib(j(?B=PSxt%(ILp@FXDUr#%AMId+*S9w&}y|CK(=!~{A?VHJ`C-5r^5=?EzWOB?2z zqSHVER;vdbb*sAMz2#wqxsAIYkkS{EdC-T3QV)E-veapjoA221@$_e)_OBp2I3@XA z|2|zz>LS#H&JGue_abrZe5WFu)%edSkQ_*#6eUYp2|gDMXT`&_Eu^KEdA?&^=%6?0Fzsdt9Eo(TwR~uP*l1z=&daR01NAuD;^%w zmD`vf&+`{@+uWW(rzN9zV#m-<6){ID^bJ7HbLmyeHqT@ZnN@G8Nxbo*cT~^3q%QGF zhj)Dv#eXbtuBxb0ZlaOCyLOX5I`(Ommg4BQI`H*Vtbe<8j_A_oMwV~XH1>|^);{NI zT>KH!{c0;9zI|r&Vu3HxS`}gB`$zcJBAsP}OLthy?8R8A0Z&~!1Y*AMiSOxB(Bst6AMLP4fvZoa`^P$RtYBg_UOf`7ehRPq_qq0~R7gVwh1I>gZKG#Iq}0-E*ukJ-4kA_cKYvMfk> zxG<$z4&f=Nt80^X{cJ0L%(8hgM8&aPb#%vPBAHWFhtbkZnKZ+Kyo&sQABYil!vTE* zb@BnK_-g31d3wHuNm#EFde6|elc;@uN=L5dpf|vZ z4L?*HT#LFy$EF_MzMJMfcbcJdmNtKkhu1pY+0O^PaT&anZTs_171VgThEsQ0{%wHL z4mu*KvoU)!a&hS*D%zjT}WEo4)rA+?y1; zU2Wsk_eNN6Ufcf71q2g~KdVx)O;~>0aoX;8mVJaQ~HEUzjoMV3e6J#(uvfQPNR^ za}r%!v^0l(;)<{eE{>cI>v>U|btz_OxQn=5gWiQwgA>LsS8r0X&t_&74h4LJ=V6aq z6LeuPjwu!uk*4F;M9NoS`k^dFj^m(HTZ(afyi$<_DR?lI2KoF(gpvluRM%MCYx}~z zVcOyl*ym=NKxmfyu^clw(_I;~?HT(LuktT03St@okU58Z4uqW72b|8pJVi=TI(0Ti z$zIU-#UcbIkRm<&qRI4=R_Ez?%%b9vfs(4a{wqwft?z;@+KF%5ptt;-f0#; zpqrB5@0xpe$&TodTjWDPxwv0i@%|?~?W@SV)&>g@a>#;j!e~x}ZmJ1Wt7ebee=X!` zZ0NsO%IEcSF=_oS3=PP};Pkl*0M}UZ?y;A5bb5Nfj~F@mB>(6j6&x$ zt6wid-SOvZkdhP^t+#eq7tW}*r5Xv_@tBK}8JTdZ&xI^>)JF_B^+@{BKE3zT z`?U^Tm_RgSrdL%{Z%A6G$b(|oVR)&uJ$+Y<3P(oru_1C+dSE*NMV_G-Zvce|E*9=+ z05*v4wQXAb0ir$9PtxQvelz?PW_=sDz(^Mn;;Mxoje9hM&u*NUE!=5PFF3%cidc@mvo=hc$i$Z_>xsFHJq}d-RsMKn zv|b?1>~59Ycq>*ceL z?EU^i{x$T52buqO>^r@ErX^c$fC*=x*5`NiXRvwgXGXb|Pea|j{n2@ut!hHl;d-G0 zlAK(nqfoDwwD_nUibI z=JlRUiMKy(>E$1tX1E-WG+&am1knf$wWX(j&BUt?C{pm3q-jwW+KnuVaivNZdU7pQktTN+3m=yKT6I|&mfO!Lu8?Lk4^TwZ2rSeGm z*mi1}rUL=yi&wD|RR{y+BYW?~@72MHT>jMR^L876XSJ?XaX2K8(@ot5+p^4s(7S)K z-6w=xJ7N^d>~sm9qXR3tup2-qt{Z!ePb;|p8jC04XGb614x&tWVq*h*4+j`nslElf zX>-T;;N^Y2TsY%B=jS4?;X&rU`PN&ybsF6T(v4rY?dG`{7PNSV;ld8knjr)f4D_l;N}iy^8>n}C8*s97W~s@Zg8LH}ynlEBet+oI zckHi82PBe5|G<$%&_IurOrT9wqjU1pT7ds08`l9qTh>4N za$FuqP-dRC@C39RA*V(Nd5%ZpX_*9Dwra7U9=7y*dwxLYzEF9|a>vGtmbEOAgB% zN1Vf1LrrgPoB%@<) z|6?#SIn~AllUwK4*FZhUxrotVunq^&vS`7(2R#me6iJ9`dkw`;lVaYIR9M#4LRTuR z{+;dI=o5gWybkd#%8G=!fD4Q9cipp%JEz%lOK*$k8s^iX1}AJRAr>X|ZM7TV7)!8u zK}PjQ0Xk37&BoHY%fp{WfViJZ3TXTEgMu#KZg1e&luTGIM7Xd2N=hmJmI%H6Uvf`i zzWo;=U&6gZnM7?yAnVO?ii?xar{o7s;6Ma5_Sik9_9u ziR3B&>f={kn7Y1O1woqflse+BZ-V>tN4C;ESCdXd6ZpK2d%+d@BS$4u6~UO~)5b@) zYk|aC%-Y`4k5Kk;fRPI+J7vD9jwctAHj^|nePhZZj6=!!%l);(d}GTeRQ|m$adCQI zG8SsrlG-{u@4Wu}ys+?+(Jvm~I;o%ArQd-ll>eyFEj=X{x!!LrWi^3LSteFNjT6i@ zH8oRe01k;5U5&A3Kd^d-z&bw6 z#ZyN-ajQ_v{ot~@6GrOm4;rB{Qf> z;tC^7#HW&(p2x-<9v66iE((w`T;g$I7cM(pxRO4rM= zuAs{8c4B8|=d05u*jjv#u}5Y>!De|6d&B!~f#|vu8D=I?dax^$Gkskzh)4B69*AC1(?@rII`m8GYth_G}cWfLS4DipN zA1^3JTJ{+GtR~SB5)z6GlzQBpG(ZNLx66BSRZ==$_a^kEOq7}|0E$!+H$?Pe84_*hnBx*iKF00+!Yx3Z@v9EM z=eyf+INLh8tiPR4hST6`q~*_ zn!4PnNf|h~J{riFFkwJh#{||Oh8$K7S}-+rzg09(2a(tB^4mPZD(-YZ7VFe!F(A~; z(PAWUOG$$T1OzxaIn9HcyncN~~T)pf0{^D70+SQ)l zO&IJxNRKdJ@3zJ>I>KKbO+hkpFkhDMC7MuEQ!|+GT`YPYfz%Bv->DlLLt__$m+{G- z7yTgs`l`o088>5p>T&?KerTOIkmemlE3YuN(SV^G(|B?^BW7H4DzWIlF#<6JB6n-N zn1;HAtq6LxbG6&xu=p!50e*W_jYmXBdG2YAQf743VE>*>pDN?UjNga47 zT(8d$9QdH-d?^zpM#7$l%N^o26fIjPd>M>9Je4L0=z!?edrQj|nxi!wKoA&67j^GT zziGpd65fvNr0^3FqiQaxF6lBXzCb?x*#8umG&=J#3(c4`vDa5j?w5h?F&aG{bLF`! z?z?=%#H6}f$F{B3rf(kL?aVctbr+V>aOiht*xGGKfJX_4Nx? z)xovDhoTg2Vm1VF2vuLNO%D%qT8t}pJ+w<`u@qEvcjn?=dmXl+qa(XVe<40 zbirrn4AeOqV6{P>F?t0>Mb9U9gHlIgH!e3rS+cRuWBx@-N%Qq^?xWEH7bL5KK%J4~ zg+)cQaAarXlcZ0QQc~WsXU4VroYccxCsZv|&A=X2F9a;Wys!JcIDK&OY%pLSFr^LgeKyA_^WyB1I^0I-O{_)z#G}7CyAL9<8DU>>sj} z@=&ty%v2=BrC#^(FYs7veNetj(O@LdFV1alPKU9^6Uh@uwMxdu#F&^ITmn&RQJ0xu z2Fo8}Vj>ky8ZLXt#mIO1cIeH*f%jfBDLy{8^LQ^HCIX`4n23B(CZHvD*JL}X7?-*O zfdq)3-us!sVtN%puyQwxV*QgVP3celeiK%)&)8I4fUxPfGJOfO-Uxr+k`=|rBO@b| zem09Tc=4x>@Y!pyZAtn?(4HS;BUh+i5i|08M*YupsIUWIe<{i2j_ZW3+Nh9lEtK7D zaBw;AB6{WJ<)3&y!OEkNiF^e=<}?q*CVCkm6FkTMOwz)_!o`ILug=I`^Gk^7S0X7# zvb>QE8(Z6y<5^&x`^sq!dk08)!8nxkaQ{zFiHM2$F=wmUjxM+C8O8LrPrLvnFbIb- zSBn{(piC($#KD1RE)k>U`3%!bh=_`ESbY8*|L90{&!3khyfcc@Z)HXIDMMc6*X7$r zk%d?$)=rWLwqjLYxXyP#Zk%uO7B_C-V-o!St?U(>UFFbQT7Z~W0Y=?OmkSb}{f`0C3GFecbh8A&ukj1eMdbZ#US z=Gcl<#`fXyp18oS?QYFgUCVH0UpTXO?c31(j23`POe}8R!5c8sbX=EHbyDRPj1=c+ zRMXK(NQqSh0pm5%Z^VH(?DpPknh=@(>Hi3aQfOP)e)g!iMn{y#VV-xw*tTNI*z>}Y zT0ud<-pASU>a zk|Ij>*bC4L6MMS5$DxgGT@?er%MEpOs@FI2FfcG2l017bdJ@>cjgvJ5PHufPm=q!& zfYKYErvhO03kms{+oS6v=e;J~?^<3WzclZlo2kwZBocqp=0&2pV3X(jA@W;g6#$I- zIQ*|f`6?0>o6O^aFcERQw4s}cxH;Vv-mPuwi-z=mRX5SSYmE|5tyn`wiT^Nbx6M;Y zJ51xk1cHR+?sDSFXGWkOqo*9$Sr7Kk7X0Qn9->snA4T_NmMS|4*d2tOz;D@R6Hf%) z&W{U2x>;0~#88h3BP zEH<}U!uE){y^X;o>98AdCKQR3f2u$tH4o>K?AN^0zHT#`HNgqYgM_B&zLe~y;dPH>n%)Hulwrigi7 zAIspqZ}CzBDRk$ZmM1j!<=D;o8l)jG*BKZr+LAiPvSWg6d#m$hX$nlQ#Wae1;kAx6 zlbdv2eOB-~IzL`3ZuIFNB4jfM$x<-1VUhw!Cb;IvlL(qmn-{9Xfs44v*A5&A7mkfz z2DBz~-8+dzp)t68Fud*XhCu&wtx8o*nhdJL^ z)SiOd-M#d!h~LOpgC&o(vQy>Vu>!q6VpFkEyh;Al?(prY>FbVhTUM6FNVt08Jns*yRb*AXT~Rx1h*2dg8=8`+VjUzuRIzl97>-#3y+gc z4h)5S4*JT>V!Q1|06aRbhkc8tkv1iXnN8`(d_f_+G?g7U@4rdfKdtQ(WeYWHba{%3 zU8u=AT~9>G{@rxB=3mGP_~}j><{f6&38kg4L*i52$v)FdU(1Bb!)76`!-6-S?wc}J?}6DmAcv=euk<8Rio3lBJhAZ!-2hGw z4h-}?Zwa27n!+;0p%BSmj;GfgtQZg#K1=t8F>-P`cJNMIr>4!(vf?Qxak(yKqoD_y z-CUjk41g~l0eaw{135Sd1h{aTJ1U2=ApOafTrU^&PjmpspX?Dza|0tdUPE;!Dl^=7U?0li!5II_E2aA|i zTp18o-f|B{KSdN(5C@CeXncJvd#t<_44`j?HP-o)K~dSm(HhDi!yPo8GJHJSihV+9 zP#ahkOlFtNV%U5aXG@ou3K6;A%Rn+?w86~<(TJABlR905@1mm{Uu`fCyLC3KGUQ$wh zY_S2r_^_}r*_<59oykl!#^lc{o?>ivqt~*lEb-lmCQwVrxJv1SBFNiqulX>PCEa5w z?ZnF6Kn`C_(w>@BAswD9&ESn#8Op0Y15 z5=rh$TpsWBF-p97fX8WkMk(~#Vzzd&CVN<6y2yRgN+gSYw#~!vUsTkCWAhTfuabwi zd^l2|zhv6+RTZa-LMmz@H4ljd%hHJy6Sddh?mjNJ$OB`lO3oKfW#;ESV+1u8!_(H$ zAqn>~>0#rBsAO;=&kv9T97YNtgPI$cgLY!zFY1Mu&s=>J2Zgg^Y}wPMOu(HW9Y%~F zO&gZ`N;5tu6xL|GXC5WWXEU1Lu$$Zj*-JlL@Q#9&`}Nbq@flQJgg>RW;iN-Bsg-z8 zC>YSY3T%hRPe17%KaUR3oGcno6>YG!wGn#vHj!pS@R(i>uv#_0uO}@X-D@j<$hGwH z>~lgw`f;)H`MwAx35h<~Gn6p|sp=P2!OGIZh38vlrHo8@36uNpp`8Xk@+!8}wr9|#$P_tHs3z*m7Gr>REh$m*Trwmju;|)vF$L*@JC(pOyfHcU`YEFoSn|lO#fUB<1jTy%^w>FDUB3gq!f7Y|p zX<_^Nsh2%Z)Bbx*&rHTbP#h4a+Yi;8EtJx#?-ElFhs)`A*b{A!&K9IWGFD1yr*r(I zGmdYGb3`g>&Icnq*jo1HSA^B#h)KFJ0AM&u!Y?-0h@-1JZt>{*VH*dQ_7kKdD#H76 z2nX3A9ns)u@7=l?vQg-lLE&wYc=(&0T0@KAiSOYDmLtDK@w>V+4!^p^4pK|-{LRrW z|GdD>V0Tw6!`;nt?3*`l06r|gpkTUjrStaDC5Opv0b{6v-{l67VcJ_>+Zoh|o&S+) zCME1^eLgfDHNozA`VyP~B*R3azL%PzQBeqFzc8J8x$yfQ6Y#v*GG87bSV3BV^!8xQ zXT!XcLeMAQb8b)%WMP4L@F;n<^4-kOzoea}pQ$&www5#Zr4fKYoY6D@7$T)4rNpoQ ze8R|Jr>D2-PZh)2B5Cgf2OgAbE8YTW70Rs0fL>$#SvA31l`o*EpNF`y;fEF z&ZqVMu5+&9hv z=h!V$fW_Hha~}^vISHqK)RIx#Ed|*2DCvPw?=dzcdinB)XV0qh*;AB{I)t6Ut}p!rCbd_;hXP%vdX~I0&X7wVVTvnaAwn(c5T z0sQ)`=wnkxR&}n`(V3kmy!>xB^XlWnzv|mXasT(y;B~t=0I2@;{{jGQ{WqiN-^=WO z&P$=+YI(Mg00o~xnnpvb-*!qfoa5LoLhGZsv*y#31MK2jEJE!{a1fpcwe0>sV4v>G4HJOVD|Y(9zYu>gca-w>;=3pyU!BHA>KaAhzz)`f|pO&0EKO{Tctrfw-G z-2S}J=Y%t6#jO#Tx9fss)x&fb-Pj9n3_2=i_d2;+<)}F^e;^3I0dA!5WxI8=UT|KK zn0|(r)R-m4ujX@96V2h%(FcG8`^z`7q}!&d>c^i~OKh@wHGM#}vGY$o2!cl&EY?+C z&blz<5T13XXS<};4=&6Xof=%g;}5}0t-p@oY7MO)bgKurm36S1^D!u#@v#F`ee~%> z_Cj&KnEmtxA6`-oBC||;G_px{)wLzX0#Zb`e@SQm1Uw6n#T7A* zs?q&ihqS@oUi-+0j>W01Gd{U`%lXJwshw(U zv*D$;>-}Q7K7EU55{|N6(x-fS+pEO)E8&Y8ksV9E#IvOU!Z+J_>>sCX9qh)YoAs-9 zPwM0AiK;ZJLW|ADbnJ}0j0!c1n-J9{dJ=eX?cKT5FbrIxsfTD?v`4piB<9b;a6b*M zU8zE_*F@H+^gV#ury$2ewKEcJp`%)lGl8C77%%GPYl4`!8VRjpP4!LqZ=PlLfBZ7H z`rF=C>rlVz19aRIqZhU%s1}@geD~4S)Yco~k1Lsmp5&x3b?1xGXPIP;Ya`5N0;|!& zmHgfK%=7!Wj73si*{Z~93$E5iswOkP-`sbZZ5`3~>Q}j+aU}s?&6ePosV|4a$^Nl| zXPOe`A&_^{*zhT5D{oKv%5Xj=rj4sT_rguBVhmo#;p{(u-Uh;TGSzlx8qP`)w>GXi zDb3Tg1VWQDGb{w?@d||GzDs8dPzN_Hpy+9;0s;u}Q=6W6lI!ikyr^8H<<+T1aYi$1 zz#U4`DmRWX%yeZ&=E`a_oefcVNivc9g#Oe_S_JJ%jy_AK z&u_yZTPeoE_XDzco742USIZ-1g=lCd&grJMDvuGl;ugtvQPthmemAK=p7r`vV! zFAl9KeMdJ1!o-a*QNj{gFOX*oKY}ElQAz1Jzp5~mBf7g(T|s7f)Ua=of-TRs{fre} zPj8xry(2~gYv#Hg;!=E7DX{HXxawfuiJPm+LSGnC7~nteVz7BAR;eJ^!rrZO3^iC3 z4$bg^|EM$eTp=VSO)>Gi(RTOxgSfjf9Z0{qI>ma))z&&PGCn@u^5z9Gsr%)zbqp<} zFMY1qfFEV@hk57C^)-+!#SF)k7&Qn92>}?1mWfH8^0Yrqn2fJ;-nfj@3AqifK>0Z{ z=ffD*9vT@L85QQhsvQF2^6kLnOG!&isX@-{6bNazCR9U@ zpHYqCz3m&KewbNSYT`#~V4yGc2qny{tFTZrwp2GRQ9RF|9PbS-C@nRE#0X!Yo%y1+ zB-+hGdr_F=1sj_iq;_0QQ$|~aR+|?^qFZsQdydhE1 zCY*TOH&})1G8a2PeYr657`Ogz@}|0Z!+_Zf^K}OsCW?fc4qxkQ9%YDym7DwC?ug>% z>gs%riTJ~h*WS)e;m4RLj7&^Fsw#%6t`Q;l3l!X|w>MWNcGHhhemMF8J1ipuLmnby z0Z2feKEdWfx3x9DI%MG}H9%0HREBpLzcta(AAc?XH0lFa7>u-l&0jRkqIzrD#Tv@FW1qSJZun3}dE`V0EsJ$Xl)MWJGr zP2h`M#F<`TU0@>o{rT)R&Qq>r7+$E1bO^#%-1?>xa;a#20*D8`_HnTt9eR^{RLzbG zyycs`M^ABzshbW5>Q@KTnCW1JQ4^3*TM3#f&dJu)$dU6ag|EU^uM#Z5QRamTSE-^y z5EiE1yx*A7+L~;Z3m#U8CV6d1=k)N65Fi%x+YaU@hRu0IwZ4Zeg;PG`s$NR<7qRk_ zDy*Ii+mo5WslX;toSRE{yh7_sE*bLqXz}aWnM!WSk*u5)L1 zHyu$_x;qP^BMDC`ZmAk>7Q8GRP(C-jlGAwxeXM-q)Ldw_7?#|8<2KMq7U{M7GEl!r zxBlk(^cbt~h1-#->?=?aipS%^L0w(FFDrCscGh|8OeJ&Z$IbaFI31EO^MS{p-gJm2 z2$Ph95D)U?I~j>5=4L^EtEN_CUASIB7#JK9qEQ7WgkIF#?sY{x$I#SBz!E`qbnus- zZ#fxQK;E}OtS(P zg-UWD$1DvcHw~d-Vw%ZUbUgw8qcTF$zqov06w<)T# zjYJPq(~=AMX`^4Zf_W<|t+Z0(o}8S9^x;@Kxk?9TDzaD{-KOMhSRF1_W}E250xwoF zgze0}m7r1_MFwz6WID)KZ-8Bj=HHr1ssllp65w#+K3NFv_PH3LzMt_;fy$YX21~^0 z(p8=k_kn7mj;P7)jt@YK4ZY>v=|id?RBAqdvHq?tOK1Sbb5We|Jrq;)c3ywB;C}ou+-O$ODHX+5|B{fMTEL{{sOHoY)nR@8#a>>s!fI-2#X5~dP9LQk z+1c69-MKn?dpCi|`*78YC~Yv?tgfhNU}~z?VxkB}R#S|z+u(71JTdPvaYS=fUgg^!f{Yk*6^qBW}vF|>;I2OW63<=Sp z#Wk~5(>tjD;g1411$y(CB>>O4*yU2@xVK+RW-m^9gq>X@njpEKl-tR2#G|2s5~$ey zC(HXT=xr6t9(*v0C>IkWBZQWP?|$`89#f30$>1J$*eO`=%wA$sa;Az#R?0j4uSmiw&8&kv)s;#{2%=?>kRi+%blV;l#{yBD9E zL(gdL&dWNRu&@_y%cxf##O-r)*1M^Vz*+;f{CSV&n^Rbr!fOAmY4!T*@(`~N|3Sd} zTKEk2JnoYx2;Q%r4QVe=mQk6v1u_GnJ-L4lOnKP)2uQY;h#(Tf{*jfCu(oFE?0g#h zm`3bX0fIMx4?t}O?iO)zYw3nKp7%exn4dj6I6TY}m`fxi1co?b-uJO0xdc}i3$jsB zIKl5O!U!$7_?fwFI;&@t9rFTV7=jp4k`*rd2AJ_O*guj61&fP{QaRmKiglWh5qjg8 zKyeKXaq*QI!yAu-)fn`)53e$wOGFTOSXWh51)8n30?p9H#cg_envVw+T=fP_9hMgt z`BUY_>jJl|yA9>m+tXavS;DB-hI(L;NM}}EI#zCsO_(EQmyvx{GfJiUqzqmKgsnjY zc~cg$g>mZ{;Ts{fd5ibc_hhtNZ?*Qty>4H8ug0hn-f$m6Mie$T--x_C#d^wj)QGn0 zQNJ`J6VH?a$O&b43cvGHL8q;~MNt6(0hxG4fb~R!%G9^AWa1pQr#Tt7-fH(RuK#h| z9EU+HTZDLcPW!C^lap$Kg3apXCdKDh_7GTi>}+fZ7V?ZIUxaO^<{Ug%oNao5m&UOurxfIZQwuR@@YAOLc90F_w=;DaNE$ zqj#J)#+GP#u-u&~(d>AzJSYwuqLJeW+YTBwX)JlCba=@ool?nfEm{a2*C;(f;`&8xQ zIV<|APV{y6A4i=w!qy+UEF$VwFMJDhrFyWDSzsxhBW;H#+nN_61%0m}NhzuIsd5?! zoq$DeKbi_G%GH^@Li7`#F{!#ZWC)8)prW#TtWvUZ6W^ZTAX>6Ws}>RJ?(W{%*{Kt^ zvDxkGLx(2-2O&iu`(R~_&;_W#+> z`y=l`ZV+XtY*hK08p#_#A~G_wHoTaqx6y9{(Vz0Iw_z2Yk3;19aaZT7Y0wu29w0zZ zm0v7%2yn-gm6s6)rkn9SXD_dJKb@=*1ceNdq3i1_oERKpVlV;XcLeflB_1m-3RE$U zD3V2dBILzX;gtfQ=IGWzUzzq^m4si`$1j2XF1jZt7eFE8%_BrPLd zNfbV*!1(Fnm!wQ4ZM6Q;lg9|q{p=Q27dA27cSlEiHTIVgL%6tNY~+heT^Ac0w^77S;r~YF zZW9GNoiKWNNu1x#s6VY6r9RR09h%mf)_GD_0dK9+XcoseE~6(^ZmO;OJuBPL>`7(O zBb2$aO!X=9_S=A!&|`b4tl_>R8fB5&i3=nlA-#od{_5?mEueHN(SHgJ1=~p*`XZ-p z!dyZ_lxs*EQdU(}l#|1<;d;2z-_jxktpRtlwKWrTv^MpehDJh=g^9`Y*W^C0^Lx`I zIeGch!_Ce`Tghm20v?u^FEjJXq$%+bAgO2_9UbbXdp5gaVGgr)+S)08lBf_MS|=w? zzy}UtbC|wya}x)vU_PKQ940$rGVmFkn(O0*GuvC5!hR^5#x&jHxANRqztBe(G!KI? z$ci?XHfNIC4fb6c=9(IT@%zaGF=65Fnl{&$J)e5R1H@3%18rZTu+a?6GGIEV)qNlI zjAeS!BkV6%v#?+PU|;=TGnc+w7gGqcLiTmX?<1uyd+kv+5_uQ+#6gO%*Qa-Mg>F+SPJQ z@%VUnT%RX2=^F{`6`V-`iguVPyYVarC;i_LGkP0AWx~~K_~lV5i@vX7uFF-;RfjHj z1yNy8Q0K{0en?OGOgct1On|$M1C!)z7ex1%KMP_(k)$s*9zUYMMwLqw&{k3TwpsNT zPS@BOdjrsz_ue}3HDru>bag86%o)uH6S@v@3Rzw;6J42bB$+vFy7E0GDYoJ_18lR(dzC8iE@oQzH zv4Bwgr0u&pKPmjp3`|VpDLBZb785(SS0+W$Mh3N}Ba+d-aGq4^-CUW-O~5q{YJn;J z!;jz3gMjV1>7R(+E25jipHZxoNBnAb=HhK8*{*EbhJ$M@f2h^(`+Tzbq_f4h_}Woh zk91boED?QuU0&q-pYgO|P`bR(5&Ef@r5J@xW^>}REwXVqY7o(Z&szZmsJhi4vZQVC z=&zFs1J#-_}A=g_F#_JMon4Sdqak*cUFt59R$T& zPeDnhzV-x@R7{Fdsj*R&nHl~B(_&Y1Lc(#96zRP#1#X+Kcuxp)JN_r#bzYKCo?$E5 zc2WPvoo)VszP6Xg6<|qduQyk>FG2L7bHY#a%EQ(Ek+h;S%Ye+YEXPgFTK@Lgdgp4( zsb6)ae6_W;3~}7ep~0?b0j)%vq~T~qDth3?Lop?HPFrssoI#tW25dZhzm z>hA$)-uPvj7F7`ct1u>&anQWI@e`3NRdT;;_p!2t1ulSW}#Rq4d> zcXx)AAR*f2{eq+-V(3dw$(v`#k5B_HsE7(lOs1u&LrsWbiT_)BDK>gxT8h7N zoZ6b?8G!k`|H)QJ?nyiZBti=CIxhHTLU^HUTX=e`G}WM#OGb;#Eddh=0;c58j{9>n72Am-|7h?SZ^k|Stka7RmyUYD$_H8`m9pqe^8 zwi~%4G8E;lpsOry?2^~*!9$rooDwt?#YHRoF5(RA%VP(D$pZT> zFN_K^*8kCY^tMqq%(wSnzdNf5#HH|`_NVXG7nCfQs`w>?4!(#|F{YRsowQbVXO|e+pMw?n z>c99G9osPz>q`@S{47QNnojY26phq>QW--`KxNGS7nRx9K1q=ffy0XhJW{l^h&C6t zeh=r%&j&JaX}tW|`kx$Uo&<25n1AIsW&eh?$qXL^xX1un*%$gT-rVX{T;jvsLI*Ci z{e8rA7IM>)#T)G&4YBgwGMnR;Z*+};W5}|G_^XpCI%6B;;iXljI)qK{r^<|sgI5G? z^*}lV7Aen9%R#e|Y>+E)Ev}KIzH;q$xbmboeqwAak%~wJwXp3?U}T~>Epr^!&*D&`dy0nqz0t9o ze*0_3T$b^krHI0S8k%r^6@U37j*V|fZZcWx>+7(@q<-T_V*aXHX_5CwT|>t12QDuH z^)+hr;pLP#y9EV|(Aj#|P~HZ8`Hxqq>G+QUV^l;fy9jp$YE2gXLxSk6M>X_~qR2y` zgn_wk48F<XhXG zNLT_fagaA5DiNkvsumC8=<91kbZ@ikPS=`P*$&pCokQP%s z`3Hde3E53H+~)?H1#P~=f%zc5x7y=SRzcwrBuN>qIW+~97|DzVV(9j~W8+G0w4W#< z9LWv}42&S*3?cs@!^#Q}Fo2T4F%t4Y4G98>d!V;BG(0>!>{qM{%x?KZl>eFqp!dXPW-9O-ta-0nlG~q;M7p3N3NgHl%12Q>^m%0<)(R_h zI{Cev?JF*8YG%EoJT3MBQaVSCi&FP_%znDqEwZS&XSNamV@=s>M zYqSv?iqFKDAb7jFwg#xlrW;r%goKF@Hu0s2o*p@L_BU1n(<)i0oX|NuI$}$#^6T;u zEBz*)9G8}c1>rCUFK$yrQ{ppfmC&)hAK03bh$OLd+#Y8679T&6O6I9n`5xpKh=KZg z7=F%njePF(^HQ=jY|oL$oluF>?+BiBhEv`{%Im49P(Kn;l8W`#YU&jn;K(Q`g%1 zqTT9AO16nh-mp2rV`$X8Qg}D}vYc)fL24X}W8*cpf85Z9948S5x&Oy|;N(eV&YE}= zK3TYOoQRxRSzFOa?_hTBZ&m4OnA?M>(@}d+ntZ1x)jEg6>aPo;vJ}mXFJkCv#7a^j z2(#(#e*~o8T3jQLle6rdq|5+!UnM6e{imMx_AI~#pf}kC<@l42!yDM*GiWU=HbX)l zWTtZ4%#F^zT>ro20?vMZlv2#hkE=Dl&dm`L$`|^;U<{8?kq4Ltpq52jq7IHkFVi(t zruc{ds*!8Pmqh*kZ4f27Ozr@A&w03U->g6L|9HMgq<941FJnF^W8R3EiX0B;#D96N zxI)&@CwP>tWR!l*yjW+LzicQQL`63|FtB6idX1+K1~xKc(U;(?&-nWk&++-+{RqqQja>KpC~{XtN{5RQX)GEl$U^XFiDrV7DZ zX~WRP?$5!Wv)s3*mKWxaAzWPC&*w{9D=h>dM{qDYe_zW1T56>Jyw zNhhq&;|UDNw{dknP3M zIuM!yQz|vKceLxDje7z(;kmb`KSIWKCn7G6aF;aNyF~n&)v`Z)FwGwt*J_!p!nYbb z_Kl)$iS(sg!+9}xwfLA#K$FeBq|Dq@$PPa&qn+g6Fz$)Vl+rT>_KLTK^WLUC#FRZ+DV;ai}7qrejL~ zPb@y##X$Vf+Uo7CkPtxObDF|WqeoYB+e!Khxvc`hwuzd$^2Ld!Y>zPT_>KU1USG4Z zjc6*^Lf*;AGmQ=Ig#?qheorXF!6B8wPq47-#Vk1AhX!!s(%#&I4VIkf`mh)jih2yF z`SV44Sky$J_Dr-jF!dJa>6e(7FC zHS4tf>$rhk7}b$nL&AB$kN7Xp@&&W)VgsRamv$vQQEzi{t`>ANDBJO1^ejvc)PM`%!j?fDOY%!)!@1KtBibh48lkjipB*g88P1 zqO7&K^}JpOi?Jh&wbY|HG_)7ggRRigu-aSCPP3jbea^u#+Io0#ygPp|0CccjAfa27 zfcPlN#K#P$FHQ#qXrB=&L$7JUJMi>yKQ0kdl1&uVzu`f(olaOBOqU)Zy{60PIBw_IEK_l-<`m*lS>?xOLo+s|{ z&FSRXlTY#&7G^Vo;}%<=^TY!mdL^9}jJUeG$|mxC>NlQmaJ9c}^0+tw0dY{+@h%|# z;nn5Ij;y?wV!!1SjwAbZF|Y|IW~h1TT-bZxLMA4wAjF>a?c0NZRv@R$Oin7PC|gf8 zsHS+QV z-v$#DG&S9y!}>!8Zx=6j!x2Hu9uW%aP2{yunPAf978frHKg-zc)y^JF_-DgJ2-#?^4^*Bfm z=$K&irKP19ORp38#u_C~!=s`AqNq*t@|yEK2kuj*c$>?W1*BzVuWfsl4LiMh26{lMHmgn3xv_ykMtEU=M!-Yx zKNV={c_`2RMUX}G20}Wh?h7(0fz)&|A}=AB%FlgOz6T~BgiDN+pFjlnUU**dKsg&q zm#5pw{La=cE<93E5Am4P8qI;Qnf$`_U~R}oERAQ8{Jow+!TQLzb^D0$YjzfvJzy4X zYIMJKTna2NmytB>%GTE>3O5FF{rWngd1c-r%{a>ds2>W4!{eCL5g}0L7@t||LwNY{ z;bAhT`w39#)7TtC6l6kgCjn+fF5rE`|0OSU%L{EH&3sGys1yDCbY99AIneBxq^nr% z%|x~}Ymge_&UBWA%b7G(Qd;_m_X7yN-RZxy^z_f9hI@LX(_Iw%-a) zv5MUy+qlf7?j8eoNqdtlF}gvN|lL8zL7F@cjKX7^vZIaF1thZw5;= zwcx*^sbu6+U+~&~@w)ocixW&qMWvjjh#=HDh72@fbMw65$VOojQ5~FaP)h-UoR`=2 z#`hkzCSag2u*TxVdwL1D(wq+Tx~2%;q&P(tlsphAyI*McuX$=Bc?XWKVWRp%kgs>8E0GgHvsfDZ{n^j- zA3uHoD3AG+&!Md&d@;>~%WOD^F1I`0faxAGvZMzb)%Pdc^hF7ghuV_2IBm!~p)}%j zo8mZCVkd_G27KkfY7U|jL~sdYh``g2gsHleCr~YRU*0be29P!0*{=s>JmMDa`l0Z> zzJt>$<)-U!L*()CQzfUa*K2$#VavxSHJ9a$`4T2)S%mB0z7nv>0)M zN&GRPpY`%KF$ly1YD!A)Y;0DSmysb*J0dGPxdt!eG023r-a7lt&&wg3ot=GoeheyR zP)glLCrL<6RZv!^!U+YD9DwnPJA6m!Hn;Ab)FE!J^e6lH_&`6wZ2tPC74wgw?h+i4 zJPPYrY?fESQp-viep>Rb`W}q{=T>g>SZjZpy|%loV4chMeG z(|!u+jO?|!aufx$`21`MR4<8L#`SsgWz5`pclt@`$wODiW4|CQ+54}hq*#-p^bmmvTL8WMj2u^0^EaL)#AkG!|fi~Ysb zW+pc=L*7LKJaLizb5^*7Qgwo<&JW?{=JepOpZM2bN+%y43BYm4yM0as;gT)_InBjsG>-#Y{@lV z05)50i8^Ce*$(f$&LpHtMP=piOtqFjB{Tew0N^ryjx(jIGVvmN;cQoFRN9_+I03HG?P*BV~+2PaVNQ;1DRJDOTvRz%__Uk$g9Jafx9L z-VER8I=)K9bT>7%gR2La0H9TBo2fsERjzc#13%Sqs7XZv*iIzQn-k^#4EhRqb;#>d zUe5481kG|%U8||ula}uJ^1B0((Jei}SyS<@MNK&kfLHh-cR%)0ZWgjNkq6D0eOD`B zbmJtt}#ci-+Wr(?e`kP3SGv9`iabNJspdSXD% zC$B*3=`O9AXs3fDq<{1y$EvRqX0}?)>+qk?9*zgce}P&zVes|^Th2qvmj`nNdTfP#7uBqb*Isz^l7_dft@(tn@; literal 0 HcmV?d00001 diff --git a/website/sidebars.js b/website/sidebars.js index 9d60a5811c..6ccfc42180 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -101,6 +101,7 @@ module.exports = { items: [ "admin_hosts_blender", "admin_hosts_hiero", + "admin_hosts_houdini", "admin_hosts_maya", "admin_hosts_nuke", "admin_hosts_resolve", @@ -146,7 +147,7 @@ module.exports = { ], }, ], - Dev: [ + Dev: [ "dev_introduction", "dev_requirements", "dev_build", @@ -160,5 +161,5 @@ module.exports = { "dev_publishing" ] } - ] + ] }; From b3517a2da945b72b6b13eff292fa7d6ed63861b0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 09:47:21 +0200 Subject: [PATCH 024/169] Remove print statement --- openpype/tools/loader/model.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 6cb9ba2c6d..d9b1c708e0 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -564,7 +564,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # This runs in the main thread because it involves the host DCC if self._host: time_since_refresh = time.time() - self._host_loaded_refresh_time - print(time_since_refresh) if time_since_refresh > self._host_loaded_refresh_timeout: repre_ids = {con.get("representation") for con in self._host.ls()} From ca40a71f5c33a1f557039661bbbdd8db5d22738b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 12 Aug 2022 09:48:52 +0200 Subject: [PATCH 025/169] Reduce queries to get loaded subset ids --- openpype/tools/loader/model.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index d9b1c708e0..9d1f1e045c 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -483,24 +483,22 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): last_versions_by_subset_id[subset_id] = hero_version # Check loaded subsets - subsets_loaded_by_id = set() + loaded_subset_ids = set() ids = self._loaded_representation_ids if ids: if self._doc_fetching_stop: return - # Get subsets from representations + # Get subset ids from loaded representations in workfile # todo: optimize with aggregation query to distinct subset id representations = get_representations(project_name, representation_ids=ids, fields=["parent"]) - parents_by_repre_id = get_representations_parents( - project_name, - representations=representations - ) - for repre_parents in parents_by_repre_id.values(): - repre_subset = repre_parents[1] - subsets_loaded_by_id.add(repre_subset["_id"]) + version_ids = set(repre["parent"] for repre in representations) + versions = get_versions(project_name, + version_ids=version_ids, + fields=["parent"]) + loaded_subset_ids = set(version["parent"] for version in versions) if self._doc_fetching_stop: return @@ -528,7 +526,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "subset_families": subset_families, "last_versions_by_subset_id": last_versions_by_subset_id, "repre_info_by_version_id": repre_info, - "subsets_loaded_by_id": subsets_loaded_by_id + "subsets_loaded_by_id": loaded_subset_ids } self.doc_fetched.emit() From e48eb3ba47f9afc41ce3c4c06b6e7ffb36746f89 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 11:15:01 +0200 Subject: [PATCH 026/169] remove create_shelf function since it is no longer needed --- openpype/hosts/houdini/api/lib.py | 32 ------------------------------- 1 file changed, 32 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 55832abeb3..c8a7f92bb9 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -460,35 +460,3 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) - - -def create_shelf(): - hou.shelves.beginChangeBlock() - - custom_shelf = hou.shelves.newShelf( - file_path='', - name="custom_shelf", - label="Custom Shelf" - ) - - new_tool = hou.shelves.newTool( - file_path='', - name='new_tool', - label='New Tool', - script='', - language=hou.scriptLanguage.Python, - icon='', - help='This is a new tool' - ) - - if new_tool not in custom_shelf.tools(): - custom_shelf.setTools(list(custom_shelf.tools()) + [new_tool]) - - shelf_set = [ - shelf for shelf in hou.shelves.shelfSets().values() - if shelf.label() == "Create and Refine" - ][0] - - shelf_set.setShelves(shelf_set.shelves() + (custom_shelf,)) - - hou.shelves.endChangeBlock() From c9f60bb848b81f9b4c095281cfae3c3d27e8d652 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 11:19:50 +0200 Subject: [PATCH 027/169] remove invalid default values --- .../defaults/project_settings/houdini.json | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 78e0d595cf..43d2ad132a 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -5,21 +5,8 @@ "shelf_set_source_path": { "windows": "", "darwin": "", - "linux": "/path/to/your/shelf_set_file" - }, - "shelf_definition": [ - { - "shelf_name": "OpenPype Shelf", - "tools_list": [ - { - "label": "OpenPype Tool", - "script": "/path/to/your/tool_script", - "icon": "/path/to/your/icon", - "help": "Help message for your tool" - } - ] - } - ] + "linux": "" + } } ], "create": { From 9c7bcb84aa42a2f2c083c856ae421a9d264f32dc Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 12:25:05 +0200 Subject: [PATCH 028/169] fix typo and tool creation --- openpype/hosts/houdini/api/shelves.py | 33 +++++++++++++++++---------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index bb92aa828e..d9a3a34da6 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -10,7 +10,7 @@ log = logging.getLogger("openpype.hosts.houdini") def generate_shelves(): - """This function generates complete shelves from shef set to tools + """This function generates complete shelves from shelf set to tools in Houdini from openpype project settings houdini shelf definition. Raises: @@ -23,8 +23,8 @@ def generate_shelves(): shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.warning( - "SHELF WARNGING: No custom shelves found in project settings." + log.info( + "SHELF INFO: No custom shelves found in project settings." ) return @@ -45,7 +45,7 @@ def generate_shelves(): shelf_set_name = shelf_set_config.get('shelf_set_name') if not shelf_set_name: log.warning( - "SHELF WARNGING: No name found in shelf set definition." + "SHELF WARNING: No name found in shelf set definition." ) return @@ -54,8 +54,8 @@ def generate_shelves(): shelves_definition = shelf_set_config.get('shelf_definition') if not shelves_definition: - log.warning( - "SHELF WARNING: \ + log.info( + "SHELF INFO: \ No shelf definition found for shelf set named '{}'".format(shelf_set_name) ) return @@ -64,26 +64,34 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf_name = shelf_definition.get('shelf_name') if not shelf_name: log.warning( - "SHELF WARNGING: No name found in shelf definition." + "SHELF WARNING: No name found in shelf definition." ) return shelf = get_or_create_shelf(shelf_name) + if not shelf_definition.get('tools_list'): + log.warning("TOOLS INFO: No tool definition found for \ +shelf named {}".format(shelf_name)) + return + + mandatory_attributes = ['name', 'script'] for tool_definition in shelf_definition.get('tools_list'): # We verify that the name and script attibutes of the tool # are set - mandatory_attributes = ['name', 'script'] if not all( [v for k, v in tool_definition.items() if k in mandatory_attributes] ): log.warning("TOOLS ERROR: You need to specify at least \ the name and the script path of the tool.") - return + continue tool = get_or_create_tool(tool_definition, shelf) + if not tool: + return + # Add the tool to the shelf if not already in it if tool not in shelf.tools(): shelf.setTools(list(shelf.tools()) + [tool]) @@ -105,12 +113,12 @@ def get_or_create_shelf_set(shelf_set_label): """ all_shelves_sets = hou.shelves.shelfSets().values() - shelf_set = [ + shelf_sets = [ shelf for shelf in all_shelves_sets if shelf.label() == shelf_set_label ] - if shelf_set: - return shelf_set[0] + if shelf_sets: + return shelf_sets[0] shelf_set_name = shelf_set_label.replace(' ', '_').lower() new_shelf_set = hou.shelves.newShelfSet( @@ -170,6 +178,7 @@ def get_or_create_tool(tool_definition, shelf): return existing_tool[0] tool_name = tool_label.replace(' ', '_').lower() + log.warning(tool_definition) if not os.path.exists(tool_definition['script']): log.warning( From f5d7634e007d4e9a27f76b7abb693daa7b9ba055 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 14:15:08 +0200 Subject: [PATCH 029/169] change tools mandatory attributes to set type and iterate only on those attributes --- openpype/hosts/houdini/api/shelves.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index d9a3a34da6..498fffc7cd 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -75,13 +75,12 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf named {}".format(shelf_name)) return - mandatory_attributes = ['name', 'script'] + mandatory_attributes = {'name', 'script'} for tool_definition in shelf_definition.get('tools_list'): # We verify that the name and script attibutes of the tool # are set if not all( - [v for k, v in tool_definition.items() if - k in mandatory_attributes] + tool_definition[key] for key in mandatory_attributes ): log.warning("TOOLS ERROR: You need to specify at least \ the name and the script path of the tool.") From e0abb7245c231d2cabec782b1172b9257fd096da Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 14:34:15 +0200 Subject: [PATCH 030/169] fix type and docstring style to match OpenPype's --- openpype/hosts/houdini/api/pipeline.py | 1 - openpype/hosts/houdini/api/shelves.py | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f809f0ce56..d7a8135d86 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -310,7 +310,6 @@ def _set_context_settings(): fps resolution renderer - shelves Returns: None diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 498fffc7cd..725d162980 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -105,7 +105,7 @@ def get_or_create_shelf_set(shelf_set_label): creates a new shelf set. Arguments: - shelf_set_label {str} -- The label of the shelf set + shelf_set_label (str) -- The label of the shelf set Returns: hou.ShelfSet -- The shelf set existing or the new one @@ -153,7 +153,7 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsist and update it. If not, creates + """This function verifies if the tool exsists and updates it. If not, creates a new one. Arguments: @@ -177,7 +177,6 @@ def get_or_create_tool(tool_definition, shelf): return existing_tool[0] tool_name = tool_label.replace(' ', '_').lower() - log.warning(tool_definition) if not os.path.exists(tool_definition['script']): log.warning( From 129a38ebc0204fc9d6777a1b876d1f882fc929f4 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Tue, 16 Aug 2022 14:34:15 +0200 Subject: [PATCH 031/169] fix type and docstring style to match OpenPype's --- openpype/hosts/houdini/api/pipeline.py | 1 - openpype/hosts/houdini/api/shelves.py | 17 ++++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f809f0ce56..d7a8135d86 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -310,7 +310,6 @@ def _set_context_settings(): fps resolution renderer - shelves Returns: None diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 498fffc7cd..ba3fcc2af9 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -105,10 +105,10 @@ def get_or_create_shelf_set(shelf_set_label): creates a new shelf set. Arguments: - shelf_set_label {str} -- The label of the shelf set + shelf_set_label (str): The label of the shelf set Returns: - hou.ShelfSet -- The shelf set existing or the new one + hou.ShelfSet: The shelf set existing or the new one """ all_shelves_sets = hou.shelves.shelfSets().values() @@ -132,10 +132,10 @@ def get_or_create_shelf(shelf_label): a new shelf. Arguments: - shelf_label {str} -- The label of the shelf + shelf_label (str): The label of the shelf Returns: - hou.Shelf -- The shelf existing or the new one + hou.Shelf: The shelf existing or the new one """ all_shelves = hou.shelves.shelves().values() @@ -153,15 +153,15 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsist and update it. If not, creates + """This function verifies if the tool exsists and updates it. If not, creates a new one. Arguments: - tool_definition {dict} -- Dict with label, script, icon and help - shelf {hou.Shelf} -- The parent shelf of the tool + tool_definition (dict): Dict with label, script, icon and help + shelf (hou.Shelf): The parent shelf of the tool Returns: - hou.Tool -- The tool updated or the new one + hou.Tool: The tool updated or the new one """ existing_tools = shelf.tools() tool_label = tool_definition.get('label') @@ -177,7 +177,6 @@ def get_or_create_tool(tool_definition, shelf): return existing_tool[0] tool_name = tool_label.replace(' ', '_').lower() - log.warning(tool_definition) if not os.path.exists(tool_definition['script']): log.warning( From 4d61eec9952b61331c32b89d66fae35de26d13b4 Mon Sep 17 00:00:00 2001 From: Thomas Fricard <51854004+friquette@users.noreply.github.com> Date: Wed, 17 Aug 2022 10:12:43 +0200 Subject: [PATCH 032/169] fix typo Co-authored-by: Roy Nieterau --- openpype/hosts/houdini/api/shelves.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index ba3fcc2af9..a802d70457 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -153,7 +153,7 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsists and updates it. If not, creates + """This function verifies if the tool exists and updates it. If not, creates a new one. Arguments: From ee4ad799902f313a98ac1e4ab1403617e2d7d4bf Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Wed, 17 Aug 2022 11:48:29 +0200 Subject: [PATCH 033/169] change logs messages --- openpype/hosts/houdini/api/shelves.py | 35 +++++++++++++++------------ 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index ba3fcc2af9..805ce4c397 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -6,7 +6,7 @@ from openpype.settings import get_project_settings import hou -log = logging.getLogger("openpype.hosts.houdini") +log = logging.getLogger("openpype.hosts.houdini.shelves") def generate_shelves(): @@ -23,8 +23,8 @@ def generate_shelves(): shelves_set_config = project_settings["houdini"]["shelves"] if not shelves_set_config: - log.info( - "SHELF INFO: No custom shelves found in project settings." + log.debug( + "No custom shelves found in project settings." ) return @@ -34,7 +34,7 @@ def generate_shelves(): if shelf_set_filepath[current_os]: if not os.path.isfile(shelf_set_filepath[current_os]): raise FileNotFoundError( - "SHELF ERROR: This path doesn't exist - {}".format( + "This path doesn't exist - {}".format( shelf_set_filepath[current_os] ) ) @@ -45,7 +45,7 @@ def generate_shelves(): shelf_set_name = shelf_set_config.get('shelf_set_name') if not shelf_set_name: log.warning( - "SHELF WARNING: No name found in shelf set definition." + "No name found in shelf set definition." ) return @@ -54,9 +54,10 @@ def generate_shelves(): shelves_definition = shelf_set_config.get('shelf_definition') if not shelves_definition: - log.info( - "SHELF INFO: \ -No shelf definition found for shelf set named '{}'".format(shelf_set_name) + log.debug( + "No shelf definition found for shelf set named '{}'".format( + shelf_set_name + ) ) return @@ -64,15 +65,18 @@ No shelf definition found for shelf set named '{}'".format(shelf_set_name) shelf_name = shelf_definition.get('shelf_name') if not shelf_name: log.warning( - "SHELF WARNING: No name found in shelf definition." + "No name found in shelf definition." ) return shelf = get_or_create_shelf(shelf_name) if not shelf_definition.get('tools_list'): - log.warning("TOOLS INFO: No tool definition found for \ -shelf named {}".format(shelf_name)) + log.debug( + "No tool definition found for shelf named {}".format( + shelf_name + ) + ) return mandatory_attributes = {'name', 'script'} @@ -82,8 +86,9 @@ shelf named {}".format(shelf_name)) if not all( tool_definition[key] for key in mandatory_attributes ): - log.warning("TOOLS ERROR: You need to specify at least \ -the name and the script path of the tool.") + log.warning( + "You need to specify at least the name and \ +the script path of the tool.") continue tool = get_or_create_tool(tool_definition, shelf) @@ -153,7 +158,7 @@ def get_or_create_shelf(shelf_label): def get_or_create_tool(tool_definition, shelf): - """This function verifies if the tool exsists and updates it. If not, creates + """This function verifies if the tool exists and updates it. If not, creates a new one. Arguments: @@ -180,7 +185,7 @@ def get_or_create_tool(tool_definition, shelf): if not os.path.exists(tool_definition['script']): log.warning( - "TOOL ERROR: This path doesn't exist - {}".format( + "This path doesn't exist - {}".format( tool_definition['script'] ) ) From 65d785d100c986128a88f1dc2c77b5321b85d7da Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 24 Aug 2022 00:29:08 +0200 Subject: [PATCH 034/169] Draft stash for refactoring maya submit deadline to use `AbstractSubmitDeadline` base. - This does *NOT* work currently! --- .../deadline/abstract_submit_deadline.py | 22 + .../plugins/publish/submit_maya_deadline.py | 1297 ++++++++--------- 2 files changed, 623 insertions(+), 696 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 0bad981fdf..577378335e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -359,6 +359,27 @@ class DeadlineJobInfo(object): def OutputDirectory(self, val): # noqa: N802 self._outputDirectory.append(val) + # Asset Dependency + # ---------------------------------------------- + _assetDependency = attr.ib(factory=list) + + @property + def AssetDependency(self): # noqa: N802 + """Return all OutputDirectory values formatted for Deadline. + + Returns: + dict: as `{'OutputDirectory0': 'dir'}` + + """ + out = {} + for index, v in enumerate(self._assetDependency): + out["AssetDependency{}".format(index)] = v + return out + + @OutputDirectory.setter + def AssetDependency(self, val): # noqa: N802 + self._assetDependency.append(val) + # Tile Job # ---------------------------------------------- TileJob = attr.ib(default=None) # Default: false @@ -396,6 +417,7 @@ class DeadlineJobInfo(object): serialized.update(self.OutputFilename) serialized.update(self.OutputFilenameTile) serialized.update(self.OutputDirectory) + serialized.update(self.AssetDependency) return serialized diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7966861358..6dfa48a9f8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -18,7 +18,6 @@ Attributes: from __future__ import print_function import os -import json import getpass import copy import re @@ -27,252 +26,32 @@ from datetime import datetime import itertools from collections import OrderedDict +import attr import clique -import requests from maya import cmds -import pyblish.api - -from openpype.lib import requests_post from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io -# Documentation for keys available at: -# https://docs.thinkboxsoftware.com -# /products/deadline/8.0/1_User%20Manual/manual -# /manual-submission.html#job-info-file-options - -payload_skeleton_template = { - "JobInfo": { - "BatchName": None, # Top-level group name - "Name": None, # Job name, as seen in Monitor - "UserName": None, - "Plugin": "MayaBatch", - "Frames": "{start}-{end}x{step}", - "Comment": None, - "Priority": 50, - }, - "PluginInfo": { - "SceneFile": None, # Input - "OutputFilePath": None, # Output directory and filename - "OutputFilePrefix": None, - "Version": cmds.about(version=True), # Mandatory for Deadline - "UsingRenderLayers": True, - "RenderLayer": None, # Render only this layer - "Renderer": None, - "ProjectPath": None, # Resolve relative references - "RenderSetupIncludeLights": None, # Include all lights flag. - }, - "AuxFiles": [] # Mandatory for Deadline, may be empty -} +from openpype_modules.deadline import abstract_submit_deadline +from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo -def _format_tiles( - filename, index, tiles_x, tiles_y, - width, height, prefix): - """Generate tile entries for Deadline tile job. - - Returns two dictionaries - one that can be directly used in Deadline - job, second that can be used for Deadline Assembly job configuration - file. - - This will format tile names: - - Example:: - { - "OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr", - "OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr" - } - - And add tile prefixes like: - - Example:: - Image prefix is: - `maya///_` - - Result for tile 0 for 4x4 will be: - `maya///_tile_1x1_4x4__` - - Calculating coordinates is tricky as in Job they are defined as top, - left, bottom, right with zero being in top-left corner. But Assembler - configuration file takes tile coordinates as X, Y, Width and Height and - zero is bottom left corner. - - Args: - filename (str): Filename to process as tiles. - index (int): Index of that file if it is sequence. - tiles_x (int): Number of tiles in X. - tiles_y (int): Number if tikes in Y. - width (int): Width resolution of final image. - height (int): Height resolution of final image. - prefix (str): Image prefix. - - Returns: - (dict, dict): Tuple of two dictionaires - first can be used to - extend JobInfo, second has tiles x, y, width and height - used for assembler configuration. - - """ - tile = 0 - out = {"JobInfo": {}, "PluginInfo": {}} - cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y - - cfg["TilesCropped"] = "False" - - for tile_x in range(1, tiles_x + 1): - for tile_y in reversed(range(1, tiles_y + 1)): - tile_prefix = "_tile_{}x{}_{}x{}_".format( - tile_x, tile_y, - tiles_x, - tiles_y - ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) - new_filename = "{}/{}{}".format( - os.path.dirname(filename), - tile_prefix, - os.path.basename(filename) - ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 - - cfg["Tile{}".format(tile)] = new_filename - cfg["Tile{}Tile".format(tile)] = new_filename - cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - - cfg["Tile{}Width".format(tile)] = w_space - cfg["Tile{}Height".format(tile)] = h_space - - tile += 1 - return out, cfg +@attr.s +class DeadlinePluginInfo(): + SceneFile = attr.ib(default=None) # Input + OutputFilePath = attr.ib(default=None) # Output directory and filename + OutputFilePrefix = attr.ib(default=None) + Version = attr.ib(default=None) # Mandatory for Deadline + UsingRenderLayers = attr.ib(default=True) + RenderLayer = attr.ib(default=None) # Render only this layer + Renderer = attr.ib(default=None) + ProjectPath = attr.ib(default=None) # Resolve relative references + RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - -class MayaSubmitDeadline(pyblish.api.InstancePlugin): +class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): """Submit available render layers to Deadline. Renders are submitted to a Deadline Web Service as @@ -284,15 +63,12 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): """ - label = "Submit to Deadline" - order = pyblish.api.IntegratorOrder + 0.1 + label = "Submit Render to Deadline" hosts = ["maya"] families = ["renderlayer"] targets = ["local"] - use_published = True tile_assembler_plugin = "OpenPypeTileAssembler" - asset_dependencies = False priority = 50 tile_priority = 50 limit_groups = [] @@ -300,32 +76,173 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): pluginInfo = {} group = "none" - def process(self, instance): - """Plugin entry point.""" - instance.data["toBeRenderedOn"] = "deadline" + def get_job_info(self): + job_info = DeadlineJobInfo(Plugin="MayaBatch") + + # todo: test whether this works for existing production cases + # where custom jobInfo was stored in the project settings + for key, value in self.jobInfo.items(): + setattr(job_info, key, value) + + instance = self._instance context = instance.context - self._instance = instance - self.payload_skeleton = copy.deepcopy(payload_skeleton_template) + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) - # get default deadline webservice url from deadline module - self.deadline_url = instance.context.data.get("defaultDeadline") - # if custom one is set in instance, use that - if instance.data.get("deadlineUrl"): - self.deadline_url = instance.data.get("deadlineUrl") - assert self.deadline_url, "Requires Deadline Webservice URL" + job_info.Name = "%s - %s" % (filename, instance.name) + job_info.BatchName = filename + job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") + job_info.UserName = context.data.get( + "deadlineUser", getpass.getuser()) - # just using existing names from Setting - self._job_info = self.jobInfo + # Deadline requires integers in frame range + frames = "{start}-{end}x{step}".format( + start=int(instance.data["frameStartHandle"]), + end=int(instance.data["frameEndHandle"]), + step=int(instance.data["byFrameStep"]), + ) + job_info.Frames = frames - self._plugin_info = self.pluginInfo + job_info.Pool = instance.data.get("primaryPool") + job_info.SecondaryPool = instance.data.get("secondaryPool") + job_info.ChunkSize = instance.data.get("chunkSize", 10) + job_info.Comment = context.data.get("comment") + job_info.Priority = instance.data.get("priority", self.priority) + + if self.group != "none" and self.group: + job_info.Group = self.group + + if self.limit_groups: + job_info.LimitGroups = ",".join(self.limit_groups) + + self.payload_skeleton["JobInfo"]["Name"] = jobname + self.payload_skeleton["JobInfo"]["BatchName"] = src_filename + + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor + self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ + os.path.dirname(output_filename_0).replace("\\", "/") + self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ + output_filename_0.replace("\\", "/") + + # Add options from RenderGlobals------------------------------------- + render_globals = instance.data.get("renderGlobals", {}) + self.payload_skeleton["JobInfo"].update(render_globals) + + keys = [ + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "OPENPYPE_SG_USER", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "OPENPYPE_DEV", + "OPENPYPE_LOG_NO_COLORS", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if self._instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session) + + + # TODO: Taken from old publish class - test whether still needed + environment["OPENPYPE_LOG_NO_COLORS"] = "1" + environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) + # to recognize job from PYPE for turning Event On/Off + environment["OPENPYPE_RENDER_JOB"] = "1" + + for key in keys: + val = environment.get(key) + if val: + job_info.EnvironmentKeyValue = "{key}={value}".format( + key=key, + value=val + ) + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + + for i, filepath in enumerate(instance.data["files"]): + dirname = os.path.dirname(filepath) + fname = os.path.basename(filepath) + job_info.OutputDirectory = dirname.replace("\\", "/") + job_info.OutputFilename = fname + + # Adding file dependencies. + if self.asset_dependencies: + dependencies = instance.context.data["fileDependencies"] + dependencies.append(context.data["currentFile"]) + for dependency in dependencies: + job_info.AssetDependency = dependency + + # Add list of expected files to job + # --------------------------------- + exp = instance.data.get("expectedFiles") + + def _get_output_filename(files): + col, rem = clique.assemble(files) + if not col and rem: + # we couldn't find any collections but have + # individual files. + assert len(rem) == 1, ( + "Found multiple non related files " + "to render, don't know what to do " + "with them.") + return rem[0] + else: + return col[0].format('{head}{padding}{tail}') + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + for _aov, files in exp[0].items(): + output_file = _get_output_filename(files) + job_info.OutputFilename = output_file + else: + output_file = _get_output_filename(exp) + job_info.OutputFilename = output_file + + return job_info + + def get_plugin_info(self): + + instance = self._instance + context = instance.context + + renderlayer = instance.data['setMembers'] # rs_beauty + + self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa + + # Output driver to render + plugin_info = DeadlinePluginInfo( + SceneFile=context.data["currentFile"], + Version=cmds.about(version=True), + ) + + return attr.asdict(plugin_info) + + def process_submission(self): + # Override to NOT submit by default when calling super process() method + pass + + def process(self, instance): + super(MayaSubmitDeadline, self).process(instance) + + # TODO: Avoid the need for this logic here, needed for submit publish + # Store output dir for unified publisher (filesequence) + output_dir = os.path.dirname(instance.data["files"][0]) + instance.data["outputDir"] = output_dir + instance.data["toBeRenderedOn"] = "deadline" self.limit_groups = self.limit context = instance.context workspace = context.data["workspaceDir"] - anatomy = context.data['anatomy'] - instance.data["toBeRenderedOn"] = "deadline" filepath = None patches = ( @@ -336,80 +253,24 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "scene_patches", {}) ) - # Handle render/export from published scene or not ------------------ - if self.use_published: - patched_files = [] - for i in context: - if "workfile" not in i.data["families"]: - continue - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - filepath = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - filepath)) + # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") + # todo: on self.use_published replace path for publishRenderMetadataFolder + # todo: on self.use_published apply scene patches to workfile instance + # rep = i.data.get("representations")[0].get("name") - if not os.path.exists(filepath): - self.log.error("published scene does not exist!") - raise - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(filepath))[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - exp = instance.data.get("expectedFiles") + # if instance.data.get("publishRenderMetadataFolder"): + # instance.data["publishRenderMetadataFolder"] = \ + # instance.data["publishRenderMetadataFolder"].replace( + # orig_scene, new_scene) + # self.log.info("Scene name was switched {} -> {}".format( + # orig_scene, new_scene + # )) + # # patch workfile is needed + # if filepath not in patched_files: + # patched_file = self._patch_workfile(filepath, patches) + # patched_files.append(patched_file) - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - f.replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( - f.replace(orig_scene, new_scene) - ) - instance.data["expectedFiles"] = [new_exp] - - if instance.data.get("publishRenderMetadataFolder"): - instance.data["publishRenderMetadataFolder"] = \ - instance.data["publishRenderMetadataFolder"].replace( - orig_scene, new_scene) - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) - # patch workfile is needed - if filepath not in patched_files: - patched_file = self._patch_workfile(filepath, patches) - patched_files.append(patched_file) - - all_instances = [] - for result in context.data["results"]: - if (result["instance"] is not None and - result["instance"] not in all_instances): # noqa: E128 - all_instances.append(result["instance"]) - - # fallback if nothing was set - if not filepath: - self.log.warning("Falling back to workfile") - filepath = context.data["currentFile"] - - self.log.debug(filepath) + filepath = self.scene_path # collect by super().process # Gather needed data ------------------------------------------------ default_render_file = instance.context.data.get('project_settings')\ @@ -417,10 +278,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): .get('RenderSettings')\ .get('default_render_image_folder') filename = os.path.basename(filepath) - comment = context.data.get("comment", "") dirname = os.path.join(workspace, default_render_file) renderlayer = instance.data['setMembers'] # rs_beauty - deadline_user = context.data.get("user", getpass.getuser()) # Always use the original work file name for the Job name even when # rendering is done from the published Work File. The original work @@ -454,116 +313,34 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): pass # Fill in common data to payload ------------------------------------ - payload_data = {} - payload_data["filename"] = filename - payload_data["filepath"] = filepath - payload_data["jobname"] = jobname - payload_data["deadline_user"] = deadline_user - payload_data["comment"] = comment - payload_data["output_filename_0"] = output_filename_0 - payload_data["render_variables"] = render_variables - payload_data["renderlayer"] = renderlayer - payload_data["workspace"] = workspace - payload_data["dirname"] = dirname - - self.log.info("--- Submission data:") - for k, v in payload_data.items(): - self.log.info("- {}: {}".format(k, v)) - self.log.info("-" * 20) - - frame_pattern = self.payload_skeleton["JobInfo"]["Frames"] - self.payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format( - start=int(self._instance.data["frameStartHandle"]), - end=int(self._instance.data["frameEndHandle"]), - step=int(self._instance.data["byFrameStep"])) - - self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( - "mayaRenderPlugin", "MayaBatch") - - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Job name, as seen in Monitor - self.payload_skeleton["JobInfo"]["Name"] = jobname - # Arbitrary username, for visualisation in Monitor - self.payload_skeleton["JobInfo"]["UserName"] = deadline_user - # Set job priority - self.payload_skeleton["JobInfo"]["Priority"] = \ - self._instance.data.get("priority", self.priority) - - if self.group != "none" and self.group: - self.payload_skeleton["JobInfo"]["Group"] = self.group - - if self.limit_groups: - self.payload_skeleton["JobInfo"]["LimitGroups"] = \ - ",".join(self.limit_groups) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - self.payload_skeleton["JobInfo"]["Comment"] = comment - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Adding file dependencies. - dependencies = instance.context.data["fileDependencies"] - dependencies.append(filepath) - if self.asset_dependencies: - for dependency in dependencies: - key = "AssetDependency" + str(dependencies.index(dependency)) - self.payload_skeleton["JobInfo"][key] = dependency - - # Handle environments ----------------------------------------------- - # We need those to pass them to pype for it to set correct context - keys = [ - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "OPENPYPE_SG_USER", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session) - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - self.payload_skeleton["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - # Add options from RenderGlobals------------------------------------- - render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) + payload_data = { + "filename": filename, + "filepath": filepath, + "jobname": jobname, + "comment": comment, + "output_filename_0": output_filename_0, + "render_variables": render_variables, + "renderlayer": renderlayer, + "workspace": workspace, + "dirname": dirname, + } # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] for x in ['vrayscene', 'assscene']), ( "Vray Scene and Ass Scene options are mutually exclusive") - if "vrayscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "vray") - if "assscene" in instance.data["families"]: - export_job = self._submit_export(payload_data, "arnold") - - # Prepare main render job ------------------------------------------- if "vrayscene" in instance.data["families"]: + vray_export_payload = self._get_vray_export_payload(payload_data) + export_job = self.submit(vray_export_payload) + payload = self._get_vray_render_payload(payload_data) + elif "assscene" in instance.data["families"]: + ass_export_payload = self._get_arnold_export_payload(payload_data) + export_job = self.submit(ass_export_payload) + payload = self._get_arnold_render_payload(payload_data) else: payload = self._get_maya_payload(payload_data) @@ -572,267 +349,222 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if export_job: payload["JobInfo"]["JobDependency0"] = export_job - # Add list of expected files to job --------------------------------- - exp = instance.data.get("expectedFiles") - exp_index = 0 - output_filenames = {} - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - exp_index += 1 - else: - col, rem = clique.assemble(exp) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ("Found multiple non related files " - "to render, don't know what to do " - "with them.") - - output_file = rem[0] - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - else: - output_file = col[0].format('{head}{padding}{tail}') - if not instance.data.get("tileRendering"): - payload['JobInfo']['OutputFilename' + str(exp_index)] = output_file # noqa: E501 - - output_filenames['OutputFilename' + str(exp_index)] = output_file - plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) - self.preflight_check(instance) - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self._job_info) - payload["PluginInfo"].update(self._plugin_info) + payload["JobInfo"].update(self.jobInfo) + payload["PluginInfo"].update(self.pluginInfo) - # Prepare tiles data ------------------------------------------------ if instance.data.get("tileRendering"): - # if we have sequence of files, we need to create tile job for - # every frame + # Prepare tiles data + self._tile_render(instance, payload) + else: + # Submit main render job + self.submit(payload) - payload["JobInfo"]["TileJob"] = True - payload["JobInfo"]["TileJobTilesInX"] = instance.data.get("tilesX") - payload["JobInfo"]["TileJobTilesInY"] = instance.data.get("tilesY") - payload["PluginInfo"]["ImageHeight"] = instance.data.get("resolutionHeight") # noqa: E501 - payload["PluginInfo"]["ImageWidth"] = instance.data.get("resolutionWidth") # noqa: E501 - payload["PluginInfo"]["RegionRendering"] = True + def _tile_render(self, instance, payload): - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { - "CleanupTiles": 1, - "ErrorOnMissing": True - } + # As collected by super process() + job_info = self.job_info + plugin_info = self.pluginInfo + + # if we have sequence of files, we need to create tile job for + # every frame + + job_info.TileJob = True + job_info.TileJobTilesInX = instance.data.get("tilesX") + job_info.TileJobTilesInY = instance.data.get("tilesY") + + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") + plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") + plugin_info["RegionRendering"] = True + + assembly_payload = { + "AuxFiles": [], + "JobInfo": { + "BatchName": payload["JobInfo"]["BatchName"], + "Frames": 1, + "Name": "{} - Tile Assembly Job".format( + payload["JobInfo"]["Name"]), + "OutputDirectory0": + payload["JobInfo"]["OutputDirectory0"].replace( + "\\", "/"), + "Plugin": self.tile_assembler_plugin, + "MachineLimit": 1 + }, + "PluginInfo": { + "CleanupTiles": 1, + "ErrorOnMissing": True } - assembly_payload["JobInfo"].update(output_filenames) - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) - assembly_payload["JobInfo"]["UserName"] = deadline_user + } + assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( + "tile_priority", self.tile_priority) - frame_payloads = [] - assembly_payloads = [] + frame_payloads = [] + assembly_payloads = [] - R_FRAME_NUMBER = re.compile(r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 - REPL_FRAME_NUMBER = re.compile(r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 + R_FRAME_NUMBER = re.compile( + r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 + REPL_FRAME_NUMBER = re.compile( + r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501 - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - # get files from `beauty` - files = exp[0].get("beauty") - # assembly files are used for assembly jobs as we need to put - # together all AOVs - assembly_files = list( - itertools.chain.from_iterable( - [f for _, f in exp[0].items()])) - if not files: - # if beauty doesn't exists, use first aov we found - files = exp[0].get(list(exp[0].keys())[0]) - else: - files = exp - assembly_files = files + exp = instance.data["expectedFiles"] + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + # get files from `beauty` + files = exp[0].get("beauty") + # assembly files are used for assembly jobs as we need to put + # together all AOVs + assembly_files = list( + itertools.chain.from_iterable( + [f for _, f in exp[0].items()])) + if not files: + # if beauty doesn't exists, use first aov we found + files = exp[0].get(list(exp[0].keys())[0]) + else: + files = exp + assembly_files = files - frame_jobs = {} + frame_jobs = {} - file_index = 1 - for file in files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 + file_index = 1 + for file in files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_payload = copy.deepcopy(payload) + new_payload["JobInfo"]["Name"] = \ + "{} (Frame {} - {} tiles)".format( + payload["JobInfo"]["Name"], + frame, + instance.data.get("tilesX") * instance.data.get("tilesY") + # noqa: E501 ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame + self.log.info( + "... preparing job {}".format( + new_payload["JobInfo"]["Name"])) + new_payload["JobInfo"]["TileJobFrame"] = frame - tiles_data = _format_tiles( + tiles_data = _format_tiles( + file, 0, + instance.data.get("tilesX"), + instance.data.get("tilesY"), + instance.data.get("resolutionWidth"), + instance.data.get("resolutionHeight"), + payload["PluginInfo"]["OutputFilePrefix"] + )[0] + new_payload["JobInfo"].update(tiles_data["JobInfo"]) + new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + + self.log.info("hashing {} - {}".format(file_index, file)) + job_hash = hashlib.sha256( + ("{}_{}".format(file_index, file)).encode("utf-8")) + frame_jobs[frame] = job_hash.hexdigest() + new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() + new_payload["JobInfo"]["ExtraInfo1"] = file + + frame_payloads.append(new_payload) + file_index += 1 + + file_index = 1 + for file in assembly_files: + frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_assembly_payload = copy.deepcopy(assembly_payload) + new_assembly_payload["JobInfo"]["Name"] = \ + "{} (Frame {})".format( + assembly_payload["JobInfo"]["Name"], + frame) + new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + new_assembly_payload["PluginInfo"]["Renderer"] = \ + self._instance.data["renderer"] # noqa: E501 + new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[ + frame] # noqa: E501 + new_assembly_payload["JobInfo"]["ExtraInfo1"] = file + assembly_payloads.append(new_assembly_payload) + file_index += 1 + + self.log.info( + "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + + url = "{}/api/jobs".format(self.deadline_url) + tiles_count = instance.data.get("tilesX") * instance.data.get( + "tilesY") # noqa: E501 + + for tile_job in frame_payloads: + response = self.submit(tile_job) + + job_id = response.json()["_id"] + hash = response.json()["Props"]["Ex0"] + + for assembly_job in assembly_payloads: + if assembly_job["JobInfo"]["ExtraInfo0"] == hash: + assembly_job["JobInfo"]["JobDependency0"] = job_id + + for assembly_job in assembly_payloads: + file = assembly_job["JobInfo"]["ExtraInfo1"] + # write assembly job config files + now = datetime.now() + + config_file = os.path.join( + os.path.dirname(output_filename_0), + "{}_config_{}.txt".format( + os.path.splitext(file)[0], + now.strftime("%Y_%m_%d_%H_%M_%S") + ) + ) + + try: + if not os.path.isdir(os.path.dirname(config_file)): + os.makedirs(os.path.dirname(config_file)) + except OSError: + # directory is not available + self.log.warning( + "Path is unreachable: `{}`".format( + os.path.dirname(config_file))) + + # add config file as job auxFile + assembly_job["AuxFiles"] = [config_file] + + with open(config_file, "w") as cf: + print("TileCount={}".format(tiles_count), file=cf) + print("ImageFileName={}".format(file), file=cf) + print("ImageWidth={}".format( + instance.data.get("resolutionWidth")), file=cf) + print("ImageHeight={}".format( + instance.data.get("resolutionHeight")), file=cf) + + tiles = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + )[1] + sorted(tiles) + for k, v in tiles.items(): + print("{}={}".format(k, v), file=cf) - self.log.info("hashing {} - {}".format(file_index, file)) - job_hash = hashlib.sha256( - ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file + job_idx = 1 + instance.data["assemblySubmissionJobs"] = [] + for ass_job in assembly_payloads: + self.log.info("submitting assembly job {} of {}".format( + job_idx, len(assembly_payloads) + )) + response = self.submit(ass_job) - frame_payloads.append(new_payload) - file_index += 1 + instance.data["assemblySubmissionJobs"].append( + response.json()["_id"]) + job_idx += 1 - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) - file_index += 1 - - self.log.info( - "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - - url = "{}/api/jobs".format(self.deadline_url) - tiles_count = instance.data.get("tilesX") * instance.data.get("tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = requests_post(url, json=tile_job) - if not response.ok: - raise Exception(response.text) - - job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] - - for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id - - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] - # write assembly job config files - now = datetime.now() - - config_file = os.path.join( - os.path.dirname(output_filename_0), - "{}_config_{}.txt".format( - os.path.splitext(file)[0], - now.strftime("%Y_%m_%d_%H_%M_%S") - ) - ) - - try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) - except OSError: - # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) - - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - - with open(config_file, "w") as cf: - print("TileCount={}".format(tiles_count), file=cf) - print("ImageFileName={}".format(file), file=cf) - print("ImageWidth={}".format( - instance.data.get("resolutionWidth")), file=cf) - print("ImageHeight={}".format( - instance.data.get("resolutionHeight")), file=cf) - - tiles = _format_tiles( - file, 0, - instance.data.get("tilesX"), - instance.data.get("tilesY"), - instance.data.get("resolutionWidth"), - instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - )[1] - sorted(tiles) - for k, v in tiles.items(): - print("{}={}".format(k, v), file=cf) - - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - self.log.debug(json.dumps(ass_job, indent=4, sort_keys=True)) - response = requests_post(url, json=ass_job) - if not response.ok: - raise Exception(response.text) - - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 - - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - else: - # Submit job to farm -------------------------------------------- - self.log.info("Submitting ...") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - raise Exception(response.text) - instance.data["deadlineSubmissionJob"] = response.json() + instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] + self.log.info("Setting batch name on instance: {}".format( + instance.data["jobBatchName"])) def _get_maya_payload(self, data): payload = copy.deepcopy(self.payload_skeleton) @@ -1045,39 +777,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): payload["PluginInfo"].update(plugin_info) return payload - def _submit_export(self, data, format): - if format == "vray": - payload = self._get_vray_export_payload(data) - self.log.info("Submitting vrscene export job.") - elif format == "arnold": - payload = self._get_arnold_export_payload(data) - self.log.info("Submitting ass export job.") - - url = "{}/api/jobs".format(self.deadline_url) - response = requests_post(url, json=payload) - if not response.ok: - self.log.error("Submition failed!") - self.log.error(response.status_code) - self.log.error(response.content) - self.log.debug(payload) - raise RuntimeError(response.text) - - dependency = response.json() - return dependency["_id"] - - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers.""" - for key in ("frameStartHandle", "frameEndHandle", "byFrameStep"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) - def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. @@ -1160,3 +859,209 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "Applied {} patch to scene.".format( patches[i]["name"])) return file + + +def _format_tiles( + filename, index, tiles_x, tiles_y, + width, height, prefix): + """Generate tile entries for Deadline tile job. + + Returns two dictionaries - one that can be directly used in Deadline + job, second that can be used for Deadline Assembly job configuration + file. + + This will format tile names: + + Example:: + { + "OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr", + "OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr" + } + + And add tile prefixes like: + + Example:: + Image prefix is: + `maya///_` + + Result for tile 0 for 4x4 will be: + `maya///_tile_1x1_4x4__` + + Calculating coordinates is tricky as in Job they are defined as top, + left, bottom, right with zero being in top-left corner. But Assembler + configuration file takes tile coordinates as X, Y, Width and Height and + zero is bottom left corner. + + Args: + filename (str): Filename to process as tiles. + index (int): Index of that file if it is sequence. + tiles_x (int): Number of tiles in X. + tiles_y (int): Number if tikes in Y. + width (int): Width resolution of final image. + height (int): Height resolution of final image. + prefix (str): Image prefix. + + Returns: + (dict, dict): Tuple of two dictionaires - first can be used to + extend JobInfo, second has tiles x, y, width and height + used for assembler configuration. + + """ + tile = 0 + out = {"JobInfo": {}, "PluginInfo": {}} + cfg = OrderedDict() + w_space = width / tiles_x + h_space = height / tiles_y + + cfg["TilesCropped"] = "False" + + for tile_x in range(1, tiles_x + 1): + for tile_y in reversed(range(1, tiles_y + 1)): + tile_prefix = "_tile_{}x{}_{}x{}_".format( + tile_x, tile_y, + tiles_x, + tiles_y + ) + out_tile_index = "OutputFilename{}Tile{}".format( + str(index), tile + ) + new_filename = "{}/{}{}".format( + os.path.dirname(filename), + tile_prefix, + os.path.basename(filename) + ) + out["JobInfo"][out_tile_index] = new_filename + out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + + out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 + out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 + out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 + out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + + cfg["Tile{}".format(tile)] = new_filename + cfg["Tile{}Tile".format(tile)] = new_filename + cfg["Tile{}FileName".format(tile)] = new_filename + cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space + + cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) + + cfg["Tile{}Width".format(tile)] = w_space + cfg["Tile{}Height".format(tile)] = h_space + + tile += 1 + return out, cfg + + +def get_renderer_variables(renderlayer, root): + """Retrieve the extension which has been set in the VRay settings. + + Will return None if the current renderer is not VRay + For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which + start with `rs`. Use the actual node name, do NOT use the `nice name` + + Args: + renderlayer (str): the node name of the renderlayer. + root (str): base path to render + + Returns: + dict + + """ + renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) + render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) + + padding = cmds.getAttr("{}.{}".format(render_attrs["node"], + render_attrs["padding"])) + + filename_0 = cmds.renderSettings( + fullPath=True, + gin="#" * int(padding), + lut=True, + layer=renderlayer or lib.get_current_renderlayer())[0] + filename_0 = re.sub('_', '_beauty', + filename_0, flags=re.IGNORECASE) + prefix_attr = "defaultRenderGlobals.imageFilePrefix" + + scene = cmds.file(query=True, sceneName=True) + scene, _ = os.path.splitext(os.path.basename(scene)) + + if renderer == "vray": + renderlayer = renderlayer.split("_")[-1] + # Maya's renderSettings function does not return V-Ray file extension + # so we get the extension from vraySettings + extension = cmds.getAttr("vraySettings.imageFormatStr") + + # When V-Ray image format has not been switched once from default .png + # the getAttr command above returns None. As such we explicitly set + # it to `.png` + if extension is None: + extension = "png" + + if extension in ["exr (multichannel)", "exr (deep)"]: + extension = "exr" + + prefix_attr = "vraySettings.fileNamePrefix" + filename_prefix = cmds.getAttr(prefix_attr) + # we need to determine path for vray as maya `renderSettings` query + # does not work for vray. + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = "{}.{}.{}".format( + filename_0, "#" * int(padding), extension) + filename_0 = os.path.normpath(os.path.join(root, filename_0)) + elif renderer == "renderman": + prefix_attr = "rmanGlobals.imageFileFormat" + # NOTE: This is guessing extensions from renderman display types. + # Some of them are just framebuffers, d_texture format can be + # set in display setting. We set those now to None, but it + # should be handled more gracefully. + display_types = { + "d_deepexr": "exr", + "d_it": None, + "d_null": None, + "d_openexr": "exr", + "d_png": "png", + "d_pointcloud": "ptc", + "d_targa": "tga", + "d_texture": None, + "d_tiff": "tif" + } + + extension = display_types.get( + cmds.listConnections("rmanDefaultDisplay.displayType")[0], + "exr" + ) or "exr" + + filename_prefix = "{}/{}".format( + cmds.getAttr("rmanGlobals.imageOutputDir"), + cmds.getAttr("rmanGlobals.imageFileFormat") + ) + + renderlayer = renderlayer.split("_")[-1] + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = os.path.normpath(os.path.join(root, filename_0)) + elif renderer == "redshift": + # mapping redshift extension dropdown values to strings + ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] + extension = ext_mapping[ + cmds.getAttr("redshiftOptions.imageFormat") + ] + else: + # Get the extension, getAttr defaultRenderGlobals.imageFormat + # returns an index number. + filename_base = os.path.basename(filename_0) + extension = os.path.splitext(filename_base)[-1].strip(".") + + filename_prefix = cmds.getAttr(prefix_attr) + return {"ext": extension, + "filename_prefix": filename_prefix, + "padding": padding, + "filename_0": filename_0} + + From da5353aa2dd1d9e774a72dbc57a6ac5d7368afdd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Aug 2022 14:20:35 +0200 Subject: [PATCH 035/169] git: update gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index ea5b20eb69..4b773e97ed 100644 --- a/.gitignore +++ b/.gitignore @@ -107,3 +107,6 @@ website/.docusaurus mypy.ini tools/run_eventserver.* + +# Developer tools +tools/dev_* From 0f95f87d773ddcbe979fe28d5f0196f1befad38e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 24 Aug 2022 14:59:14 +0200 Subject: [PATCH 036/169] More draft refactoring - still not functional (WIP commit for my own sanity) --- .../plugins/publish/submit_maya_deadline.py | 185 ++++-------------- 1 file changed, 35 insertions(+), 150 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6dfa48a9f8..5a7d0b98c6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -31,7 +31,6 @@ import clique from maya import cmds -from openpype.hosts.maya.api import lib from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline @@ -87,11 +86,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - filepath = context.data["currentFile"] - filename = os.path.basename(filepath) + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + src_filepath = context.data["currentFile"] + src_filename = os.path.basename(src_filepath) - job_info.Name = "%s - %s" % (filename, instance.name) - job_info.BatchName = filename + job_info.Name = "%s - %s" % (src_filename, instance.name) + job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") job_info.UserName = context.data.get( "deadlineUser", getpass.getuser()) @@ -116,9 +119,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if self.limit_groups: job_info.LimitGroups = ",".join(self.limit_groups) - self.payload_skeleton["JobInfo"]["Name"] = jobname - self.payload_skeleton["JobInfo"]["BatchName"] = src_filename - # Optional, enable double-click to preview rendered # frames from Deadline Monitor self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ @@ -227,11 +227,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return attr.asdict(plugin_info) def process_submission(self): - # Override to NOT submit by default when calling super process() method - pass - def process(self, instance): - super(MayaSubmitDeadline, self).process(instance) + instance = self._instance + context = instance.context + + # Generated by AbstractSubmitDeadline. The `job_info`, `plugin_info` + # and `aux_files` are the skeleton payloads that are the basis for + # all the maya submissions + job_info = self.job_info + plugin_info = self.plugin_info + aux_files = self.aux_files + filepath = self.scene_path # publish if `use_publish` else workfile # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) @@ -241,21 +247,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.limit_groups = self.limit - context = instance.context - workspace = context.data["workspaceDir"] - - filepath = None - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) + # Patch workfile (only when use_published is enabled) + if self.use_published: + patches = ( + context.data["project_settings"].get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + self._patch_workfile(filepath, patches) # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") # todo: on self.use_published replace path for publishRenderMetadataFolder - # todo: on self.use_published apply scene patches to workfile instance # rep = i.data.get("representations")[0].get("name") # if instance.data.get("publishRenderMetadataFolder"): @@ -270,9 +274,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # patched_file = self._patch_workfile(filepath, patches) # patched_files.append(patched_file) - filepath = self.scene_path # collect by super().process - # Gather needed data ------------------------------------------------ + workspace = context.data["workspaceDir"] default_render_file = instance.context.data.get('project_settings')\ .get('maya')\ .get('RenderSettings')\ @@ -281,14 +284,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): dirname = os.path.join(workspace, default_render_file) renderlayer = instance.data['setMembers'] # rs_beauty - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. - src_filename = os.path.basename(context.data["currentFile"]) - jobname = "%s - %s" % (src_filename, instance.name) - # Get the variables depending on the renderer + # TODO: Find replacement logic for `get_renderer_variables` through + # what is collected for the render or is implemented in maya + # api `lib_renderproducts` render_variables = get_renderer_variables(renderlayer, dirname) filename_0 = render_variables["filename_0"] if self.use_published: @@ -842,8 +841,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): str: Patched file path or None """ - if os.path.splitext(file)[1].lower() != ".ma" or not patches: - return None + if not patches or os.path.splitext(file)[1].lower() != ".ma": + return compiled_regex = [re.compile(p["regex"]) for p in patches] with open(file, "r+") as pf: @@ -931,7 +930,7 @@ def _format_tiles( os.path.basename(filename) ) out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + out["PluginInfo"]["RegionPrefix{}".format(tile)] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 @@ -951,117 +950,3 @@ def _format_tiles( tile += 1 return out, cfg - - -def get_renderer_variables(renderlayer, root): - """Retrieve the extension which has been set in the VRay settings. - - Will return None if the current renderer is not VRay - For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which - start with `rs`. Use the actual node name, do NOT use the `nice name` - - Args: - renderlayer (str): the node name of the renderlayer. - root (str): base path to render - - Returns: - dict - - """ - renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer()) - render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"]) - - padding = cmds.getAttr("{}.{}".format(render_attrs["node"], - render_attrs["padding"])) - - filename_0 = cmds.renderSettings( - fullPath=True, - gin="#" * int(padding), - lut=True, - layer=renderlayer or lib.get_current_renderlayer())[0] - filename_0 = re.sub('_', '_beauty', - filename_0, flags=re.IGNORECASE) - prefix_attr = "defaultRenderGlobals.imageFilePrefix" - - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) - - if renderer == "vray": - renderlayer = renderlayer.split("_")[-1] - # Maya's renderSettings function does not return V-Ray file extension - # so we get the extension from vraySettings - extension = cmds.getAttr("vraySettings.imageFormatStr") - - # When V-Ray image format has not been switched once from default .png - # the getAttr command above returns None. As such we explicitly set - # it to `.png` - if extension is None: - extension = "png" - - if extension in ["exr (multichannel)", "exr (deep)"]: - extension = "exr" - - prefix_attr = "vraySettings.fileNamePrefix" - filename_prefix = cmds.getAttr(prefix_attr) - # we need to determine path for vray as maya `renderSettings` query - # does not work for vray. - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = "{}.{}.{}".format( - filename_0, "#" * int(padding), extension) - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "renderman": - prefix_attr = "rmanGlobals.imageFileFormat" - # NOTE: This is guessing extensions from renderman display types. - # Some of them are just framebuffers, d_texture format can be - # set in display setting. We set those now to None, but it - # should be handled more gracefully. - display_types = { - "d_deepexr": "exr", - "d_it": None, - "d_null": None, - "d_openexr": "exr", - "d_png": "png", - "d_pointcloud": "ptc", - "d_targa": "tga", - "d_texture": None, - "d_tiff": "tif" - } - - extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0], - "exr" - ) or "exr" - - filename_prefix = "{}/{}".format( - cmds.getAttr("rmanGlobals.imageOutputDir"), - cmds.getAttr("rmanGlobals.imageFileFormat") - ) - - renderlayer = renderlayer.split("_")[-1] - - filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 - filename_0 = os.path.normpath(os.path.join(root, filename_0)) - elif renderer == "redshift": - # mapping redshift extension dropdown values to strings - ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] - extension = ext_mapping[ - cmds.getAttr("redshiftOptions.imageFormat") - ] - else: - # Get the extension, getAttr defaultRenderGlobals.imageFormat - # returns an index number. - filename_base = os.path.basename(filename_0) - extension = os.path.splitext(filename_base)[-1].strip(".") - - filename_prefix = cmds.getAttr(prefix_attr) - return {"ext": extension, - "filename_prefix": filename_prefix, - "padding": padding, - "filename_0": filename_0} - - From 67fd21edae5d7c3670ee704558ff10505c64a783 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:40:32 +0200 Subject: [PATCH 037/169] imlemented fusion addon --- openpype/hosts/fusion/__init__.py | 10 ++++++++++ openpype/hosts/fusion/addon.py | 23 +++++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 openpype/hosts/fusion/addon.py diff --git a/openpype/hosts/fusion/__init__.py b/openpype/hosts/fusion/__init__.py index e69de29bb2..ddae01890b 100644 --- a/openpype/hosts/fusion/__init__.py +++ b/openpype/hosts/fusion/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + FusionAddon, + FUSION_HOST_DIR, +) + + +__all__ = ( + "FusionAddon", + "FUSION_HOST_DIR", +) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py new file mode 100644 index 0000000000..97fb262517 --- /dev/null +++ b/openpype/hosts/fusion/addon.py @@ -0,0 +1,23 @@ +import os +from openpype.modules import OpenPypeModule +from openpype.modules.interfaces import IHostModule + +FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class FusionAddon(OpenPypeModule, IHostModule): + name = "fusion" + host_name = "fusion" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(FUSION_HOST_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".comp"] From 3ea7510693ec23623ec7760f3479e10b363a2adf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:40:40 +0200 Subject: [PATCH 038/169] removed usage of HOST_WORKFILE_EXTENSIONS --- openpype/hosts/fusion/api/workio.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/workio.py b/openpype/hosts/fusion/api/workio.py index a1710c6e3a..89752d3e6d 100644 --- a/openpype/hosts/fusion/api/workio.py +++ b/openpype/hosts/fusion/api/workio.py @@ -2,13 +2,11 @@ import sys import os -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS - from .pipeline import get_current_comp def file_extensions(): - return HOST_WORKFILE_EXTENSIONS["fusion"] + return [".comp"] def has_unsaved_changes(): From cf50d1dd1f00fe178d8bfc28a039579222cab1d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 25 Aug 2022 15:40:56 +0200 Subject: [PATCH 039/169] reuse 'FUSION_HOST_DIR' from fusion public api --- openpype/hosts/fusion/api/pipeline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 54a6c94b60..987eae214b 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -18,12 +18,11 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) -import openpype.hosts.fusion +from openpype.hosts.fusion import FUSION_HOST_DIR log = Logger.get_logger(__name__) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(FUSION_HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") From 38932ba3012668e8ca2a239ffa2b68b002d979d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 27 Aug 2022 10:38:01 +0200 Subject: [PATCH 040/169] fixed interface name --- openpype/hosts/fusion/addon.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py index 97fb262517..e257005061 100644 --- a/openpype/hosts/fusion/addon.py +++ b/openpype/hosts/fusion/addon.py @@ -1,11 +1,11 @@ import os from openpype.modules import OpenPypeModule -from openpype.modules.interfaces import IHostModule +from openpype.modules.interfaces import IHostAddon FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) -class FusionAddon(OpenPypeModule, IHostModule): +class FusionAddon(OpenPypeModule, IHostAddon): name = "fusion" host_name = "fusion" From 0d8cf12618cee76a5e144429a3459074b14e4adf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 17:14:22 +0200 Subject: [PATCH 041/169] define new source where publish templates are not defined in integrate plubin --- .../defaults/project_settings/global.json | 3 + .../defaults/project_settings/maya.json | 2 +- .../schemas/schema_global_publish.json | 4 ++ .../schemas/schema_global_tools.json | 57 +++++++++++++++++++ 4 files changed, 65 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 0ff9363ba7..3e00cd725e 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -414,6 +414,9 @@ "filter_families": [] } ] + }, + "publish": { + "template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 28f6d23e4d..38063bc2c1 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -980,4 +980,4 @@ "ValidateNoAnimation": false } } -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index e1aa230b49..c24c88d04a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -642,6 +642,10 @@ ] } }, + { + "type": "label", + "label": "NOTE: Publish template profiles settings were moved to Tools/Publish/Template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index f8c9482e5f..7dc44c2842 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -284,6 +284,63 @@ } } ] + }, + { + "type": "dict", + "key": "publish", + "label": "Publish", + "children": [ + { + "type": "label", + "label": "NOTE: For backwards compatibility can be value empty and in that case are used values from IntegrateAssetNew. This will change in future so please move all values here as soon as possible." + }, + { + "type": "list", + "key": "template_name_profiles", + "label": "Template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "label", + "label": "" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "tasks", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + } + ] } ] } From 2b6c4659237259b6c691dd2b5dc1db927b47fcd3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:03:30 +0200 Subject: [PATCH 042/169] added helper functions to get template name --- openpype/pipeline/publish/__init__.py | 4 ++ openpype/pipeline/publish/contants.py | 1 + openpype/pipeline/publish/lib.py | 97 ++++++++++++++++++++++++++- 3 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 openpype/pipeline/publish/contants.py diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index aa7fe0bdbf..a2aa61c4d5 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -10,6 +10,8 @@ from .publish_plugins import ( ) from .lib import ( + get_publish_template_name, + DiscoverResult, publish_plugins_discover, load_help_content_from_plugin, @@ -33,6 +35,8 @@ __all__ = ( "OpenPypePyblishPluginMixin", "OptionalPyblishPluginMixin", + "get_publish_template_name", + "DiscoverResult", "publish_plugins_discover", "load_help_content_from_plugin", diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py new file mode 100644 index 0000000000..958675ecc1 --- /dev/null +++ b/openpype/pipeline/publish/contants.py @@ -0,0 +1 @@ +DEFAULT_PUBLISH_TEMPLATE = "publish" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 9060a0bf4b..7c3ea22c06 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -8,8 +8,101 @@ import six import pyblish.plugin import pyblish.api -from openpype.lib import Logger -from openpype.settings import get_project_settings, get_system_settings +from openpype.lib import Logger, filter_profiles +from openpype.settings import ( + get_project_settings, + get_system_settings, +) + +from .contants import DEFAULT_PUBLISH_TEMPLATE + + +def get_template_name_profiles(project_name=None, project_settings=None): + """Receive profiles for publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["template_name_profiles"] + ) + if profiles: + return profiles + + # Use legacy approach for cases new settings are not filled yet for the + # project + return ( + project_settings + ["global"] + ["publish"] + ["IntegrateAssetNew"] + ["template_name_profiles"] + ) + + +def get_publish_template_name( + project_name, + host_name, + family, + task_name, + task_type, + project_settings=None, + logger=None +): + """Get template name which should be used for passed context. + + Publish templates are filtered by host name, family, task name and + task type. + + Default template which is used at if profiles are not available or profile + has empty value is defined by 'DEFAULT_PUBLISH_TEMPLATE' constant. + + Args: + project_name (str): Name of project where to look for settings. + host_name (str): Name of host integration. + family (str): Family for which should be found template. + task_name (str): Task name on which is intance working. + task_type (str): Task type on which is intance working. + project_setting (Dict[str, Any]): Prepared project settings. + logger (logging.Logger): Custom logger used for 'filter_profiles' + function. + + Returns: + str: Template name which should be used for integration. + """ + + template = None + filter_criteria = { + "hosts": host_name, + "families": family, + "tasks": task_name, + "task_types": task_type, + } + profiles = get_template_name_profiles(project_name, project_settings) + profile = filter_profiles(profiles, filter_criteria, logger=logger) + if profile: + template = profile["template_name"] + return template or DEFAULT_PUBLISH_TEMPLATE class DiscoverResult: From 96138a0b73ba6a3f9757283853da9cd1aa85c023 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:22:49 +0200 Subject: [PATCH 043/169] use new functions in integrators --- openpype/plugins/publish/integrate.py | 61 ++++++-------------- openpype/plugins/publish/integrate_legacy.py | 21 +++---- 2 files changed, 27 insertions(+), 55 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index f99c718f8a..56d2621015 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -26,7 +26,10 @@ from openpype.lib import source_hash from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io -from openpype.pipeline.publish import KnownPublishError +from openpype.pipeline.publish import ( + KnownPublishError, + get_publish_template_name, +) log = logging.getLogger(__name__) @@ -792,52 +795,26 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def get_template_name(self, instance): """Return anatomy template name to use for integration""" - # Define publish template name from profiles - filter_criteria = self.get_profile_filter_criteria(instance) - template_name_profiles = self._get_template_name_profiles(instance) - profile = filter_profiles( - template_name_profiles, - filter_criteria, - logger=self.log - ) - - if profile: - return profile["template_name"] - return self.default_template_name - - def _get_template_name_profiles(self, instance): - """Receive profiles for publish template keys. - - Reuse template name profiles from legacy integrator. Goal is to move - the profile settings out of plugin settings but until that happens we - want to be able set it at one place and don't break backwards - compatibility (more then once). - """ - - return ( - instance.context.data["project_settings"] - ["global"] - ["publish"] - ["IntegrateAssetNew"] - ["template_name_profiles"] - ) - - def get_profile_filter_criteria(self, instance): - """Return filter criteria for `filter_profiles`""" # Anatomy data is pre-filled by Collectors - anatomy_data = instance.data["anatomyData"] + + project_name = legacy_io.active_project() # Task can be optional in anatomy data - task = anatomy_data.get("task", {}) + host_name = instance.context.data["hostName"] + anatomy_data = instance.data["anatomyData"] + family = anatomy_data["family"] + task_info = anatomy_data.get("task") or {} - # Return filter criteria - return { - "families": anatomy_data["family"], - "tasks": task.get("name"), - "task_types": task.get("type"), - "hosts": instance.context.data["hostName"], - } + return get_publish_template_name( + project_name, + host_name, + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], + logger=self.log + ) def get_rootless_path(self, anatomy, path): """Returns, if possible, path without absolute portion from root diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index b90b61f587..fedaae794a 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -33,6 +33,7 @@ from openpype.lib import ( TemplateUnsolved ) from openpype.pipeline import legacy_io +from openpype.pipeline.publish import get_publish_template_name # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -388,22 +389,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "tasks": task_name, - "hosts": instance.context.data["hostName"], - "task_types": task_type - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + template_name = get_publish_template_name( + project_name, + instance.context.data["hostName"], + family, + task_name=task_info.get("name"), + task_type=task_info.get("type"), + project_settings=instance.context.data["project_settings"], logger=self.log ) - template_name = "publish" - if profile: - template_name = profile["template_name"] - published_representations = {} for idx, repre in enumerate(repres): published_files = [] From c7108ac7fbad46fe2aafe669498cb3755d9c7730 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:23:37 +0200 Subject: [PATCH 044/169] modified imports in integrators --- openpype/plugins/publish/integrate.py | 6 +++--- openpype/plugins/publish/integrate_legacy.py | 11 ++++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 56d2621015..8b60ea3b51 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -5,6 +5,9 @@ import copy import clique import six +from bson.objectid import ObjectId +import pyblish.api + from openpype.client.operations import ( OperationsSession, new_subset_document, @@ -14,8 +17,6 @@ from openpype.client.operations import ( prepare_version_update_data, prepare_representation_update_data, ) -from bson.objectid import ObjectId -import pyblish.api from openpype.client import ( get_representations, @@ -23,7 +24,6 @@ from openpype.client import ( get_version_by_name, ) from openpype.lib import source_hash -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.file_transaction import FileTransaction from openpype.pipeline import legacy_io from openpype.pipeline.publish import ( diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index fedaae794a..0e157c9d1f 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -15,7 +15,6 @@ from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api -import openpype.api from openpype.client import ( get_asset_by_name, get_subset_by_id, @@ -25,12 +24,14 @@ from openpype.client import ( get_representations, get_archived_representations, ) -from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, create_hard_link, StringTemplate, - TemplateUnsolved + TemplateUnsolved, + source_hash, + filter_profiles, + get_local_site_id, ) from openpype.pipeline import legacy_io from openpype.pipeline.publish import get_publish_template_name @@ -1053,7 +1054,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for _src, dest in resources: path = self.get_rootless_path(anatomy, dest) dest = self.get_dest_temp_url(dest) - file_hash = openpype.api.source_hash(dest) + file_hash = source_hash(dest) if self.TMP_FILE_EXT and \ ',{}'.format(self.TMP_FILE_EXT) in file_hash: file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), @@ -1163,7 +1164,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def _get_sites(self, sync_project_presets): """Returns tuple (local_site, remote_site)""" - local_site_id = openpype.api.get_local_site_id() + local_site_id = get_local_site_id() local_site = sync_project_presets["config"]. \ get("active_site", "studio").strip() From c76a1a1dbbe8e705b06ebf02f37237cf7dda98fd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:52:43 +0200 Subject: [PATCH 045/169] added settings for hero templates and changed 'tasks' to 'task_names' --- .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 47 +++++++++++++++++-- 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 3e00cd725e..8692f95a04 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -416,7 +416,8 @@ ] }, "publish": { - "template_name_profiles": [] + "template_name_profiles": [], + "hero_template_name_profiles": [] } }, "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 7dc44c2842..c919cd73c5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -303,9 +303,47 @@ "type": "dict", "children": [ { - "type": "label", - "label": "" + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "template_name", + "label": "Template name" + } + ] + } + }, + { + "type": "list", + "key": "hero_template_name_profiles", + "label": "Hero template name profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ { "key": "families", "label": "Families", @@ -324,7 +362,7 @@ "type": "task-types-enum" }, { - "key": "tasks", + "key": "task_names", "label": "Task names", "type": "list", "object_type": "text" @@ -335,7 +373,8 @@ { "type": "text", "key": "template_name", - "label": "Template name" + "label": "Template name", + "tooltip": "Name of template from Anatomy templates" } ] } From 9d4416719b4a99d50b0d411b5548a8afa8072240 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:53:33 +0200 Subject: [PATCH 046/169] convert legacy to new settings by replacing 'tasks' with 'task_names' --- openpype/pipeline/publish/lib.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 7c3ea22c06..03dfbadfcc 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -2,6 +2,7 @@ import os import sys import types import inspect +import copy import xml.etree.ElementTree import six @@ -47,17 +48,23 @@ def get_template_name_profiles(project_name=None, project_settings=None): ["template_name_profiles"] ) if profiles: - return profiles + return copy.deepcopy(profiles) # Use legacy approach for cases new settings are not filled yet for the # project - return ( + legacy_profiles = ( project_settings ["global"] ["publish"] ["IntegrateAssetNew"] ["template_name_profiles"] ) + # Replace "tasks" key with "task_names" + profiles = [] + for profile in copy.deepcopy(legacy_profiles): + profile["task_names"] = profile.pop("tasks", []) + profiles.append(profile) + return profiles def get_publish_template_name( @@ -95,7 +102,7 @@ def get_publish_template_name( filter_criteria = { "hosts": host_name, "families": family, - "tasks": task_name, + "task_names": task_name, "task_types": task_type, } profiles = get_template_name_profiles(project_name, project_settings) From 63f5b5f2ab40a94c7496b8f08fa19204a5687b5a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:53:49 +0200 Subject: [PATCH 047/169] added ability to get hero version template name --- openpype/pipeline/publish/contants.py | 1 + openpype/pipeline/publish/lib.py | 64 +++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/publish/contants.py b/openpype/pipeline/publish/contants.py index 958675ecc1..169eca2e5c 100644 --- a/openpype/pipeline/publish/contants.py +++ b/openpype/pipeline/publish/contants.py @@ -1 +1,2 @@ DEFAULT_PUBLISH_TEMPLATE = "publish" +DEFAULT_HERO_PUBLISH_TEMPLATE = "hero" diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 03dfbadfcc..85a64da721 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -15,7 +15,10 @@ from openpype.settings import ( get_system_settings, ) -from .contants import DEFAULT_PUBLISH_TEMPLATE +from .contants import ( + DEFAULT_PUBLISH_TEMPLATE, + DEFAULT_HERO_PUBLISH_TEMPLATE, +) def get_template_name_profiles(project_name=None, project_settings=None): @@ -67,6 +70,49 @@ def get_template_name_profiles(project_name=None, project_settings=None): return profiles +def get_hero_template_name_profiles(project_name=None, project_settings=None): + """Receive profiles for hero publish template keys. + + At least one of arguments must be passed. + + Args: + project_name (str): Name of project where to look for templates. + project_settings(Dic[str, Any]): Prepared project settings. + + Returns: + List[Dict[str, Any]]: Publish template profiles. + """ + + if not project_name and not project_settings: + raise ValueError(( + "Both project name and project settings are missing." + " At least one must be entered." + )) + + if not project_settings: + project_settings = get_project_settings(project_name) + + profiles = ( + project_settings + ["global"] + ["tools"] + ["publish"] + ["hero_template_name_profiles"] + ) + if profiles: + return copy.deepcopy(profiles) + + # Use legacy approach for cases new settings are not filled yet for the + # project + return copy.deepcopy( + project_settings + ["global"] + ["publish"] + ["IntegrateHeroVersion"] + ["template_name_profiles"] + ) + + def get_publish_template_name( project_name, host_name, @@ -74,6 +120,7 @@ def get_publish_template_name( task_name, task_type, project_settings=None, + hero=False, logger=None ): """Get template name which should be used for passed context. @@ -105,11 +152,22 @@ def get_publish_template_name( "task_names": task_name, "task_types": task_type, } - profiles = get_template_name_profiles(project_name, project_settings) + if hero: + default_template = DEFAULT_HERO_PUBLISH_TEMPLATE + profiles = get_hero_template_name_profiles( + project_name, project_settings + ) + + else: + profiles = get_template_name_profiles( + project_name, project_settings + ) + default_template = DEFAULT_PUBLISH_TEMPLATE + profile = filter_profiles(profiles, filter_criteria, logger=logger) if profile: template = profile["template_name"] - return template or DEFAULT_PUBLISH_TEMPLATE + return template or default_template class DiscoverResult: From 1698aefcfbc887ba6f29fc59dbdfbc2595d5c6a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:57:00 +0200 Subject: [PATCH 048/169] use 'get_publish_template_name' in hero integration --- .../plugins/publish/integrate_hero_version.py | 41 ++++++++----------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 7d698ff98d..2938c61f8e 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -14,14 +14,12 @@ from openpype.client import ( get_archived_representations, get_representations, ) -from openpype.lib import ( - create_hard_link, - filter_profiles -) +from openpype.lib import create_hard_link from openpype.pipeline import ( schema, legacy_io, ) +from openpype.pipeline.publish import get_publish_template_name class IntegrateHeroVersion(pyblish.api.InstancePlugin): @@ -68,10 +66,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) return - template_key = self._get_template_key(instance) - anatomy = instance.context.data["anatomy"] project_name = anatomy.project_name + + template_key = self._get_template_key(project_name, instance) + if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -527,30 +526,24 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): return publish_folder - def _get_template_key(self, instance): + def _get_template_key(self, project_name, instance): anatomy_data = instance.data["anatomyData"] - task_data = anatomy_data.get("task") or {} - task_name = task_data.get("name") - task_type = task_data.get("type") + task_info = anatomy_data.get("task") or {} host_name = instance.context.data["hostName"] + # TODO raise error if Hero not set? family = self.main_family_from_instance(instance) - key_values = { - "families": family, - "task_names": task_name, - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles( - self.template_name_profiles, - key_values, + + return get_publish_template_name( + project_name, + host_name, + family, + task_info.get("name"), + task_info.get("type"), + project_settings=instance.context.data["project_settings"], + hero=True, logger=self.log ) - if profile: - template_name = profile["template_name"] - else: - template_name = self._default_template_name - return template_name def main_family_from_instance(self, instance): """Returns main family of entered instance.""" From 9b7384e1ae96b0f348911e9e163a23857dd2ca7f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 18:57:27 +0200 Subject: [PATCH 049/169] removed unused attribute --- openpype/plugins/publish/integrate_legacy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 0e157c9d1f..bbf30c9ab7 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -140,7 +140,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): integrated_file_sizes = {} # Attributes set by settings - template_name_profiles = None subset_grouping_profiles = None def process(self, instance): From c6a6e3b21a4aaa6c98450de918c78907fed91f5e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 19:07:34 +0200 Subject: [PATCH 050/169] added warning for access to legacy settings --- openpype/pipeline/publish/lib.py | 35 +++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py index 85a64da721..29c745ed15 100644 --- a/openpype/pipeline/publish/lib.py +++ b/openpype/pipeline/publish/lib.py @@ -21,7 +21,9 @@ from .contants import ( ) -def get_template_name_profiles(project_name=None, project_settings=None): +def get_template_name_profiles( + project_name, project_settings=None, logger=None +): """Receive profiles for publish template keys. At least one of arguments must be passed. @@ -62,6 +64,16 @@ def get_template_name_profiles(project_name=None, project_settings=None): ["IntegrateAssetNew"] ["template_name_profiles"] ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/template_name_profiles'." + ).format(project_name)) + # Replace "tasks" key with "task_names" profiles = [] for profile in copy.deepcopy(legacy_profiles): @@ -70,7 +82,9 @@ def get_template_name_profiles(project_name=None, project_settings=None): return profiles -def get_hero_template_name_profiles(project_name=None, project_settings=None): +def get_hero_template_name_profiles( + project_name, project_settings=None, logger=None +): """Receive profiles for hero publish template keys. At least one of arguments must be passed. @@ -104,13 +118,24 @@ def get_hero_template_name_profiles(project_name=None, project_settings=None): # Use legacy approach for cases new settings are not filled yet for the # project - return copy.deepcopy( + legacy_profiles = copy.deepcopy( project_settings ["global"] ["publish"] ["IntegrateHeroVersion"] ["template_name_profiles"] ) + if legacy_profiles: + if not logger: + logger = Logger.get_logger("get_hero_template_name_profiles") + + logger.warning(( + "Project \"{}\" is using legacy access to hero publish template." + " It is recommended to move settings to new location" + " 'project_settings/global/tools/publish/" + "hero_template_name_profiles'." + ).format(project_name)) + return legacy_profiles def get_publish_template_name( @@ -155,12 +180,12 @@ def get_publish_template_name( if hero: default_template = DEFAULT_HERO_PUBLISH_TEMPLATE profiles = get_hero_template_name_profiles( - project_name, project_settings + project_name, project_settings, logger ) else: profiles = get_template_name_profiles( - project_name, project_settings + project_name, project_settings, logger ) default_template = DEFAULT_PUBLISH_TEMPLATE From dc4c32b6fc6aaeaedb6bb9d76c7b72d9f5c45c45 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:14:58 +0200 Subject: [PATCH 051/169] Fix representation data for workfile --- openpype/modules/deadline/abstract_submit_deadline.py | 6 +++--- .../deadline/plugins/publish/submit_maya_deadline.py | 3 --- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 577378335e..d198542370 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -554,9 +554,9 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): "Workfile (scene) must be published along") # determine published path from Anatomy. template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0].get("ext") - template_data["representation"] = rep - template_data["ext"] = rep + rep = i.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") template_data["comment"] = None anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["path"] diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 5a7d0b98c6..68e8eaaa73 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -258,10 +258,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) self._patch_workfile(filepath, patches) - # todo: on self.use_published originally use template_data["representation"] using .get("name") instead of .get("ext") # todo: on self.use_published replace path for publishRenderMetadataFolder - # rep = i.data.get("representations")[0].get("name") - # if instance.data.get("publishRenderMetadataFolder"): # instance.data["publishRenderMetadataFolder"] = \ # instance.data["publishRenderMetadataFolder"].replace( From 67b8664be07fbb8a0061b7c8a62bf4073ef79307 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:15:20 +0200 Subject: [PATCH 052/169] Remove comment for patched file code refactor since it's already implemented --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 68e8eaaa73..07ed237c94 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -266,10 +266,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # self.log.info("Scene name was switched {} -> {}".format( # orig_scene, new_scene # )) - # # patch workfile is needed - # if filepath not in patched_files: - # patched_file = self._patch_workfile(filepath, patches) - # patched_files.append(patched_file) # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] From 2da8f036dee501be62da07d07fa9efafc9e8839f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:19:37 +0200 Subject: [PATCH 053/169] Refactor logic for less indentation --- .../deadline/abstract_submit_deadline.py | 103 +++++++++--------- 1 file changed, 52 insertions(+), 51 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index d198542370..55e16d8d21 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -546,65 +546,66 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): anatomy = self._instance.context.data['anatomy'] file_path = None for i in self._instance.context: - if "workfile" in i.data["families"] \ - or i.data["family"] == "workfile": - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0] - template_data["representation"] = rep.get("name") - template_data["ext"] = rep.get("ext") - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) - self.log.info("Using published scene for render {}".format( - file_path)) + is_workfile = + if not is_workfile: + continue - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + # determine published path from Anatomy. + template_data = i.data.get("anatomyData") + rep = i.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) - if not replace_in_path: - return file_path + self.log.info("Using published scene for render {}".format( + file_path)) - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext( - os.path.basename(file_path))[0] - orig_scene = os.path.splitext( - os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + new_scene = os.path.splitext(os.path.basename(file_path))[0] + orig_scene = os.path.splitext(os.path.basename( + self._instance.context.data["currentFile"]))[0] + exp = self._instance.data.get("expectedFiles") + + if isinstance(exp[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in exp[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( str(f).replace(orig_scene, new_scene) ) - self._instance.data["expectedFiles"] = new_exp + new_exp[aov] = replaced_files + # [] might be too much here, TODO + self._instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in exp: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + self._instance.data["expectedFiles"] = new_exp - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path From 21a319b10c46dec6efc2aaf6fe6ac1fe09bfc512 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 29 Aug 2022 19:20:33 +0200 Subject: [PATCH 054/169] added 'deprecated' to integrator labels and added new location for hero templates as note --- .../projects_schema/schemas/schema_global_publish.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index c24c88d04a..2cb0cebf95 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -649,7 +649,7 @@ { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", @@ -754,10 +754,14 @@ "type": "list", "object_type": "text" }, + { + "type": "label", + "label": "NOTE: Hero publish template profiles settings were moved to Tools/Publish/Hero template name profiles. Please move values there." + }, { "type": "list", "key": "template_name_profiles", - "label": "Template name profiles", + "label": "Template name profiles (DEPRECATED)", "use_label_wrap": true, "object_type": { "type": "dict", From e81e3a7a1021db4e442aa3147ed03ccf8d92d8c6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:22:06 +0200 Subject: [PATCH 055/169] Fix missing line --- openpype/modules/deadline/abstract_submit_deadline.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 55e16d8d21..86eebc0d35 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -547,7 +547,10 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): file_path = None for i in self._instance.context: - is_workfile = + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) if not is_workfile: continue From 2933b37ef7711aac4e04284120b6c9b0ce2c9612 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:34:27 +0200 Subject: [PATCH 056/169] Refactor code for readability --- .../deadline/abstract_submit_deadline.py | 124 ++++++++++-------- 1 file changed, 68 insertions(+), 56 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 86eebc0d35..46baa9ee57 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -543,72 +543,84 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): published. """ - anatomy = self._instance.context.data['anatomy'] - file_path = None - for i in self._instance.context: - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - # determine published path from Anatomy. - template_data = i.data.get("anatomyData") - rep = i.data.get("representations")[0] - template_data["representation"] = rep.get("name") - template_data["ext"] = rep.get("ext") - template_data["comment"] = None - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] - file_path = os.path.normpath(template_filled) + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue - self.log.info("Using published scene for render {}".format( - file_path)) + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") - if not os.path.exists(file_path): - self.log.error("published scene does not exist!") - raise + return i - if not replace_in_path: - return file_path + instance = self._instance + workfile_instance = _get_workfile_instance(instance.context) + if not workfile_instance: + return - # now we need to switch scene in expected files - # because token will now point to published - # scene file and that might differ from current one - new_scene = os.path.splitext(os.path.basename(file_path))[0] - orig_scene = os.path.splitext(os.path.basename( - self._instance.context.data["currentFile"]))[0] - exp = self._instance.data.get("expectedFiles") + # determine published path from Anatomy. + template_data = workfile_instance.data.get("anatomyData") + rep = workfile_instance.data.get("representations")[0] + template_data["representation"] = rep.get("name") + template_data["ext"] = rep.get("ext") + template_data["comment"] = None - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - new_exp = {} - for aov, files in exp[0].items(): - replaced_files = [] - for f in files: - replaced_files.append( - str(f).replace(orig_scene, new_scene) - ) - new_exp[aov] = replaced_files - # [] might be too much here, TODO - self._instance.data["expectedFiles"] = [new_exp] - else: - new_exp = [] - for f in exp: - new_exp.append( + anatomy = instance.context.data['anatomy'] + anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled["publish"]["path"] + file_path = os.path.normpath(template_filled) + + self.log.info("Using published scene for render {}".format(file_path)) + + if not os.path.exists(file_path): + self.log.error("published scene does not exist!") + raise + + if not replace_in_path: + return file_path + + # now we need to switch scene in expected files + # because token will now point to published + # scene file and that might differ from current one + def _clean_name(path): + return os.path.splitext(os.path.basename(path))[0] + + new_scene = _clean_name(file_path) + orig_scene = _clean_name(instance.context.data["currentFile"]) + expected_files = instance.data.get("expectedFiles") + + if isinstance(expected_files[0], dict): + # we have aovs and we need to iterate over them + new_exp = {} + for aov, files in expected_files[0].items(): + replaced_files = [] + for f in files: + replaced_files.append( str(f).replace(orig_scene, new_scene) ) - self._instance.data["expectedFiles"] = new_exp + new_exp[aov] = replaced_files + # [] might be too much here, TODO + instance.data["expectedFiles"] = [new_exp] + else: + new_exp = [] + for f in expected_files: + new_exp.append( + str(f).replace(orig_scene, new_scene) + ) + instance.data["expectedFiles"] = new_exp - self.log.info("Scene name was switched {} -> {}".format( - orig_scene, new_scene - )) + self.log.info("Scene name was switched {} -> {}".format( + orig_scene, new_scene + )) return file_path From c725ff5b42c2f3f248a6af8f835020c9efb23182 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:35:05 +0200 Subject: [PATCH 057/169] Move replacing in `publishRenderMetadataFolder` to abstract base class --- openpype/modules/deadline/abstract_submit_deadline.py | 6 ++++++ .../deadline/plugins/publish/submit_maya_deadline.py | 9 --------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 46baa9ee57..f56cf49f6d 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -618,6 +618,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): ) instance.data["expectedFiles"] = new_exp + metadata_folder = instance.data.get("publishRenderMetadataFolder") + if metadata_folder: + metadata_folder = metadata_folder.replace(orig_scene, + new_scene) + instance.data["publishRenderMetadataFolder"] = metadata_folder + self.log.info("Scene name was switched {} -> {}".format( orig_scene, new_scene )) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 07ed237c94..26c26a124c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -258,15 +258,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) self._patch_workfile(filepath, patches) - # todo: on self.use_published replace path for publishRenderMetadataFolder - # if instance.data.get("publishRenderMetadataFolder"): - # instance.data["publishRenderMetadataFolder"] = \ - # instance.data["publishRenderMetadataFolder"].replace( - # orig_scene, new_scene) - # self.log.info("Scene name was switched {} -> {}".format( - # orig_scene, new_scene - # )) - # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] default_render_file = instance.context.data.get('project_settings')\ From 23e652a51f41a6c65d6feba6edb0268f85feccb8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:54:36 +0200 Subject: [PATCH 058/169] Patch plug-in payload with settings --- .../maya/plugins/publish/collect_render.py | 1 + .../plugins/publish/submit_maya_deadline.py | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index ebda5e190d..768a53329f 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -293,6 +293,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): "source": filepath, "expectedFiles": full_exp_files, "publishRenderMetadataFolder": common_publish_meta_path, + "renderProducts": layer_render_products, "resolutionWidth": lib.get_attr_in_layer( "defaultResolution.width", layer=layer_name ), diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 26c26a124c..854a66eaa5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -215,16 +215,21 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderlayer = instance.data['setMembers'] # rs_beauty - self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa - # Output driver to render plugin_info = DeadlinePluginInfo( SceneFile=context.data["currentFile"], Version=cmds.about(version=True), + RenderLayer=renderlayer, + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights") # noqa ) - return attr.asdict(plugin_info) + plugin_payload = attr.asdict(plugin_info) + + # Patching with pluginInfo from settings + for key, value in self.pluginInfo.items(): + plugin_payload[key] = value + + return plugin_payload def process_submission(self): @@ -338,10 +343,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) - # add jobInfo and pluginInfo variables from Settings - payload["JobInfo"].update(self.jobInfo) - payload["PluginInfo"].update(self.pluginInfo) - if instance.data.get("tileRendering"): # Prepare tiles data self._tile_render(instance, payload) From 4abddd027de9d4a1814ddff1b971bb9a99c47008 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:07 +0200 Subject: [PATCH 059/169] Use collected render products for image prefix --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 854a66eaa5..bb7ae380b6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -307,7 +307,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "jobname": jobname, "comment": comment, "output_filename_0": output_filename_0, - "render_variables": render_variables, "renderlayer": renderlayer, "workspace": workspace, "dirname": dirname, @@ -564,6 +563,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderer = self._instance.data["renderer"] + # Get layer prefix + render_products = self._instance.data["renderProducts"] + layer_metadata = render_products.layer_data + layer_prefix = layer_metadata.filePrefix + # This hack is here because of how Deadline handles Renderman version. # it considers everything with `renderman` set as version older than # Renderman 22, and so if we are using renderman > 21 we need to set @@ -583,7 +587,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "SceneFile": data["filepath"], # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), - "OutputFilePrefix": data["render_variables"]["filename_prefix"], # noqa: E501 + "OutputFilePrefix": layer_prefix, # Only render layers are considered renderable in this pipeline "UsingRenderLayers": True, From 6f5fcecfae7ec92bf2df80fb673bfff3e1049231 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:24 +0200 Subject: [PATCH 060/169] Use existing variable `renderer` --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index bb7ae380b6..c7f91905ea 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -573,7 +573,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Renderman 22, and so if we are using renderman > 21 we need to set # renderer string on the job to `renderman22`. We will have to change # this when Deadline releases new version handling this. - if self._instance.data["renderer"] == "renderman": + if renderer == "renderman": try: from rfm2.config import cfg # noqa except ImportError: From ae250c4a100dcc4474512913dbc858326ced3c8a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:55:40 +0200 Subject: [PATCH 061/169] Remove unused `comment` key-value --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index c7f91905ea..b2b877ab0e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -305,7 +305,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "filename": filename, "filepath": filepath, "jobname": jobname, - "comment": comment, "output_filename_0": output_filename_0, "renderlayer": renderlayer, "workspace": workspace, From 7af7f71edacea21a00c370e4ad1e92b2fe576b66 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 19:58:04 +0200 Subject: [PATCH 062/169] Remove logging of plugin name --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index b2b877ab0e..db796f25a9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -335,9 +335,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if export_job: payload["JobInfo"]["JobDependency0"] = export_job - plugin = payload["JobInfo"]["Plugin"] - self.log.info("using render plugin : {}".format(plugin)) - # Store output dir for unified publisher (filesequence) instance.data["outputDir"] = os.path.dirname(output_filename_0) From f91e33c0385762a7a73cca192f4d36716377ee1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:21:30 +0200 Subject: [PATCH 063/169] More refactoring/cleanup (WIP) --- .../plugins/publish/submit_maya_deadline.py | 304 +++++++----------- 1 file changed, 110 insertions(+), 194 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index db796f25a9..8f12a9518f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -70,7 +70,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): tile_assembler_plugin = "OpenPypeTileAssembler" priority = 50 tile_priority = 50 - limit_groups = [] + limit = [] # limit groups jobInfo = {} pluginInfo = {} group = "none" @@ -112,23 +112,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.ChunkSize = instance.data.get("chunkSize", 10) job_info.Comment = context.data.get("comment") job_info.Priority = instance.data.get("priority", self.priority) + job_info.FramesPerTask = instance.data.get("framesPerTask", 1) if self.group != "none" and self.group: job_info.Group = self.group - if self.limit_groups: - job_info.LimitGroups = ",".join(self.limit_groups) + if self.limit: + job_info.LimitGroups = ",".join(self.limit) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ - os.path.dirname(output_filename_0).replace("\\", "/") - self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ - output_filename_0.replace("\\", "/") - - # Add options from RenderGlobals------------------------------------- + # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - self.payload_skeleton["JobInfo"].update(render_globals) + for key, value in render_globals: + setattr(job_info, key, value) keys = [ "FTRACK_API_KEY", @@ -140,7 +135,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_LOG_NO_COLORS", "OPENPYPE_VERSION" ] # Add mongo url if it's enabled @@ -150,10 +144,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO: Taken from old publish class - test whether still needed environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" @@ -166,7 +158,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) # to recognize job from PYPE for turning Event On/Off job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue = "OPENPYPE_LOG_NO_COLORS=1" + # Optional, enable double-click to preview rendered + # frames from Deadline Monitor for i, filepath in enumerate(instance.data["files"]): dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) @@ -213,14 +208,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - renderlayer = instance.data['setMembers'] # rs_beauty - - # Output driver to render plugin_info = DeadlinePluginInfo( - SceneFile=context.data["currentFile"], + SceneFile=self.scene_path, Version=cmds.about(version=True), - RenderLayer=renderlayer, - RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights") # noqa + RenderLayer=instance.data['setMembers'], + RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa + ProjectPath=context.data["workspaceDir"], + UsingRenderLayers=True, ) plugin_payload = attr.asdict(plugin_info) @@ -236,12 +230,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - # Generated by AbstractSubmitDeadline. The `job_info`, `plugin_info` - # and `aux_files` are the skeleton payloads that are the basis for - # all the maya submissions - job_info = self.job_info - plugin_info = self.plugin_info - aux_files = self.aux_files filepath = self.scene_path # publish if `use_publish` else workfile # TODO: Avoid the need for this logic here, needed for submit publish @@ -250,18 +238,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data["outputDir"] = output_dir instance.data["toBeRenderedOn"] = "deadline" - self.limit_groups = self.limit - # Patch workfile (only when use_published is enabled) if self.use_published: - patches = ( - context.data["project_settings"].get( - "deadline", {}).get( - "publish", {}).get( - "MayaSubmitDeadline", {}).get( - "scene_patches", {}) - ) - self._patch_workfile(filepath, patches) + self._patch_workfile() # Gather needed data ------------------------------------------------ workspace = context.data["workspaceDir"] @@ -271,22 +250,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): .get('default_render_image_folder') filename = os.path.basename(filepath) dirname = os.path.join(workspace, default_render_file) - renderlayer = instance.data['setMembers'] # rs_beauty - - # Get the variables depending on the renderer - # TODO: Find replacement logic for `get_renderer_variables` through - # what is collected for the render or is implemented in maya - # api `lib_renderproducts` - render_variables = get_renderer_variables(renderlayer, dirname) - filename_0 = render_variables["filename_0"] - if self.use_published: - new_scene = os.path.splitext(filename)[0] - orig_scene = os.path.splitext( - os.path.basename(context.data["currentFile"]))[0] - filename_0 = render_variables["filename_0"].replace( - orig_scene, new_scene) - - output_filename_0 = filename_0 # this is needed because renderman handles directory and file # prefixes separately @@ -301,16 +264,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pass # Fill in common data to payload ------------------------------------ + # TODO: Replace these with collected data from CollectRender payload_data = { "filename": filename, "filepath": filepath, - "jobname": jobname, "output_filename_0": output_filename_0, "renderlayer": renderlayer, - "workspace": workspace, "dirname": dirname, } + # Store output dir for unified publisher (filesequence) + instance.data["outputDir"] = os.path.dirname(output_filename_0) + # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] @@ -333,17 +298,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add export job as dependency -------------------------------------- if export_job: - payload["JobInfo"]["JobDependency0"] = export_job - - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) + job_info, _ = payload + job_info.JobDependency = export_job if instance.data.get("tileRendering"): # Prepare tiles data self._tile_render(instance, payload) else: # Submit main render job - self.submit(payload) + job_info, plugin_info = payload + self.submit(self.assemble_payload(job_info, plugin_info)) def _tile_render(self, instance, payload): @@ -546,18 +510,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data["jobBatchName"])) def _get_maya_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) - if not self.asset_dependencies: - job_info_ext = {} + job_info = copy.deepcopy(self.job_info) - else: - job_info_ext = { - # Asset dependency to wait for at least the scene file to sync. - "AssetDependency0": data["filepath"], - } - - renderer = self._instance.data["renderer"] + if self.asset_dependencies: + # Asset dependency to wait for at least the scene file to sync. + job_info.AssetDependency = self.scene_path # Get layer prefix render_products = self._instance.data["renderProducts"] @@ -569,6 +527,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Renderman 22, and so if we are using renderman > 21 we need to set # renderer string on the job to `renderman22`. We will have to change # this when Deadline releases new version handling this. + renderer = self._instance.data["renderer"] if renderer == "renderman": try: from rfm2.config import cfg # noqa @@ -580,29 +539,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): renderer = "renderman22" plugin_info = { - "SceneFile": data["filepath"], # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), "OutputFilePrefix": layer_prefix, - - # Only render layers are considered renderable in this pipeline - "UsingRenderLayers": True, - - # Render only this layer - "RenderLayer": data["renderlayer"], - - # Determine which renderer to use from the file itself - "Renderer": renderer, - - # Resolve relative references - "ProjectPath": data["workspace"], } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + + return job_info, plugin_info def _get_vray_export_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + job_info = copy.deepcopy(self.job_info) + + job_info.Name = self._job_info_label("Export") + + # Get V-Ray settings info to compute output path vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) @@ -610,34 +560,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): first_file = self.format_vray_output_filename(scene, template) first_file = "{}/{}".format(data["workspace"], first_file) output = os.path.dirname(first_file) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - "Plugin": self._instance.data.get( - "mayaRenderPlugin", "MayaPype"), - "FramesPerTask": self._instance.data.get("framesPerTask", 1) - } - - plugin_info_ext = { - # Renderer + plugin_info = { "Renderer": "vray", - # Input - "SceneFile": data["filepath"], "SkipExistingFrames": True, - "UsingRenderLayers": True, "UseLegacyRenderLayers": True, - "RenderLayer": data["renderlayer"], - "ProjectPath": data["workspace"], "OutputFilePath": output } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) - return payload + return job_info, plugin_info def _get_arnold_export_payload(self, data): @@ -653,76 +584,55 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): script = os.path.normpath(module_path) - payload = copy.deepcopy(self.payload_skeleton) - job_info_ext = { - # Job name, as seen in Monitor - "Name": "Export {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), + job_info = copy.deepcopy(self.job_info) + plugin_info = copy.deepcopy(self.plugin_info) - "Plugin": "Python", - "FramesPerTask": self._instance.data.get("framesPerTask", 1), - "Frames": 1 + job_info.Name = self._job_info_label("Export") + + # Force a single frame Python job + job_info.Plugin = "Python" + job_info.Frames = 1 + + # add required env vars for the export script + envs = { + "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": data["renderlayer"], + "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, + "OPENPYPE_ASS_EXPORT_OUTPUT": payload['JobInfo']['OutputFilename0'], # noqa + "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa + "OPENPYPE_ASS_EXPORT_STEP": 1 } + for key, value in envs.items(): + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, + value=value) - plugin_info_ext = { + plugin_info.update({ "Version": "3.6", "ScriptFile": script, "Arguments": "", "SingleFrameOnly": "True", - } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info_ext) + }) - envs = [ - v - for k, v in payload["JobInfo"].items() - if k.startswith("EnvironmentKeyValue") - ] - - # add app name to environment - envs.append( - "AVALON_APP_NAME={}".format(os.environ.get("AVALON_APP_NAME"))) - envs.append( - "OPENPYPE_ASS_EXPORT_RENDER_LAYER={}".format(data["renderlayer"])) - envs.append( - "OPENPYPE_ASS_EXPORT_SCENE_FILE={}".format(data["filepath"])) - envs.append( - "OPENPYPE_ASS_EXPORT_OUTPUT={}".format( - payload['JobInfo']['OutputFilename0'])) - envs.append( - "OPENPYPE_ASS_EXPORT_START={}".format( - int(self._instance.data["frameStartHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_END={}".format( - int(self._instance.data["frameEndHandle"]))) - envs.append( - "OPENPYPE_ASS_EXPORT_STEP={}".format(1)) - - for i, e in enumerate(envs): - payload["JobInfo"]["EnvironmentKeyValue{}".format(i)] = e - return payload + return job_info, plugin_info def _get_vray_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Vray" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) # "vrayscene//_/" - scene, _ = os.path.splitext(data["filename"]) + scene, _ = os.path.splitext(self.scene_path) first_file = self.format_vray_output_filename(scene, template) first_file = "{}/{}".format(data["workspace"], first_file) - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Vray", - "OverrideTaskExtraInfoNames": False, - } plugin_info = { "InputFilename": first_file, @@ -731,35 +641,28 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Width": self._instance.data["resolutionWidth"], "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": payload["JobInfo"]["OutputDirectory0"], - "OutputFileName": payload["JobInfo"]["OutputFilename0"] + "OutputFilePath": job_info.OutputDirectory[0], + "OutputFileName": job_info.OutputFilename[0] } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + return job_info, plugin_info def _get_arnold_render_payload(self, data): - payload = copy.deepcopy(self.payload_skeleton) + + # Job Info + job_info = copy.deepcopy(self.job_info) + job_info.Name = self._job_info_label("Render") + job_info.Plugin = "Arnold" + job_info.OverrideTaskExtraInfoNames = False + + # Plugin Info ass_file, _ = os.path.splitext(data["output_filename_0"]) first_file = ass_file + ".ass" - job_info_ext = { - "Name": "Render {} [{}-{}]".format( - data["jobname"], - int(self._instance.data["frameStartHandle"]), - int(self._instance.data["frameEndHandle"])), - - "Plugin": "Arnold", - "OverrideTaskExtraInfoNames": False, - } - plugin_info = { "ArnoldFile": first_file, } - payload["JobInfo"].update(job_info_ext) - payload["PluginInfo"].update(plugin_info) - return payload + return job_info, plugin_info def format_vray_output_filename(self, filename, template, dir=False): """Format the expected output file of the Export job. @@ -804,7 +707,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return result - def _patch_workfile(self, file, patches): + def _patch_workfile(self): # type: (str, dict) -> [str, None] """Patch Maya scene. @@ -818,19 +721,25 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "line": "line to insert" } - Args: - file (str): File to patch. - patches (dict): Dictionary defining patches. - - Returns: - str: Patched file path or None - """ - if not patches or os.path.splitext(file)[1].lower() != ".ma": + project_settings = self._instance.context.data["project_settings"] + patches = ( + project_settings.get( + "deadline", {}).get( + "publish", {}).get( + "MayaSubmitDeadline", {}).get( + "scene_patches", {}) + ) + if not patches: + return + + if not os.path.splitext(self.scene_path)[1].lower() != ".ma": + self.log.debug("Skipping workfile patch since workfile is not " + ".ma file") return compiled_regex = [re.compile(p["regex"]) for p in patches] - with open(file, "r+") as pf: + with open(self.scene_path, "r+") as pf: scene_data = pf.readlines() for ln, line in enumerate(scene_data): for i, r in enumerate(compiled_regex): @@ -839,10 +748,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pf.seek(0) pf.writelines(scene_data) pf.truncate() - self.log.info( - "Applied {} patch to scene.".format( - patches[i]["name"])) - return file + self.log.info("Applied {} patch to scene.".format( + patches[i]["name"] + )) + + def _job_info_label(self, label): + return "{label} {job.Name} [{start}-{end}]".format( + label=label, + job=self.job_info, + start=int(self._instance.data["frameStartHandle"]), + end=int(self._instance.data["frameEndHandle"]), + ) def _format_tiles( From f9bbda244bee373dd3bfb025528923d061808525 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:22:19 +0200 Subject: [PATCH 064/169] More explicit PluginInfo name --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 8f12a9518f..87ef4e6db9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -38,7 +38,7 @@ from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s -class DeadlinePluginInfo(): +class MayaPluginInfo: SceneFile = attr.ib(default=None) # Input OutputFilePath = attr.ib(default=None) # Output directory and filename OutputFilePrefix = attr.ib(default=None) @@ -208,7 +208,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance = self._instance context = instance.context - plugin_info = DeadlinePluginInfo( + plugin_info = MayaPluginInfo( SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], From ecf2a89081f19e14c65b0fd7b1992fe80519e983 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 29 Aug 2022 23:39:11 +0200 Subject: [PATCH 065/169] More temp restructuring --- .../plugins/publish/submit_maya_deadline.py | 52 +++++++------------ 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 87ef4e6db9..a77ccd73d4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -273,9 +273,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "dirname": dirname, } - # Store output dir for unified publisher (filesequence) - instance.data["outputDir"] = os.path.dirname(output_filename_0) - # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] @@ -326,26 +323,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True - assembly_payload = { - "AuxFiles": [], - "JobInfo": { - "BatchName": payload["JobInfo"]["BatchName"], - "Frames": 1, - "Name": "{} - Tile Assembly Job".format( - payload["JobInfo"]["Name"]), - "OutputDirectory0": - payload["JobInfo"]["OutputDirectory0"].replace( - "\\", "/"), - "Plugin": self.tile_assembler_plugin, - "MachineLimit": 1 - }, - "PluginInfo": { + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( + job=job_info) + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { "CleanupTiles": 1, "ErrorOnMissing": True - } } - assembly_payload["JobInfo"]["Priority"] = self._instance.data.get( - "tile_priority", self.tile_priority) frame_payloads = [] assembly_payloads = [] @@ -414,6 +404,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): file_index = 1 for file in assembly_files: frame = re.search(R_FRAME_NUMBER, file).group("frame") + new_assembly_payload = copy.deepcopy(assembly_payload) new_assembly_payload["JobInfo"]["Name"] = \ "{} (Frame {})".format( @@ -434,7 +425,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - url = "{}/api/jobs".format(self.deadline_url) tiles_count = instance.data.get("tilesX") * instance.data.get( "tilesY") # noqa: E501 @@ -444,9 +434,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_id = response.json()["_id"] hash = response.json()["Props"]["Ex0"] + # Add assembly job dependencies for assembly_job in assembly_payloads: - if assembly_job["JobInfo"]["ExtraInfo0"] == hash: - assembly_job["JobInfo"]["JobDependency0"] = job_id + assembly_job_info = assembly_job["JobInfo"] + if assembly_job_info.ExtraInfo[0] == hash: + assembly_job.JobDependency = job_id for assembly_job in assembly_payloads: file = assembly_job["JobInfo"]["ExtraInfo1"] @@ -461,14 +453,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): ) ) + config_file_dir = os.path.dirname(config_file) try: - if not os.path.isdir(os.path.dirname(config_file)): - os.makedirs(os.path.dirname(config_file)) + if not os.path.isdir(config_file_dir): + os.makedirs(config_file_dir) except OSError: # directory is not available - self.log.warning( - "Path is unreachable: `{}`".format( - os.path.dirname(config_file))) + self.log.warning("Path is unreachable: " + "`{}`".format(config_file_dir)) # add config file as job auxFile assembly_job["AuxFiles"] = [config_file] @@ -505,10 +497,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): response.json()["_id"]) job_idx += 1 - instance.data["jobBatchName"] = payload["JobInfo"]["BatchName"] - self.log.info("Setting batch name on instance: {}".format( - instance.data["jobBatchName"])) - def _get_maya_payload(self, data): job_info = copy.deepcopy(self.job_info) From 6abafd0aca1ca06204f5e5bc11907a0a6a855900 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 00:41:30 +0200 Subject: [PATCH 066/169] Refactor tile logic --- .../plugins/publish/submit_maya_deadline.py | 120 ++++++++---------- 1 file changed, 55 insertions(+), 65 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index a77ccd73d4..920adf7e4a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -314,11 +314,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # if we have sequence of files, we need to create tile job for # every frame - job_info.TileJob = True job_info.TileJobTilesInX = instance.data.get("tilesX") job_info.TileJobTilesInY = instance.data.get("tilesY") + tiles_count = job_info.TileJobTilesInX * job_info.TileJobTilesInY + plugin_info["ImageHeight"] = instance.data.get("resolutionHeight") plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True @@ -334,7 +335,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_plugin_info = { "CleanupTiles": 1, - "ErrorOnMissing": True + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] } frame_payloads = [] @@ -367,81 +369,69 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): file_index = 1 for file in files: frame = re.search(R_FRAME_NUMBER, file).group("frame") - new_payload = copy.deepcopy(payload) - new_payload["JobInfo"]["Name"] = \ - "{} (Frame {} - {} tiles)".format( + + new_job_info = copy.deepcopy(job_info) + new_job_info.Name = "{} (Frame {} - {} tiles)".format( payload["JobInfo"]["Name"], frame, instance.data.get("tilesX") * instance.data.get("tilesY") - # noqa: E501 - ) - self.log.info( - "... preparing job {}".format( - new_payload["JobInfo"]["Name"])) - new_payload["JobInfo"]["TileJobFrame"] = frame + ) + new_job_info.TileJobFrame = frame - tiles_data = _format_tiles( + new_plugin_info = copy.deepcopy(plugin_info) + + # Add tile data into job info and plugin info + tiles_out, _ = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), payload["PluginInfo"]["OutputFilePrefix"] - )[0] - new_payload["JobInfo"].update(tiles_data["JobInfo"]) - new_payload["PluginInfo"].update(tiles_data["PluginInfo"]) + ) + new_job_info.update(tiles_out["JobInfo"]) + new_plugin_info.update(tiles_out["PluginInfo"]) self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( ("{}_{}".format(file_index, file)).encode("utf-8")) frame_jobs[frame] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo0"] = job_hash.hexdigest() - new_payload["JobInfo"]["ExtraInfo1"] = file - frame_payloads.append(new_payload) - file_index += 1 + new_job_info.ExtraInfo[0] = job_hash.hexdigest() + new_job_info.ExtraInfo[1] = file - file_index = 1 - for file in assembly_files: - frame = re.search(R_FRAME_NUMBER, file).group("frame") - - new_assembly_payload = copy.deepcopy(assembly_payload) - new_assembly_payload["JobInfo"]["Name"] = \ - "{} (Frame {})".format( - assembly_payload["JobInfo"]["Name"], - frame) - new_assembly_payload["JobInfo"]["OutputFilename0"] = re.sub( - REPL_FRAME_NUMBER, - "\\1{}\\3".format("#" * len(frame)), file) - - new_assembly_payload["PluginInfo"]["Renderer"] = \ - self._instance.data["renderer"] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo0"] = frame_jobs[ - frame] # noqa: E501 - new_assembly_payload["JobInfo"]["ExtraInfo1"] = file - assembly_payloads.append(new_assembly_payload) + frame_payloads.append(self.assemble_payload( + job_info=new_job_info, + plugin_info=new_plugin_info + )) file_index += 1 self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) - tiles_count = instance.data.get("tilesX") * instance.data.get( - "tilesY") # noqa: E501 - - for tile_job in frame_payloads: - response = self.submit(tile_job) - + frame_tile_job_id = {} + for tile_job_payload in frame_payloads: + response = self.submit(tile_job_payload) job_id = response.json()["_id"] - hash = response.json()["Props"]["Ex0"] + frame_tile_job_id[frame] = job_id - # Add assembly job dependencies - for assembly_job in assembly_payloads: - assembly_job_info = assembly_job["JobInfo"] - if assembly_job_info.ExtraInfo[0] == hash: - assembly_job.JobDependency = job_id + assembly_jobs = [] + for i, file in enumerate(assembly_files): + frame = re.search(R_FRAME_NUMBER, file).group("frame") + + frame_assembly_job_info = copy.deepcopy(assembly_job_info) + frame_assembly_job_info.Name += " (Frame {})".format(frame) + frame_assembly_job_info.OutputFilename[0] = re.sub( + REPL_FRAME_NUMBER, + "\\1{}\\3".format("#" * len(frame)), file) + + hash = frame_jobs[frame] + tile_job_id = frame_tile_job_id[frame] + + frame_assembly_job_info.ExtraInfo[0] = hash + frame_assembly_job_info.ExtraInfo[1] = file + frame_assembly_job_info.JobDependency = tile_job_id - for assembly_job in assembly_payloads: - file = assembly_job["JobInfo"]["ExtraInfo1"] # write assembly job config files now = datetime.now() @@ -462,9 +452,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.warning("Path is unreachable: " "`{}`".format(config_file_dir)) - # add config file as job auxFile - assembly_job["AuxFiles"] = [config_file] - with open(config_file, "w") as cf: print("TileCount={}".format(tiles_count), file=cf) print("ImageFileName={}".format(file), file=cf) @@ -485,17 +472,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for k, v in tiles.items(): print("{}={}".format(k, v), file=cf) - job_idx = 1 - instance.data["assemblySubmissionJobs"] = [] - for ass_job in assembly_payloads: - self.log.info("submitting assembly job {} of {}".format( - job_idx, len(assembly_payloads) - )) - response = self.submit(ass_job) + payload = self.assemble_payload( + job_info=frame_assembly_job_info, + plugin_info=assembly_plugin_info.copy(), + # add config file as job auxFile + aux_files=[config_file] + ) - instance.data["assemblySubmissionJobs"].append( - response.json()["_id"]) - job_idx += 1 + self.log.info("submitting assembly job {} of {}".format( + i+1, len(assembly_payloads) + )) + response = self.submit(payload) + assembly_jobs.append(response.json()["_id"]) + + instance.data["assemblySubmissionJobs"] = assembly_jobs def _get_maya_payload(self, data): From a6002de641e1ad500192be433f620f17680ea056 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 00:58:23 +0200 Subject: [PATCH 067/169] Refactor _format_tiles for readability --- .../plugins/publish/submit_maya_deadline.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 920adf7e4a..00d8eb7859 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -800,23 +800,23 @@ def _format_tiles( tiles_x, tiles_y ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) + + # Job Info new_filename = "{}/{}{}".format( os.path.dirname(filename), tile_prefix, os.path.basename(filename) ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(tile)] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa + # Plugin Info + out["PluginInfo"]["RegionPrefix{}".format(tile)] = "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) # noqa: E501 out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename @@ -828,4 +828,5 @@ def _format_tiles( cfg["Tile{}Height".format(tile)] = h_space tile += 1 + return out, cfg From a9fe806fec1a5e5ecf98327cfa4845b8b6d3edc0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:02:24 +0200 Subject: [PATCH 068/169] Calculate once --- .../plugins/publish/submit_maya_deadline.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 00d8eb7859..d0348119dc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -800,6 +800,10 @@ def _format_tiles( tiles_x, tiles_y ) + top = int(height) - (tile_y * h_space) + bottom = int(height) - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 # Job Info new_filename = "{}/{}{}".format( @@ -811,19 +815,17 @@ def _format_tiles( # Plugin Info out["PluginInfo"]["RegionPrefix{}".format(tile)] = "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) # noqa: E501 - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + out["PluginInfo"]["RegionTop{}".format(tile)] = top + out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom + out["PluginInfo"]["RegionLeft{}".format(tile)] = left + out["PluginInfo"]["RegionRight{}".format(tile)] = right # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) - + cfg["Tile{}X".format(tile)] = left + cfg["Tile{}Y".format(tile)] = top cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From d7c72f97b30f85aca15b4a8148c140595f0b2a3a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:10:19 +0200 Subject: [PATCH 069/169] Batch submit assembly jobs --- .../plugins/publish/submit_maya_deadline.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index d0348119dc..265c0f79ec 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -339,7 +339,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Renderer": self._instance.data["renderer"] } - frame_payloads = [] assembly_payloads = [] R_FRAME_NUMBER = re.compile( @@ -358,14 +357,15 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): itertools.chain.from_iterable( [f for _, f in exp[0].items()])) if not files: - # if beauty doesn't exists, use first aov we found + # if beauty doesn't exist, use first aov we found files = exp[0].get(list(exp[0].keys())[0]) else: files = exp assembly_files = files + # Define frame tile jobs frame_jobs = {} - + frame_payloads = {} file_index = 1 for file in files: frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -400,22 +400,24 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): new_job_info.ExtraInfo[0] = job_hash.hexdigest() new_job_info.ExtraInfo[1] = file - frame_payloads.append(self.assemble_payload( + frame_payloads[frame] = self.assemble_payload( job_info=new_job_info, plugin_info=new_plugin_info - )) + ) file_index += 1 self.log.info( "Submitting tile job(s) [{}] ...".format(len(frame_payloads))) + # Submit frame tile jobs frame_tile_job_id = {} - for tile_job_payload in frame_payloads: + for frame, tile_job_payload in frame_payloads.items(): response = self.submit(tile_job_payload) job_id = response.json()["_id"] frame_tile_job_id[frame] = job_id - assembly_jobs = [] + # Define assembly payloads + assembly_payloads = [] for i, file in enumerate(assembly_files): frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -478,14 +480,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # add config file as job auxFile aux_files=[config_file] ) + assembly_payloads.append(payload) + # Submit assembly jobs + assembly_job_ids = [] + for i, payload in enumerate(assembly_payloads): self.log.info("submitting assembly job {} of {}".format( i+1, len(assembly_payloads) )) response = self.submit(payload) - assembly_jobs.append(response.json()["_id"]) + assembly_job_ids.append(response.json()["_id"]) - instance.data["assemblySubmissionJobs"] = assembly_jobs + instance.data["assemblySubmissionJobs"] = assembly_job_ids def _get_maya_payload(self, data): From 965522585b98e441907480caee57af5dad92c2d2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 01:11:12 +0200 Subject: [PATCH 070/169] Remove redundant docstring --- .../deadline/plugins/publish/submit_maya_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 265c0f79ec..cd9f426977 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -51,16 +51,6 @@ class MayaPluginInfo: class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): - """Submit available render layers to Deadline. - - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". - - Attributes: - use_published (bool): Use published scene to render instead of the - one in work area. - - """ label = "Submit Render to Deadline" hosts = ["maya"] From 8af88e115723e6abc73fff279773043e4a520326 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:08:44 +0200 Subject: [PATCH 071/169] More cleanup --- .../plugins/publish/submit_maya_deadline.py | 98 +++++++------------ 1 file changed, 38 insertions(+), 60 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index cd9f426977..95140a082f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -86,8 +86,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.Name = "%s - %s" % (src_filename, instance.name) job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") - job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # Deadline requires integers in frame range frames = "{start}-{end}x{step}".format( @@ -134,25 +133,18 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # TODO: Taken from old publish class - test whether still needed - environment["OPENPYPE_LOG_NO_COLORS"] = "1" # to recognize job from PYPE for turning Event On/Off environment["OPENPYPE_RENDER_JOB"] = "1" + environment["OPENPYPE_LOG_NO_COLORS"] = "1" - for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val - ) - # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" - job_info.EnvironmentKeyValue = "OPENPYPE_LOG_NO_COLORS=1" + for key, value in environment.items(): + if not value: + continue + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, + value=value) - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - for i, filepath in enumerate(instance.data["files"]): + # Enable double-click to preview rendered frames from Deadline Monitor + for filepath in instance.data["files"]: dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) job_info.OutputDirectory = dirname.replace("\\", "/") @@ -241,25 +233,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): filename = os.path.basename(filepath) dirname = os.path.join(workspace, default_render_file) - # this is needed because renderman handles directory and file - # prefixes separately - if self._instance.data["renderer"] == "renderman": - dirname = os.path.dirname(output_filename_0) - - # Create render folder ---------------------------------------------- - try: - # Ensure render folder exists - os.makedirs(dirname) - except OSError: - pass - # Fill in common data to payload ------------------------------------ # TODO: Replace these with collected data from CollectRender payload_data = { "filename": filename, - "filepath": filepath, - "output_filename_0": output_filename_0, - "renderlayer": renderlayer, "dirname": dirname, } @@ -299,8 +276,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): def _tile_render(self, instance, payload): # As collected by super process() - job_info = self.job_info - plugin_info = self.pluginInfo + job_info = copy.deepcopy(self.job_info) + plugin_info = copy.deepcopy(self.plugin_info) # if we have sequence of files, we need to create tile job for # every frame @@ -314,23 +291,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): plugin_info["ImageWidth"] = instance.data.get("resolutionWidth") plugin_info["RegionRendering"] = True - assembly_job_info = copy.deepcopy(job_info) - assembly_job_info.Plugin = self.tile_assembler_plugin - assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( - job=job_info) - assembly_job_info.Frames = 1 - assembly_job_info.MachineLimit = 1 - assembly_job_info.Priority = instance.data.get("tile_priority", - self.tile_priority) - - assembly_plugin_info = { - "CleanupTiles": 1, - "ErrorOnMissing": True, - "Renderer": self._instance.data["renderer"] - } - - assembly_payloads = [] - R_FRAME_NUMBER = re.compile( r".+\.(?P[0-9]+)\..+") # noqa: N806, E501 REPL_FRAME_NUMBER = re.compile( @@ -407,7 +367,23 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame_tile_job_id[frame] = job_id # Define assembly payloads + assembly_job_info = copy.deepcopy(job_info) + assembly_job_info.Plugin = self.tile_assembler_plugin + assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( + job=job_info) + assembly_job_info.Frames = 1 + assembly_job_info.MachineLimit = 1 + assembly_job_info.Priority = instance.data.get("tile_priority", + self.tile_priority) + + assembly_plugin_info = { + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] + } + assembly_payloads = [] + output_dir = self.job_info.OutputDirectory[0] for i, file in enumerate(assembly_files): frame = re.search(R_FRAME_NUMBER, file).group("frame") @@ -427,22 +403,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # write assembly job config files now = datetime.now() - config_file = os.path.join( - os.path.dirname(output_filename_0), + config_file = os.path.join(output_dir, "{}_config_{}.txt".format( os.path.splitext(file)[0], now.strftime("%Y_%m_%d_%H_%M_%S") ) ) - - config_file_dir = os.path.dirname(config_file) try: - if not os.path.isdir(config_file_dir): - os.makedirs(config_file_dir) + if not os.path.isdir(output_dir): + os.makedirs(output_dir) except OSError: # directory is not available self.log.warning("Path is unreachable: " - "`{}`".format(config_file_dir)) + "`{}`".format(output_dir)) with open(config_file, "w") as cf: print("TileCount={}".format(tiles_count), file=cf) @@ -567,17 +540,22 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.Plugin = "Python" job_info.Frames = 1 + renderlayer = self._instance.data["setMembers"] + # add required env vars for the export script envs = { "AVALON_APP_NAME": os.environ.get("AVALON_APP_NAME"), - "OPENPYPE_ASS_EXPORT_RENDER_LAYER": data["renderlayer"], + "OPENPYPE_ASS_EXPORT_RENDER_LAYER": renderlayer, "OPENPYPE_ASS_EXPORT_SCENE_FILE": self.scene_path, - "OPENPYPE_ASS_EXPORT_OUTPUT": payload['JobInfo']['OutputFilename0'], # noqa + "OPENPYPE_ASS_EXPORT_OUTPUT": job_info.OutputFilename[0], "OPENPYPE_ASS_EXPORT_START": int(self._instance.data["frameStartHandle"]), # noqa "OPENPYPE_ASS_EXPORT_END": int(self._instance.data["frameEndHandle"]), # noqa "OPENPYPE_ASS_EXPORT_STEP": 1 } for key, value in envs.items(): + if not value: + continue + job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, value=value) From e8aa926cb7d338427ce7ba558f8ffa1609fde8ef Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:28:42 +0200 Subject: [PATCH 072/169] Move single use of in-line function to the class for readability --- .../deadline/abstract_submit_deadline.py | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f56cf49f6d..a3db3feac9 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -544,26 +544,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): """ - def _get_workfile_instance(context): - """Find workfile instance in context""" - for i in context: - - is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" - ) - if not is_workfile: - continue - - # test if there is instance of workfile waiting - # to be published. - assert i.data["publish"] is True, ( - "Workfile (scene) must be published along") - - return i - instance = self._instance - workfile_instance = _get_workfile_instance(instance.context) + workfile_instance = self._get_workfile_instance(instance.context) if not workfile_instance: return @@ -689,3 +671,22 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): self._instance.data["deadlineSubmissionJob"] = result return result["_id"] + + @staticmethod + def _get_workfile_instance(context): + """Find workfile instance in context""" + for i in context: + + is_workfile = ( + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" + ) + if not is_workfile: + continue + + # test if there is instance of workfile waiting + # to be published. + assert i.data["publish"] is True, ( + "Workfile (scene) must be published along") + + return i From c6a0a199e1810e5c63c484d5871d34d14594e5be Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 09:40:08 +0200 Subject: [PATCH 073/169] Cosmetics --- .../deadline/plugins/publish/submit_maya_deadline.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 95140a082f..873005e051 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -267,15 +267,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if instance.data.get("tileRendering"): # Prepare tiles data - self._tile_render(instance, payload) + self._tile_render(payload) else: # Submit main render job job_info, plugin_info = payload self.submit(self.assemble_payload(job_info, plugin_info)) - def _tile_render(self, instance, payload): + def _tile_render(self, payload): # As collected by super process() + instance = self._instance job_info = copy.deepcopy(self.job_info) plugin_info = copy.deepcopy(self.plugin_info) @@ -321,11 +322,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): frame = re.search(R_FRAME_NUMBER, file).group("frame") new_job_info = copy.deepcopy(job_info) - new_job_info.Name = "{} (Frame {} - {} tiles)".format( - payload["JobInfo"]["Name"], - frame, - instance.data.get("tilesX") * instance.data.get("tilesY") - ) + new_job_info.Name += " (Frame {} - {} tiles)".format(frame, + tiles_count) new_job_info.TileJobFrame = frame new_plugin_info = copy.deepcopy(plugin_info) From e429e2ec41d23c097b301c31485a38aea634d6a3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 10:19:38 +0200 Subject: [PATCH 074/169] Remove json dump since `renderProducts` are not serializable --- openpype/hosts/maya/plugins/publish/collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 768a53329f..14aac2f206 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -360,7 +360,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): instance.data["label"] = label instance.data["farm"] = True instance.data.update(data) - self.log.debug("data: {}".format(json.dumps(data, indent=4))) def parse_options(self, render_globals): """Get all overrides with a value, skip those without. From 9472cbe271af4be7c0328bf05200f44c521c58ae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 11:23:14 +0200 Subject: [PATCH 075/169] Fix submission --- .../collect_deadline_server_from_instance.py | 2 +- .../plugins/publish/submit_maya_deadline.py | 71 +++++++++---------- 2 files changed, 33 insertions(+), 40 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py index a7035cd99f..9981bead3e 100644 --- a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py +++ b/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py @@ -13,7 +13,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.415 label = "Deadline Webservice from the Instance" - families = ["rendering"] + families = ["rendering", "renderlayer"] def process(self, instance): instance.data["deadlineUrl"] = self._collect_deadline_url(instance) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 873005e051..2afa1883c4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -27,7 +27,6 @@ import itertools from collections import OrderedDict import attr -import clique from maya import cmds @@ -111,7 +110,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - for key, value in render_globals: + for key, value in render_globals.items(): setattr(job_info, key, value) keys = [ @@ -143,13 +142,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, value=value) - # Enable double-click to preview rendered frames from Deadline Monitor - for filepath in instance.data["files"]: - dirname = os.path.dirname(filepath) - fname = os.path.basename(filepath) - job_info.OutputDirectory = dirname.replace("\\", "/") - job_info.OutputFilename = fname - # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] @@ -160,28 +152,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add list of expected files to job # --------------------------------- exp = instance.data.get("expectedFiles") - - def _get_output_filename(files): - col, rem = clique.assemble(files) - if not col and rem: - # we couldn't find any collections but have - # individual files. - assert len(rem) == 1, ( - "Found multiple non related files " - "to render, don't know what to do " - "with them.") - return rem[0] - else: - return col[0].format('{head}{padding}{tail}') - - if isinstance(exp[0], dict): - # we have aovs and we need to iterate over them - for _aov, files in exp[0].items(): - output_file = _get_output_filename(files) - job_info.OutputFilename = output_file - else: - output_file = _get_output_filename(exp) - job_info.OutputFilename = output_file + for filepath in self._iter_expected_files(exp): + job_info.OutputDirectory = os.path.dirname(filepath) + job_info.OutputFilename = os.path.basename(filepath) return job_info @@ -194,6 +167,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): SceneFile=self.scene_path, Version=cmds.about(version=True), RenderLayer=instance.data['setMembers'], + Renderer=instance.data["renderer"], RenderSetupIncludeLights=instance.data.get("renderSetupIncludeLights"), # noqa ProjectPath=context.data["workspaceDir"], UsingRenderLayers=True, @@ -216,7 +190,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # TODO: Avoid the need for this logic here, needed for submit publish # Store output dir for unified publisher (filesequence) - output_dir = os.path.dirname(instance.data["files"][0]) + expected_files = instance.data["expectedFiles"] + first_file = next(self._iter_expected_files(expected_files)) + output_dir = os.path.dirname(first_file) instance.data["outputDir"] = output_dir instance.data["toBeRenderedOn"] = "deadline" @@ -247,17 +223,20 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): "Vray Scene and Ass Scene options are mutually exclusive") if "vrayscene" in instance.data["families"]: + self.log.debug("Submitting V-Ray scene render..") vray_export_payload = self._get_vray_export_payload(payload_data) export_job = self.submit(vray_export_payload) payload = self._get_vray_render_payload(payload_data) elif "assscene" in instance.data["families"]: + self.log.debug("Submitting Arnold .ass standalone render..") ass_export_payload = self._get_arnold_export_payload(payload_data) export_job = self.submit(ass_export_payload) payload = self._get_arnold_render_payload(payload_data) else: + self.log.debug("Submitting MayaBatch render..") payload = self._get_maya_payload(payload_data) # Add export job as dependency -------------------------------------- @@ -274,6 +253,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.submit(self.assemble_payload(job_info, plugin_info)) def _tile_render(self, payload): + """Submit as tile render per frame with dependent assembly jobs.""" # As collected by super process() instance = self._instance @@ -315,7 +295,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_files = files # Define frame tile jobs - frame_jobs = {} + frame_file_hash = {} frame_payloads = {} file_index = 1 for file in files: @@ -343,9 +323,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( ("{}_{}".format(file_index, file)).encode("utf-8")) - frame_jobs[frame] = job_hash.hexdigest() - new_job_info.ExtraInfo[0] = job_hash.hexdigest() + file_hash = job_hash.hexdigest() + frame_file_hash[frame] = file_hash + + new_job_info.ExtraInfo[0] = file_hash new_job_info.ExtraInfo[1] = file frame_payloads[frame] = self.assemble_payload( @@ -391,10 +373,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): REPL_FRAME_NUMBER, "\\1{}\\3".format("#" * len(frame)), file) - hash = frame_jobs[frame] + file_hash = frame_file_hash[frame] tile_job_id = frame_tile_job_id[frame] - frame_assembly_job_info.ExtraInfo[0] = hash + frame_assembly_job_info.ExtraInfo[0] = file_hash frame_assembly_job_info.ExtraInfo[1] = file frame_assembly_job_info.JobDependency = tile_job_id @@ -483,11 +465,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if int(rman_version.split(".")[0]) > 22: renderer = "renderman22" - plugin_info = { + plugin_info = copy.deepcopy(self.plugin_info) + plugin_info.update({ # Output directory and filename "OutputFilePath": data["dirname"].replace("\\", "/"), "OutputFilePrefix": layer_prefix, - } + }) return job_info, plugin_info @@ -710,6 +693,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): end=int(self._instance.data["frameEndHandle"]), ) + @staticmethod + def _iter_expected_files(exp): + if isinstance(exp[0], dict): + for _aov, files in exp[0].items(): + for file in files: + yield file + else: + for file in exp: + yield file + def _format_tiles( filename, index, tiles_x, tiles_y, From 227b8405479e3a87b507e63a9b59fe473d7c9276 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:12:54 +0200 Subject: [PATCH 076/169] Refactor AbstractSubmitDeadline vars to allow easier access to indices --- .../deadline/abstract_submit_deadline.py | 251 +++++++----------- .../plugins/publish/submit_maya_deadline.py | 35 ++- 2 files changed, 115 insertions(+), 171 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index a3db3feac9..427faec115 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -9,6 +9,7 @@ import os from abc import abstractmethod import platform import getpass +from functools import partial from collections import OrderedDict import six @@ -66,6 +67,58 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + self.__key = key + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + @attr.s class DeadlineJobInfo(object): """Mapping of all Deadline *JobInfo* attributes. @@ -218,24 +271,8 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - _environmentKeyValue = attr.ib(factory=list) - - @property - def EnvironmentKeyValue(self): # noqa: N802 - """Return all environment key values formatted for Deadline. - - Returns: - dict: as `{'EnvironmentKeyValue0', 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._environmentKeyValue): - out["EnvironmentKeyValue{}".format(index)] = v - return out - - @EnvironmentKeyValue.setter - def EnvironmentKeyValue(self, val): # noqa: N802 - self._environmentKeyValue.append(val) + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false UseJobEnvironmentOnly = attr.ib(default=None) # Default: false @@ -243,142 +280,29 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- - _extraInfos = attr.ib(factory=list) - _extraInfoKeyValues = attr.ib(factory=list) - - @property - def ExtraInfo(self): # noqa: N802 - """Return all ExtraInfo values formatted for Deadline. - - Returns: - dict: as `{'ExtraInfo0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfos): - out["ExtraInfo{}".format(index)] = v - return out - - @ExtraInfo.setter - def ExtraInfo(self, val): # noqa: N802 - self._extraInfos.append(val) - - @property - def ExtraInfoKeyValue(self): # noqa: N802 - """Return all ExtraInfoKeyValue values formatted for Deadline. - - Returns: - dict: as {'ExtraInfoKeyValue0': 'key=value'}` - - """ - out = {} - for index, v in enumerate(self._extraInfoKeyValues): - out["ExtraInfoKeyValue{}".format(index)] = v - return out - - @ExtraInfoKeyValue.setter - def ExtraInfoKeyValue(self, val): # noqa: N802 - self._extraInfoKeyValues.append(val) + ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + "ExtraInfoKeyValue")) # Task Extra Info Names # ---------------------------------------------- OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - _taskExtraInfos = attr.ib(factory=list) - - @property - def TaskExtraInfoName(self): # noqa: N802 - """Return all TaskExtraInfoName values formatted for Deadline. - - Returns: - dict: as `{'TaskExtraInfoName0': 'value'}` - - """ - out = {} - for index, v in enumerate(self._taskExtraInfos): - out["TaskExtraInfoName{}".format(index)] = v - return out - - @TaskExtraInfoName.setter - def TaskExtraInfoName(self, val): # noqa: N802 - self._taskExtraInfos.append(val) + TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, + "TaskExtraInfoName")) # Output # ---------------------------------------------- - _outputFilename = attr.ib(factory=list) - _outputFilenameTile = attr.ib(factory=list) - _outputDirectory = attr.ib(factory=list) - - @property - def OutputFilename(self): # noqa: N802 - """Return all OutputFilename values formatted for Deadline. - - Returns: - dict: as `{'OutputFilename0': 'filename'}` - - """ - out = {} - for index, v in enumerate(self._outputFilename): - out["OutputFilename{}".format(index)] = v - return out - - @OutputFilename.setter - def OutputFilename(self, val): # noqa: N802 - self._outputFilename.append(val) - - @property - def OutputFilenameTile(self): # noqa: N802 - """Return all OutputFilename#Tile values formatted for Deadline. - - Returns: - dict: as `{'OutputFilenme#Tile': 'tile'}` - - """ - out = {} - for index, v in enumerate(self._outputFilenameTile): - out["OutputFilename{}Tile".format(index)] = v - return out - - @OutputFilenameTile.setter - def OutputFilenameTile(self, val): # noqa: N802 - self._outputFilenameTile.append(val) - - @property - def OutputDirectory(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._outputDirectory): - out["OutputDirectory{}".format(index)] = v - return out - - @OutputDirectory.setter - def OutputDirectory(self, val): # noqa: N802 - self._outputDirectory.append(val) + OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename")) + OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputFilename{}Tile")) + OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, + "OutputDirectory")) # Asset Dependency # ---------------------------------------------- - _assetDependency = attr.ib(factory=list) - - @property - def AssetDependency(self): # noqa: N802 - """Return all OutputDirectory values formatted for Deadline. - - Returns: - dict: as `{'OutputDirectory0': 'dir'}` - - """ - out = {} - for index, v in enumerate(self._assetDependency): - out["AssetDependency{}".format(index)] = v - return out - - @OutputDirectory.setter - def AssetDependency(self, val): # noqa: N802 - self._assetDependency.append(val) + AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, + "AssetDependency")) # Tile Job # ---------------------------------------------- @@ -402,7 +326,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if a.name.startswith("_"): + if isinstance(v, DeadlineIndexedVar): return False if v is None: return False @@ -410,16 +334,37 @@ class DeadlineJobInfo(object): serialized = attr.asdict( self, dict_factory=OrderedDict, filter=filter_data) - serialized.update(self.EnvironmentKeyValue) - serialized.update(self.ExtraInfo) - serialized.update(self.ExtraInfoKeyValue) - serialized.update(self.TaskExtraInfoName) - serialized.update(self.OutputFilename) - serialized.update(self.OutputFilenameTile) - serialized.update(self.OutputDirectory) - serialized.update(self.AssetDependency) + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + return serialized + def update(self, data): + """Update instance with data dict""" + for key, value in data.items(): + setattr(self, key, value) + + def __setattr__(self, key, value): + # Backwards compatibility: Allow appending to index vars by setting + # it on Job Info directly like: JobInfo.OutputFilename = filename + existing = getattr(self, key, None) + if isinstance(existing, DeadlineIndexedVar): + existing += value + return + + object.__setattr__(self, key, value) + @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 2afa1883c4..d979c92814 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -110,8 +110,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Add options from RenderGlobals render_globals = instance.data.get("renderGlobals", {}) - for key, value in render_globals.items(): - setattr(job_info, key, value) + job_info.update(render_globals) keys = [ "FTRACK_API_KEY", @@ -257,8 +256,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # As collected by super process() instance = self._instance - job_info = copy.deepcopy(self.job_info) - plugin_info = copy.deepcopy(self.plugin_info) + + payload_job_info, payload_plugin_info = payload + job_info = copy.deepcopy(payload_job_info) + plugin_info = copy.deepcopy(payload_plugin_info) # if we have sequence of files, we need to create tile job for # every frame @@ -309,16 +310,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): new_plugin_info = copy.deepcopy(plugin_info) # Add tile data into job info and plugin info - tiles_out, _ = _format_tiles( + tiles_data = _format_tiles( file, 0, instance.data.get("tilesX"), instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] - ) - new_job_info.update(tiles_out["JobInfo"]) - new_plugin_info.update(tiles_out["PluginInfo"]) + payload_plugin_info["OutputFilePrefix"] + )[0] + + new_job_info.update(tiles_data["JobInfo"]) + new_plugin_info.update(tiles_data["PluginInfo"]) self.log.info("hashing {} - {}".format(file_index, file)) job_hash = hashlib.sha256( @@ -342,15 +344,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Submit frame tile jobs frame_tile_job_id = {} for frame, tile_job_payload in frame_payloads.items(): - response = self.submit(tile_job_payload) - job_id = response.json()["_id"] + job_id = self.submit(tile_job_payload) frame_tile_job_id[frame] = job_id # Define assembly payloads assembly_job_info = copy.deepcopy(job_info) assembly_job_info.Plugin = self.tile_assembler_plugin - assembly_job_info.Name = "{job.Name} - Tile Assembly Job".format( - job=job_info) + assembly_job_info.Name += " - Tile Assembly Job" assembly_job_info.Frames = 1 assembly_job_info.MachineLimit = 1 assembly_job_info.Priority = instance.data.get("tile_priority", @@ -411,10 +411,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): instance.data.get("tilesY"), instance.data.get("resolutionWidth"), instance.data.get("resolutionHeight"), - payload["PluginInfo"]["OutputFilePrefix"] + payload_plugin_info["OutputFilePrefix"] )[1] - sorted(tiles) - for k, v in tiles.items(): + for k, v in sorted(tiles.items()): print("{}={}".format(k, v), file=cf) payload = self.assemble_payload( @@ -431,8 +430,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.log.info("submitting assembly job {} of {}".format( i+1, len(assembly_payloads) )) - response = self.submit(payload) - assembly_job_ids.append(response.json()["_id"]) + assembly_job_id = self.submit(payload) + assembly_job_ids.append(assembly_job_id) instance.data["assemblySubmissionJobs"] = assembly_job_ids From a7293f2a4f6a30297eea15297f5b25100e11e9f1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:13:14 +0200 Subject: [PATCH 077/169] Fix indentation --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 427faec115..e1bdcb10d9 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -295,7 +295,7 @@ class DeadlineJobInfo(object): OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, "OutputFilename")) OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename{}Tile")) + "OutputFilename{}Tile")) OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, "OutputDirectory")) From 91a3d8494bf8b65fe37560a02018edf59433caa6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:13:39 +0200 Subject: [PATCH 078/169] Disable aux files for now since it's not supported by Deadline Webservice --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index d979c92814..7694e80e9a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -419,8 +419,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): payload = self.assemble_payload( job_info=frame_assembly_job_info, plugin_info=assembly_plugin_info.copy(), + # todo: aux file transfers don't work with deadline webservice # add config file as job auxFile - aux_files=[config_file] + # aux_files=[config_file] ) assembly_payloads.append(payload) From 39d216797dacad00a3f42102e146c2806b9f8244 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:17:40 +0200 Subject: [PATCH 079/169] Force integer pixel values --- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7694e80e9a..3fbff0153b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -765,10 +765,10 @@ def _format_tiles( tiles_x, tiles_y ) - top = int(height) - (tile_y * h_space) - bottom = int(height) - ((tile_y - 1) * h_space) - 1 - left = (tile_x - 1) * w_space - right = (tile_x * w_space) - 1 + top = int(height - (tile_y * h_space)) + bottom = int(height - ((tile_y - 1) * h_space) - 1) + left = int((tile_x - 1) * w_space) + right = int((tile_x * w_space) - 1) # Job Info new_filename = "{}/{}{}".format( From a7190a51ad74823c069c73dbe8f7ec0f4c6daba6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:19:52 +0200 Subject: [PATCH 080/169] Force integer pixel values --- .../plugins/publish/submit_maya_deadline.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7966861358..3ac9df07d6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -141,17 +141,21 @@ def _format_tiles( out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - out["PluginInfo"]["RegionTop{}".format(tile)] = int(height) - (tile_y * h_space) # noqa: E501 - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(height) - ((tile_y - 1) * h_space) - 1 # noqa: E501 - out["PluginInfo"]["RegionLeft{}".format(tile)] = (tile_x - 1) * w_space # noqa: E501 - out["PluginInfo"]["RegionRight{}".format(tile)] = (tile_x * w_space) - 1 # noqa: E501 + top = int(height) - (tile_y * h_space) + bottom = int(height) - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 + + out["PluginInfo"]["RegionTop{}".format(tile)] = int(top) + out["PluginInfo"]["RegionBottom{}".format(tile)] = int(bottom) + out["PluginInfo"]["RegionLeft{}".format(tile)] = int(left) + out["PluginInfo"]["RegionRight{}".format(tile)] = int(right) cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = (tile_x - 1) * w_space - - cfg["Tile{}Y".format(tile)] = int(height) - (tile_y * h_space) + cfg["Tile{}X".format(tile)] = int(left) + cfg["Tile{}Y".format(tile)] = int(top) cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From 2fb7cabca49bae51162e2a82dfdc7d225094ed36 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:39:28 +0200 Subject: [PATCH 081/169] Shush hound --- .../deadline/abstract_submit_deadline.py | 4 ++-- .../plugins/publish/submit_maya_deadline.py | 21 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index e1bdcb10d9..35b114da95 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -623,8 +623,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): for i in context: is_workfile = ( - "workfile" in i.data.get("families", []) or - i.data["family"] == "workfile" + "workfile" in i.data.get("families", []) or + i.data["family"] == "workfile" ) if not is_workfile: continue diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3fbff0153b..1b69f8b4e9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -357,14 +357,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): self.tile_priority) assembly_plugin_info = { - "CleanupTiles": 1, - "ErrorOnMissing": True, - "Renderer": self._instance.data["renderer"] + "CleanupTiles": 1, + "ErrorOnMissing": True, + "Renderer": self._instance.data["renderer"] } assembly_payloads = [] output_dir = self.job_info.OutputDirectory[0] - for i, file in enumerate(assembly_files): + for file in assembly_files: frame = re.search(R_FRAME_NUMBER, file).group("frame") frame_assembly_job_info = copy.deepcopy(assembly_job_info) @@ -383,7 +383,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # write assembly job config files now = datetime.now() - config_file = os.path.join(output_dir, + config_file = os.path.join( + output_dir, "{}_config_{}.txt".format( os.path.splitext(file)[0], now.strftime("%Y_%m_%d_%H_%M_%S") @@ -427,10 +428,12 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Submit assembly jobs assembly_job_ids = [] + num_assemblies = len(assembly_payloads) for i, payload in enumerate(assembly_payloads): - self.log.info("submitting assembly job {} of {}".format( - i+1, len(assembly_payloads) - )) + self.log.info( + "submitting assembly job {} of {}".format(i + 1, + num_assemblies) + ) assembly_job_id = self.submit(payload) assembly_job_ids.append(assembly_job_id) @@ -682,7 +685,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): pf.writelines(scene_data) pf.truncate() self.log.info("Applied {} patch to scene.".format( - patches[i]["name"] + patches[i]["name"] )) def _job_info_label(self, label): From e9e01e3163079bc9f6c48fe633aed92592928328 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 14:43:18 +0200 Subject: [PATCH 082/169] Use update method --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 1b69f8b4e9..9692b136e9 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -69,8 +69,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # todo: test whether this works for existing production cases # where custom jobInfo was stored in the project settings - for key, value in self.jobInfo.items(): - setattr(job_info, key, value) + job_info.update(self.jobInfo) instance = self._instance context = instance.context From f9c214e435a53ecbf8b5f0aba57292511a1e0873 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 17:02:23 +0200 Subject: [PATCH 083/169] Only apply `RenderSetupIncludeLights` when value is not None --- .../deadline/plugins/publish/submit_maya_deadline.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3ac9df07d6..92c50c3e80 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -509,7 +509,15 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.payload_skeleton["JobInfo"]["Comment"] = comment self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer - self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa + # Only set RenderSetupIncludeLights when not None + rs_include_lights = instance.data.get("renderSetupIncludeLights") + if rs_include_lights is not None: + self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = ( + rs_include_lights + ) + else: + self.payload_skeleton["PluginInfo"].pop("RenderSetupIncludeLights") + # Adding file dependencies. dependencies = instance.context.data["fileDependencies"] dependencies.append(filepath) From 37b2d85aa762ed4908fe29e3a57a11aef8e459cf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 17:02:55 +0200 Subject: [PATCH 084/169] Fix indentation (shush hound) --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 92c50c3e80..0a18506bd4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -513,7 +513,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): rs_include_lights = instance.data.get("renderSetupIncludeLights") if rs_include_lights is not None: self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = ( - rs_include_lights + rs_include_lights ) else: self.payload_skeleton["PluginInfo"].pop("RenderSetupIncludeLights") From 1f8c7e8ea527adfd16bf209781e2997d03f3e189 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 30 Aug 2022 20:48:23 +0200 Subject: [PATCH 085/169] Force integer division --- .../plugins/publish/submit_maya_deadline.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 0a18506bd4..ac9d5a3d79 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -117,8 +117,8 @@ def _format_tiles( tile = 0 out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y + w_space = width // tiles_x + h_space = height // tiles_y cfg["TilesCropped"] = "False" @@ -146,16 +146,16 @@ def _format_tiles( left = (tile_x - 1) * w_space right = (tile_x * w_space) - 1 - out["PluginInfo"]["RegionTop{}".format(tile)] = int(top) - out["PluginInfo"]["RegionBottom{}".format(tile)] = int(bottom) - out["PluginInfo"]["RegionLeft{}".format(tile)] = int(left) - out["PluginInfo"]["RegionRight{}".format(tile)] = int(right) + out["PluginInfo"]["RegionTop{}".format(tile)] = top + out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom + out["PluginInfo"]["RegionLeft{}".format(tile)] = left + out["PluginInfo"]["RegionRight{}".format(tile)] = right cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename - cfg["Tile{}X".format(tile)] = int(left) - cfg["Tile{}Y".format(tile)] = int(top) + cfg["Tile{}X".format(tile)] = left + cfg["Tile{}Y".format(tile)] = top cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From 67ee5b5710548a0d390d9aa34b13563da7bda30d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 31 Aug 2022 10:35:52 +0200 Subject: [PATCH 086/169] Format with signed numbers (include + or -) This allows negative offsets to be pasted --- .../plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 9fca1b5391..05899de5e1 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -453,7 +453,7 @@ class OpenPypeTileAssembler(DeadlinePlugin): # Swap to have input as foreground args.append("--swap") # Paste foreground to background - args.append("--paste +{}+{}".format(pos_x, pos_y)) + args.append("--paste {x:+d}{y:+d}".format(x=pos_x, y=pos_y)) args.append("-o") args.append(output_path) From 1da0f46930dacb69ff13cb0984ebfb2b341c6ceb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 31 Aug 2022 10:58:41 +0200 Subject: [PATCH 087/169] Fix docstring typos --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index ac9d5a3d79..e77c86ec43 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -103,13 +103,13 @@ def _format_tiles( filename (str): Filename to process as tiles. index (int): Index of that file if it is sequence. tiles_x (int): Number of tiles in X. - tiles_y (int): Number if tikes in Y. + tiles_y (int): Number of tiles in Y. width (int): Width resolution of final image. height (int): Height resolution of final image. prefix (str): Image prefix. Returns: - (dict, dict): Tuple of two dictionaires - first can be used to + (dict, dict): Tuple of two dictionaries - first can be used to extend JobInfo, second has tiles x, y, width and height used for assembler configuration. From b61688828e4e65433e7d7aabb68883601b62244a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Sep 2022 12:11:07 +0200 Subject: [PATCH 088/169] Use new import source of Extractor --- .../hosts/maya/plugins/publish/extract_ass.py | 6 ++--- .../maya/plugins/publish/extract_assembly.py | 7 +++-- .../maya/plugins/publish/extract_assproxy.py | 6 ++--- .../plugins/publish/extract_camera_alembic.py | 4 +-- .../publish/extract_camera_mayaScene.py | 4 +-- .../hosts/maya/plugins/publish/extract_fbx.py | 6 ++--- .../maya/plugins/publish/extract_layout.py | 27 ++++++++++--------- .../maya/plugins/publish/extract_look.py | 8 +++--- .../plugins/publish/extract_maya_scene_raw.py | 5 ++-- .../maya/plugins/publish/extract_model.py | 4 +-- .../publish/extract_multiverse_look.py | 4 +-- .../plugins/publish/extract_multiverse_usd.py | 4 +-- .../publish/extract_multiverse_usd_comp.py | 4 +-- .../publish/extract_multiverse_usd_over.py | 4 +-- .../maya/plugins/publish/extract_playblast.py | 6 ++--- .../plugins/publish/extract_pointcache.py | 4 +-- .../plugins/publish/extract_redshift_proxy.py | 4 +-- .../plugins/publish/extract_rendersetup.py | 7 ++--- .../hosts/maya/plugins/publish/extract_rig.py | 4 +-- .../maya/plugins/publish/extract_thumbnail.py | 4 +-- .../publish/extract_unreal_skeletalmesh.py | 5 ++-- .../publish/extract_unreal_staticmesh.py | 5 ++-- .../maya/plugins/publish/extract_vrayproxy.py | 4 +-- .../maya/plugins/publish/extract_vrayscene.py | 4 +-- .../plugins/publish/extract_xgen_cache.py | 4 +-- .../plugins/publish/extract_yeti_cache.py | 4 +-- .../maya/plugins/publish/extract_yeti_rig.py | 4 +-- 27 files changed, 76 insertions(+), 76 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 760f410f91..5c21a4ff08 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,12 +1,12 @@ import os -import openpype.api - from maya import cmds + +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractAssStandin(openpype.api.Extractor): +class ExtractAssStandin(publish.Extractor): """Extract the content of the instance to a ass file Things to pay attention to: diff --git a/openpype/hosts/maya/plugins/publish/extract_assembly.py b/openpype/hosts/maya/plugins/publish/extract_assembly.py index 482930b76e..466fe962ab 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assembly.py +++ b/openpype/hosts/maya/plugins/publish/extract_assembly.py @@ -1,14 +1,13 @@ +import os import json -import os - -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import extract_alembic from maya import cmds -class ExtractAssembly(openpype.api.Extractor): +class ExtractAssembly(publish.Extractor): """Produce an alembic of just point positions and normals. Positions and normals are preserved, but nothing more, diff --git a/openpype/hosts/maya/plugins/publish/extract_assproxy.py b/openpype/hosts/maya/plugins/publish/extract_assproxy.py index 93720dbb82..4937a28a9e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_assproxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_assproxy.py @@ -3,17 +3,17 @@ import contextlib from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractAssProxy(openpype.api.Extractor): +class ExtractAssProxy(publish.Extractor): """Extract proxy model as Maya Ascii to use as arnold standin """ - order = openpype.api.Extractor.order + 0.2 + order = publish.Extractor.order + 0.2 label = "Ass Proxy (Maya ASCII)" hosts = ["maya"] families = ["ass"] diff --git a/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py b/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py index b744bfd0fe..aa445a0387 100644 --- a/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py +++ b/openpype/hosts/maya/plugins/publish/extract_camera_alembic.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -class ExtractCameraAlembic(openpype.api.Extractor): +class ExtractCameraAlembic(publish.Extractor): """Extract a Camera as Alembic. The cameras gets baked to world space by default. Only when the instance's diff --git a/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py index 8d6c4b5f3c..7467fa027d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/openpype/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -5,7 +5,7 @@ import itertools from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib @@ -78,7 +78,7 @@ def unlock(plug): cmds.disconnectAttr(source, destination) -class ExtractCameraMayaScene(openpype.api.Extractor): +class ExtractCameraMayaScene(publish.Extractor): """Extract a Camera as Maya Scene. This will create a duplicate of the camera that will be baked *with* diff --git a/openpype/hosts/maya/plugins/publish/extract_fbx.py b/openpype/hosts/maya/plugins/publish/extract_fbx.py index fbbe8e06b0..9af3acef65 100644 --- a/openpype/hosts/maya/plugins/publish/extract_fbx.py +++ b/openpype/hosts/maya/plugins/publish/extract_fbx.py @@ -4,13 +4,13 @@ import os from maya import cmds # noqa import maya.mel as mel # noqa import pyblish.api -import openpype.api -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import maintained_selection from openpype.hosts.maya.api import fbx -class ExtractFBX(openpype.api.Extractor): +class ExtractFBX(publish.Extractor): """Extract FBX from Maya. This extracts reproducible FBX exports ignoring any of the diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 991217684a..0f499b09b1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -5,13 +5,11 @@ import json from maya import cmds from maya.api import OpenMaya as om -from bson.objectid import ObjectId - -from openpype.pipeline import legacy_io -import openpype.api +from openpype.client import get_representation_by_id +from openpype.pipeline import legacy_io, publish -class ExtractLayout(openpype.api.Extractor): +class ExtractLayout(publish.Extractor): """Extract a layout.""" label = "Extract Layout" @@ -30,6 +28,8 @@ class ExtractLayout(openpype.api.Extractor): instance.data["representations"] = [] json_data = [] + # TODO representation queries can be refactored to be faster + project_name = legacy_io.active_project() for asset in cmds.sets(str(instance), query=True): # Find the container @@ -43,11 +43,11 @@ class ExtractLayout(openpype.api.Extractor): representation_id = cmds.getAttr(f"{container}.representation") - representation = legacy_io.find_one( - { - "type": "representation", - "_id": ObjectId(representation_id) - }, projection={"parent": True, "context.family": True}) + representation = get_representation_by_id( + project_name, + representation_id, + fields=["parent", "context.family"] + ) self.log.info(representation) @@ -102,9 +102,10 @@ class ExtractLayout(openpype.api.Extractor): for i in range(0, len(t_matrix_list), row_length): t_matrix.append(t_matrix_list[i:i + row_length]) - json_element["transform_matrix"] = [] - for row in t_matrix: - json_element["transform_matrix"].append(list(row)) + json_element["transform_matrix"] = [ + list(row) + for row in t_matrix + ] basis_list = [ 1, 0, 0, 0, diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index ce3b265566..91b0da75c6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -13,8 +13,8 @@ from maya import cmds # noqa import pyblish.api -import openpype.api -from openpype.pipeline import legacy_io +from openpype.lib import source_hash +from openpype.pipeline import legacy_io, publish from openpype.hosts.maya.api import lib # Modes for transfer @@ -161,7 +161,7 @@ def no_workspace_dir(): os.rmdir(fake_workspace_dir) -class ExtractLook(openpype.api.Extractor): +class ExtractLook(publish.Extractor): """Extract Look (Maya Scene + JSON) Only extracts the sets (shadingEngines and alike) alongside a .json file @@ -505,7 +505,7 @@ class ExtractLook(openpype.api.Extractor): args = [] if do_maketx: args.append("maketx") - texture_hash = openpype.api.source_hash(filepath, *args) + texture_hash = source_hash(filepath, *args) # If source has been published before with the same settings, # then don't reprocess but hardlink from the original diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py index 3a47cdadb5..3769ec3605 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_scene_raw.py @@ -4,12 +4,11 @@ import os from maya import cmds -import openpype.api from openpype.hosts.maya.api.lib import maintained_selection -from openpype.pipeline import AVALON_CONTAINER_ID +from openpype.pipeline import AVALON_CONTAINER_ID, publish -class ExtractMayaSceneRaw(openpype.api.Extractor): +class ExtractMayaSceneRaw(publish.Extractor): """Extract as Maya Scene (raw). This will preserve all references, construction history, etc. diff --git a/openpype/hosts/maya/plugins/publish/extract_model.py b/openpype/hosts/maya/plugins/publish/extract_model.py index 0282d1e9c8..7c8c3a2981 100644 --- a/openpype/hosts/maya/plugins/publish/extract_model.py +++ b/openpype/hosts/maya/plugins/publish/extract_model.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -class ExtractModel(openpype.api.Extractor): +class ExtractModel(publish.Extractor): """Extract as Model (Maya Scene). Only extracts contents based on the original "setMembers" data to ensure diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py index 82e2b41929..92137acb95 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_look.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseLook(openpype.api.Extractor): +class ExtractMultiverseLook(publish.Extractor): """Extractor for Multiverse USD look data. This will extract: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py index 3654be7b34..6c352bebe6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py @@ -3,11 +3,11 @@ import six from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseUsd(openpype.api.Extractor): +class ExtractMultiverseUsd(publish.Extractor): """Extractor for Multiverse USD Asset data. This will extract settings for a Multiverse Write Asset operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py index ad9303657f..a62729c198 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_comp.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractMultiverseUsdComposition(openpype.api.Extractor): +class ExtractMultiverseUsdComposition(publish.Extractor): """Extractor of Multiverse USD Composition data. This will extract settings for a Multiverse Write Composition operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py index d44e3878b8..0628623e88 100644 --- a/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py +++ b/openpype/hosts/maya/plugins/publish/extract_multiverse_usd_over.py @@ -1,12 +1,12 @@ import os -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds -class ExtractMultiverseUsdOverride(openpype.api.Extractor): +class ExtractMultiverseUsdOverride(publish.Extractor): """Extractor for Multiverse USD Override data. This will extract settings for a Multiverse Write Override operation: diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 871adda0c3..81fdba2f98 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -1,18 +1,16 @@ import os -import glob -import contextlib import clique import capture +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -import openpype.api from maya import cmds import pymel.core as pm -class ExtractPlayblast(openpype.api.Extractor): +class ExtractPlayblast(publish.Extractor): """Extract viewport playblast. Takes review camera and creates review Quicktime video based on viewport diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index bf6feecef3..7c1c6d5c12 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -2,7 +2,7 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( extract_alembic, suspended_refresh, @@ -11,7 +11,7 @@ from openpype.hosts.maya.api.lib import ( ) -class ExtractAlembic(openpype.api.Extractor): +class ExtractAlembic(publish.Extractor): """Produce an alembic of just point positions and normals. Positions and normals, uvs, creases are preserved, but nothing more, diff --git a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py index 23cac9190d..4377275635 100644 --- a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractRedshiftProxy(openpype.api.Extractor): +class ExtractRedshiftProxy(publish.Extractor): """Extract the content of the instance to a redshift proxy file.""" label = "Redshift Proxy (.rs)" diff --git a/openpype/hosts/maya/plugins/publish/extract_rendersetup.py b/openpype/hosts/maya/plugins/publish/extract_rendersetup.py index 6bdd5f590e..5970c038a4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_rendersetup.py +++ b/openpype/hosts/maya/plugins/publish/extract_rendersetup.py @@ -1,10 +1,11 @@ -import json import os -import openpype.api +import json + import maya.app.renderSetup.model.renderSetup as renderSetup +from openpype.pipeline import publish -class ExtractRenderSetup(openpype.api.Extractor): +class ExtractRenderSetup(publish.Extractor): """ Produce renderSetup template file diff --git a/openpype/hosts/maya/plugins/publish/extract_rig.py b/openpype/hosts/maya/plugins/publish/extract_rig.py index 53c1eeb671..c71a2f710d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_rig.py @@ -4,11 +4,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractRig(openpype.api.Extractor): +class ExtractRig(publish.Extractor): """Extract rig as Maya Scene.""" label = "Extract Rig (Maya Scene)" diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 9380da5128..854301ea48 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -3,14 +3,14 @@ import glob import capture +from openpype.pipeline import publish from openpype.hosts.maya.api import lib -import openpype.api from maya import cmds import pymel.core as pm -class ExtractThumbnail(openpype.api.Extractor): +class ExtractThumbnail(publish.Extractor): """Extract viewport thumbnail. Takes review camera and creates a thumbnail based on viewport diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py index 7ef7f2f181..258120db2f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py @@ -6,7 +6,8 @@ from contextlib import contextmanager from maya import cmds # noqa import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.maya.api import fbx @@ -20,7 +21,7 @@ def renamed(original_name, renamed_name): cmds.rename(renamed_name, original_name) -class ExtractUnrealSkeletalMesh(openpype.api.Extractor): +class ExtractUnrealSkeletalMesh(publish.Extractor): """Extract Unreal Skeletal Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py index 69d51f9ff1..44f0615a27 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py @@ -5,7 +5,8 @@ import os from maya import cmds # noqa import pyblish.api -import openpype.api + +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( parent_nodes, maintained_selection @@ -13,7 +14,7 @@ from openpype.hosts.maya.api.lib import ( from openpype.hosts.maya.api import fbx -class ExtractUnrealStaticMesh(openpype.api.Extractor): +class ExtractUnrealStaticMesh(publish.Extractor): """Extract Unreal Static Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py b/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py index 562ca078e1..38bf02245a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayproxy.py @@ -2,11 +2,11 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection -class ExtractVRayProxy(openpype.api.Extractor): +class ExtractVRayProxy(publish.Extractor): """Extract the content of the instance to a vrmesh file Things to pay attention to: diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py index 5d41697e5f..8442df1611 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py @@ -3,14 +3,14 @@ import os import re -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.render_setup_tools import export_in_rs_layer from openpype.hosts.maya.api.lib import maintained_selection from maya import cmds -class ExtractVrayscene(openpype.api.Extractor): +class ExtractVrayscene(publish.Extractor): """Extractor for vrscene.""" label = "VRay Scene (.vrscene)" diff --git a/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py b/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py index 5728682abe..77350f343e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py +++ b/openpype/hosts/maya/plugins/publish/extract_xgen_cache.py @@ -2,14 +2,14 @@ import os from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( suspended_refresh, maintained_selection ) -class ExtractXgenCache(openpype.api.Extractor): +class ExtractXgenCache(publish.Extractor): """Produce an alembic of just xgen interactive groom """ diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py b/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py index cf6db00e9a..b61f599cab 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_cache.py @@ -3,10 +3,10 @@ import json from maya import cmds -import openpype.api +from openpype.pipeline import publish -class ExtractYetiCache(openpype.api.Extractor): +class ExtractYetiCache(publish.Extractor): """Producing Yeti cache files using scene time range. This will extract Yeti cache file sequence and fur settings. diff --git a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py index 6e21bffa4e..1d0c5e88c3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py +++ b/openpype/hosts/maya/plugins/publish/extract_yeti_rig.py @@ -7,7 +7,7 @@ import contextlib from maya import cmds -import openpype.api +from openpype.pipeline import publish from openpype.hosts.maya.api import lib @@ -90,7 +90,7 @@ def yetigraph_attribute_values(assumed_destination, resources): pass -class ExtractYetiRig(openpype.api.Extractor): +class ExtractYetiRig(publish.Extractor): """Extract the Yeti rig to a Maya Scene and write the Yeti rig data.""" label = "Extract Yeti Rig" From 4bdd18cb817bbc58c1143e0e02442a9346ce9a1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 11:50:14 +0200 Subject: [PATCH 089/169] Use DeadlineKeyValueVar for EnvironmentKeyValue on Job Info - To improve readability of code that sets the values --- .../deadline/abstract_submit_deadline.py | 58 +++++++++++++++---- .../publish/submit_aftereffects_deadline.py | 11 ++-- .../publish/submit_harmony_deadline.py | 10 ++-- .../plugins/publish/submit_maya_deadline.py | 7 +-- 4 files changed, 59 insertions(+), 27 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index 35b114da95..beb1cd0fae 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -67,6 +67,43 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + + class DeadlineIndexedVar(dict): """ @@ -80,15 +117,9 @@ class DeadlineIndexedVar(dict): """ def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() self.__key = key - def next_available_index(self): - # Add as first unused entry - i = 0 - while i in self.keys(): - i += 1 - return i - def serialize(self): key = self.__key @@ -100,6 +131,13 @@ class DeadlineIndexedVar(dict): key.format(index): value for index, value in sorted(self.items()) } + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + def update(self, data): # Force the integer key check for key, value in data.items(): @@ -271,7 +309,7 @@ class DeadlineJobInfo(object): # Environment # ---------------------------------------------- - EnvironmentKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, "EnvironmentKeyValue")) IncludeEnvironment = attr.ib(default=None) # Default: false @@ -281,7 +319,7 @@ class DeadlineJobInfo(object): # Job Extra Info # ---------------------------------------------- ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) - ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineIndexedVar, + ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, "ExtraInfoKeyValue")) # Task Extra Info Names @@ -326,7 +364,7 @@ class DeadlineJobInfo(object): """ def filter_data(a, v): - if isinstance(v, DeadlineIndexedVar): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): return False if v is None: return False diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 1d68793d53..55acd92043 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -92,13 +92,12 @@ class AfterEffectsSubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - dln_job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + dln_job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off - dln_job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + dln_job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return dln_job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 3f9c09b592..6327143623 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -284,14 +284,12 @@ class HarmonySubmitDeadline( environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) for key in keys: - val = environment.get(key) - if val: - job_info.EnvironmentKeyValue = "{key}={value}".format( - key=key, - value=val) + value = environment.get(key) + if value: + job_info.EnvironmentKeyValue[key] = value # to recognize job from PYPE for turning Event On/Off - job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1" + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" return job_info diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 9692b136e9..ad46feea03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -137,8 +137,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for key, value in environment.items(): if not value: continue - job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, - value=value) + job_info.EnvironmentKeyValue[key] = value # Adding file dependencies. if self.asset_dependencies: @@ -538,9 +537,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): for key, value in envs.items(): if not value: continue - - job_info.EnvironmentKeyValue = "{key}={value}".format(key=key, - value=value) + job_info.EnvironmentKeyValue[key] = value plugin_info.update({ "Version": "3.6", From 1e87c9d6d2c7338f8e53e8a06d9f1983056797b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:02:39 +0200 Subject: [PATCH 090/169] Use DeadlineIndexedVar `__iadd__` functionality --- .../plugins/publish/submit_aftereffects_deadline.py | 4 ++-- .../deadline/plugins/publish/submit_maya_deadline.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 55acd92043..0c1ffa6bd7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -67,9 +67,9 @@ class AfterEffectsSubmitDeadline( dln_job_info.Group = self.group dln_job_info.Department = self.department dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename = \ + dln_job_info.OutputFilename += \ os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory = \ + dln_job_info.OutputDirectory += \ os.path.dirname(self._instance.data["expectedFiles"][0]) dln_job_info.JobDelay = "00:00:00" diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index ad46feea03..6b08f9894d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -144,14 +144,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): dependencies = instance.context.data["fileDependencies"] dependencies.append(context.data["currentFile"]) for dependency in dependencies: - job_info.AssetDependency = dependency + job_info.AssetDependency += dependency # Add list of expected files to job # --------------------------------- exp = instance.data.get("expectedFiles") for filepath in self._iter_expected_files(exp): - job_info.OutputDirectory = os.path.dirname(filepath) - job_info.OutputFilename = os.path.basename(filepath) + job_info.OutputDirectory += os.path.dirname(filepath) + job_info.OutputFilename += os.path.basename(filepath) return job_info @@ -443,7 +443,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): if self.asset_dependencies: # Asset dependency to wait for at least the scene file to sync. - job_info.AssetDependency = self.scene_path + job_info.AssetDependency += self.scene_path # Get layer prefix render_products = self._instance.data["renderProducts"] From 2c01cb806d68aa04a733e16d4cfd1abb15f438fe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:03:34 +0200 Subject: [PATCH 091/169] Remove backwards compatibility for append functionality in old style vars --- openpype/modules/deadline/abstract_submit_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index beb1cd0fae..f698b7688e 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -393,16 +393,6 @@ class DeadlineJobInfo(object): for key, value in data.items(): setattr(self, key, value) - def __setattr__(self, key, value): - # Backwards compatibility: Allow appending to index vars by setting - # it on Job Info directly like: JobInfo.OutputFilename = filename - existing = getattr(self, key, None) - if isinstance(existing, DeadlineIndexedVar): - existing += value - return - - object.__setattr__(self, key, value) - @six.add_metaclass(AbstractMetaInstancePlugin) class AbstractSubmitDeadline(pyblish.api.InstancePlugin): From 47164b36effa0f4986ccf15aa3a4967ccb014e26 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 12:11:35 +0200 Subject: [PATCH 092/169] Be more explicit about what keys to include from Session This way it matches more with logic of other host submitters (e.g. AfterEffects + Harmony) --- .../deadline/plugins/publish/submit_maya_deadline.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 6b08f9894d..bb48fe6902 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -130,15 +130,16 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session) - # to recognize job from PYPE for turning Event On/Off - environment["OPENPYPE_RENDER_JOB"] = "1" - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - - for key, value in environment.items(): + for key in keys: + value = environment.get(key) if not value: continue job_info.EnvironmentKeyValue[key] = value + # to recognize job from PYPE for turning Event On/Off + job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1" + job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1" + # Adding file dependencies. if self.asset_dependencies: dependencies = instance.context.data["fileDependencies"] From 88e4798b535c47cefce3dc2a1ed9aacf60dd0f68 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 2 Sep 2022 19:50:56 +0200 Subject: [PATCH 093/169] Remove old type hint --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index bb48fe6902..68d55fef5d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -641,7 +641,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): return result def _patch_workfile(self): - # type: (str, dict) -> [str, None] """Patch Maya scene. This will take list of patches (lines to add) and apply them to From 5645bcb353b13b1711ba67e0a3b394b273e7cef3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 3 Sep 2022 13:17:10 +0200 Subject: [PATCH 094/169] Use custom plugin info per type of plugin submission --- .../plugins/publish/submit_maya_deadline.py | 127 +++++++++--------- 1 file changed, 66 insertions(+), 61 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 68d55fef5d..2a41d92efd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -49,6 +49,30 @@ class MayaPluginInfo: RenderSetupIncludeLights = attr.ib(default=None) # Include all lights flag +@attr.s +class PythonPluginInfo: + ScriptFile = attr.ib() + Version = attr.ib(default="3.6") + Arguments = attr.ib(default=None) + SingleFrameOnly = attr.ib(default=None) + + +@attr.s +class VRayPluginInfo: + InputFilename = attr.ib(default=None) # Input + SeparateFilesPerFrame = attr.ib(default=None) + VRayEngine = attr.ib(default="V-Ray") + Width = attr.ib(default=None) + Height = attr.ib(default=None) # Mandatory for Deadline + OutputFilePath = attr.ib(default=True) + OutputFileName = attr.ib(default=None) # Render only this layer + + +@attr.s +class ArnoldPluginInfo: + ArnoldFile = attr.ib(default=None) + + class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): label = "Submit Render to Deadline" @@ -479,26 +503,19 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): def _get_vray_export_payload(self, data): job_info = copy.deepcopy(self.job_info) - job_info.Name = self._job_info_label("Export") # Get V-Ray settings info to compute output path - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - scene, _ = os.path.splitext(data["filename"]) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - output = os.path.dirname(first_file) + vray_scene = self.format_vray_output_filename() plugin_info = { "Renderer": "vray", "SkipExistingFrames": True, "UseLegacyRenderLayers": True, - "OutputFilePath": output + "OutputFilePath": os.path.dirname(vray_scene) } - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_arnold_export_payload(self, data): @@ -515,8 +532,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): script = os.path.normpath(module_path) job_info = copy.deepcopy(self.job_info) - plugin_info = copy.deepcopy(self.plugin_info) - job_info.Name = self._job_info_label("Export") # Force a single frame Python job @@ -540,14 +555,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): continue job_info.EnvironmentKeyValue[key] = value - plugin_info.update({ - "Version": "3.6", - "ScriptFile": script, - "Arguments": "", - "SingleFrameOnly": "True", - }) + plugin_info = PythonPluginInfo( + ScriptFile=script, + Version="3.6", + Arguments="", + SingleFrameOnly="True" + ) - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_vray_render_payload(self, data): @@ -558,27 +573,17 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): job_info.OverrideTaskExtraInfoNames = False # Plugin Info - vray_settings = cmds.ls(type="VRaySettingsNode") - node = vray_settings[0] - template = cmds.getAttr("{}.vrscene_filename".format(node)) - # "vrayscene//_/" + plugin_info = VRayPluginInfo( + InputFilename=self.format_vray_output_filename(), + SeparateFilesPerFrame=False, + VRayEngine="V-Ray", + Width=self._instance.data["resolutionWidth"], + Height=self._instance.data["resolutionHeight"], + OutputFilePath=job_info.OutputDirectory[0], + OutputFileName=job_info.OutputFilename[0] + ) - scene, _ = os.path.splitext(self.scene_path) - first_file = self.format_vray_output_filename(scene, template) - first_file = "{}/{}".format(data["workspace"], first_file) - - plugin_info = { - "InputFilename": first_file, - "SeparateFilesPerFrame": True, - "VRayEngine": "V-Ray", - - "Width": self._instance.data["resolutionWidth"], - "Height": self._instance.data["resolutionHeight"], - "OutputFilePath": job_info.OutputDirectory[0], - "OutputFileName": job_info.OutputFilename[0] - } - - return job_info, plugin_info + return job_info, attr.asdict(plugin_info) def _get_arnold_render_payload(self, data): @@ -590,55 +595,55 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): # Plugin Info ass_file, _ = os.path.splitext(data["output_filename_0"]) - first_file = ass_file + ".ass" - plugin_info = { - "ArnoldFile": first_file, - } + ass_filepath = ass_file + ".ass" - return job_info, plugin_info + plugin_info = ArnoldPluginInfo( + ArnoldFile=ass_filepath + ) - def format_vray_output_filename(self, filename, template, dir=False): + return job_info, attr.asdict(plugin_info) + + def format_vray_output_filename(self): """Format the expected output file of the Export job. Example: /_/ - "shot010_v006/shot010_v006_CHARS/CHARS" - - Args: - instance: - filename(str): - dir(bool): - + "shot010_v006/shot010_v006_CHARS/CHARS_0001.vrscene" Returns: str """ + + # "vrayscene//_/" + vray_settings = cmds.ls(type="VRaySettingsNode") + node = vray_settings[0] + template = cmds.getAttr("{}.vrscene_filename".format(node)) + scene, _ = os.path.splitext(self.scene_path) + def smart_replace(string, key_values): new_string = string for key, value in key_values.items(): new_string = new_string.replace(key, value) return new_string - # Ensure filename has no extension - file_name, _ = os.path.splitext(filename) + # Get workfile scene path without extension to format vrscene_filename + scene_filename = os.path.basename(self.scene_path) + scene_filename_no_ext, _ = os.path.splitext(scene_filename) layer = self._instance.data['setMembers'] # Reformat without tokens output_path = smart_replace( template, - {"": file_name, + {"": scene_filename_no_ext, "": layer}) - if dir: - return output_path.replace("\\", "/") - start_frame = int(self._instance.data["frameStartHandle"]) + workspace = self._instance.context.data["workspace"] filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame) + filepath_zero = os.path.join(workspace, filename_zero) - result = filename_zero.replace("\\", "/") - - return result + return filepath_zero.replace("\\", "/") def _patch_workfile(self): """Patch Maya scene. From 507dac4aa9f50e8978a841067262ce33e77cf5e0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 3 Sep 2022 15:06:24 +0200 Subject: [PATCH 095/169] Ensure integer math for _format_tiles See #3758 --- .../plugins/publish/submit_maya_deadline.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 2a41d92efd..7c486b7c34 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -754,14 +754,21 @@ def _format_tiles( used for assembler configuration. """ - tile = 0 + # Math used requires integers for correct output - as such + # we ensure our inputs are correct. + assert type(tiles_x) is int, "tiles_x must be an integer" + assert type(tiles_y) is int, "tiles_y must be an integer" + assert type(width) is int, "width must be an integer" + assert type(height) is int, "height must be an integer" + out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() - w_space = width / tiles_x - h_space = height / tiles_y + w_space = width // tiles_x + h_space = height // tiles_y cfg["TilesCropped"] = "False" + tile = 0 for tile_x in range(1, tiles_x + 1): for tile_y in reversed(range(1, tiles_y + 1)): tile_prefix = "_tile_{}x{}_{}x{}_".format( @@ -769,10 +776,10 @@ def _format_tiles( tiles_x, tiles_y ) - top = int(height - (tile_y * h_space)) - bottom = int(height - ((tile_y - 1) * h_space) - 1) - left = int((tile_x - 1) * w_space) - right = int((tile_x * w_space) - 1) + top = height - (tile_y * h_space) + bottom = height - ((tile_y - 1) * h_space) - 1 + left = (tile_x - 1) * w_space + right = (tile_x * w_space) - 1 # Job Info new_filename = "{}/{}{}".format( From db1fa6d40ef59f9e3061a637a04874d4857a6585 Mon Sep 17 00:00:00 2001 From: Thomas Fricard Date: Mon, 5 Sep 2022 12:49:43 +0200 Subject: [PATCH 096/169] add a python2 compatibility for the FileNotFoundError --- openpype/hosts/houdini/api/shelves.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py index 805ce4c397..248d99105c 100644 --- a/openpype/hosts/houdini/api/shelves.py +++ b/openpype/hosts/houdini/api/shelves.py @@ -1,6 +1,7 @@ import os import logging import platform +import six from openpype.settings import get_project_settings @@ -8,6 +9,9 @@ import hou log = logging.getLogger("openpype.hosts.houdini.shelves") +if six.PY2: + FileNotFoundError = IOError + def generate_shelves(): """This function generates complete shelves from shelf set to tools From d9a150022e1659aec584fe962f9c47e66bfb178d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:28:48 +0800 Subject: [PATCH 097/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 4 ++-- openpype/hosts/maya/lib.py | 18 ++++++++++++++++++ .../defaults/project_settings/maya.json | 14 ++++++++++++++ .../projects_schema/schema_project_maya.json | 15 +++++++++++++++ 4 files changed, 49 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f565f6a308..5bf8b67fc2 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import copy_workspace_mel,load_workspace_mel from . import menu, lib from .workio import ( open_file, @@ -550,7 +550,7 @@ def on_task_changed(): def before_workfile_save(event): workdir_path = event["workdir_path"] if workdir_path: - copy_workspace_mel(workdir_path) + load_workspace_mel(workdir_path) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6c142053e6..d24f267bbd 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,8 @@ import os import shutil +import json +from openpype.settings import get_current_project_settings def copy_workspace_mel(workdir): # Check that source mel exists @@ -24,3 +26,19 @@ def copy_workspace_mel(workdir): src_filepath, dst_filepath )) shutil.copy(src_filepath, dst_filepath) + + +def load_workspace_mel(workdir): + dst_filepath = os.path.join(workdir, "workspace.mel") + if os.path.exists(dst_filepath): + return + + if not os.path.exists(workdir): + os.makedirs(workdir) + + with open(dst_filepath, "w") as mel_file: + setting = get_current_project_settings() + mel_script = setting["maya"]["mel-workspace"]["scripts"] + for mel in mel_script: + mel_file.write(mel) + mel_file.write("\n") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ac0f161cf2..0a46632042 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -15,6 +15,20 @@ "destination-path": [] } }, + "mel-workspace":{ + "scripts":[ + "workspace -fr \"shaders\" \"renderData/shaders\";", + "workspace -fr \"images\" \"renders\";", + "workspace -fr \"particles\" \"particles\";", + "workspace -fr \"mayaAscii\" \"\";", + "workspace -fr \"mayaBinary\" \"\";", + "workspace -fr \"scene\" \"\";", + "workspace -fr \"alembicCache\" \"cache/alembic\";", + "workspace -fr \"renderData\" \"renderData\";", + "workspace -fr \"sourceImages\" \"sourceimages\";", + "workspace -fr \"fileCache\" \"cache/nCache\";" + ] + }, "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index cb380194a7..a774d604ca 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -53,6 +53,21 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "mel-workspace", + "label": "Maya MEL Workspace", + "is_group": true, + "children": [ + { + "type": "list", + "object_type": "text", + "key": "scripts", + "label": "scripts" + } + ] + }, { "type": "schema", "name": "schema_scriptsmenu" From b88def9aea1fc1a682209ea78edcf5ae87a652e3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:40:30 +0800 Subject: [PATCH 098/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 27 +------------------ .../defaults/project_settings/maya.json | 2 +- .../projects_schema/schema_project_maya.json | 4 +-- 3 files changed, 4 insertions(+), 29 deletions(-) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index d24f267bbd..bf06c9ad7d 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,33 +1,8 @@ import os import shutil -import json from openpype.settings import get_current_project_settings -def copy_workspace_mel(workdir): - # Check that source mel exists - current_dir = os.path.dirname(os.path.abspath(__file__)) - src_filepath = os.path.join(current_dir, "resources", "workspace.mel") - if not os.path.exists(src_filepath): - print("Source mel file does not exist. {}".format(src_filepath)) - return - - # Skip if workspace.mel already exists - dst_filepath = os.path.join(workdir, "workspace.mel") - if os.path.exists(dst_filepath): - return - - # Create workdir if does not exists yet - if not os.path.exists(workdir): - os.makedirs(workdir) - - # Copy file - print("Copying workspace mel \"{}\" -> \"{}\"".format( - src_filepath, dst_filepath - )) - shutil.copy(src_filepath, dst_filepath) - - def load_workspace_mel(workdir): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): @@ -38,7 +13,7 @@ def load_workspace_mel(workdir): with open(dst_filepath, "w") as mel_file: setting = get_current_project_settings() - mel_script = setting["maya"]["mel-workspace"]["scripts"] + mel_script = setting["maya"]["mel-workspace"]["definition"] for mel in mel_script: mel_file.write(mel) mel_file.write("\n") diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 0a46632042..162732280f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -16,7 +16,7 @@ } }, "mel-workspace":{ - "scripts":[ + "definition":[ "workspace -fr \"shaders\" \"renderData/shaders\";", "workspace -fr \"images\" \"renders\";", "workspace -fr \"particles\" \"particles\";", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index a774d604ca..7204ec586a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -63,8 +63,8 @@ { "type": "list", "object_type": "text", - "key": "scripts", - "label": "scripts" + "key": "definition", + "label": "definition" } ] }, From 109abb58987b22f6d390d424a27e209eff6b5638 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:41:09 +0800 Subject: [PATCH 099/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 5bf8b67fc2..4768a9ee4f 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import copy_workspace_mel,load_workspace_mel +from openpype.hosts.maya.lib import load_workspace_mel from . import menu, lib from .workio import ( open_file, From 69d2cf20f5b4889ce674487d2da8fd2a230a093a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Sep 2022 19:42:10 +0800 Subject: [PATCH 100/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index bf06c9ad7d..2853789656 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -3,6 +3,7 @@ import shutil from openpype.settings import get_current_project_settings + def load_workspace_mel(workdir): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): From 5908f49b685a981d08d7ea1ff841d567018a7e76 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Sep 2022 10:56:01 +0200 Subject: [PATCH 101/169] updating README file --- README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b8c04f8b49..a2f442b640 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll - **Linux** - **Ubuntu** 20.04 LTS - **Centos** 7 -- **Mac OSX** +- **Mac OSX** - **10.15** Catalina - **11.1** Big Sur (using Rosetta2) @@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`. **Note that it needs existing virtual environment.** + +Developer tools +------------- + +In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (`dev_clear_pyc(.ps1|.sh)`). + + + ## Contributors ✨ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): From a0e241b02fbfdee18b8ded65af89eedb343793d2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Sep 2022 10:58:22 +0200 Subject: [PATCH 102/169] README fix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a2f442b640..a3d3cf1dbb 100644 --- a/README.md +++ b/README.md @@ -291,7 +291,7 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`. Developer tools ------------- -In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (`dev_clear_pyc(.ps1|.sh)`). +In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`). From 26fbdac8da117c83a71b75ce6315be4044d23942 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 16:14:30 +0800 Subject: [PATCH 103/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 5 +++-- openpype/hosts/maya/hooks/pre_copy_mel.py | 5 +++-- openpype/hosts/maya/lib.py | 19 ++++++++++--------- .../defaults/project_anatomy/attributes.json | 3 +-- .../defaults/project_settings/maya.json | 15 +-------------- .../projects_schema/schema_project_maya.json | 17 ++++------------- 6 files changed, 22 insertions(+), 42 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 4768a9ee4f..4578d6fb39 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -28,7 +28,7 @@ from openpype.pipeline import ( AVALON_CONTAINER_ID, ) from openpype.pipeline.load import any_outdated_containers -from openpype.hosts.maya.lib import load_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel from . import menu, lib from .workio import ( open_file, @@ -548,9 +548,10 @@ def on_task_changed(): def before_workfile_save(event): + project_name = os.getenv("AVALON_PROJECT") workdir_path = event["workdir_path"] if workdir_path: - load_workspace_mel(workdir_path) + create_workspace_mel(workdir_path, project_name) class MayaDirmap(HostDirmap): diff --git a/openpype/hosts/maya/hooks/pre_copy_mel.py b/openpype/hosts/maya/hooks/pre_copy_mel.py index b11e18241e..6f90af4b7c 100644 --- a/openpype/hosts/maya/hooks/pre_copy_mel.py +++ b/openpype/hosts/maya/hooks/pre_copy_mel.py @@ -1,5 +1,5 @@ from openpype.lib import PreLaunchHook -from openpype.hosts.maya.lib import copy_workspace_mel +from openpype.hosts.maya.lib import create_workspace_mel class PreCopyMel(PreLaunchHook): @@ -10,9 +10,10 @@ class PreCopyMel(PreLaunchHook): app_groups = ["maya"] def execute(self): + project_name = self.launch_context.env.get("AVALON_PROJECT") workdir = self.launch_context.env.get("AVALON_WORKDIR") if not workdir: self.log.warning("BUG: Workdir is not filled.") return - copy_workspace_mel(workdir) + create_workspace_mel(workdir, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 2853789656..443bf7d10e 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,10 +1,8 @@ import os -import shutil - -from openpype.settings import get_current_project_settings +from openpype.settings import get_project_settings -def load_workspace_mel(workdir): +def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): return @@ -12,9 +10,12 @@ def load_workspace_mel(workdir): if not os.path.exists(workdir): os.makedirs(workdir) + project_setting = get_project_settings(project_name) + mel_script = project_setting["maya"].get("mel_workspace") + + # Skip if mel script in settings is empty + if not mel_script: + return + with open(dst_filepath, "w") as mel_file: - setting = get_current_project_settings() - mel_script = setting["maya"]["mel-workspace"]["definition"] - for mel in mel_script: - mel_file.write(mel) - mel_file.write("\n") + mel_file.write(mel_script) diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index 983ac603f9..bf8bbef8de 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -19,8 +19,7 @@ "blender/2-91", "harmony/20", "photoshop/2021", - "aftereffects/2021", - "unreal/4-26" + "aftereffects/2021" ], "tools_env": [], "active": true diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 162732280f..ada69c3730 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -15,20 +15,7 @@ "destination-path": [] } }, - "mel-workspace":{ - "definition":[ - "workspace -fr \"shaders\" \"renderData/shaders\";", - "workspace -fr \"images\" \"renders\";", - "workspace -fr \"particles\" \"particles\";", - "workspace -fr \"mayaAscii\" \"\";", - "workspace -fr \"mayaBinary\" \"\";", - "workspace -fr \"scene\" \"\";", - "workspace -fr \"alembicCache\" \"cache/alembic\";", - "workspace -fr \"renderData\" \"renderData\";", - "workspace -fr \"sourceImages\" \"sourceimages\";", - "workspace -fr \"fileCache\" \"cache/nCache\";" - ] - }, + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 7204ec586a..978de56a51 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -54,19 +54,10 @@ ] }, { - "type": "dict", - "collapsible": true, - "key": "mel-workspace", - "label": "Maya MEL Workspace", - "is_group": true, - "children": [ - { - "type": "list", - "object_type": "text", - "key": "definition", - "label": "definition" - } - ] + "type": "text", + "multiline" : true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" }, { "type": "schema", From a9b69536cac401221cacaaa3155c4f9a7be682b8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 16:44:29 +0800 Subject: [PATCH 104/169] adding and loading maya mel workspace through openpype project setting --- openpype/settings/defaults/project_anatomy/attributes.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index bf8bbef8de..983ac603f9 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -19,7 +19,8 @@ "blender/2-91", "harmony/20", "photoshop/2021", - "aftereffects/2021" + "aftereffects/2021", + "unreal/4-26" ], "tools_env": [], "active": true From decc11251854f60db02531f93e2b8fbd4d3fa7ec Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 17:06:47 +0800 Subject: [PATCH 105/169] load and edit mel workspace within the Openpype project settings --- .../settings/defaults/project_settings/maya.json | 2 +- .../schemas/projects_schema/schema_project_maya.json | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index ada69c3730..bb96fcf741 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1,4 +1,5 @@ { + "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "ext_mapping": { "model": "ma", "mayaAscii": "ma", @@ -15,7 +16,6 @@ "destination-path": [] } }, - "mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n", "scriptsmenu": { "name": "OpenPype Tools", "definition": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index 978de56a51..a54f8e6e4f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -5,6 +5,12 @@ "label": "Maya", "is_file": true, "children": [ + { + "type": "text", + "multiline" : true, + "key": "mel_workspace", + "label": "Maya MEL Workspace" + }, { "type": "dict-modifiable", "key": "ext_mapping", @@ -53,12 +59,6 @@ } ] }, - { - "type": "text", - "multiline" : true, - "key": "mel_workspace", - "label": "Maya MEL Workspace" - }, { "type": "schema", "name": "schema_scriptsmenu" From 57aa1e6659ab9552c1980dbebcd8b64535469f39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 7 Sep 2022 14:49:55 +0200 Subject: [PATCH 106/169] implementing not retimed working frame range and retimed handles switch --- openpype/hosts/flame/api/plugin.py | 10 ++++++ .../flame/plugins/create/create_shot_clip.py | 16 ++++++++++ .../publish/collect_timeline_instances.py | 4 +++ .../publish/extract_subset_resources.py | 32 +++++++++++++------ .../publish/collect_otio_frame_ranges.py | 6 ++++ 5 files changed, 58 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index efbabb6a55..145b1f0921 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -361,6 +361,8 @@ class PublishableClip: index_from_segment_default = False use_shot_name_default = False include_handles_default = False + retimed_handles_default = True + retimed_framerange_default = True def __init__(self, segment, **kwargs): self.rename_index = kwargs["rename_index"] @@ -496,6 +498,14 @@ class PublishableClip: "audio", {}).get("value") or False self.include_handles = self.ui_inputs.get( "includeHandles", {}).get("value") or self.include_handles_default + self.retimed_handles = ( + self.ui_inputs.get("retimedHandles", {}).get("value") + or self.retimed_handles_default + ) + self.retimed_framerange = ( + self.ui_inputs.get("retimedFramerange", {}).get("value") + or self.retimed_framerange_default + ) # build subset name from layer name if self.subset_name == "[ track name ]": diff --git a/openpype/hosts/flame/plugins/create/create_shot_clip.py b/openpype/hosts/flame/plugins/create/create_shot_clip.py index fa239ea420..b03a39a7ca 100644 --- a/openpype/hosts/flame/plugins/create/create_shot_clip.py +++ b/openpype/hosts/flame/plugins/create/create_shot_clip.py @@ -276,6 +276,22 @@ class CreateShotClip(opfapi.Creator): "target": "tag", "toolTip": "By default handles are excluded", # noqa "order": 3 + }, + "retimedHandles": { + "value": True, + "type": "QCheckBox", + "label": "Retimed handles", + "target": "tag", + "toolTip": "By default handles are retimed.", # noqa + "order": 4 + }, + "retimedFramerange": { + "value": True, + "type": "QCheckBox", + "label": "Retimed framerange", + "target": "tag", + "toolTip": "By default framerange is retimed.", # noqa + "order": 5 } } } diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 992db62c75..d6ff13b059 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -131,6 +131,10 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "fps": self.fps, "workfileFrameStart": workfile_start, "sourceFirstFrame": int(first_frame), + "notRetimedHandles": ( + not marker_data.get("retimedHandles")), + "notRetimedFramerange": ( + not marker_data.get("retimedFramerange")), "path": file_path, "flameAddTasks": self.add_tasks, "tasks": { diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 3e1e8db986..1af6b00654 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -90,26 +90,38 @@ class ExtractSubsetResources(openpype.api.Extractor): handle_end = instance.data["handleEnd"] handles = max(handle_start, handle_end) include_handles = instance.data.get("includeHandles") + retimed_handles = instance.data.get("retimedHandles") # get media source range with handles source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] - # retime if needed + + # retime if needed if r_speed != 1.0: - source_start_handles = ( - instance.data["sourceStart"] - r_handle_start) - source_end_handles = ( - source_start_handles - + (r_source_dur - 1) - + r_handle_start - + r_handle_end - ) + if retimed_handles: + # handles are retimed + source_start_handles = ( + instance.data["sourceStart"] - r_handle_start) + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + r_handle_start + + r_handle_end + ) + else: + # handles are not retimed + source_end_handles = ( + source_start_handles + + (r_source_dur - 1) + + handle_start + + handle_end + ) # get frame range with handles for representation range frame_start_handle = frame_start - handle_start repre_frame_start = frame_start_handle if include_handles: - if r_speed == 1.0: + if r_speed == 1.0 or not retimed_handles: frame_start_handle = frame_start else: frame_start_handle = ( diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 40e89e29bc..40a3fa6978 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -29,6 +29,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # get basic variables otio_clip = instance.data["otioClip"] workfile_start = instance.data["workfileFrameStart"] + not_retime_framerange = instance.data.get("notRetimedFramerange") # get ranges otio_tl_range = otio_clip.range_in_parent() @@ -54,6 +55,11 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): frame_end = frame_start + otio.opentime.to_frames( otio_tl_range.duration, otio_tl_range.duration.rate) - 1 + # in case of retimed clip and frame range should not be retimed + if not_retime_framerange: + frame_end = frame_start + otio.opentime.to_frames( + otio_src_range.duration, otio_src_range.duration.rate) - 1 + data = { "frameStart": frame_start, "frameEnd": frame_end, From d4eeabad7e0883bb2f2d31e0b8479e4b547ba7cc Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:17:49 +0800 Subject: [PATCH 107/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/api/pipeline.py | 6 +++--- openpype/hosts/maya/lib.py | 5 +++-- .../schemas/projects_schema/schema_project_maya.json | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 4578d6fb39..6012d82263 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -59,7 +59,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): self._op_events = {} def install(self): - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + project_settings = get_project_settings(legacy_io.active_project()) # process path mapping dirmap_processor = MayaDirmap("maya", project_settings) dirmap_processor.process_dirmap() @@ -536,7 +536,7 @@ def on_task_changed(): lib.update_content_on_context_change() msg = " project: {}\n asset: {}\n task:{}".format( - legacy_io.Session["AVALON_PROJECT"], + legacy_io.active_project(), legacy_io.Session["AVALON_ASSET"], legacy_io.Session["AVALON_TASK"] ) @@ -548,7 +548,7 @@ def on_task_changed(): def before_workfile_save(event): - project_name = os.getenv("AVALON_PROJECT") + project_name = legacy_io.active_project() workdir_path = event["workdir_path"] if workdir_path: create_workspace_mel(workdir_path, project_name) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 443bf7d10e..e466850810 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,6 @@ import os from openpype.settings import get_project_settings - +from openpype.api import Logger def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") @@ -15,7 +15,8 @@ def create_workspace_mel(workdir, project_name): # Skip if mel script in settings is empty if not mel_script: - return + log = Logger.get_logger("create_workspace_mel") + log.debug("File 'workspace.mel' not created. Settings value is empty.") with open(dst_filepath, "w") as mel_file: mel_file.write(mel_script) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json index a54f8e6e4f..72c974642f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_maya.json @@ -8,6 +8,7 @@ { "type": "text", "multiline" : true, + "use_label_wrap": true, "key": "mel_workspace", "label": "Maya MEL Workspace" }, From a6d7df1423fdd5ec37744a34042e01a76776f263 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:19:15 +0800 Subject: [PATCH 108/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index e466850810..e07e174dd6 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -2,6 +2,7 @@ import os from openpype.settings import get_project_settings from openpype.api import Logger + def create_workspace_mel(workdir, project_name): dst_filepath = os.path.join(workdir, "workspace.mel") if os.path.exists(dst_filepath): From b7256e7c19ba376e438a88ca5d0b4a9609a44423 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 21:49:02 +0800 Subject: [PATCH 109/169] adding and loading maya mel workspace through openpype project setting --- openpype/hosts/maya/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index e07e174dd6..6f7bb8f986 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -1,6 +1,6 @@ import os from openpype.settings import get_project_settings -from openpype.api import Logger +from openpype.lib import Logger def create_workspace_mel(workdir, project_name): From d2b3c80fb93acf26f9d61a6164dadf2941cd7930 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 7 Sep 2022 16:02:38 +0200 Subject: [PATCH 110/169] improving variable name --- openpype/plugins/publish/collect_otio_frame_ranges.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 40a3fa6978..cfb0318950 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -29,7 +29,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): # get basic variables otio_clip = instance.data["otioClip"] workfile_start = instance.data["workfileFrameStart"] - not_retime_framerange = instance.data.get("notRetimedFramerange") + workfile_source_duration = instance.data.get("notRetimedFramerange") # get ranges otio_tl_range = otio_clip.range_in_parent() @@ -56,7 +56,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin): otio_tl_range.duration, otio_tl_range.duration.rate) - 1 # in case of retimed clip and frame range should not be retimed - if not_retime_framerange: + if workfile_source_duration: frame_end = frame_start + otio.opentime.to_frames( otio_src_range.duration, otio_src_range.duration.rate) - 1 From 9bcd86bac7ca0294fc41bc3d2465166b3b5e8861 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Sep 2022 22:12:16 +0800 Subject: [PATCH 111/169] load and edit mel workspace within the Openpype project settings --- openpype/hosts/maya/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/lib.py b/openpype/hosts/maya/lib.py index 6f7bb8f986..ffb2f0b27c 100644 --- a/openpype/hosts/maya/lib.py +++ b/openpype/hosts/maya/lib.py @@ -18,6 +18,7 @@ def create_workspace_mel(workdir, project_name): if not mel_script: log = Logger.get_logger("create_workspace_mel") log.debug("File 'workspace.mel' not created. Settings value is empty.") + return with open(dst_filepath, "w") as mel_file: mel_file.write(mel_script) From 49dff63f08207eea0218cf37e4824795d08e3895 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 17:50:55 +0200 Subject: [PATCH 112/169] Fix detection of workfile instance --- openpype/modules/deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/abstract_submit_deadline.py b/openpype/modules/deadline/abstract_submit_deadline.py index f698b7688e..512ff800ee 100644 --- a/openpype/modules/deadline/abstract_submit_deadline.py +++ b/openpype/modules/deadline/abstract_submit_deadline.py @@ -519,7 +519,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): instance = self._instance workfile_instance = self._get_workfile_instance(instance.context) - if not workfile_instance: + if workfile_instance is None: return # determine published path from Anatomy. From ff149b68215adc8c0211d7248d94b17376496ad1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 19:42:55 +0200 Subject: [PATCH 113/169] Remove unused import --- openpype/tools/loader/model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 9d1f1e045c..19b135bfc5 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -14,8 +14,7 @@ from openpype.client import ( get_versions, get_hero_versions, get_version_by_name, - get_representations, - get_representations_parents + get_representations ) from openpype.pipeline import ( registered_host, From dc903c752014348bda4fd4a2a05600ab7ec66f87 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 19:49:17 +0200 Subject: [PATCH 114/169] Store `loaded_in_scene` as `bool` in model --- openpype/tools/loader/model.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 19b135bfc5..8543672617 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -679,9 +679,7 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): data["asset"] = asset_docs_by_id[asset_id]["name"] data["last_version"] = last_version - - loaded = subset_doc["_id"] in subsets_loaded_by_id - data["loaded_in_scene"] = "yes" if loaded else "no" + data["loaded_in_scene"] = subset_doc["_id"] in subsets_loaded_by_id # Sync server data data.update( From 9ebd602a91a21e427cbd063bdd3841a0370d6b22 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 8 Sep 2022 20:06:34 +0200 Subject: [PATCH 115/169] Add delegate to loaded in scene column for "yes/no" and colorized column --- openpype/tools/loader/widgets.py | 7 ++++++- openpype/tools/utils/delegates.py | 27 +++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 3c4a89aa0f..e27d7e6a12 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -36,7 +36,8 @@ from openpype.tools.utils import ( ) from openpype.tools.utils.delegates import ( VersionDelegate, - PrettyTimeDelegate + PrettyTimeDelegate, + LoadedInSceneDelegate ) from openpype.tools.utils.widgets import ( OptionalMenu, @@ -234,6 +235,10 @@ class SubsetWidget(QtWidgets.QWidget): column = model.Columns.index("repre_info") view.setItemDelegateForColumn(column, avail_delegate) + loaded_in_scene_delegate = LoadedInSceneDelegate(view) + column = model.Columns.index("loaded_in_scene") + view.setItemDelegateForColumn(column, loaded_in_scene_delegate) + layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addLayout(top_bar_layout) diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index d6c2d69e76..3547251282 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -291,3 +291,30 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if value is not None: return pretty_timestamp(value) + + +class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): + """Delegate for Loaded in Scene state columns. + + Shows "yes" or "no" for True or False values + Colorizes green or dark grey based on True or False values + + """ + + def __init__(self, *args, **kwargs): + super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) + self._colors = { + True: QtGui.QColor(80, 170, 80), + False: QtGui.QColor(90, 90, 90) + } + + def displayText(self, value, locale): + return "yes" if value else "no" + + def initStyleOption(self, option, index): + super(LoadedInSceneDelegate, self).initStyleOption(option, index) + + # Colorize based on value + value = index.data(QtCore.Qt.DisplayRole) + color = self._colors[bool(value)] + option.palette.setBrush(QtGui.QPalette.Text, color) From 3943d74f3ea3b72d08f2d2e114a30b4a5e00a515 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Sep 2022 11:55:05 +0200 Subject: [PATCH 116/169] flame: adding batch action hook --- openpype/hosts/flame/api/__init__.py | 4 +- openpype/hosts/flame/api/menu.py | 50 +++++++++++++++++++ .../hosts/flame/startup/openpype_in_flame.py | 13 +++++ 3 files changed, 66 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 76c1c93379..7da91d41e4 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -51,7 +51,8 @@ from .pipeline import ( ) from .menu import ( FlameMenuProjectConnect, - FlameMenuTimeline + FlameMenuTimeline, + FlameMenuBatch ) from .plugin import ( Creator, @@ -131,6 +132,7 @@ __all__ = [ # menu "FlameMenuProjectConnect", "FlameMenuTimeline", + "FlameMenuBatch", # plugin "Creator", diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index 7f1a6a24e2..a822059930 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -201,3 +201,53 @@ class FlameMenuTimeline(_FlameMenuApp): if self.flame: self.flame.execute_shortcut('Rescan Python Hooks') self.log.info('Rescan Python Hooks') + + +class FlameMenuBatch(_FlameMenuApp): + + # flameMenuProjectconnect app takes care of the preferences dialog as well + + def __init__(self, framework): + _FlameMenuApp.__init__(self, framework) + + def __getattr__(self, name): + def method(*args, **kwargs): + project = self.dynamic_menu_data.get(name) + if project: + self.link_project(project) + return method + + def build_menu(self): + if not self.flame: + return [] + + menu = deepcopy(self.menu) + + menu['actions'].append({ + "name": "Load...", + "execute": lambda x: self.tools_helper.show_loader() + }) + menu['actions'].append({ + "name": "Manage...", + "execute": lambda x: self.tools_helper.show_scene_inventory() + }) + menu['actions'].append({ + "name": "Library...", + "execute": lambda x: self.tools_helper.show_library_loader() + }) + return menu + + def refresh(self, *args, **kwargs): + self.rescan() + + def rescan(self, *args, **kwargs): + if not self.flame: + try: + import flame + self.flame = flame + except ImportError: + self.flame = None + + if self.flame: + self.flame.execute_shortcut('Rescan Python Hooks') + self.log.info('Rescan Python Hooks') diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index f2ac23b19e..60f6612b7f 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -73,6 +73,8 @@ def load_apps(): opfapi.FlameMenuProjectConnect(opfapi.CTX.app_framework)) opfapi.CTX.flame_apps.append( opfapi.FlameMenuTimeline(opfapi.CTX.app_framework)) + opfapi.CTX.flame_apps.append( + opfapi.FlameMenuBatch(opfapi.CTX.app_framework)) opfapi.CTX.app_framework.log.info("Apps are loaded") @@ -191,3 +193,14 @@ def get_timeline_custom_ui_actions(): openpype_install() return _build_app_menu("FlameMenuTimeline") + +def get_batch_custom_ui_actions(): + """Hook to create submenu in batch + + Returns: + list: menu object + """ + # install openpype and the host + openpype_install() + + return _build_app_menu("FlameMenuBatch") \ No newline at end of file From d0665c3928ecff4176f6e22b676384f09485c173 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 9 Sep 2022 13:06:12 +0200 Subject: [PATCH 117/169] Change `mayaascii` -> `mayaAscii` --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 2 +- openpype/settings/defaults/project_settings/ftrack.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 6024781d87..7e5815b100 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -35,7 +35,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): family_mapping = { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 09b194e21c..cdf861df4a 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -455,7 +455,7 @@ "family_mapping": { "camera": "cam", "look": "look", - "mayaascii": "scene", + "mayaAscii": "scene", "model": "geo", "rig": "rig", "setdress": "setdress", From c622eb7a59986ebe003205d66cd2ae101e1b23eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Sep 2022 13:54:33 +0200 Subject: [PATCH 118/169] flame: add ui to project media panel --- openpype/hosts/flame/api/__init__.py | 4 ++-- openpype/hosts/flame/api/menu.py | 2 +- openpype/hosts/flame/startup/openpype_in_flame.py | 15 ++++++++++++++- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 7da91d41e4..c00ee958b6 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -52,7 +52,7 @@ from .pipeline import ( from .menu import ( FlameMenuProjectConnect, FlameMenuTimeline, - FlameMenuBatch + FlameMenuUniversal ) from .plugin import ( Creator, @@ -132,7 +132,7 @@ __all__ = [ # menu "FlameMenuProjectConnect", "FlameMenuTimeline", - "FlameMenuBatch", + "FlameMenuUniversal", # plugin "Creator", diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index a822059930..f72a352bba 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -203,7 +203,7 @@ class FlameMenuTimeline(_FlameMenuApp): self.log.info('Rescan Python Hooks') -class FlameMenuBatch(_FlameMenuApp): +class FlameMenuUniversal(_FlameMenuApp): # flameMenuProjectconnect app takes care of the preferences dialog as well diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index 60f6612b7f..9fdc30db5d 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -194,6 +194,7 @@ def get_timeline_custom_ui_actions(): return _build_app_menu("FlameMenuTimeline") + def get_batch_custom_ui_actions(): """Hook to create submenu in batch @@ -203,4 +204,16 @@ def get_batch_custom_ui_actions(): # install openpype and the host openpype_install() - return _build_app_menu("FlameMenuBatch") \ No newline at end of file + return _build_app_menu("FlameMenuUniversal") + + +def get_media_panel_custom_ui_actions(): + """Hook to create submenu in desktop + + Returns: + list: menu object + """ + # install openpype and the host + openpype_install() + + return _build_app_menu("FlameMenuUniversal") From 7b78e09eaec1c7e243782c13079ff70ca6c06d23 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 9 Sep 2022 13:59:15 +0200 Subject: [PATCH 119/169] fixing name of class --- openpype/hosts/flame/startup/openpype_in_flame.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/startup/openpype_in_flame.py b/openpype/hosts/flame/startup/openpype_in_flame.py index 9fdc30db5d..d07aaa6b7d 100644 --- a/openpype/hosts/flame/startup/openpype_in_flame.py +++ b/openpype/hosts/flame/startup/openpype_in_flame.py @@ -74,7 +74,7 @@ def load_apps(): opfapi.CTX.flame_apps.append( opfapi.FlameMenuTimeline(opfapi.CTX.app_framework)) opfapi.CTX.flame_apps.append( - opfapi.FlameMenuBatch(opfapi.CTX.app_framework)) + opfapi.FlameMenuUniversal(opfapi.CTX.app_framework)) opfapi.CTX.app_framework.log.info("Apps are loaded") From 42f575fca94722a25ae462a5112a843799d9aad1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 14:23:36 +0200 Subject: [PATCH 120/169] actions are expected as list so each application in group is stored if force not open workfile is enabled --- openpype/tools/launcher/models.py | 46 +++++++++++++++++++------------ 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 6d40d21f96..6e3b531018 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -281,18 +281,25 @@ class ActionModel(QtGui.QStandardItemModel): if not action_item: return - action = action_item.data(ACTION_ROLE) - actual_data = self._prepare_compare_data(action) + actions = action_item.data(ACTION_ROLE) + if not isinstance(actions, list): + actions = [actions] + + action_actions_data = [ + self._prepare_compare_data(action) + for action in actions + ] stored = self.launcher_registry.get_item("force_not_open_workfile") - if is_checked: - stored.append(actual_data) - else: - final_values = [] - for config in stored: - if config != actual_data: - final_values.append(config) - stored = final_values + for actual_data in action_actions_data: + if is_checked: + stored.append(actual_data) + else: + final_values = [] + for config in stored: + if config != actual_data: + final_values.append(config) + stored = final_values self.launcher_registry.set_item("force_not_open_workfile", stored) self.launcher_registry._get_item.cache_clear() @@ -329,21 +336,24 @@ class ActionModel(QtGui.QStandardItemModel): item (QStandardItem) stored (list) of dict """ - action = item.data(ACTION_ROLE) - if not self.is_application_action(action): + + actions = item.data(ACTION_ROLE) + if not isinstance(actions, list): + actions = [actions] + + if not self.is_application_action(actions[0]): return False - actual_data = self._prepare_compare_data(action) + action_actions_data = [ + self._prepare_compare_data(action) + for action in actions + ] for config in stored: - if config == actual_data: + if config in action_actions_data: return True - return False def _prepare_compare_data(self, action): - if isinstance(action, list) and action: - action = action[0] - compare_data = {} if action and action.label: compare_data = { From 98c065cb8b3c8f75ae0479da1a3287ebbe0b22d6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 14:24:02 +0200 Subject: [PATCH 121/169] change "start_last_workfile" when triggered from group --- openpype/tools/launcher/widgets.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 62599664fe..774ceb659d 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -312,11 +312,12 @@ class ActionBar(QtWidgets.QWidget): is_group = index.data(GROUP_ROLE) is_variant_group = index.data(VARIANT_GROUP_ROLE) + force_not_open_workfile = index.data(FORCE_NOT_OPEN_WORKFILE_ROLE) if not is_group and not is_variant_group: action = index.data(ACTION_ROLE) # Change data of application action if issubclass(action, ApplicationAction): - if index.data(FORCE_NOT_OPEN_WORKFILE_ROLE): + if force_not_open_workfile: action.data["start_last_workfile"] = False else: action.data.pop("start_last_workfile", None) @@ -385,10 +386,18 @@ class ActionBar(QtWidgets.QWidget): menu.addMenu(sub_menu) result = menu.exec_(QtGui.QCursor.pos()) - if result: - action = actions_mapping[result] - self._start_animation(index) - self.action_clicked.emit(action) + if not result: + return + + action = actions_mapping[result] + if issubclass(action, ApplicationAction): + if force_not_open_workfile: + action.data["start_last_workfile"] = False + else: + action.data.pop("start_last_workfile", None) + + self._start_animation(index) + self.action_clicked.emit(action) class ActionHistory(QtWidgets.QPushButton): From 113210a781ed5affd6e92f72d075a9522a07c0f0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Sep 2022 17:21:53 +0200 Subject: [PATCH 122/169] firx access to 'Pattern' attribute --- openpype/client/entities.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index a9112ac581..43afccf2f1 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -14,6 +14,8 @@ from bson.objectid import ObjectId from .mongo import get_project_database, get_project_connection +PatternType = type(re.compile("")) + def _prepare_fields(fields, required_fields=None): if not fields: @@ -1054,11 +1056,11 @@ def _regex_filters(filters): for key, value in filters.items(): regexes = [] a_values = [] - if isinstance(value, re.Pattern): + if isinstance(value, PatternType): regexes.append(value) elif isinstance(value, (list, tuple, set)): for item in value: - if isinstance(item, re.Pattern): + if isinstance(item, PatternType): regexes.append(item) else: a_values.append(item) @@ -1194,7 +1196,7 @@ def get_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - context_filters (Dict[str, List[str, re.Pattern]]): Filter by + context_filters (Dict[str, List[str, PatternType]]): Filter by representation context fields. names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. @@ -1240,7 +1242,7 @@ def get_archived_representations( as filter. Filter ignored if 'None' is passed. version_ids (Iterable[str]): Subset ids used as parent filter. Filter ignored if 'None' is passed. - context_filters (Dict[str, List[str, re.Pattern]]): Filter by + context_filters (Dict[str, List[str, PatternType]]): Filter by representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. From c5ae7e5d6cb704794e444addf93bb9e903795dc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Fri, 9 Sep 2022 17:45:44 +0200 Subject: [PATCH 123/169] Update openpype/hosts/flame/plugins/publish/extract_subset_resources.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 1af6b00654..1d42330e23 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -96,7 +96,7 @@ class ExtractSubsetResources(openpype.api.Extractor): source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] - # retime if needed + # retime if needed if r_speed != 1.0: if retimed_handles: # handles are retimed From 87cb5e25b82d3ea352b17794e231924bdbb097ed Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 10 Sep 2022 04:17:04 +0000 Subject: [PATCH 124/169] [Automated] Bump version --- CHANGELOG.md | 45 ++++++++++++++++++--------------------------- openpype/version.py | 2 +- 2 files changed, 19 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6754f1e2e3..0ffb6a996b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,29 +1,36 @@ # Changelog -## [3.14.2-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) **🆕 New features** - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) +- Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) +- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) **🚀 Enhancements** +- Flame: Adding Creator's retimed shot and handles switch [\#3826](https://github.com/pypeclub/OpenPype/pull/3826) +- Flame: OpenPype submenu to batch and media manager [\#3825](https://github.com/pypeclub/OpenPype/pull/3825) +- General: Better pixmap scaling [\#3809](https://github.com/pypeclub/OpenPype/pull/3809) - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) -- Blender: Publisher collect workfile representation [\#3670](https://github.com/pypeclub/OpenPype/pull/3670) -- Maya: move set render settings menu entry [\#3669](https://github.com/pypeclub/OpenPype/pull/3669) -- Scene Inventory: Maya add actions to select from or to scene [\#3659](https://github.com/pypeclub/OpenPype/pull/3659) +- Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) **🐛 Bug fixes** +- General: Fix Pattern access in client code [\#3828](https://github.com/pypeclub/OpenPype/pull/3828) +- Launcher: Skip opening last work file works for groups [\#3822](https://github.com/pypeclub/OpenPype/pull/3822) +- Maya: Publishing data key change [\#3811](https://github.com/pypeclub/OpenPype/pull/3811) +- Igniter: Fix status handling when version is already installed [\#3804](https://github.com/pypeclub/OpenPype/pull/3804) - Resolve: Addon import is Python 2 compatible [\#3798](https://github.com/pypeclub/OpenPype/pull/3798) +- Hiero: retimed clip publishing is working [\#3792](https://github.com/pypeclub/OpenPype/pull/3792) - nuke: validate write node is not failing due wrong type [\#3780](https://github.com/pypeclub/OpenPype/pull/3780) - Fix - changed format of version string in pyproject.toml [\#3777](https://github.com/pypeclub/OpenPype/pull/3777) - Ftrack status fix typo prgoress -\> progress [\#3761](https://github.com/pypeclub/OpenPype/pull/3761) - Fix version resolution [\#3757](https://github.com/pypeclub/OpenPype/pull/3757) -- Maya: `containerise` dont skip empty values [\#3674](https://github.com/pypeclub/OpenPype/pull/3674) **🔀 Refactored code** @@ -33,17 +40,19 @@ - General: Remove unused teshost [\#3773](https://github.com/pypeclub/OpenPype/pull/3773) - General: Copied 'Extractor' plugin to publish pipeline [\#3771](https://github.com/pypeclub/OpenPype/pull/3771) - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) +- General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) -- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) +- Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** - Standalone Publisher: Ignore empty labels, then still use name like other asset models [\#3779](https://github.com/pypeclub/OpenPype/pull/3779) +- Kitsu - sync\_all\_project - add list ignore\_projects [\#3776](https://github.com/pypeclub/OpenPype/pull/3776) ## [3.14.1](https://github.com/pypeclub/OpenPype/tree/3.14.1) (2022-08-30) @@ -52,23 +61,16 @@ ### 📖 Documentation - Documentation: Few updates [\#3698](https://github.com/pypeclub/OpenPype/pull/3698) -- Documentation: Settings development [\#3660](https://github.com/pypeclub/OpenPype/pull/3660) - -**🆕 New features** - -- Webpublisher:change create flatten image into tri state [\#3678](https://github.com/pypeclub/OpenPype/pull/3678) -- Blender: validators code correction with settings and defaults [\#3662](https://github.com/pypeclub/OpenPype/pull/3662) **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) -- Ftrack: Store ftrack entities on hierarchy integration to instances [\#3677](https://github.com/pypeclub/OpenPype/pull/3677) -- Blender: ops refresh manager after process events [\#3663](https://github.com/pypeclub/OpenPype/pull/3663) **🐛 Bug fixes** @@ -82,11 +84,11 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) - Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) **🔀 Refactored code** +- General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) - Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) @@ -110,7 +112,6 @@ - Hiero: Define hiero as module [\#3717](https://github.com/pypeclub/OpenPype/pull/3717) - Deadline: better logging for DL webservice failures [\#3694](https://github.com/pypeclub/OpenPype/pull/3694) -- Photoshop: resize saved images in ExtractReview for ffmpeg [\#3676](https://github.com/pypeclub/OpenPype/pull/3676) ## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18) @@ -120,21 +121,11 @@ - Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) - Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) -- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675) -- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661) **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) -- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656) - -**🔀 Refactored code** - -- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673) - -**Merged pull requests:** - -- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) diff --git a/openpype/version.py b/openpype/version.py index c042ca2625..142bd51a30 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.3" +__version__ = "3.14.2-nightly.4" From e76ec9e5aff5651070bcc817e9c0b60a9a980ce4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 12 Sep 2022 16:26:35 +0800 Subject: [PATCH 125/169] adding and loading mel workspace within openpype settings --- openpype/hosts/maya/resources/workspace.mel | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 openpype/hosts/maya/resources/workspace.mel diff --git a/openpype/hosts/maya/resources/workspace.mel b/openpype/hosts/maya/resources/workspace.mel deleted file mode 100644 index f7213fa4f6..0000000000 --- a/openpype/hosts/maya/resources/workspace.mel +++ /dev/null @@ -1,11 +0,0 @@ -//Maya 2018 Project Definition - -workspace -fr "shaders" "renderData/shaders"; -workspace -fr "alembicCache" "cache/alembic"; -workspace -fr "mayaAscii" ""; -workspace -fr "mayaBinary" ""; -workspace -fr "renderData" "renderData"; -workspace -fr "fileCache" "cache/nCache"; -workspace -fr "scene" ""; -workspace -fr "sourceImages" "sourceimages"; -workspace -fr "images" "renders"; From 4ba3ff21ab10e5b0c092f9580dc6444bdd61383c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 12 Sep 2022 11:18:09 +0200 Subject: [PATCH 126/169] Tweak back more to intended logic --- .../plugins/publish/submit_maya_deadline.py | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index f8d0af9752..45790c40ea 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -754,7 +754,12 @@ def _format_tiles( used for assembler configuration. """ - tile = 0 + # Math used requires integers for correct output - as such + # we ensure our inputs are correct. + assert type(tiles_x) is int, "tiles_x must be an integer" + assert type(tiles_y) is int, "tiles_y must be an integer" + assert type(width) is int, "width must be an integer" + assert type(height) is int, "height must be an integer" out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() w_space = width // tiles_x @@ -762,6 +767,7 @@ def _format_tiles( cfg["TilesCropped"] = "False" + tile = 0 for tile_x in range(1, tiles_x + 1): for tile_y in reversed(range(1, tiles_y + 1)): tile_prefix = "_tile_{}x{}_{}x{}_".format( @@ -769,28 +775,31 @@ def _format_tiles( tiles_x, tiles_y ) - out_tile_index = "OutputFilename{}Tile{}".format( - str(index), tile - ) + new_filename = "{}/{}{}".format( os.path.dirname(filename), tile_prefix, os.path.basename(filename) ) - out["JobInfo"][out_tile_index] = new_filename - out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ - "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - top = int(height) - (tile_y * h_space) - bottom = int(height) - ((tile_y - 1) * h_space) - 1 + top = height - (tile_y * h_space) + bottom = height - ((tile_y - 1) * h_space) - 1 left = (tile_x - 1) * w_space right = (tile_x * w_space) - 1 + # Job info + out["JobInfo"]["OutputFilename{}Tile{}".format(index, tile)] = new_filename # noqa: E501 + + # Plugin Info + out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ + "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) + out["PluginInfo"]["RegionTop{}".format(tile)] = top out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom out["PluginInfo"]["RegionLeft{}".format(tile)] = left out["PluginInfo"]["RegionRight{}".format(tile)] = right + # Tile config cfg["Tile{}".format(tile)] = new_filename cfg["Tile{}Tile".format(tile)] = new_filename cfg["Tile{}FileName".format(tile)] = new_filename @@ -801,5 +810,5 @@ def _format_tiles( cfg["Tile{}Height".format(tile)] = h_space tile += 1 - + return out, cfg From 41a738bd12efc48aec512a83ee36ffd9b4ddcb3a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 12 Sep 2022 11:19:28 +0200 Subject: [PATCH 127/169] Cosmetics --- .../modules/deadline/plugins/publish/submit_maya_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 45790c40ea..44f2b5b2b4 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -760,6 +760,7 @@ def _format_tiles( assert type(tiles_y) is int, "tiles_y must be an integer" assert type(width) is int, "width must be an integer" assert type(height) is int, "height must be an integer" + out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() w_space = width // tiles_x @@ -793,7 +794,6 @@ def _format_tiles( # Plugin Info out["PluginInfo"]["RegionPrefix{}".format(str(tile))] = \ "/{}".format(tile_prefix).join(prefix.rsplit("/", 1)) - out["PluginInfo"]["RegionTop{}".format(tile)] = top out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom out["PluginInfo"]["RegionLeft{}".format(tile)] = left @@ -805,7 +805,6 @@ def _format_tiles( cfg["Tile{}FileName".format(tile)] = new_filename cfg["Tile{}X".format(tile)] = left cfg["Tile{}Y".format(tile)] = top - cfg["Tile{}Width".format(tile)] = w_space cfg["Tile{}Height".format(tile)] = h_space From c6ad515682944690d15532cd446fae2d8c93a570 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 12 Sep 2022 09:46:17 +0000 Subject: [PATCH 128/169] [Automated] Bump version --- CHANGELOG.md | 16 ++++++---------- openpype/version.py | 2 +- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ffb6a996b..cccfc2eded 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.14.2-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) @@ -8,7 +8,6 @@ - Nuke: Build workfile by template [\#3763](https://github.com/pypeclub/OpenPype/pull/3763) - Houdini: Publishing workfiles [\#3697](https://github.com/pypeclub/OpenPype/pull/3697) -- Global: making collect audio plugin global [\#3679](https://github.com/pypeclub/OpenPype/pull/3679) **🚀 Enhancements** @@ -18,6 +17,7 @@ - Photoshop: attempt to speed up ExtractImage [\#3793](https://github.com/pypeclub/OpenPype/pull/3793) - SyncServer: Added cli commands for sync server [\#3765](https://github.com/pypeclub/OpenPype/pull/3765) - Kitsu: Drop 'entities root' setting. [\#3739](https://github.com/pypeclub/OpenPype/pull/3739) +- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) **🐛 Bug fixes** @@ -42,6 +42,8 @@ - General: Move queries of asset and representation links [\#3770](https://github.com/pypeclub/OpenPype/pull/3770) - General: Move create project folders to pipeline [\#3768](https://github.com/pypeclub/OpenPype/pull/3768) - General: Create project function moved to client code [\#3766](https://github.com/pypeclub/OpenPype/pull/3766) +- Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) +- General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) - General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) @@ -65,12 +67,12 @@ **🚀 Enhancements** - General: Thumbnail can use project roots [\#3750](https://github.com/pypeclub/OpenPype/pull/3750) -- git: update gitignore [\#3722](https://github.com/pypeclub/OpenPype/pull/3722) - Settings: Remove settings lock on tray exit [\#3720](https://github.com/pypeclub/OpenPype/pull/3720) - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) **🐛 Bug fixes** @@ -84,7 +86,7 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Webpublisher: added check for empty context [\#3682](https://github.com/pypeclub/OpenPype/pull/3682) +- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) **🔀 Refactored code** @@ -117,15 +119,9 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) -**🚀 Enhancements** - -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) -- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680) - **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) diff --git a/openpype/version.py b/openpype/version.py index 142bd51a30..c5dc4ee581 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.4" +__version__ = "3.14.2-nightly.5" From 162370e1ad1291cbbf3eca65266c226ccd119aca Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 12 Sep 2022 09:56:59 +0000 Subject: [PATCH 129/169] [Automated] Release --- CHANGELOG.md | 15 +++++++++------ openpype/version.py | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cccfc2eded..46bf56f5bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.14.2-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) **🆕 New features** @@ -45,10 +45,11 @@ - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -72,7 +73,6 @@ - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) - General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) **🐛 Bug fixes** @@ -91,13 +91,12 @@ **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) -- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) -- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) - General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) @@ -119,6 +118,10 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) +**🚀 Enhancements** + +- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) + **🐛 Bug fixes** - General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) diff --git a/openpype/version.py b/openpype/version.py index c5dc4ee581..8469b1712a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2-nightly.5" +__version__ = "3.14.2" From 6e2ffc1e5ceb134f17fcedd1646f2cec0014a43a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 10:33:41 +0200 Subject: [PATCH 130/169] Remove getting project name and settings twice --- openpype/hosts/maya/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index acd8a55aa4..45c52cd0d5 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -66,8 +66,6 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost): project_name = legacy_io.active_project() project_settings = get_project_settings(project_name) # process path mapping - project_name = legacy_io.active_project() - project_settings = get_project_settings(project_name) dirmap_processor = MayaDirmap("maya", project_name, project_settings) dirmap_processor.process_dirmap() From b14ab9f2aff93fce85cec7b6403183a0b7dcf511 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:31:44 +0200 Subject: [PATCH 131/169] added publisher to host tools --- openpype/tools/utils/host_tools.py | 37 ++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 52d15a59f7..3177ed35aa 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -32,6 +32,7 @@ class HostToolsHelper: self._workfiles_tool = None self._loader_tool = None self._creator_tool = None + self._publisher_tool = None self._subset_manager_tool = None self._scene_inventory_tool = None self._library_loader_tool = None @@ -205,6 +206,7 @@ class HostToolsHelper: pyblish_show = self._discover_pyblish_gui() return pyblish_show(parent) + def _discover_pyblish_gui(self): """Return the most desirable of the currently registered GUIs""" # Prefer last registered @@ -269,6 +271,30 @@ class HostToolsHelper: dialog.activateWindow() dialog.showNormal() + def get_publisher_tool(self, parent): + """Create, cache and return scene inventory tool window.""" + if self._scene_inventory_tool is None: + from openpype.tools.publisher import PublisherWindow + + host = registered_host() + ILoadHost.validate_load_methods(host) + + publisher_window = PublisherWindow( + parent=parent or self._parent + ) + self._publisher_tool = publisher_window + + return self._publisher_tool + + def show_publisher_tool(self, parent=None): + with qt_app_context(): + dialog = self.get_publisher_tool(parent) + + dialog.show() + dialog.raise_() + dialog.activateWindow() + dialog.showNormal() + def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -298,6 +324,10 @@ class HostToolsHelper: elif tool_name == "publish": self.log.info("Can't return publish tool window.") + # "new" publisher + elif tool_name == "publisher": + return self.get_publisher_tool(parent, *args, **kwargs) + elif tool_name == "experimental_tools": return self.get_experimental_tools_dialog(parent, *args, **kwargs) @@ -335,6 +365,9 @@ class HostToolsHelper: elif tool_name == "publish": self.show_publish(parent, *args, **kwargs) + elif tool_name == "publisher": + self.show_publisher_tool(parent, *args, **kwargs) + elif tool_name == "experimental_tools": self.show_experimental_tools_dialog(parent, *args, **kwargs) @@ -414,6 +447,10 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) +def show_publisher(parent=None): + _SingletonPoint.show_tool_by_name("publisher", parent) + + def show_experimental_tools_dialog(parent=None): _SingletonPoint.show_tool_by_name("experimental_tools", parent) From 7ad8aa34db533c97de270d520874a307caa93fe4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 12:38:36 +0200 Subject: [PATCH 132/169] fix variable usage --- openpype/tools/utils/host_tools.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 3177ed35aa..f7e6d330ed 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -273,7 +273,8 @@ class HostToolsHelper: def get_publisher_tool(self, parent): """Create, cache and return scene inventory tool window.""" - if self._scene_inventory_tool is None: + + if self._publisher_tool is None: from openpype.tools.publisher import PublisherWindow host = registered_host() From a440a92838772c967f1bb844534153fe9814f4fc Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 13 Sep 2022 13:34:37 +0200 Subject: [PATCH 133/169] Fix docstring Co-authored-by: Roy Nieterau --- openpype/tools/utils/host_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index f7e6d330ed..7208e0a500 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -272,7 +272,7 @@ class HostToolsHelper: dialog.showNormal() def get_publisher_tool(self, parent): - """Create, cache and return scene inventory tool window.""" + """Create, cache and return publisher window.""" if self._publisher_tool is None: from openpype.tools.publisher import PublisherWindow From 9e5e5d59210a82d6c171f3871834955d326b2a0b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 13:35:38 +0200 Subject: [PATCH 134/169] remove unnecessary lines --- openpype/tools/utils/host_tools.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 7208e0a500..d2f05d3302 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -194,7 +194,6 @@ class HostToolsHelper: library_loader_tool.showNormal() library_loader_tool.refresh() - def show_publish(self, parent=None): """Try showing the most desirable publish GUI @@ -206,7 +205,6 @@ class HostToolsHelper: pyblish_show = self._discover_pyblish_gui() return pyblish_show(parent) - def _discover_pyblish_gui(self): """Return the most desirable of the currently registered GUIs""" # Prefer last registered From ecee2d2be5d33c4014effa836f620114cfc1bf9a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:02:11 +0200 Subject: [PATCH 135/169] implemented 'check_ftrack_url' in ftrack module --- openpype/modules/ftrack/__init__.py | 8 ++- openpype/modules/ftrack/ftrack_module.py | 69 ++++++++++++++++++++---- 2 files changed, 64 insertions(+), 13 deletions(-) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 7261254c6f..6dc67b74b9 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -1,9 +1,13 @@ from .ftrack_module import ( FtrackModule, - FTRACK_MODULE_DIR + FTRACK_MODULE_DIR, + + check_ftrack_url, ) __all__ = ( "FtrackModule", - "FTRACK_MODULE_DIR" + "FTRACK_MODULE_DIR", + + "check_ftrack_url", ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index cb4f204523..e00f9d89c6 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -12,8 +12,10 @@ from openpype_interfaces import ( ISettingsChangeListener ) from openpype.settings import SaveWarningExc +from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) +_PLACEHOLDER = object() class FtrackModule( @@ -28,17 +30,8 @@ class FtrackModule( ftrack_settings = settings[self.name] self.enabled = ftrack_settings["enabled"] - # Add http schema - ftrack_url = ftrack_settings["ftrack_server"].strip("/ ") - if ftrack_url: - if "http" not in ftrack_url: - ftrack_url = "https://" + ftrack_url - - # Check if "ftrack.app" is part os url - if "ftrackapp.com" not in ftrack_url: - ftrack_url = ftrack_url + ".ftrackapp.com" - - self.ftrack_url = ftrack_url + self._settings_ftrack_url = ftrack_settings["ftrack_server"] + self._ftrack_url = _PLACEHOLDER current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -70,6 +63,16 @@ class FtrackModule( self.timers_manager_connector = None self._timers_manager_module = None + def get_ftrack_url(self): + if self._ftrack_url is _PLACEHOLDER: + self._ftrack_url = check_ftrack_url( + self._settings_ftrack_url, + logger=self.log + ) + return self._ftrack_url + + ftrack_url = property(get_ftrack_url) + def get_global_environments(self): """Ftrack's global environments.""" return { @@ -479,6 +482,50 @@ class FtrackModule( click_group.add_command(cli_main) +def _check_ftrack_url(url): + import requests + + try: + result = requests.get(url, allow_redirects=False) + except requests.exceptions.RequestException: + return False + + if (result.status_code != 200 or "FTRACK_VERSION" not in result.headers): + return False + return True + + +def check_ftrack_url(url, log_errors=True, logger=None): + """Checks if Ftrack server is responding""" + + if logger is None: + logger = Logger.get_logger(__name__) + + url = url.strip("/ ") + if not url: + logger.error("Ftrack URL is not set!") + return None + + if not url.startswith("http"): + url = "https://" + url + + ftrack_url = None + if not url.endswith("ftrackapp.com"): + ftrackapp_url = url + ".ftrackapp.com" + if _check_ftrack_url(ftrackapp_url): + ftrack_url = ftrackapp_url + + if not ftrack_url and _check_ftrack_url(url): + ftrack_url = url + + if ftrack_url: + logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + elif log_errors: + logger.error("Entered Ftrack URL \"{}\" is not accesible!".format(url)) + + return ftrack_url + + @click.group(FtrackModule.name, help="Ftrack module related commands.") def cli_main(): pass From b29f26b28cb9350c0460b8bd8b89a8bfcbf0c7cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:02:42 +0200 Subject: [PATCH 136/169] changed imports in ftrack tray --- openpype/modules/ftrack/tray/ftrack_tray.py | 40 +++++++++------------ 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 501d837a4c..a6a87b8ef9 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -6,22 +6,18 @@ import threading from Qt import QtCore, QtWidgets, QtGui import ftrack_api -from ..ftrack_server.lib import check_ftrack_url -from ..ftrack_server import socket_thread -from ..lib import credentials -from ..ftrack_module import FTRACK_MODULE_DIR -from . import login_dialog - from openpype import resources from openpype.lib import Logger - - -log = Logger.get_logger("FtrackModule") +from openpype_modules.ftrack import check_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.lib import credentials +from . import login_dialog class FtrackTrayWrapper: def __init__(self, module): self.module = module + self.log = Logger.get_logger(self.__class__.__name__) self.thread_action_server = None self.thread_socket_server = None @@ -62,19 +58,19 @@ class FtrackTrayWrapper: if validation: self.widget_login.set_credentials(ft_user, ft_api_key) self.module.set_credentials_to_env(ft_user, ft_api_key) - log.info("Connected to Ftrack successfully") + self.log.info("Connected to Ftrack successfully") self.on_login_change() return validation if not validation and ft_user and ft_api_key: - log.warning( + self.log.warning( "Current Ftrack credentials are not valid. {}: {} - {}".format( str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key ) ) - log.info("Please sign in to Ftrack") + self.log.info("Please sign in to Ftrack") self.bool_logged = False self.show_login_widget() self.set_menu_visibility() @@ -104,7 +100,7 @@ class FtrackTrayWrapper: self.action_credentials.setIcon(self.icon_not_logged) self.action_credentials.setToolTip("Logged out") - log.info("Logged out of Ftrack") + self.log.info("Logged out of Ftrack") self.bool_logged = False self.set_menu_visibility() @@ -126,10 +122,6 @@ class FtrackTrayWrapper: ftrack_url = self.module.ftrack_url os.environ["FTRACK_SERVER"] = ftrack_url - parent_file_path = os.path.dirname( - os.path.dirname(os.path.realpath(__file__)) - ) - min_fail_seconds = 5 max_fail_count = 3 wait_time_after_max_fail = 10 @@ -154,7 +146,7 @@ class FtrackTrayWrapper: # Main loop while True: if not self.bool_action_server_running: - log.debug("Action server was pushed to stop.") + self.log.debug("Action server was pushed to stop.") break # Check if accessible Ftrack and Mongo url @@ -164,7 +156,9 @@ class FtrackTrayWrapper: # Run threads only if Ftrack is accessible if not ftrack_accessible: if not printed_ftrack_error: - log.warning("Can't access Ftrack {}".format(ftrack_url)) + self.log.warning( + "Can't access Ftrack {}".format(ftrack_url) + ) if self.thread_socket_server is not None: self.thread_socket_server.stop() @@ -191,7 +185,7 @@ class FtrackTrayWrapper: self.set_menu_visibility() elif failed_count == max_fail_count: - log.warning(( + self.log.warning(( "Action server failed {} times." " I'll try to run again {}s later" ).format( @@ -243,10 +237,10 @@ class FtrackTrayWrapper: self.thread_action_server.join() self.thread_action_server = None - log.info("Ftrack action server was forced to stop") + self.log.info("Ftrack action server was forced to stop") except Exception: - log.warning( + self.log.warning( "Error has happened during Killing action server", exc_info=True ) @@ -343,7 +337,7 @@ class FtrackTrayWrapper: self.thread_timer = None except Exception as e: - log.error("During Killing Timer event server: {0}".format(e)) + self.log.error("During Killing Timer event server: {0}".format(e)) def changed_user(self): self.stop_action_server() From 21e050a8f18a272da3e200405550be6570e9f3d9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 14:05:25 +0200 Subject: [PATCH 137/169] use new import of 'check_ftrack_url' --- openpype/modules/ftrack/ftrack_module.py | 2 +- .../modules/ftrack/ftrack_server/__init__.py | 2 -- .../ftrack/ftrack_server/event_server_cli.py | 6 ++-- openpype/modules/ftrack/ftrack_server/lib.py | 35 +------------------ openpype/modules/ftrack/lib/avalon_sync.py | 7 ++-- 5 files changed, 8 insertions(+), 44 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index e00f9d89c6..899711e33e 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -6,7 +6,7 @@ import platform import click from openpype.modules import OpenPypeModule -from openpype_interfaces import ( +from openpype.modules.interfaces import ( ITrayModule, IPluginPaths, ISettingsChangeListener diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/ftrack/ftrack_server/__init__.py index 9e3920b500..8e5f7c4c51 100644 --- a/openpype/modules/ftrack/ftrack_server/__init__.py +++ b/openpype/modules/ftrack/ftrack_server/__init__.py @@ -1,8 +1,6 @@ from .ftrack_server import FtrackServer -from .lib import check_ftrack_url __all__ = ( "FtrackServer", - "check_ftrack_url" ) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 3ef7c8270a..2848469bc3 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -20,9 +20,11 @@ from openpype.lib import ( get_openpype_version, get_build_version, ) -from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import ( + FTRACK_MODULE_DIR, + check_ftrack_url, +) from openpype_modules.ftrack.lib import credentials -from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url from openpype_modules.ftrack.ftrack_server import socket_thread diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 947dacf917..c8143f739c 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -26,45 +26,12 @@ except ImportError: from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.client import OpenPypeMongoConnection -from openpype.api import Logger +from openpype.lib import Logger TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" - if logger is None: - logger = Logger.get_logger(__name__) - - if not url: - logger.error("Ftrack URL is not set!") - return None - - url = url.strip('/ ') - - if 'http' not in url: - if url.endswith('ftrackapp.com'): - url = 'https://' + url - else: - url = 'https://{0}.ftrackapp.com'.format(url) - try: - result = requests.get(url, allow_redirects=False) - except requests.exceptions.RequestException: - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): - if log_errors: - logger.error("Entered Ftrack URL is not accesible!") - return False - - logger.debug("Ftrack server {} is accessible.".format(url)) - - return url - - class SocketBaseEventHub(ftrack_api.event.hub.EventHub): hearbeat_msg = b"hearbeat" diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 72be6a8e9a..935d1e85c9 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -19,11 +19,8 @@ from openpype.client.operations import ( CURRENT_PROJECT_SCHEMA, CURRENT_PROJECT_CONFIG_SCHEMA, ) -from openpype.api import ( - Logger, - get_anatomy_settings -) -from openpype.lib import ApplicationManager +from openpype.settings import get_anatomy_settings +from openpype.lib import ApplicationManager, Logger from openpype.pipeline import AvalonMongoDB, schema from .constants import CUST_ATTR_ID_KEY, FPS_KEYS From 98f1312ce999e4be72a1a90bce99c75be967cdfd Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 13 Sep 2022 15:06:31 +0200 Subject: [PATCH 138/169] Modify log message Co-authored-by: Roy Nieterau --- openpype/modules/ftrack/ftrack_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 899711e33e..2ab0eb8239 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -521,7 +521,7 @@ def check_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) elif log_errors: - logger.error("Entered Ftrack URL \"{}\" is not accesible!".format(url)) + logger.error("Entered Ftrack URL \"{}\" is not accessible!".format(url)) return ftrack_url From aead601397e0ebecfafb6da62570c0585f627018 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:11:20 +0200 Subject: [PATCH 139/169] unify messages --- openpype/modules/ftrack/ftrack_module.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 2ab0eb8239..e79910372f 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -520,8 +520,9 @@ def check_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) + elif log_errors: - logger.error("Entered Ftrack URL \"{}\" is not accessible!".format(url)) + logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url From 0291d2a7054b6b551fc8e5dc1092a87a026838d8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:15:18 +0200 Subject: [PATCH 140/169] renamed 'check_ftrack_url' to 'resolve_ftrack_url' --- openpype/modules/ftrack/__init__.py | 4 ++-- openpype/modules/ftrack/ftrack_module.py | 6 +++--- openpype/modules/ftrack/ftrack_server/event_server_cli.py | 8 ++++---- openpype/modules/ftrack/tray/ftrack_tray.py | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index 6dc67b74b9..e520f08337 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -2,12 +2,12 @@ from .ftrack_module import ( FtrackModule, FTRACK_MODULE_DIR, - check_ftrack_url, + resolve_ftrack_url, ) __all__ = ( "FtrackModule", "FTRACK_MODULE_DIR", - "check_ftrack_url", + "resolve_ftrack_url", ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index e79910372f..05ea7b79d1 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -65,7 +65,7 @@ class FtrackModule( def get_ftrack_url(self): if self._ftrack_url is _PLACEHOLDER: - self._ftrack_url = check_ftrack_url( + self._ftrack_url = resolve_ftrack_url( self._settings_ftrack_url, logger=self.log ) @@ -495,8 +495,8 @@ def _check_ftrack_url(url): return True -def check_ftrack_url(url, log_errors=True, logger=None): - """Checks if Ftrack server is responding""" +def resolve_ftrack_url(url, log_errors=True, logger=None): + """Checks if Ftrack server is responding.""" if logger is None: logger = Logger.get_logger(__name__) diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/ftrack/ftrack_server/event_server_cli.py index 2848469bc3..20c5ab24a8 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/ftrack/ftrack_server/event_server_cli.py @@ -22,7 +22,7 @@ from openpype.lib import ( ) from openpype_modules.ftrack import ( FTRACK_MODULE_DIR, - check_ftrack_url, + resolve_ftrack_url, ) from openpype_modules.ftrack.lib import credentials from openpype_modules.ftrack.ftrack_server import socket_thread @@ -116,7 +116,7 @@ def legacy_server(ftrack_url): while True: if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible and not printed_ftrack_error: @@ -259,7 +259,7 @@ def main_loop(ftrack_url): while True: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) if not mongo_accessible: mongo_accessible = check_mongo_url(mongo_uri) @@ -443,7 +443,7 @@ def run_event_server( os.environ["CLOCKIFY_API_KEY"] = clockify_api_key # Check url regex and accessibility - ftrack_url = check_ftrack_url(ftrack_url) + ftrack_url = resolve_ftrack_url(ftrack_url) if not ftrack_url: print('Exiting! < Please enter Ftrack server url >') return 1 diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index a6a87b8ef9..e3c6e30ead 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -8,7 +8,7 @@ from Qt import QtCore, QtWidgets, QtGui import ftrack_api from openpype import resources from openpype.lib import Logger -from openpype_modules.ftrack import check_ftrack_url, FTRACK_MODULE_DIR +from openpype_modules.ftrack import resolve_ftrack_url, FTRACK_MODULE_DIR from openpype_modules.ftrack.ftrack_server import socket_thread from openpype_modules.ftrack.lib import credentials from . import login_dialog @@ -151,7 +151,7 @@ class FtrackTrayWrapper: # Check if accessible Ftrack and Mongo url if not ftrack_accessible: - ftrack_accessible = check_ftrack_url(ftrack_url) + ftrack_accessible = resolve_ftrack_url(ftrack_url) # Run threads only if Ftrack is accessible if not ftrack_accessible: From 09519c25a804186f9cc4afb92131d0572211f712 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:17:25 +0200 Subject: [PATCH 141/169] removed unused argument 'log_errors' --- openpype/modules/ftrack/ftrack_module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 05ea7b79d1..68575009b2 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -495,7 +495,7 @@ def _check_ftrack_url(url): return True -def resolve_ftrack_url(url, log_errors=True, logger=None): +def resolve_ftrack_url(url, logger=None): """Checks if Ftrack server is responding.""" if logger is None: @@ -521,7 +521,7 @@ def resolve_ftrack_url(url, log_errors=True, logger=None): if ftrack_url: logger.debug("Ftrack server \"{}\" is accessible.".format(ftrack_url)) - elif log_errors: + else: logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url From 477266f1407e84e2ba9d086107e15e8fc5173e79 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Sep 2022 15:18:56 +0200 Subject: [PATCH 142/169] better variable name for ftrack url value check --- openpype/modules/ftrack/ftrack_module.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 68575009b2..75ffd7f864 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -15,7 +15,7 @@ from openpype.settings import SaveWarningExc from openpype.lib import Logger FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -_PLACEHOLDER = object() +_URL_NOT_SET = object() class FtrackModule( @@ -31,7 +31,7 @@ class FtrackModule( self.enabled = ftrack_settings["enabled"] self._settings_ftrack_url = ftrack_settings["ftrack_server"] - self._ftrack_url = _PLACEHOLDER + self._ftrack_url = _URL_NOT_SET current_dir = os.path.dirname(os.path.abspath(__file__)) low_platform = platform.system().lower() @@ -64,7 +64,7 @@ class FtrackModule( self._timers_manager_module = None def get_ftrack_url(self): - if self._ftrack_url is _PLACEHOLDER: + if self._ftrack_url is _URL_NOT_SET: self._ftrack_url = resolve_ftrack_url( self._settings_ftrack_url, logger=self.log From 778e0b2e491f5948f2932968a70f8f620204fb01 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:44:20 +0200 Subject: [PATCH 143/169] Perform case-insensitive lookup --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 7e5815b100..5d39e12985 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -74,11 +74,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): version_number = int(instance_version) family = instance.data["family"] - family_low = family.lower() + # Perform case-insensitive family mapping + family_low = family.lower() asset_type = instance.data.get("ftrackFamily") - if not asset_type and family_low in self.family_mapping: - asset_type = self.family_mapping[family_low] + if not asset_type: + for map_family, map_value in self.family_mapping.items(): + if map_family.lower() == family_low: + asset_type = map_value if not asset_type: asset_type = "upload" From 4466d8a94249ad66546730b7135e34003f4aa4f8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:45:11 +0200 Subject: [PATCH 144/169] Remove redundant logic since just above it's forced to be "upload" when `not asset_type` --- .../ftrack/plugins/publish/integrate_ftrack_instances.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 5d39e12985..a35dbf71d4 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -89,15 +89,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): self.log.debug( "Family: {}\nMapping: {}".format(family_low, self.family_mapping) ) - - # Ignore this instance if neither "ftrackFamily" or a family mapping is - # found. - if not asset_type: - self.log.info(( - "Family \"{}\" does not match any asset type mapping" - ).format(family)) - return - status_name = self._get_asset_version_status_name(instance) # Base of component item data From 1bc37ace465f647b6af35a4a2b8cf2832bd94925 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 15:45:48 +0200 Subject: [PATCH 145/169] Actually break loop early on detected mapping --- .../modules/ftrack/plugins/publish/integrate_ftrack_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index a35dbf71d4..5ff75e7060 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -82,6 +82,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): for map_family, map_value in self.family_mapping.items(): if map_family.lower() == family_low: asset_type = map_value + break if not asset_type: asset_type = "upload" From b3bb5f8612ccb623f5da34e4270e8db9bcdb8a7e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 13 Sep 2022 23:09:17 +0200 Subject: [PATCH 146/169] Always increment current file - similar to Maya --- .../plugins/publish/increment_current_file.py | 29 +++------------ .../increment_current_file_deadline.py | 35 ------------------- 2 files changed, 4 insertions(+), 60 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index 5cb14d732a..c990f481d3 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,10 +2,9 @@ import pyblish.api from openpype.lib import version_up from openpype.pipeline import registered_host -from openpype.pipeline.publish import get_errored_plugins_from_context -class IncrementCurrentFile(pyblish.api.InstancePlugin): +class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. Saves the current scene with an increased version number. @@ -15,30 +14,10 @@ class IncrementCurrentFile(pyblish.api.InstancePlugin): label = "Increment current file" order = pyblish.api.IntegratorOrder + 9.0 hosts = ["houdini"] - families = ["colorbleed.usdrender", "redshift_rop"] - targets = ["local"] + families = ["workfile"] + optional = True - def process(self, instance): - - # This should be a ContextPlugin, but this is a workaround - # for a bug in pyblish to run once for a family: issue #250 - context = instance.context - key = "__hasRun{}".format(self.__class__.__name__) - if context.data.get(key, False): - return - else: - context.data[key] = True - - context = instance.context - errored_plugins = get_errored_plugins_from_context(context) - if any( - plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins - ): - raise RuntimeError( - "Skipping incrementing current file because " - "submission to deadline failed." - ) + def process(self, context): # Filename must not have changed since collecting host = registered_host() diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py deleted file mode 100644 index cb0d7e3680..0000000000 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py +++ /dev/null @@ -1,35 +0,0 @@ -import pyblish.api - -import hou -from openpype.lib import version_up -from openpype.pipeline.publish import get_errored_plugins_from_context - - -class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): - """Increment the current file. - - Saves the current scene with an increased version number. - - """ - - label = "Increment current file" - order = pyblish.api.IntegratorOrder + 9.0 - hosts = ["houdini"] - targets = ["deadline"] - - def process(self, context): - - errored_plugins = get_errored_plugins_from_context(context) - if any( - plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins - ): - raise RuntimeError( - "Skipping incrementing current file because " - "submission to deadline failed." - ) - - current_filepath = context.data["currentFile"] - new_filepath = version_up(current_filepath) - - hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True) From 48546ef24f6c9b1db77e6094792ba2b2f1f88bc3 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 14 Sep 2022 04:18:24 +0000 Subject: [PATCH 147/169] [Automated] Bump version --- CHANGELOG.md | 32 ++++++++++++++++++-------------- openpype/version.py | 2 +- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 46bf56f5bd..7d6b620d58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,22 @@ # Changelog +## [3.14.3-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.2...HEAD) + +**🚀 Enhancements** + +- Publisher: Add new publisher to host tools [\#3833](https://github.com/pypeclub/OpenPype/pull/3833) +- Maya: Workspace mel loaded from settings [\#3790](https://github.com/pypeclub/OpenPype/pull/3790) + +**🐛 Bug fixes** + +- Ftrack: Url validation does not require ftrackapp [\#3834](https://github.com/pypeclub/OpenPype/pull/3834) +- Maya+Ftrack: Change typo in family name `mayaascii` -\> `mayaAscii` [\#3820](https://github.com/pypeclub/OpenPype/pull/3820) + ## [3.14.2](https://github.com/pypeclub/OpenPype/tree/3.14.2) (2022-09-12) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.1...3.14.2) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.2-nightly.5...3.14.2) **🆕 New features** @@ -45,11 +59,10 @@ - Maya: Refactor submit deadline to use AbstractSubmitDeadline [\#3759](https://github.com/pypeclub/OpenPype/pull/3759) - General: Change publish template settings location [\#3755](https://github.com/pypeclub/OpenPype/pull/3755) - General: Move hostdirname functionality into host [\#3749](https://github.com/pypeclub/OpenPype/pull/3749) -- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) +- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - Houdini: Define houdini as addon [\#3735](https://github.com/pypeclub/OpenPype/pull/3735) - Fusion: Defined fusion as addon [\#3733](https://github.com/pypeclub/OpenPype/pull/3733) - Flame: Defined flame as addon [\#3732](https://github.com/pypeclub/OpenPype/pull/3732) -- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - Resolve: Define resolve as addon [\#3727](https://github.com/pypeclub/OpenPype/pull/3727) **Merged pull requests:** @@ -72,7 +85,6 @@ - General: Added helper getters to modules manager [\#3712](https://github.com/pypeclub/OpenPype/pull/3712) - Unreal: Define unreal as module and use host class [\#3701](https://github.com/pypeclub/OpenPype/pull/3701) - Settings: Lock settings UI session [\#3700](https://github.com/pypeclub/OpenPype/pull/3700) -- General: Benevolent context label collector [\#3686](https://github.com/pypeclub/OpenPype/pull/3686) **🐛 Bug fixes** @@ -86,17 +98,17 @@ - Settings: Fix project overrides save [\#3708](https://github.com/pypeclub/OpenPype/pull/3708) - Workfiles tool: Fix published workfile filtering [\#3704](https://github.com/pypeclub/OpenPype/pull/3704) - PS, AE: Provide default variant value for workfile subset [\#3703](https://github.com/pypeclub/OpenPype/pull/3703) -- Flame: retime is working on clip publishing [\#3684](https://github.com/pypeclub/OpenPype/pull/3684) **🔀 Refactored code** - General: Move delivery logic to pipeline [\#3751](https://github.com/pypeclub/OpenPype/pull/3751) -- General: Move publish utils to pipeline [\#3745](https://github.com/pypeclub/OpenPype/pull/3745) - General: Host addons cleanup [\#3744](https://github.com/pypeclub/OpenPype/pull/3744) - Webpublisher: Webpublisher is used as addon [\#3740](https://github.com/pypeclub/OpenPype/pull/3740) +- Photoshop: Defined photoshop as addon [\#3736](https://github.com/pypeclub/OpenPype/pull/3736) - Harmony: Defined harmony as addon [\#3734](https://github.com/pypeclub/OpenPype/pull/3734) - General: Module interfaces cleanup [\#3731](https://github.com/pypeclub/OpenPype/pull/3731) - AfterEffects: Move AE functions from general lib [\#3730](https://github.com/pypeclub/OpenPype/pull/3730) +- Blender: Define blender as module [\#3729](https://github.com/pypeclub/OpenPype/pull/3729) - AfterEffects: Define AfterEffects as module [\#3728](https://github.com/pypeclub/OpenPype/pull/3728) - General: Replace PypeLogger with Logger [\#3725](https://github.com/pypeclub/OpenPype/pull/3725) - Nuke: Define nuke as module [\#3724](https://github.com/pypeclub/OpenPype/pull/3724) @@ -118,14 +130,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0) -**🚀 Enhancements** - -- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685) - -**🐛 Bug fixes** - -- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691) - ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) diff --git a/openpype/version.py b/openpype/version.py index 8469b1712a..e8a65b04d2 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.2" +__version__ = "3.14.3-nightly.1" From d5cb828edc611f9ed9f98b61ae5ae3591d797640 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:45:44 +0200 Subject: [PATCH 148/169] Move LoadedInSceneDelegate into Loader tool since it's specific to loader --- openpype/tools/loader/delegates.py | 28 ++++++++++++++++++++++++++++ openpype/tools/loader/widgets.py | 4 ++-- openpype/tools/utils/delegates.py | 27 --------------------------- 3 files changed, 30 insertions(+), 29 deletions(-) create mode 100644 openpype/tools/loader/delegates.py diff --git a/openpype/tools/loader/delegates.py b/openpype/tools/loader/delegates.py new file mode 100644 index 0000000000..e6663d48f1 --- /dev/null +++ b/openpype/tools/loader/delegates.py @@ -0,0 +1,28 @@ +from Qt import QtWidgets, QtGui, QtCore + + +class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): + """Delegate for Loaded in Scene state columns. + + Shows "yes" or "no" for True or False values + Colorizes green or dark grey based on True or False values + + """ + + def __init__(self, *args, **kwargs): + super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) + self._colors = { + True: QtGui.QColor(80, 170, 80), + False: QtGui.QColor(90, 90, 90) + } + + def displayText(self, value, locale): + return "yes" if value else "no" + + def initStyleOption(self, option, index): + super(LoadedInSceneDelegate, self).initStyleOption(option, index) + + # Colorize based on value + value = index.data(QtCore.Qt.DisplayRole) + color = self._colors[bool(value)] + option.palette.setBrush(QtGui.QPalette.Text, color) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 615f28f04d..98522c48ce 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -37,8 +37,7 @@ from openpype.tools.utils import ( ) from openpype.tools.utils.delegates import ( VersionDelegate, - PrettyTimeDelegate, - LoadedInSceneDelegate + PrettyTimeDelegate ) from openpype.tools.utils.widgets import ( OptionalMenu, @@ -59,6 +58,7 @@ from .model import ( ITEM_ID_ROLE ) from . import lib +from .delegates import LoadedInSceneDelegate from openpype.tools.utils.constants import ( LOCAL_PROVIDER_ROLE, diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index 3547251282..d6c2d69e76 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -291,30 +291,3 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if value is not None: return pretty_timestamp(value) - - -class LoadedInSceneDelegate(QtWidgets.QStyledItemDelegate): - """Delegate for Loaded in Scene state columns. - - Shows "yes" or "no" for True or False values - Colorizes green or dark grey based on True or False values - - """ - - def __init__(self, *args, **kwargs): - super(LoadedInSceneDelegate, self).__init__(*args, **kwargs) - self._colors = { - True: QtGui.QColor(80, 170, 80), - False: QtGui.QColor(90, 90, 90) - } - - def displayText(self, value, locale): - return "yes" if value else "no" - - def initStyleOption(self, option, index): - super(LoadedInSceneDelegate, self).initStyleOption(option, index) - - # Colorize based on value - value = index.data(QtCore.Qt.DisplayRole) - color = self._colors[bool(value)] - option.palette.setBrush(QtGui.QPalette.Text, color) From d84b175efbc60c226609c6393dddedbf9960c7be Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:47:52 +0200 Subject: [PATCH 149/169] Support ILoadHost hosts Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/tools/loader/model.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 58d6787507..17af7bb2f7 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -569,6 +569,11 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): if self._host: time_since_refresh = time.time() - self._host_loaded_refresh_time if time_since_refresh > self._host_loaded_refresh_timeout: + if isinstance(self._host, ILoadHost): + containers = self._host.get_containers() + else: + containers = self._host.ls() + repre_ids = {con.get("representation") for con in self._host.ls()} self._loaded_representation_ids = repre_ids From a8909889c4662764042288245ef2d901d1218055 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:49:52 +0200 Subject: [PATCH 150/169] Fix refactored code --- openpype/tools/loader/model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 17af7bb2f7..1c078325f3 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -25,6 +25,7 @@ from openpype.pipeline import ( from openpype.style import get_default_entity_icon_color from openpype.tools.utils.models import TreeModel, Item from openpype.tools.utils import lib +from openpype.host import ILoadHost from openpype.modules import ModulesManager from openpype.tools.utils.constants import ( @@ -573,9 +574,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): containers = self._host.get_containers() else: containers = self._host.ls() - - repre_ids = {con.get("representation") - for con in self._host.ls()} + + repre_ids = {con.get("representation") for con in containers} self._loaded_representation_ids = repre_ids self._host_loaded_refresh_time = time.time() From 1eae84fd49af6930eb7865451c17557bf1d4d8b6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 11:58:10 +0200 Subject: [PATCH 151/169] Switch columns --- openpype/tools/loader/model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 1c078325f3..77a8669c46 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -138,8 +138,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration", "handles", "step", - "repre_info", - "loaded_in_scene" + "loaded_in_scene", + "repre_info" ] column_labels_mapping = { @@ -153,8 +153,8 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): "duration": "Duration", "handles": "Handles", "step": "Step", - "repre_info": "Availability", - "loaded_in_scene": "In scene" + "loaded_in_scene": "In scene", + "repre_info": "Availability" } SortAscendingRole = QtCore.Qt.UserRole + 2 From c3588e2f9d337109aea3c2630fb12079b617d81d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 12:37:46 +0200 Subject: [PATCH 152/169] Make `loaded_in_scene` column 5 pixels wider + reorder to match with column order --- openpype/tools/loader/widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 98522c48ce..c028aa4174 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -170,8 +170,8 @@ class SubsetWidget(QtWidgets.QWidget): ("duration", 60), ("handles", 55), ("step", 10), - ("repre_info", 65), - ("loaded_in_scene", 20) + ("loaded_in_scene", 25), + ("repre_info", 65) ) def __init__( From 817886b234c00d6a9f2a9bd0902cf8a2a9cb9cb3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 13:39:10 +0200 Subject: [PATCH 153/169] Fix typo in logic --- openpype/lib/transcoding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 60d5d3ed4a..51e34312f2 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -154,7 +154,7 @@ def convert_value_by_type_name(value_type, value, logger=None): elif parts_len == 4: divisor = 2 elif parts_len == 9: - divisor == 3 + divisor = 3 elif parts_len == 16: divisor = 4 else: From 5c3c4dcbb2acfe736cfc94e6360eb2eb52cd580b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 13:43:43 +0200 Subject: [PATCH 154/169] Fix same typo in duplicated code --- .../plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 9fca1b5391..b259e18a94 100644 --- a/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/openpype/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -71,7 +71,7 @@ def convert_value_by_type_name(value_type, value): elif parts_len == 4: divisor = 2 elif parts_len == 9: - divisor == 3 + divisor = 3 elif parts_len == 16: divisor = 4 else: From 1c6b23b674eec3ed1fc1b1e0a68931a5661b71a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 13:50:57 +0200 Subject: [PATCH 155/169] Fix `headsUpDisplay` key name Capture has a default setting named `headsUpDisplay` which is the long name for the setting `hud`. Thus when supplying `hud` as viewport option then `capture` will merge the key-values and thus will try to set both `headsUpDisplay` and `hud` value for the modelEditor which ends up ignoring `hud` and instead applying the `headsUpDisplay`. Thus, `hud` didn't do anything. --- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/projects_schema/schemas/schema_maya_capture.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 99ba4cdd5c..7759ac4e5e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -731,7 +731,7 @@ "grid": false, "hairSystems": true, "handles": false, - "hud": false, + "headsUpDisplay": false, "hulls": false, "ikHandles": false, "imagePlane": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 7a40f349cc..ab35fd391f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -441,8 +441,8 @@ }, { "type": "boolean", - "key": "hud", - "label": "hud" + "key": "headsUpDisplay", + "label": "headsUpDisplay" }, { "type": "boolean", From 8e068308c6369f50d220a58a81288f1f57337365 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 15:30:46 +0200 Subject: [PATCH 156/169] Add Display Textures settings correctly, labelize the Show settings to clarify what they are --- .../schemas/schema_maya_capture.json | 77 ++++++++++--------- 1 file changed, 41 insertions(+), 36 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 7a40f349cc..ae6c428faf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -195,6 +195,11 @@ { "nolights": "No Lights"} ] }, + { + "type": "boolean", + "key": "displayTextures", + "label": "Display Textures" + }, { "type": "number", "key": "textureMaxResolution", @@ -217,11 +222,6 @@ "key": "shadows", "label": "Display Shadows" }, - { - "type": "boolean", - "key": "textures", - "label": "Display Textures" - }, { "type": "boolean", "key": "twoSidedLighting", @@ -372,67 +372,67 @@ { "type": "boolean", "key": "cameras", - "label": "cameras" + "label": "Cameras" }, { "type": "boolean", "key": "clipGhosts", - "label": "clipGhosts" + "label": "Clip Ghosts" }, { "type": "boolean", "key": "controlVertices", - "label": "controlVertices" + "label": "NURBS CVs" }, { "type": "boolean", "key": "deformers", - "label": "deformers" + "label": "Deformers" }, { "type": "boolean", "key": "dimensions", - "label": "dimensions" + "label": "Dimensions" }, { "type": "boolean", "key": "dynamicConstraints", - "label": "dynamicConstraints" + "label": "Dynamic Constraints" }, { "type": "boolean", "key": "dynamics", - "label": "dynamics" + "label": "Dynamics" }, { "type": "boolean", "key": "fluids", - "label": "fluids" + "label": "Fluids" }, { "type": "boolean", "key": "follicles", - "label": "follicles" + "label": "Follicles" }, { "type": "boolean", "key": "gpuCacheDisplayFilter", - "label": "gpuCacheDisplayFilter" + "label": "GPU Cache" }, { "type": "boolean", "key": "greasePencils", - "label": "greasePencils" + "label": "Grease Pencil" }, { "type": "boolean", "key": "grid", - "label": "grid" + "label": "Grid" }, { "type": "boolean", "key": "hairSystems", - "label": "hairSystems" + "label": "Hair Systems" }, { "type": "boolean", @@ -442,47 +442,47 @@ { "type": "boolean", "key": "hud", - "label": "hud" + "label": "HUD" }, { "type": "boolean", "key": "hulls", - "label": "hulls" + "label": "NURBS Hulls" }, { "type": "boolean", "key": "ikHandles", - "label": "ikHandles" + "label": "IK Handles" }, { "type": "boolean", "key": "imagePlane", - "label": "imagePlane" + "label": "Image Planes" }, { "type": "boolean", "key": "joints", - "label": "joints" + "label": "Joints" }, { "type": "boolean", "key": "lights", - "label": "lights" + "label": "Lights" }, { "type": "boolean", "key": "locators", - "label": "locators" + "label": "Locators" }, { "type": "boolean", "key": "manipulators", - "label": "manipulators" + "label": "Manipulators" }, { "type": "boolean", "key": "motionTrails", - "label": "motionTrails" + "label": "Motion Trails" }, { "type": "boolean", @@ -502,47 +502,52 @@ { "type": "boolean", "key": "nurbsCurves", - "label": "nurbsCurves" + "label": "NURBS Curves" }, { "type": "boolean", "key": "nurbsSurfaces", - "label": "nurbsSurfaces" + "label": "NURBS Surfaces" }, { "type": "boolean", "key": "particleInstancers", - "label": "particleInstancers" + "label": "Particle Instancers" }, { "type": "boolean", "key": "pivots", - "label": "pivots" + "label": "Pivots" }, { "type": "boolean", "key": "planes", - "label": "planes" + "label": "Planes" }, { "type": "boolean", "key": "pluginShapes", - "label": "pluginShapes" + "label": "Plugin Shapes" }, { "type": "boolean", "key": "polymeshes", - "label": "polymeshes" + "label": "Polygons" }, { "type": "boolean", "key": "strokes", - "label": "strokes" + "label": "Strokes" }, { "type": "boolean", "key": "subdivSurfaces", - "label": "subdivSurfaces" + "label": "Subdiv Surfaces" + }, + { + "type": "boolean", + "key": "textures", + "label": "Texture Placements" } ] }, From c6bd26485d191406e96288c7a4ea7e99f2364494 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 15:35:12 +0200 Subject: [PATCH 157/169] Sort a bit more by Label again so that NURBS options are together + fix label for handles --- .../schemas/schema_maya_capture.json | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index ae6c428faf..d2627c1e2a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -379,11 +379,6 @@ "key": "clipGhosts", "label": "Clip Ghosts" }, - { - "type": "boolean", - "key": "controlVertices", - "label": "NURBS CVs" - }, { "type": "boolean", "key": "deformers", @@ -437,18 +432,13 @@ { "type": "boolean", "key": "handles", - "label": "handles" + "label": "Handles" }, { "type": "boolean", "key": "hud", "label": "HUD" }, - { - "type": "boolean", - "key": "hulls", - "label": "NURBS Hulls" - }, { "type": "boolean", "key": "ikHandles", @@ -499,11 +489,21 @@ "key": "nRigids", "label": "nRigids" }, + { + "type": "boolean", + "key": "controlVertices", + "label": "NURBS CVs" + }, { "type": "boolean", "key": "nurbsCurves", "label": "NURBS Curves" }, + { + "type": "boolean", + "key": "hulls", + "label": "NURBS Hulls" + }, { "type": "boolean", "key": "nurbsSurfaces", From fd4648c9bd48bda91e1c259a5903f4f263668b78 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 15:47:43 +0200 Subject: [PATCH 158/169] Add label --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index d2627c1e2a..18e69e92c3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -369,6 +369,10 @@ { "type": "splitter" }, + { + "type": "label", + "label": "Show" + }, { "type": "boolean", "key": "cameras", From 522d1e2df837bda7611b9667c1afd127337d945f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:04:09 +0200 Subject: [PATCH 159/169] Labelize Camera options to match with Camera attributes in Attribute Editor --- .../schemas/schema_maya_capture.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 18e69e92c3..8c2a460871 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -564,47 +564,47 @@ { "type": "boolean", "key": "displayGateMask", - "label": "displayGateMask" + "label": "Display Gate Mask" }, { "type": "boolean", "key": "displayResolution", - "label": "displayResolution" + "label": "Display Resolution" }, { "type": "boolean", "key": "displayFilmGate", - "label": "displayFilmGate" + "label": "Display Film Gate" }, { "type": "boolean", "key": "displayFieldChart", - "label": "displayFieldChart" + "label": "Display Field Chart" }, { "type": "boolean", "key": "displaySafeAction", - "label": "displaySafeAction" + "label": "Display Safe Action" }, { "type": "boolean", "key": "displaySafeTitle", - "label": "displaySafeTitle" + "label": "Display Safe Title" }, { "type": "boolean", "key": "displayFilmPivot", - "label": "displayFilmPivot" + "label": "Display Film Pivot" }, { "type": "boolean", "key": "displayFilmOrigin", - "label": "displayFilmOrigin" + "label": "Display Film Origin" }, { "type": "number", "key": "overscan", - "label": "overscan", + "label": "Overscan", "decimal": 1, "minimum": 0, "maximum": 10 From 9b9bfdadb993c43c2f6d1232ef522b5326df2cce Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:05:10 +0200 Subject: [PATCH 160/169] Uppercase `percent` label like the surrounding labels --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 8c2a460871..32987e7423 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -157,7 +157,7 @@ { "type": "number", "key": "percent", - "label": "percent", + "label": "Percent", "decimal": 1, "minimum": 0, "maximum": 200 From f501dac15ff4d3f0e30b4db4af5caf3485a90c7a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:14:18 +0200 Subject: [PATCH 161/169] Fix default settings for new viewport options settings --- openpype/settings/defaults/project_settings/maya.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 99ba4cdd5c..8706ea995f 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -693,10 +693,10 @@ "Viewport Options": { "override_viewport_options": true, "displayLights": "default", + "displayTextures": true, "textureMaxResolution": 1024, "renderDepthOfField": true, "shadows": true, - "textures": true, "twoSidedLighting": true, "lineAAEnable": true, "multiSample": 8, @@ -719,7 +719,6 @@ "motionBlurShutterOpenFraction": 0.2, "cameras": false, "clipGhosts": false, - "controlVertices": false, "deformers": false, "dimensions": false, "dynamicConstraints": false, @@ -732,7 +731,6 @@ "hairSystems": true, "handles": false, "hud": false, - "hulls": false, "ikHandles": false, "imagePlane": true, "joints": false, @@ -743,7 +741,9 @@ "nCloths": false, "nParticles": false, "nRigids": false, + "controlVertices": false, "nurbsCurves": false, + "hulls": false, "nurbsSurfaces": false, "particleInstancers": false, "pivots": false, @@ -751,7 +751,8 @@ "pluginShapes": false, "polymeshes": true, "strokes": false, - "subdivSurfaces": false + "subdivSurfaces": false, + "textures": false }, "Camera Options": { "displayGateMask": false, From b9c3c95c2642b19305a57064be74fa0cd6ef12ae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:16:03 +0200 Subject: [PATCH 162/169] Use `id` variable (cosmetics because it results in same key) --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 58e160cb2f..6a8447d6ad 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2483,7 +2483,7 @@ def load_capture_preset(data=None): # DISPLAY OPTIONS id = 'Display Options' disp_options = {} - for key in preset['Display Options']: + for key in preset[id]: if key.startswith('background'): disp_options[key] = preset['Display Options'][key] if len(disp_options[key]) == 4: From 95fef2c4b11e055be8d55bd464e075eb2f1d7415 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:50:32 +0200 Subject: [PATCH 163/169] Fix Width label --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 570e22aa60..ffa1e61e68 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -141,7 +141,7 @@ { "type": "number", "key": "width", - "label": " Width", + "label": "Width", "decimal": 0, "minimum": 0, "maximum": 99999 From 1e27e9b71ff18af0aa7d957be6db344f7030a2aa Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:50:50 +0200 Subject: [PATCH 164/169] Remove unused settings --- .../schemas/schema_maya_capture.json | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index ffa1e61e68..2e4d4d67ab 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -153,19 +153,6 @@ "decimal": 0, "minimum": 0, "maximum": 99999 - }, - { - "type": "number", - "key": "percent", - "label": "Percent", - "decimal": 1, - "minimum": 0, - "maximum": 200 - }, - { - "type": "text", - "key": "mode", - "label": "Mode" } ] }, From ee4b9056feae4f942624d6e9bf37bb64a875bfba Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:51:59 +0200 Subject: [PATCH 165/169] Fix incorrectly resolved merge conflict --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index 2e4d4d67ab..e23dbbbc1d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -430,11 +430,6 @@ "key": "headsUpDisplay", "label": "HUD" }, - { - "type": "boolean", - "key": "hulls", - "label": "hulls" - }, { "type": "boolean", "key": "ikHandles", From e16f5df4d7a346ee5b1f7b3c79b146fe9ba3e958 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:53:04 +0200 Subject: [PATCH 166/169] Update defaults for the removed settings --- openpype/settings/defaults/project_settings/maya.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 8b0418f5c6..79e80aec2e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -686,9 +686,7 @@ }, "Resolution": { "width": 1920, - "height": 1080, - "percent": 1.0, - "mode": "Custom" + "height": 1080 }, "Viewport Options": { "override_viewport_options": true, From 730f451020cb438b6a57756e5713212ae6e2261f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 16:57:11 +0200 Subject: [PATCH 167/169] Revert "Fix Width label" This reverts commit 95fef2c4b11e055be8d55bd464e075eb2f1d7415. --- .../schemas/projects_schema/schemas/schema_maya_capture.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index e23dbbbc1d..c9904150fd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -141,7 +141,7 @@ { "type": "number", "key": "width", - "label": "Width", + "label": " Width", "decimal": 0, "minimum": 0, "maximum": 99999 From a0333c88aed89707eba5cbea154f4449639dac44 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 17:23:59 +0200 Subject: [PATCH 168/169] Remove unused PanZoom / pan_zoom settings --- .../settings/defaults/project_settings/maya.json | 3 --- .../projects_schema/schemas/schema_maya_capture.json | 12 ------------ 2 files changed, 15 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 79e80aec2e..8643297f02 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -678,9 +678,6 @@ "isolate_view": true, "off_screen": true }, - "PanZoom": { - "pan_zoom": true - }, "Renderer": { "rendererName": "vp2Renderer" }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json index c9904150fd..62c33f55fc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_capture.json @@ -94,18 +94,6 @@ } ] }, - - { - "type": "dict", - "key": "PanZoom", - "children": [ - { - "type": "boolean", - "key": "pan_zoom", - "label": " Pan Zoom" - } - ] - }, { "type": "splitter" }, From 7a5d20ffdb05347736475c86b87e6a782fb5d80f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 15 Sep 2022 11:00:20 +0200 Subject: [PATCH 169/169] :bug: skip plugin if otioTimeline is missing --- openpype/plugins/publish/extract_otio_file.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index c692205d81..1a6a82117d 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -16,6 +16,8 @@ class ExtractOTIOFile(publish.Extractor): hosts = ["resolve", "hiero", "traypublisher"] def process(self, instance): + if not instance.context.data.get("otioTimeline"): + return # create representation data if "representations" not in instance.data: instance.data["representations"] = []