Merge branch 'develop' into feature/better_instance_changes

This commit is contained in:
Jakub Trllo 2023-02-07 18:23:31 +01:00
commit 6120f218b4
31 changed files with 1410 additions and 167 deletions

View file

@ -1,4 +1,5 @@
import os
from openpype.lib import PreLaunchHook
@ -40,5 +41,13 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
self.log.info("Current context does not have any workfile yet.")
return
# Determine whether to open workfile post initialization.
if self.host_name == "maya":
key = "open_workfile_post_initialization"
if self.data["project_settings"]["maya"][key]:
self.log.debug("Opening workfile post initialization.")
self.data["env"]["OPENPYPE_" + key.upper()] = "1"
return
# Add path to workfile to arguments
self.launch_context.launch_args.append(last_workfile)

View file

@ -123,7 +123,7 @@ class HostBase(object):
Union[str, None]: Current task name.
"""
return os.environ.get("AVALON_ASSET")
return os.environ.get("AVALON_TASK")
def get_current_context(self):
"""Get current context information.

View file

@ -1,4 +1,5 @@
import os
import re
import logging
import platform
@ -66,7 +67,7 @@ def generate_shelves():
)
continue
mandatory_attributes = {'name', 'script'}
mandatory_attributes = {'label', 'script'}
for tool_definition in shelf_definition.get('tools_list'):
# We verify that the name and script attibutes of the tool
# are set
@ -152,31 +153,32 @@ def get_or_create_tool(tool_definition, shelf):
Returns:
hou.Tool: The tool updated or the new one
"""
existing_tools = shelf.tools()
tool_label = tool_definition.get('label')
tool_label = tool_definition.get("label")
if not tool_label:
log.warning("Skipped shelf without label")
return
script_path = tool_definition["script"]
if not script_path or not os.path.exists(script_path):
log.warning("This path doesn't exist - {}".format(script_path))
return
existing_tools = shelf.tools()
existing_tool = next(
(tool for tool in existing_tools if tool.label() == tool_label),
None
)
with open(script_path) as stream:
script = stream.read()
tool_definition["script"] = script
if existing_tool:
tool_definition.pop('name', None)
tool_definition.pop('label', None)
tool_definition.pop("label", None)
existing_tool.setData(**tool_definition)
return existing_tool
tool_name = tool_label.replace(' ', '_').lower()
if not os.path.exists(tool_definition['script']):
log.warning(
"This path doesn't exist - {}".format(tool_definition['script'])
)
return
with open(tool_definition['script']) as f:
script = f.read()
tool_definition.update({'script': script})
new_tool = hou.shelves.newTool(name=tool_name, **tool_definition)
return new_tool
tool_name = re.sub(r"[^\w\d]+", "_", tool_label).lower()
return hou.shelves.newTool(name=tool_name, **tool_definition)

View file

@ -12,6 +12,11 @@ class MaxAddon(OpenPypeModule, IHostAddon):
def initialize(self, module_settings):
self.enabled = True
def add_implementation_envs(self, env, _app):
# Remove auto screen scale factor for Qt
# - let 3dsmax decide it's value
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
def get_workfile_extensions(self):
return [".max"]

View file

@ -1,4 +1,13 @@
# -*- coding: utf-8 -*-
import os
import sys
# this might happen in some 3dsmax version where PYTHONPATH isn't added
# to sys.path automatically
for path in os.environ["PYTHONPATH"].split(os.pathsep):
if path and path not in sys.path:
sys.path.append(path)
from openpype.hosts.max.api import MaxHost
from openpype.pipeline import install_host

View file

@ -5,6 +5,7 @@ import sys
import platform
import uuid
import math
import re
import json
import logging
@ -3353,3 +3354,34 @@ def iter_visible_nodes_in_range(nodes, start, end):
def get_attribute_input(attr):
connections = cmds.listConnections(attr, plugs=True, destination=False)
return connections[0] if connections else None
def write_xgen_file(data, filepath):
"""Overwrites data in .xgen files.
Quite naive approach to mainly overwrite "xgDataPath" and "xgProjectPath".
Args:
data (dict): Dictionary of key, value. Key matches with xgen file.
For example:
{"xgDataPath": "some/path"}
filepath (string): Absolute path of .xgen file.
"""
# Generate regex lookup for line to key basically
# match any of the keys in `\t{key}\t\t`
keys = "|".join(re.escape(key) for key in data.keys())
re_keys = re.compile("^\t({})\t\t".format(keys))
lines = []
with open(filepath, "r") as f:
for line in f:
match = re_keys.match(line)
if match:
key = match.group(1)
value = data[key]
line = "\t{}\t\t{}\n".format(key, value)
lines.append(line)
with open(filepath, "w") as f:
f.writelines(lines)

View file

@ -300,6 +300,39 @@ class ReferenceLoader(Loader):
str(representation["_id"]),
type="string")
# When an animation or pointcache gets connected to an Xgen container,
# the compound attribute "xgenContainers" gets created. When animation
# containers gets updated we also need to update the cacheFileName on
# the Xgen collection.
compound_name = "xgenContainers"
if cmds.objExists("{}.{}".format(node, compound_name)):
import xgenm
container_amount = cmds.getAttr(
"{}.{}".format(node, compound_name), size=True
)
# loop through all compound children
for i in range(container_amount):
attr = "{}.{}[{}].container".format(node, compound_name, i)
objectset = cmds.listConnections(attr)[0]
reference_node = cmds.sets(objectset, query=True)[0]
palettes = cmds.ls(
cmds.referenceQuery(reference_node, nodes=True),
type="xgmPalette"
)
for palette in palettes:
for description in xgenm.descriptions(palette):
xgenm.setAttr(
"cacheFileName",
path.replace("\\", "/"),
palette,
description,
"SplinePrimitive"
)
# Refresh UI and viewport.
de = xgenm.xgGlobal.DescriptionEditor
de.refresh("Full")
def remove(self, container):
"""Remove an existing `container` from Maya scene

View file

@ -2,9 +2,9 @@ from openpype.hosts.maya.api import plugin
class CreateXgen(plugin.Creator):
"""Xgen interactive export"""
"""Xgen"""
name = "xgen"
label = "Xgen Interactive"
label = "Xgen"
family = "xgen"
icon = "pagelines"

View file

@ -0,0 +1,153 @@
from maya import cmds
from openpype.pipeline import InventoryAction, get_representation_context
from openpype.hosts.maya.api.lib import get_id
class ConnectGeometry(InventoryAction):
"""Connect geometries within containers.
Source container will connect to the target containers, by searching for
matching geometry IDs (cbid).
Source containers are of family; "animation" and "pointcache".
The connection with be done with a live world space blendshape.
"""
label = "Connect Geometry"
icon = "link"
color = "white"
def process(self, containers):
# Validate selection is more than 1.
message = (
"Only 1 container selected. 2+ containers needed for this action."
)
if len(containers) == 1:
self.display_warning(message)
return
# Categorize containers by family.
containers_by_family = {}
for container in containers:
family = get_representation_context(
container["representation"]
)["subset"]["data"]["family"]
try:
containers_by_family[family].append(container)
except KeyError:
containers_by_family[family] = [container]
# Validate to only 1 source container.
source_containers = containers_by_family.get("animation", [])
source_containers += containers_by_family.get("pointcache", [])
source_container_namespaces = [
x["namespace"] for x in source_containers
]
message = (
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
"\"animation\" or \"pointcache\".".format(
len(source_containers), source_container_namespaces
)
)
if len(source_containers) != 1:
self.display_warning(message)
return
source_object = source_containers[0]["objectName"]
# Collect matching geometry transforms based cbId attribute.
target_containers = []
for family, containers in containers_by_family.items():
if family in ["animation", "pointcache"]:
continue
target_containers.extend(containers)
source_data = self.get_container_data(source_object)
matches = []
node_types = set()
for target_container in target_containers:
target_data = self.get_container_data(
target_container["objectName"]
)
node_types.update(target_data["node_types"])
for id, transform in target_data["ids"].items():
source_match = source_data["ids"].get(id)
if source_match:
matches.append([source_match, transform])
# Message user about what is about to happen.
if not matches:
self.display_warning("No matching geometries found.")
return
message = "Connecting geometries:\n\n"
for match in matches:
message += "{} > {}\n".format(match[0], match[1])
choice = self.display_warning(message, show_cancel=True)
if choice is False:
return
# Setup live worldspace blendshape connection.
for source, target in matches:
blendshape = cmds.blendShape(source, target)[0]
cmds.setAttr(blendshape + ".origin", 0)
cmds.setAttr(blendshape + "." + target.split(":")[-1], 1)
# Update Xgen if in any of the containers.
if "xgmPalette" in node_types:
cmds.xgmPreview()
def get_container_data(self, container):
"""Collects data about the container nodes.
Args:
container (dict): Container instance.
Returns:
data (dict):
"node_types": All node types in container nodes.
"ids": If the node is a mesh, we collect its parent transform
id.
"""
data = {"node_types": set(), "ids": {}}
ref_node = cmds.sets(container, query=True, nodesOnly=True)[0]
for node in cmds.referenceQuery(ref_node, nodes=True):
node_type = cmds.nodeType(node)
data["node_types"].add(node_type)
# Only interested in mesh transforms for connecting geometry with
# blendshape.
if node_type != "mesh":
continue
transform = cmds.listRelatives(node, parent=True)[0]
data["ids"][get_id(transform)] = transform
return data
def display_warning(self, message, show_cancel=False):
"""Show feedback to user.
Returns:
bool
"""
from Qt import QtWidgets
accept = QtWidgets.QMessageBox.Ok
if show_cancel:
buttons = accept | QtWidgets.QMessageBox.Cancel
else:
buttons = accept
state = QtWidgets.QMessageBox.warning(
None,
"",
message,
buttons=buttons,
defaultButton=accept
)
return state == accept

View file

@ -0,0 +1,168 @@
from maya import cmds
import xgenm
from openpype.pipeline import (
InventoryAction, get_representation_context, get_representation_path
)
class ConnectXgen(InventoryAction):
"""Connect Xgen with an animation or pointcache.
"""
label = "Connect Xgen"
icon = "link"
color = "white"
def process(self, containers):
# Validate selection is more than 1.
message = (
"Only 1 container selected. 2+ containers needed for this action."
)
if len(containers) == 1:
self.display_warning(message)
return
# Categorize containers by family.
containers_by_family = {}
for container in containers:
family = get_representation_context(
container["representation"]
)["subset"]["data"]["family"]
try:
containers_by_family[family].append(container)
except KeyError:
containers_by_family[family] = [container]
# Validate to only 1 source container.
source_containers = containers_by_family.get("animation", [])
source_containers += containers_by_family.get("pointcache", [])
source_container_namespaces = [
x["namespace"] for x in source_containers
]
message = (
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
"\"animation\" or \"pointcache\".".format(
len(source_containers), source_container_namespaces
)
)
if len(source_containers) != 1:
self.display_warning(message)
return
source_container = source_containers[0]
source_object = source_container["objectName"]
# Validate source representation is an alembic.
source_path = get_representation_path(
get_representation_context(
source_container["representation"]
)["representation"]
).replace("\\", "/")
message = "Animation container \"{}\" is not an alembic:\n{}".format(
source_container["namespace"], source_path
)
if not source_path.endswith(".abc"):
self.display_warning(message)
return
# Target containers.
target_containers = []
for family, containers in containers_by_family.items():
if family in ["animation", "pointcache"]:
continue
target_containers.extend(containers)
# Inform user of connections from source representation to target
# descriptions.
descriptions_data = []
connections_msg = ""
for target_container in target_containers:
reference_node = cmds.sets(
target_container["objectName"], query=True
)[0]
palettes = cmds.ls(
cmds.referenceQuery(reference_node, nodes=True),
type="xgmPalette"
)
for palette in palettes:
for description in xgenm.descriptions(palette):
descriptions_data.append([palette, description])
connections_msg += "\n{}/{}".format(palette, description)
message = "Connecting \"{}\" to:\n".format(
source_container["namespace"]
)
message += connections_msg
choice = self.display_warning(message, show_cancel=True)
if choice is False:
return
# Recreate "xgenContainers" attribute to reset.
compound_name = "xgenContainers"
attr = "{}.{}".format(source_object, compound_name)
if cmds.objExists(attr):
cmds.deleteAttr(attr)
cmds.addAttr(
source_object,
longName=compound_name,
attributeType="compound",
numberOfChildren=1,
multi=True
)
# Connect target containers.
for target_container in target_containers:
cmds.addAttr(
source_object,
longName="container",
attributeType="message",
parent=compound_name
)
index = target_containers.index(target_container)
cmds.connectAttr(
target_container["objectName"] + ".message",
source_object + ".{}[{}].container".format(
compound_name, index
)
)
# Setup cache on Xgen
object = "SplinePrimitive"
for palette, description in descriptions_data:
xgenm.setAttr("useCache", "true", palette, description, object)
xgenm.setAttr("liveMode", "false", palette, description, object)
xgenm.setAttr(
"cacheFileName", source_path, palette, description, object
)
# Refresh UI and viewport.
de = xgenm.xgGlobal.DescriptionEditor
de.refresh("Full")
def display_warning(self, message, show_cancel=False):
"""Show feedback to user.
Returns:
bool
"""
from Qt import QtWidgets
accept = QtWidgets.QMessageBox.Ok
if show_cancel:
buttons = accept | QtWidgets.QMessageBox.Cancel
else:
buttons = accept
state = QtWidgets.QMessageBox.warning(
None,
"",
message,
buttons=buttons,
defaultButton=accept
)
return state == accept

View file

@ -93,7 +93,20 @@ class ImportMayaLoader(load.LoaderPlugin):
"""
representations = ["ma", "mb", "obj"]
families = ["*"]
families = [
"model",
"pointcache",
"proxyAbc",
"animation",
"mayaAscii",
"mayaScene",
"setdress",
"layout",
"camera",
"rig",
"camerarig",
"staticMesh"
]
label = "Import"
order = 10

View file

@ -25,9 +25,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"camera",
"rig",
"camerarig",
"xgen",
"staticMesh",
"mvLook"]
representations = ["ma", "abc", "fbx", "mb"]
label = "Reference"

View file

@ -81,10 +81,11 @@ class VRayProxyLoader(load.LoaderPlugin):
c = colors.get(family)
if c is not None:
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
cmds.setAttr("{0}.outlinerColor".format(group_node),
(float(c[0])/255),
(float(c[1])/255),
(float(c[2])/255)
cmds.setAttr(
"{0}.outlinerColor".format(group_node),
(float(c[0]) / 255),
(float(c[1]) / 255),
(float(c[2]) / 255)
)
return containerise(
@ -101,7 +102,7 @@ class VRayProxyLoader(load.LoaderPlugin):
assert cmds.objExists(node), "Missing container"
members = cmds.sets(node, query=True) or []
vraymeshes = cmds.ls(members, type="VRayMesh")
vraymeshes = cmds.ls(members, type="VRayProxy")
assert vraymeshes, "Cannot find VRayMesh in container"
# get all representations for this version

View file

@ -0,0 +1,173 @@
import os
import maya.cmds as cmds
import xgenm
from Qt import QtWidgets
import openpype.hosts.maya.api.plugin
from openpype.hosts.maya.api.lib import (
maintained_selection,
get_container_members,
attribute_values,
write_xgen_file
)
from openpype.hosts.maya.api import current_file
from openpype.pipeline import get_representation_path
class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"""Load Xgen as reference"""
families = ["xgen"]
representations = ["ma", "mb"]
label = "Reference Xgen"
icon = "code-fork"
color = "orange"
def get_xgen_xgd_paths(self, palette):
_, maya_extension = os.path.splitext(current_file())
xgen_file = current_file().replace(
maya_extension,
"__{}.xgen".format(palette.replace("|", "").replace(":", "__"))
)
xgd_file = xgen_file.replace(".xgen", ".xgd")
return xgen_file, xgd_file
def process_reference(self, context, name, namespace, options):
# Validate workfile has a path.
if current_file() is None:
QtWidgets.QMessageBox.warning(
None,
"",
"Current workfile has not been saved. Please save the workfile"
" before loading an Xgen."
)
return
maya_filepath = self.prepare_root_value(
self.fname, context["project"]["name"]
)
# Reference xgen. Xgen does not like being referenced in under a group.
new_nodes = []
with maintained_selection():
nodes = cmds.file(
maya_filepath,
namespace=namespace,
sharedReferenceFile=False,
reference=True,
returnNewNodes=True
)
xgen_palette = cmds.ls(
nodes, type="xgmPalette", long=True
)[0].replace("|", "")
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
# Change the cache and disk values of xgDataPath and xgProjectPath
# to ensure paths are setup correctly.
project_path = os.path.dirname(current_file()).replace("\\", "/")
xgenm.setAttr("xgProjectPath", project_path, xgen_palette)
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
xgen_palette.replace(":", "__ns__"),
xgenm.getAttr("xgDataPath", xgen_palette)
)
xgenm.setAttr("xgDataPath", data_path, xgen_palette)
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
write_xgen_file(data, xgen_file)
# This create an expression attribute of float. If we did not add
# any changes to collection, then Xgen does not create an xgd file
# on save. This gives errors when launching the workfile again due
# to trying to find the xgd file.
name = "custom_float_ignore"
if name not in xgenm.customAttrs(xgen_palette):
xgenm.addCustomAttr(
"custom_float_ignore", xgen_palette
)
shapes = cmds.ls(nodes, shapes=True, long=True)
new_nodes = (list(set(nodes) - set(shapes)))
self[:] = new_nodes
return new_nodes
def set_palette_attributes(self, xgen_palette, xgen_file, xgd_file):
cmds.setAttr(
"{}.xgBaseFile".format(xgen_palette),
os.path.basename(xgen_file),
type="string"
)
cmds.setAttr(
"{}.xgFileName".format(xgen_palette),
os.path.basename(xgd_file),
type="string"
)
cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True)
def update(self, container, representation):
"""Workflow for updating Xgen.
- Copy and potentially overwrite the workspace .xgen file.
- Export changes to delta file.
- Set collection attributes to not include delta files.
- Update xgen maya file reference.
- Apply the delta file changes.
- Reset collection attributes to include delta files.
We have to do this workflow because when using referencing of the xgen
collection, Maya implicitly imports the Xgen data from the xgen file so
we dont have any control over when adding the delta file changes.
There is an implicit increment of the xgen and delta files, due to
using the workfile basename.
"""
container_node = container["objectName"]
members = get_container_members(container_node)
xgen_palette = cmds.ls(
members, type="xgmPalette", long=True
)[0].replace("|", "")
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
# Export current changes to apply later.
xgenm.createDelta(xgen_palette.replace("|", ""), xgd_file)
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
maya_file = get_representation_path(representation)
_, extension = os.path.splitext(maya_file)
new_xgen_file = maya_file.replace(extension, ".xgen")
data_path = ""
with open(new_xgen_file, "r") as f:
for line in f:
if line.startswith("\txgDataPath"):
line = line.rstrip()
data_path = line.split("\t")[-1]
break
project_path = os.path.dirname(current_file()).replace("\\", "/")
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
xgen_palette.replace(":", "__ns__"),
data_path
)
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
write_xgen_file(data, xgen_file)
attribute_data = {
"{}.xgFileName".format(xgen_palette): os.path.basename(xgen_file),
"{}.xgBaseFile".format(xgen_palette): "",
"{}.xgExportAsDelta".format(xgen_palette): False
}
with attribute_values(attribute_data):
super().update(container, representation)
xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file)

View file

@ -0,0 +1,71 @@
import os
from maya import cmds
import pyblish.api
from openpype.hosts.maya.api.lib import get_attribute_input
class CollectXgen(pyblish.api.InstancePlugin):
"""Collect Xgen"""
order = pyblish.api.CollectorOrder + 0.499999
label = "Collect Xgen"
families = ["xgen"]
def process(self, instance):
data = {
"xgmPalettes": cmds.ls(instance, type="xgmPalette", long=True),
"xgmDescriptions": cmds.ls(
instance, type="xgmDescription", long=True
),
"xgmSubdPatches": cmds.ls(instance, type="xgmSubdPatch", long=True)
}
data["xgenNodes"] = (
data["xgmPalettes"] +
data["xgmDescriptions"] +
data["xgmSubdPatches"]
)
if data["xgmPalettes"]:
data["xgmPalette"] = data["xgmPalettes"][0]
data["xgenConnections"] = {}
for node in data["xgmSubdPatches"]:
data["xgenConnections"][node] = {}
for attr in ["transform", "geometry"]:
input = get_attribute_input("{}.{}".format(node, attr))
data["xgenConnections"][node][attr] = input
# Collect all files under palette root as resources.
import xgenm
data_path = xgenm.getAttr(
"xgDataPath", data["xgmPalette"].replace("|", "")
).split(os.pathsep)[0]
data_path = data_path.replace(
"${PROJECT}",
xgenm.getAttr("xgProjectPath", data["xgmPalette"].replace("|", ""))
)
transfers = []
# Since we are duplicating this palette when extracting we predict that
# the name will be the basename without namespaces.
predicted_palette_name = data["xgmPalette"].split(":")[-1]
predicted_palette_name = predicted_palette_name.replace("|", "")
for root, _, files in os.walk(data_path):
for file in files:
source = os.path.join(root, file).replace("\\", "/")
destination = os.path.join(
instance.data["resourcesDir"],
"collections",
predicted_palette_name,
source.replace(data_path, "")[1:]
)
transfers.append((source, destination.replace("\\", "/")))
data["transfers"] = transfers
self.log.info(data)
instance.data.update(data)

View file

@ -20,8 +20,7 @@ class ExtractMayaSceneRaw(publish.Extractor):
"mayaScene",
"setdress",
"layout",
"camerarig",
"xgen"]
"camerarig"]
scene_type = "ma"
def process(self, instance):

View file

@ -0,0 +1,250 @@
import os
import shutil
import copy
from maya import cmds
import pyblish.api
from openpype.hosts.maya.api.lib import extract_alembic
from openpype.pipeline import publish
from openpype.lib import StringTemplate
class ExtractWorkfileXgen(publish.Extractor):
"""Extract Workfile Xgen.
When submitting a render, we need to prep Xgen side car files.
"""
# Offset to run before workfile scene save.
order = pyblish.api.ExtractorOrder - 0.499
label = "Extract Workfile Xgen"
families = ["workfile"]
hosts = ["maya"]
def get_render_max_frame_range(self, context):
"""Return start to end frame range including all renderlayers in
context.
This will return the full frame range which includes all frames of the
renderlayer instances to be published/submitted.
Args:
context (pyblish.api.Context): Current publishing context.
Returns:
tuple or None: Start frame, end frame tuple if any renderlayers
found. Otherwise None is returned.
"""
def _is_active_renderlayer(i):
"""Return whether instance is active renderlayer"""
if not i.data.get("publish", True):
return False
is_renderlayer = (
"renderlayer" in i.data.get("families", []) or
i.data["family"] == "renderlayer"
)
return is_renderlayer
start_frame = None
end_frame = None
for instance in context:
if not _is_active_renderlayer(instance):
# Only consider renderlyare instances
continue
render_start_frame = instance.data["frameStart"]
render_end_frame = instance.data["frameStart"]
if start_frame is None:
start_frame = render_start_frame
else:
start_frame = min(start_frame, render_start_frame)
if end_frame is None:
end_frame = render_end_frame
else:
end_frame = max(end_frame, render_end_frame)
if start_frame is None or end_frame is None:
return
return start_frame, end_frame
def process(self, instance):
transfers = []
# Validate there is any palettes in the scene.
if not cmds.ls(type="xgmPalette"):
self.log.debug(
"No collections found in the scene. Skipping Xgen extraction."
)
return
import xgenm
# Validate to extract only when we are publishing a renderlayer as
# well.
render_range = self.get_render_max_frame_range(instance.context)
if not render_range:
self.log.debug(
"No publishable renderlayers found in context. Skipping Xgen"
" extraction."
)
return
start_frame, end_frame = render_range
# We decrement start frame and increment end frame so motion blur will
# render correctly.
start_frame -= 1
end_frame += 1
# Extract patches alembic.
path_no_ext, _ = os.path.splitext(instance.context.data["currentFile"])
kwargs = {"attrPrefix": ["xgen"], "stripNamespaces": True}
alembic_files = []
for palette in cmds.ls(type="xgmPalette"):
patch_names = []
for description in xgenm.descriptions(palette):
for name in xgenm.boundGeometry(palette, description):
patch_names.append(name)
alembic_file = "{}__{}.abc".format(
path_no_ext, palette.replace(":", "__ns__")
)
extract_alembic(
alembic_file,
root=patch_names,
selection=False,
startFrame=float(start_frame),
endFrame=float(end_frame),
verbose=True,
**kwargs
)
alembic_files.append(alembic_file)
template_data = copy.deepcopy(instance.data["anatomyData"])
published_maya_path = StringTemplate(
instance.context.data["anatomy"].templates["publish"]["file"]
).format(template_data)
published_basename, _ = os.path.splitext(published_maya_path)
for source in alembic_files:
destination = os.path.join(
os.path.dirname(instance.data["resourcesDir"]),
os.path.basename(
source.replace(path_no_ext, published_basename)
)
)
transfers.append((source, destination))
# Validate that we are using the published workfile.
deadline_settings = instance.context.get("deadline")
if deadline_settings:
publish_settings = deadline_settings["publish"]
if not publish_settings["MayaSubmitDeadline"]["use_published"]:
self.log.debug(
"Not using the published workfile. Abort Xgen extraction."
)
return
# Collect Xgen and Delta files.
xgen_files = []
sources = []
current_dir = os.path.dirname(instance.context.data["currentFile"])
attrs = ["xgFileName", "xgBaseFile"]
for palette in cmds.ls(type="xgmPalette"):
for attr in attrs:
source = os.path.join(
current_dir, cmds.getAttr(palette + "." + attr)
)
if not os.path.exists(source):
continue
ext = os.path.splitext(source)[1]
if ext == ".xgen":
xgen_files.append(source)
if ext == ".xgd":
sources.append(source)
# Copy .xgen file to temporary location and modify.
staging_dir = self.staging_dir(instance)
for source in xgen_files:
destination = os.path.join(staging_dir, os.path.basename(source))
shutil.copy(source, destination)
lines = []
with open(destination, "r") as f:
for line in [line.rstrip() for line in f]:
if line.startswith("\txgProjectPath"):
path = os.path.dirname(instance.data["resourcesDir"])
line = "\txgProjectPath\t\t{}/".format(
path.replace("\\", "/")
)
lines.append(line)
with open(destination, "w") as f:
f.write("\n".join(lines))
sources.append(destination)
# Add resource files to workfile instance.
for source in sources:
basename = os.path.basename(source)
destination = os.path.join(
os.path.dirname(instance.data["resourcesDir"]), basename
)
transfers.append((source, destination))
destination_dir = os.path.join(
instance.data["resourcesDir"], "collections"
)
for palette in cmds.ls(type="xgmPalette"):
project_path = xgenm.getAttr("xgProjectPath", palette)
data_path = xgenm.getAttr("xgDataPath", palette)
data_path = data_path.replace("${PROJECT}", project_path)
for path in data_path.split(";"):
for root, _, files in os.walk(path):
for f in files:
source = os.path.join(root, f)
destination = "{}/{}{}".format(
destination_dir,
palette.replace(":", "__ns__"),
source.replace(path, "")
)
transfers.append((source, destination))
for source, destination in transfers:
self.log.debug("Transfer: {} > {}".format(source, destination))
instance.data["transfers"] = transfers
# Set palette attributes in preparation for workfile publish.
attrs = {"xgFileName": None, "xgBaseFile": ""}
data = {}
for palette in cmds.ls(type="xgmPalette"):
attrs["xgFileName"] = "resources/{}.xgen".format(
palette.replace(":", "__ns__")
)
for attr, value in attrs.items():
node_attr = palette + "." + attr
old_value = cmds.getAttr(node_attr)
try:
data[palette][attr] = old_value
except KeyError:
data[palette] = {attr: old_value}
cmds.setAttr(node_attr, value, type="string")
self.log.info(
"Setting \"{}\" on \"{}\"".format(value, node_attr)
)
cmds.setAttr(palette + "." + "xgExportAsDelta", False)
instance.data["xgenAttributes"] = data

View file

@ -0,0 +1,142 @@
import os
import copy
import tempfile
from maya import cmds
import xgenm
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import (
maintained_selection, attribute_values, write_xgen_file, delete_after
)
from openpype.lib import StringTemplate
class ExtractXgen(publish.Extractor):
"""Extract Xgen
Workflow:
- Duplicate nodes used for patches.
- Export palette and import onto duplicate nodes.
- Export/Publish duplicate nodes and palette.
- Export duplicate palette to .xgen file and add to publish.
- Publish all xgen files as resources.
"""
label = "Extract Xgen"
hosts = ["maya"]
families = ["xgen"]
scene_type = "ma"
def process(self, instance):
if "representations" not in instance.data:
instance.data["representations"] = []
staging_dir = self.staging_dir(instance)
maya_filename = "{}.{}".format(instance.data["name"], self.scene_type)
maya_filepath = os.path.join(staging_dir, maya_filename)
# Get published xgen file name.
template_data = copy.deepcopy(instance.data["anatomyData"])
template_data.update({"ext": "xgen"})
templates = instance.context.data["anatomy"].templates["publish"]
xgen_filename = StringTemplate(templates["file"]).format(template_data)
xgen_path = os.path.join(
self.staging_dir(instance), xgen_filename
).replace("\\", "/")
type = "mayaAscii" if self.scene_type == "ma" else "mayaBinary"
# Duplicate xgen setup.
with delete_after() as delete_bin:
duplicate_nodes = []
# Collect nodes to export.
for _, connections in instance.data["xgenConnections"].items():
transform_name = connections["transform"].split(".")[0]
# Duplicate_transform subd patch geometry.
duplicate_transform = cmds.duplicate(transform_name)[0]
delete_bin.append(duplicate_transform)
# Discard the children.
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
children = cmds.listRelatives(
duplicate_transform, children=True
)
cmds.delete(set(children) - set(shapes))
duplicate_transform = cmds.parent(
duplicate_transform, world=True
)[0]
duplicate_nodes.append(duplicate_transform)
# Export temp xgen palette files.
temp_xgen_path = os.path.join(
tempfile.gettempdir(), "temp.xgen"
).replace("\\", "/")
xgenm.exportPalette(
instance.data["xgmPalette"].replace("|", ""), temp_xgen_path
)
self.log.info("Extracted to {}".format(temp_xgen_path))
# Import xgen onto the duplicate.
with maintained_selection():
cmds.select(duplicate_nodes)
palette = xgenm.importPalette(temp_xgen_path, [])
delete_bin.append(palette)
# Export duplicated palettes.
xgenm.exportPalette(palette, xgen_path)
# Export Maya file.
attribute_data = {"{}.xgFileName".format(palette): xgen_filename}
with attribute_values(attribute_data):
with maintained_selection():
cmds.select(duplicate_nodes + [palette])
cmds.file(
maya_filepath,
force=True,
type=type,
exportSelected=True,
preserveReferences=False,
constructionHistory=True,
shader=True,
constraints=True,
expressions=True
)
self.log.info("Extracted to {}".format(maya_filepath))
if os.path.exists(temp_xgen_path):
os.remove(temp_xgen_path)
data = {
"xgDataPath": os.path.join(
instance.data["resourcesDir"],
"collections",
palette.replace(":", "__ns__")
).replace("\\", "/"),
"xgProjectPath": os.path.dirname(
instance.data["resourcesDir"]
).replace("\\", "/")
}
write_xgen_file(data, xgen_path)
# Adding representations.
representation = {
"name": "xgen",
"ext": "xgen",
"files": xgen_filename,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)
representation = {
"name": self.scene_type,
"ext": self.scene_type,
"files": maya_filename,
"stagingDir": staging_dir
}
instance.data["representations"].append(representation)

View file

@ -1,64 +0,0 @@
import os
from maya import cmds
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import (
suspended_refresh,
maintained_selection
)
class ExtractXgenCache(publish.Extractor):
"""Produce an alembic of just xgen interactive groom
"""
label = "Extract Xgen ABC Cache"
hosts = ["maya"]
families = ["xgen"]
optional = True
def process(self, instance):
# Collect the out set nodes
out_descriptions = [node for node in instance
if cmds.nodeType(node) == "xgmSplineDescription"]
start = 1
end = 1
self.log.info("Extracting Xgen Cache..")
dirname = self.staging_dir(instance)
parent_dir = self.staging_dir(instance)
filename = "{name}.abc".format(**instance.data)
path = os.path.join(parent_dir, filename)
with suspended_refresh():
with maintained_selection():
command = (
'-file '
+ path
+ ' -df "ogawa" -fr '
+ str(start)
+ ' '
+ str(end)
+ ' -step 1 -mxf -wfw'
)
for desc in out_descriptions:
command += (" -obj " + desc)
cmds.xgmSplineCache(export=True, j=command)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
"stagingDir": dirname,
}
instance.data["representations"].append(representation)
self.log.info("Extracted {} to {}".format(instance, dirname))

View file

@ -0,0 +1,36 @@
from maya import cmds
import pyblish.api
class ResetXgenAttributes(pyblish.api.InstancePlugin):
"""Reset Xgen attributes.
When the incremental save of the workfile triggers, the Xgen attributes
changes so this plugin will change it back to the values before publishing.
"""
label = "Reset Xgen Attributes."
# Offset to run after workfile increment plugin.
order = pyblish.api.IntegratorOrder + 10.0
families = ["workfile"]
def process(self, instance):
xgen_attributes = instance.data.get("xgenAttributes", {})
if not xgen_attributes:
return
for palette, data in xgen_attributes.items():
for attr, value in data.items():
node_attr = "{}.{}".format(palette, attr)
self.log.info(
"Setting \"{}\" on \"{}\"".format(value, node_attr)
)
cmds.setAttr(node_attr, value, type="string")
cmds.setAttr(palette + ".xgExportAsDelta", True)
# Need to save the scene, cause the attribute changes above does not
# mark the scene as modified so user can exit without commiting the
# changes.
self.log.info("Saving changes.")
cmds.file(save=True)

View file

@ -0,0 +1,18 @@
from maya import cmds
import pyblish.api
from openpype.pipeline import PublishValidationError
class ValidateVray(pyblish.api.InstancePlugin):
"""Validate general Vray setup."""
order = pyblish.api.ValidatorOrder
label = 'VRay'
hosts = ["maya"]
families = ["vrayproxy"]
def process(self, instance):
# Validate vray plugin is loaded.
if not cmds.pluginInfo("vrayformaya", query=True, loaded=True):
raise PublishValidationError("Vray plugin is not loaded.")

View file

@ -0,0 +1,59 @@
import json
import maya.cmds as cmds
import xgenm
import pyblish.api
from openpype.pipeline.publish import PublishValidationError
class ValidateXgen(pyblish.api.InstancePlugin):
"""Validate Xgen data."""
label = "Validate Xgen"
order = pyblish.api.ValidatorOrder
host = ["maya"]
families = ["xgen"]
def process(self, instance):
set_members = instance.data.get("setMembers")
# Only 1 collection/node per instance.
if len(set_members) != 1:
raise PublishValidationError(
"Only one collection per instance is allowed."
" Found:\n{}".format(set_members)
)
# Only xgen palette node is allowed.
node_type = cmds.nodeType(set_members[0])
if node_type != "xgmPalette":
raise PublishValidationError(
"Only node of type \"xgmPalette\" are allowed. Referred to as"
" \"collection\" in the Maya UI."
" Node type found: {}".format(node_type)
)
# Cant have inactive modifiers in collection cause Xgen will try and
# look for them when loading.
palette = instance.data["xgmPalette"].replace("|", "")
inactive_modifiers = {}
for description in instance.data["xgmDescriptions"]:
description = description.split("|")[-2]
modifier_names = xgenm.fxModules(palette, description)
for name in modifier_names:
attr = xgenm.getAttr("active", palette, description, name)
# Attribute value are lowercase strings of false/true.
if attr == "false":
try:
inactive_modifiers[description].append(name)
except KeyError:
inactive_modifiers[description] = [name]
if inactive_modifiers:
raise PublishValidationError(
"There are inactive modifiers on the collection. "
"Please delete these:\n{}".format(
json.dumps(inactive_modifiers, indent=4, sort_keys=True)
)
)

View file

@ -1,16 +1,33 @@
import os
from functools import partial
from openpype.settings import get_project_settings
from openpype.pipeline import install_host
from openpype.hosts.maya.api import MayaHost
from maya import cmds
host = MayaHost()
install_host(host)
print("starting OpenPype usersetup")
print("Starting OpenPype usersetup...")
# build a shelf
# Open Workfile Post Initialization.
key = "OPENPYPE_OPEN_WORKFILE_POST_INITIALIZATION"
if bool(int(os.environ.get(key, "0"))):
cmds.evalDeferred(
partial(
cmds.file,
os.environ["AVALON_LAST_WORKFILE"],
open=True,
force=True
),
lowestPriority=True
)
# Build a shelf.
settings = get_project_settings(os.environ['AVALON_PROJECT'])
shelf_preset = settings['maya'].get('project_shelf')
@ -26,7 +43,10 @@ if shelf_preset:
print(import_string)
exec(import_string)
cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], iconPath=icon_path, preset=shelf_preset)")
cmds.evalDeferred(
"mlib.shelf(name=shelf_preset['name'], iconPath=icon_path,"
" preset=shelf_preset)"
)
print("finished OpenPype usersetup")
print("Finished OpenPype usersetup.")

View file

@ -64,6 +64,16 @@ class FtrackModule(
self._timers_manager_module = None
def get_ftrack_url(self):
"""Resolved ftrack url.
Resolving is trying to fill missing information in url and tried to
connect to the server.
Returns:
Union[str, None]: Final variant of url or None if url could not be
reached.
"""
if self._ftrack_url is _URL_NOT_SET:
self._ftrack_url = resolve_ftrack_url(
self._settings_ftrack_url,
@ -73,8 +83,19 @@ class FtrackModule(
ftrack_url = property(get_ftrack_url)
@property
def settings_ftrack_url(self):
"""Ftrack url from settings in a format as it is.
Returns:
str: Ftrack url from settings.
"""
return self._settings_ftrack_url
def get_global_environments(self):
"""Ftrack's global environments."""
return {
"FTRACK_SERVER": self.ftrack_url
}
@ -510,7 +531,10 @@ def resolve_ftrack_url(url, logger=None):
url = "https://" + url
ftrack_url = None
if not url.endswith("ftrackapp.com"):
if url and _check_ftrack_url(url):
ftrack_url = url
if not ftrack_url and not url.endswith("ftrackapp.com"):
ftrackapp_url = url + ".ftrackapp.com"
if _check_ftrack_url(ftrackapp_url):
ftrack_url = ftrackapp_url

View file

@ -139,8 +139,7 @@ class CredentialsDialog(QtWidgets.QDialog):
self.fill_ftrack_url()
def fill_ftrack_url(self):
url = os.getenv("FTRACK_SERVER")
checked_url = self.check_url(url)
checked_url = self.check_url()
if checked_url == self.ftsite_input.text():
return
@ -154,7 +153,7 @@ class CredentialsDialog(QtWidgets.QDialog):
self.api_input.setEnabled(enabled)
self.user_input.setEnabled(enabled)
if not url:
if not checked_url:
self.btn_advanced.hide()
self.btn_simple.hide()
self.btn_ftrack_login.hide()
@ -254,7 +253,7 @@ class CredentialsDialog(QtWidgets.QDialog):
)
def _on_ftrack_login_clicked(self):
url = self.check_url(self.ftsite_input.text())
url = self.check_url()
if not url:
return
@ -302,21 +301,21 @@ class CredentialsDialog(QtWidgets.QDialog):
if is_logged is not None:
self.set_is_logged(is_logged)
def check_url(self, url):
if url is not None:
url = url.strip("/ ")
if not url:
def check_url(self):
settings_url = self._module.settings_ftrack_url
url = self._module.ftrack_url
if not settings_url:
self.set_error(
"Ftrack URL is not defined in settings!"
)
return
if "http" not in url:
if url.endswith("ftrackapp.com"):
url = "https://" + url
else:
url = "https://{}.ftrackapp.com".format(url)
if url is None:
self.set_error(
"Specified URL does not lead to a valid Ftrack server."
)
return
try:
result = requests.get(
url,

View file

@ -61,7 +61,8 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
"background",
"effect",
"staticMesh",
"skeletalMesh"
"skeletalMesh",
"xgen"
]
def process(self, instance):

View file

@ -1,4 +1,5 @@
{
"open_workfile_post_initialization": false,
"imageio": {
"ocio_config": {
"enabled": false,

View file

@ -5,6 +5,11 @@
"label": "Maya",
"is_file": true,
"children": [
{
"type": "boolean",
"key": "open_workfile_post_initialization",
"label": "Open Workfile Post Initialization"
},
{
"key": "imageio",
"type": "dict",