Merge remote-tracking branch 'upstream/develop' into develop
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
|
||||
|
||||
|
|
@ -40,5 +41,13 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
self.log.info("Current context does not have any workfile yet.")
|
||||
return
|
||||
|
||||
# Determine whether to open workfile post initialization.
|
||||
if self.host_name == "maya":
|
||||
key = "open_workfile_post_initialization"
|
||||
if self.data["project_settings"]["maya"][key]:
|
||||
self.log.debug("Opening workfile post initialization.")
|
||||
self.data["env"]["OPENPYPE_" + key.upper()] = "1"
|
||||
return
|
||||
|
||||
# Add path to workfile to arguments
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class HostBase(object):
|
|||
Union[str, None]: Current task name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_ASSET")
|
||||
return os.environ.get("AVALON_TASK")
|
||||
|
||||
def get_current_context(self):
|
||||
"""Get current context information.
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ class MaxAddon(OpenPypeModule, IHostAddon):
|
|||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Remove auto screen scale factor for Qt
|
||||
# - let 3dsmax decide it's value
|
||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".max"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import sys
|
||||
|
||||
# this might happen in some 3dsmax version where PYTHONPATH isn't added
|
||||
# to sys.path automatically
|
||||
for path in os.environ["PYTHONPATH"].split(os.pathsep):
|
||||
if path and path not in sys.path:
|
||||
sys.path.append(path)
|
||||
|
||||
from openpype.hosts.max.api import MaxHost
|
||||
from openpype.pipeline import install_host
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import sys
|
|||
import platform
|
||||
import uuid
|
||||
import math
|
||||
import re
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
|
@ -3353,3 +3354,34 @@ def iter_visible_nodes_in_range(nodes, start, end):
|
|||
def get_attribute_input(attr):
|
||||
connections = cmds.listConnections(attr, plugs=True, destination=False)
|
||||
return connections[0] if connections else None
|
||||
|
||||
|
||||
def write_xgen_file(data, filepath):
|
||||
"""Overwrites data in .xgen files.
|
||||
|
||||
Quite naive approach to mainly overwrite "xgDataPath" and "xgProjectPath".
|
||||
|
||||
Args:
|
||||
data (dict): Dictionary of key, value. Key matches with xgen file.
|
||||
For example:
|
||||
{"xgDataPath": "some/path"}
|
||||
filepath (string): Absolute path of .xgen file.
|
||||
"""
|
||||
# Generate regex lookup for line to key basically
|
||||
# match any of the keys in `\t{key}\t\t`
|
||||
keys = "|".join(re.escape(key) for key in data.keys())
|
||||
re_keys = re.compile("^\t({})\t\t".format(keys))
|
||||
|
||||
lines = []
|
||||
with open(filepath, "r") as f:
|
||||
for line in f:
|
||||
match = re_keys.match(line)
|
||||
if match:
|
||||
key = match.group(1)
|
||||
value = data[key]
|
||||
line = "\t{}\t\t{}\n".format(key, value)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
with open(filepath, "w") as f:
|
||||
f.writelines(lines)
|
||||
|
|
|
|||
|
|
@ -514,6 +514,9 @@ def check_lock_on_current_file():
|
|||
|
||||
# add the lock file when opening the file
|
||||
filepath = current_file()
|
||||
# Skip if current file is 'untitled'
|
||||
if not filepath:
|
||||
return
|
||||
|
||||
if is_workfile_locked(filepath):
|
||||
# add lockfile dialog
|
||||
|
|
@ -680,10 +683,12 @@ def before_workfile_save(event):
|
|||
|
||||
def after_workfile_save(event):
|
||||
workfile_name = event["filename"]
|
||||
if handle_workfile_locks():
|
||||
if workfile_name:
|
||||
if not is_workfile_locked(workfile_name):
|
||||
create_workfile_lock(workfile_name)
|
||||
if (
|
||||
handle_workfile_locks()
|
||||
and workfile_name
|
||||
and not is_workfile_locked(workfile_name)
|
||||
):
|
||||
create_workfile_lock(workfile_name)
|
||||
|
||||
|
||||
class MayaDirmap(HostDirmap):
|
||||
|
|
|
|||
|
|
@ -300,6 +300,39 @@ class ReferenceLoader(Loader):
|
|||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
# When an animation or pointcache gets connected to an Xgen container,
|
||||
# the compound attribute "xgenContainers" gets created. When animation
|
||||
# containers gets updated we also need to update the cacheFileName on
|
||||
# the Xgen collection.
|
||||
compound_name = "xgenContainers"
|
||||
if cmds.objExists("{}.{}".format(node, compound_name)):
|
||||
import xgenm
|
||||
container_amount = cmds.getAttr(
|
||||
"{}.{}".format(node, compound_name), size=True
|
||||
)
|
||||
# loop through all compound children
|
||||
for i in range(container_amount):
|
||||
attr = "{}.{}[{}].container".format(node, compound_name, i)
|
||||
objectset = cmds.listConnections(attr)[0]
|
||||
reference_node = cmds.sets(objectset, query=True)[0]
|
||||
palettes = cmds.ls(
|
||||
cmds.referenceQuery(reference_node, nodes=True),
|
||||
type="xgmPalette"
|
||||
)
|
||||
for palette in palettes:
|
||||
for description in xgenm.descriptions(palette):
|
||||
xgenm.setAttr(
|
||||
"cacheFileName",
|
||||
path.replace("\\", "/"),
|
||||
palette,
|
||||
description,
|
||||
"SplinePrimitive"
|
||||
)
|
||||
|
||||
# Refresh UI and viewport.
|
||||
de = xgenm.xgGlobal.DescriptionEditor
|
||||
de.refresh("Full")
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove an existing `container` from Maya scene
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@ from openpype.hosts.maya.api import plugin
|
|||
|
||||
|
||||
class CreateXgen(plugin.Creator):
|
||||
"""Xgen interactive export"""
|
||||
"""Xgen"""
|
||||
|
||||
name = "xgen"
|
||||
label = "Xgen Interactive"
|
||||
label = "Xgen"
|
||||
family = "xgen"
|
||||
icon = "pagelines"
|
||||
|
|
|
|||
153
openpype/hosts/maya/plugins/inventory/connect_geometry.py
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import InventoryAction, get_representation_context
|
||||
from openpype.hosts.maya.api.lib import get_id
|
||||
|
||||
|
||||
class ConnectGeometry(InventoryAction):
|
||||
"""Connect geometries within containers.
|
||||
|
||||
Source container will connect to the target containers, by searching for
|
||||
matching geometry IDs (cbid).
|
||||
Source containers are of family; "animation" and "pointcache".
|
||||
The connection with be done with a live world space blendshape.
|
||||
"""
|
||||
|
||||
label = "Connect Geometry"
|
||||
icon = "link"
|
||||
color = "white"
|
||||
|
||||
def process(self, containers):
|
||||
# Validate selection is more than 1.
|
||||
message = (
|
||||
"Only 1 container selected. 2+ containers needed for this action."
|
||||
)
|
||||
if len(containers) == 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
# Categorize containers by family.
|
||||
containers_by_family = {}
|
||||
for container in containers:
|
||||
family = get_representation_context(
|
||||
container["representation"]
|
||||
)["subset"]["data"]["family"]
|
||||
try:
|
||||
containers_by_family[family].append(container)
|
||||
except KeyError:
|
||||
containers_by_family[family] = [container]
|
||||
|
||||
# Validate to only 1 source container.
|
||||
source_containers = containers_by_family.get("animation", [])
|
||||
source_containers += containers_by_family.get("pointcache", [])
|
||||
source_container_namespaces = [
|
||||
x["namespace"] for x in source_containers
|
||||
]
|
||||
message = (
|
||||
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
|
||||
"\"animation\" or \"pointcache\".".format(
|
||||
len(source_containers), source_container_namespaces
|
||||
)
|
||||
)
|
||||
if len(source_containers) != 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
source_object = source_containers[0]["objectName"]
|
||||
|
||||
# Collect matching geometry transforms based cbId attribute.
|
||||
target_containers = []
|
||||
for family, containers in containers_by_family.items():
|
||||
if family in ["animation", "pointcache"]:
|
||||
continue
|
||||
|
||||
target_containers.extend(containers)
|
||||
|
||||
source_data = self.get_container_data(source_object)
|
||||
matches = []
|
||||
node_types = set()
|
||||
for target_container in target_containers:
|
||||
target_data = self.get_container_data(
|
||||
target_container["objectName"]
|
||||
)
|
||||
node_types.update(target_data["node_types"])
|
||||
for id, transform in target_data["ids"].items():
|
||||
source_match = source_data["ids"].get(id)
|
||||
if source_match:
|
||||
matches.append([source_match, transform])
|
||||
|
||||
# Message user about what is about to happen.
|
||||
if not matches:
|
||||
self.display_warning("No matching geometries found.")
|
||||
return
|
||||
|
||||
message = "Connecting geometries:\n\n"
|
||||
for match in matches:
|
||||
message += "{} > {}\n".format(match[0], match[1])
|
||||
|
||||
choice = self.display_warning(message, show_cancel=True)
|
||||
if choice is False:
|
||||
return
|
||||
|
||||
# Setup live worldspace blendshape connection.
|
||||
for source, target in matches:
|
||||
blendshape = cmds.blendShape(source, target)[0]
|
||||
cmds.setAttr(blendshape + ".origin", 0)
|
||||
cmds.setAttr(blendshape + "." + target.split(":")[-1], 1)
|
||||
|
||||
# Update Xgen if in any of the containers.
|
||||
if "xgmPalette" in node_types:
|
||||
cmds.xgmPreview()
|
||||
|
||||
def get_container_data(self, container):
|
||||
"""Collects data about the container nodes.
|
||||
|
||||
Args:
|
||||
container (dict): Container instance.
|
||||
|
||||
Returns:
|
||||
data (dict):
|
||||
"node_types": All node types in container nodes.
|
||||
"ids": If the node is a mesh, we collect its parent transform
|
||||
id.
|
||||
"""
|
||||
data = {"node_types": set(), "ids": {}}
|
||||
ref_node = cmds.sets(container, query=True, nodesOnly=True)[0]
|
||||
for node in cmds.referenceQuery(ref_node, nodes=True):
|
||||
node_type = cmds.nodeType(node)
|
||||
data["node_types"].add(node_type)
|
||||
|
||||
# Only interested in mesh transforms for connecting geometry with
|
||||
# blendshape.
|
||||
if node_type != "mesh":
|
||||
continue
|
||||
|
||||
transform = cmds.listRelatives(node, parent=True)[0]
|
||||
data["ids"][get_id(transform)] = transform
|
||||
|
||||
return data
|
||||
|
||||
def display_warning(self, message, show_cancel=False):
|
||||
"""Show feedback to user.
|
||||
|
||||
Returns:
|
||||
bool
|
||||
"""
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
accept = QtWidgets.QMessageBox.Ok
|
||||
if show_cancel:
|
||||
buttons = accept | QtWidgets.QMessageBox.Cancel
|
||||
else:
|
||||
buttons = accept
|
||||
|
||||
state = QtWidgets.QMessageBox.warning(
|
||||
None,
|
||||
"",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept
|
||||
)
|
||||
|
||||
return state == accept
|
||||
168
openpype/hosts/maya/plugins/inventory/connect_xgen.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
from maya import cmds
|
||||
import xgenm
|
||||
|
||||
from openpype.pipeline import (
|
||||
InventoryAction, get_representation_context, get_representation_path
|
||||
)
|
||||
|
||||
|
||||
class ConnectXgen(InventoryAction):
|
||||
"""Connect Xgen with an animation or pointcache.
|
||||
"""
|
||||
|
||||
label = "Connect Xgen"
|
||||
icon = "link"
|
||||
color = "white"
|
||||
|
||||
def process(self, containers):
|
||||
# Validate selection is more than 1.
|
||||
message = (
|
||||
"Only 1 container selected. 2+ containers needed for this action."
|
||||
)
|
||||
if len(containers) == 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
# Categorize containers by family.
|
||||
containers_by_family = {}
|
||||
for container in containers:
|
||||
family = get_representation_context(
|
||||
container["representation"]
|
||||
)["subset"]["data"]["family"]
|
||||
try:
|
||||
containers_by_family[family].append(container)
|
||||
except KeyError:
|
||||
containers_by_family[family] = [container]
|
||||
|
||||
# Validate to only 1 source container.
|
||||
source_containers = containers_by_family.get("animation", [])
|
||||
source_containers += containers_by_family.get("pointcache", [])
|
||||
source_container_namespaces = [
|
||||
x["namespace"] for x in source_containers
|
||||
]
|
||||
message = (
|
||||
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
|
||||
"\"animation\" or \"pointcache\".".format(
|
||||
len(source_containers), source_container_namespaces
|
||||
)
|
||||
)
|
||||
if len(source_containers) != 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
source_container = source_containers[0]
|
||||
source_object = source_container["objectName"]
|
||||
|
||||
# Validate source representation is an alembic.
|
||||
source_path = get_representation_path(
|
||||
get_representation_context(
|
||||
source_container["representation"]
|
||||
)["representation"]
|
||||
).replace("\\", "/")
|
||||
message = "Animation container \"{}\" is not an alembic:\n{}".format(
|
||||
source_container["namespace"], source_path
|
||||
)
|
||||
if not source_path.endswith(".abc"):
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
# Target containers.
|
||||
target_containers = []
|
||||
for family, containers in containers_by_family.items():
|
||||
if family in ["animation", "pointcache"]:
|
||||
continue
|
||||
|
||||
target_containers.extend(containers)
|
||||
|
||||
# Inform user of connections from source representation to target
|
||||
# descriptions.
|
||||
descriptions_data = []
|
||||
connections_msg = ""
|
||||
for target_container in target_containers:
|
||||
reference_node = cmds.sets(
|
||||
target_container["objectName"], query=True
|
||||
)[0]
|
||||
palettes = cmds.ls(
|
||||
cmds.referenceQuery(reference_node, nodes=True),
|
||||
type="xgmPalette"
|
||||
)
|
||||
for palette in palettes:
|
||||
for description in xgenm.descriptions(palette):
|
||||
descriptions_data.append([palette, description])
|
||||
connections_msg += "\n{}/{}".format(palette, description)
|
||||
|
||||
message = "Connecting \"{}\" to:\n".format(
|
||||
source_container["namespace"]
|
||||
)
|
||||
message += connections_msg
|
||||
choice = self.display_warning(message, show_cancel=True)
|
||||
if choice is False:
|
||||
return
|
||||
|
||||
# Recreate "xgenContainers" attribute to reset.
|
||||
compound_name = "xgenContainers"
|
||||
attr = "{}.{}".format(source_object, compound_name)
|
||||
if cmds.objExists(attr):
|
||||
cmds.deleteAttr(attr)
|
||||
|
||||
cmds.addAttr(
|
||||
source_object,
|
||||
longName=compound_name,
|
||||
attributeType="compound",
|
||||
numberOfChildren=1,
|
||||
multi=True
|
||||
)
|
||||
|
||||
# Connect target containers.
|
||||
for target_container in target_containers:
|
||||
cmds.addAttr(
|
||||
source_object,
|
||||
longName="container",
|
||||
attributeType="message",
|
||||
parent=compound_name
|
||||
)
|
||||
index = target_containers.index(target_container)
|
||||
cmds.connectAttr(
|
||||
target_container["objectName"] + ".message",
|
||||
source_object + ".{}[{}].container".format(
|
||||
compound_name, index
|
||||
)
|
||||
)
|
||||
|
||||
# Setup cache on Xgen
|
||||
object = "SplinePrimitive"
|
||||
for palette, description in descriptions_data:
|
||||
xgenm.setAttr("useCache", "true", palette, description, object)
|
||||
xgenm.setAttr("liveMode", "false", palette, description, object)
|
||||
xgenm.setAttr(
|
||||
"cacheFileName", source_path, palette, description, object
|
||||
)
|
||||
|
||||
# Refresh UI and viewport.
|
||||
de = xgenm.xgGlobal.DescriptionEditor
|
||||
de.refresh("Full")
|
||||
|
||||
def display_warning(self, message, show_cancel=False):
|
||||
"""Show feedback to user.
|
||||
|
||||
Returns:
|
||||
bool
|
||||
"""
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
accept = QtWidgets.QMessageBox.Ok
|
||||
if show_cancel:
|
||||
buttons = accept | QtWidgets.QMessageBox.Cancel
|
||||
else:
|
||||
buttons = accept
|
||||
|
||||
state = QtWidgets.QMessageBox.warning(
|
||||
None,
|
||||
"",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept
|
||||
)
|
||||
|
||||
return state == accept
|
||||
|
|
@ -93,7 +93,20 @@ class ImportMayaLoader(load.LoaderPlugin):
|
|||
|
||||
"""
|
||||
representations = ["ma", "mb", "obj"]
|
||||
families = ["*"]
|
||||
families = [
|
||||
"model",
|
||||
"pointcache",
|
||||
"proxyAbc",
|
||||
"animation",
|
||||
"mayaAscii",
|
||||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camera",
|
||||
"rig",
|
||||
"camerarig",
|
||||
"staticMesh"
|
||||
]
|
||||
|
||||
label = "Import"
|
||||
order = 10
|
||||
|
|
|
|||
|
|
@ -25,9 +25,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
"camera",
|
||||
"rig",
|
||||
"camerarig",
|
||||
"xgen",
|
||||
"staticMesh",
|
||||
"mvLook"]
|
||||
|
||||
representations = ["ma", "abc", "fbx", "mb"]
|
||||
|
||||
label = "Reference"
|
||||
|
|
|
|||
173
openpype/hosts/maya/plugins/load/load_xgen.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
import xgenm
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
get_container_members,
|
||||
attribute_values,
|
||||
write_xgen_file
|
||||
)
|
||||
from openpype.hosts.maya.api import current_file
|
||||
from openpype.pipeline import get_representation_path
|
||||
|
||||
|
||||
class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Load Xgen as reference"""
|
||||
|
||||
families = ["xgen"]
|
||||
representations = ["ma", "mb"]
|
||||
|
||||
label = "Reference Xgen"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def get_xgen_xgd_paths(self, palette):
|
||||
_, maya_extension = os.path.splitext(current_file())
|
||||
xgen_file = current_file().replace(
|
||||
maya_extension,
|
||||
"__{}.xgen".format(palette.replace("|", "").replace(":", "__"))
|
||||
)
|
||||
xgd_file = xgen_file.replace(".xgen", ".xgd")
|
||||
return xgen_file, xgd_file
|
||||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
# Validate workfile has a path.
|
||||
if current_file() is None:
|
||||
QtWidgets.QMessageBox.warning(
|
||||
None,
|
||||
"",
|
||||
"Current workfile has not been saved. Please save the workfile"
|
||||
" before loading an Xgen."
|
||||
)
|
||||
return
|
||||
|
||||
maya_filepath = self.prepare_root_value(
|
||||
self.fname, context["project"]["name"]
|
||||
)
|
||||
|
||||
# Reference xgen. Xgen does not like being referenced in under a group.
|
||||
new_nodes = []
|
||||
|
||||
with maintained_selection():
|
||||
nodes = cmds.file(
|
||||
maya_filepath,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
reference=True,
|
||||
returnNewNodes=True
|
||||
)
|
||||
|
||||
xgen_palette = cmds.ls(
|
||||
nodes, type="xgmPalette", long=True
|
||||
)[0].replace("|", "")
|
||||
|
||||
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
|
||||
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
|
||||
|
||||
# Change the cache and disk values of xgDataPath and xgProjectPath
|
||||
# to ensure paths are setup correctly.
|
||||
project_path = os.path.dirname(current_file()).replace("\\", "/")
|
||||
xgenm.setAttr("xgProjectPath", project_path, xgen_palette)
|
||||
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
|
||||
xgen_palette.replace(":", "__ns__"),
|
||||
xgenm.getAttr("xgDataPath", xgen_palette)
|
||||
)
|
||||
xgenm.setAttr("xgDataPath", data_path, xgen_palette)
|
||||
|
||||
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
|
||||
write_xgen_file(data, xgen_file)
|
||||
|
||||
# This create an expression attribute of float. If we did not add
|
||||
# any changes to collection, then Xgen does not create an xgd file
|
||||
# on save. This gives errors when launching the workfile again due
|
||||
# to trying to find the xgd file.
|
||||
name = "custom_float_ignore"
|
||||
if name not in xgenm.customAttrs(xgen_palette):
|
||||
xgenm.addCustomAttr(
|
||||
"custom_float_ignore", xgen_palette
|
||||
)
|
||||
|
||||
shapes = cmds.ls(nodes, shapes=True, long=True)
|
||||
|
||||
new_nodes = (list(set(nodes) - set(shapes)))
|
||||
|
||||
self[:] = new_nodes
|
||||
|
||||
return new_nodes
|
||||
|
||||
def set_palette_attributes(self, xgen_palette, xgen_file, xgd_file):
|
||||
cmds.setAttr(
|
||||
"{}.xgBaseFile".format(xgen_palette),
|
||||
os.path.basename(xgen_file),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
"{}.xgFileName".format(xgen_palette),
|
||||
os.path.basename(xgd_file),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Workflow for updating Xgen.
|
||||
|
||||
- Copy and potentially overwrite the workspace .xgen file.
|
||||
- Export changes to delta file.
|
||||
- Set collection attributes to not include delta files.
|
||||
- Update xgen maya file reference.
|
||||
- Apply the delta file changes.
|
||||
- Reset collection attributes to include delta files.
|
||||
|
||||
We have to do this workflow because when using referencing of the xgen
|
||||
collection, Maya implicitly imports the Xgen data from the xgen file so
|
||||
we dont have any control over when adding the delta file changes.
|
||||
|
||||
There is an implicit increment of the xgen and delta files, due to
|
||||
using the workfile basename.
|
||||
"""
|
||||
|
||||
container_node = container["objectName"]
|
||||
members = get_container_members(container_node)
|
||||
xgen_palette = cmds.ls(
|
||||
members, type="xgmPalette", long=True
|
||||
)[0].replace("|", "")
|
||||
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
|
||||
|
||||
# Export current changes to apply later.
|
||||
xgenm.createDelta(xgen_palette.replace("|", ""), xgd_file)
|
||||
|
||||
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
|
||||
|
||||
maya_file = get_representation_path(representation)
|
||||
_, extension = os.path.splitext(maya_file)
|
||||
new_xgen_file = maya_file.replace(extension, ".xgen")
|
||||
data_path = ""
|
||||
with open(new_xgen_file, "r") as f:
|
||||
for line in f:
|
||||
if line.startswith("\txgDataPath"):
|
||||
line = line.rstrip()
|
||||
data_path = line.split("\t")[-1]
|
||||
break
|
||||
|
||||
project_path = os.path.dirname(current_file()).replace("\\", "/")
|
||||
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
|
||||
xgen_palette.replace(":", "__ns__"),
|
||||
data_path
|
||||
)
|
||||
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
|
||||
write_xgen_file(data, xgen_file)
|
||||
|
||||
attribute_data = {
|
||||
"{}.xgFileName".format(xgen_palette): os.path.basename(xgen_file),
|
||||
"{}.xgBaseFile".format(xgen_palette): "",
|
||||
"{}.xgExportAsDelta".format(xgen_palette): False
|
||||
}
|
||||
with attribute_values(attribute_data):
|
||||
super().update(container, representation)
|
||||
|
||||
xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file)
|
||||
71
openpype/hosts/maya/plugins/publish/collect_xgen.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.hosts.maya.api.lib import get_attribute_input
|
||||
|
||||
|
||||
class CollectXgen(pyblish.api.InstancePlugin):
|
||||
"""Collect Xgen"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499999
|
||||
label = "Collect Xgen"
|
||||
families = ["xgen"]
|
||||
|
||||
def process(self, instance):
|
||||
data = {
|
||||
"xgmPalettes": cmds.ls(instance, type="xgmPalette", long=True),
|
||||
"xgmDescriptions": cmds.ls(
|
||||
instance, type="xgmDescription", long=True
|
||||
),
|
||||
"xgmSubdPatches": cmds.ls(instance, type="xgmSubdPatch", long=True)
|
||||
}
|
||||
data["xgenNodes"] = (
|
||||
data["xgmPalettes"] +
|
||||
data["xgmDescriptions"] +
|
||||
data["xgmSubdPatches"]
|
||||
)
|
||||
|
||||
if data["xgmPalettes"]:
|
||||
data["xgmPalette"] = data["xgmPalettes"][0]
|
||||
|
||||
data["xgenConnections"] = {}
|
||||
for node in data["xgmSubdPatches"]:
|
||||
data["xgenConnections"][node] = {}
|
||||
for attr in ["transform", "geometry"]:
|
||||
input = get_attribute_input("{}.{}".format(node, attr))
|
||||
data["xgenConnections"][node][attr] = input
|
||||
|
||||
# Collect all files under palette root as resources.
|
||||
import xgenm
|
||||
|
||||
data_path = xgenm.getAttr(
|
||||
"xgDataPath", data["xgmPalette"].replace("|", "")
|
||||
).split(os.pathsep)[0]
|
||||
data_path = data_path.replace(
|
||||
"${PROJECT}",
|
||||
xgenm.getAttr("xgProjectPath", data["xgmPalette"].replace("|", ""))
|
||||
)
|
||||
transfers = []
|
||||
|
||||
# Since we are duplicating this palette when extracting we predict that
|
||||
# the name will be the basename without namespaces.
|
||||
predicted_palette_name = data["xgmPalette"].split(":")[-1]
|
||||
predicted_palette_name = predicted_palette_name.replace("|", "")
|
||||
|
||||
for root, _, files in os.walk(data_path):
|
||||
for file in files:
|
||||
source = os.path.join(root, file).replace("\\", "/")
|
||||
destination = os.path.join(
|
||||
instance.data["resourcesDir"],
|
||||
"collections",
|
||||
predicted_palette_name,
|
||||
source.replace(data_path, "")[1:]
|
||||
)
|
||||
transfers.append((source, destination.replace("\\", "/")))
|
||||
|
||||
data["transfers"] = transfers
|
||||
|
||||
self.log.info(data)
|
||||
instance.data.update(data)
|
||||
|
|
@ -20,8 +20,7 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camerarig",
|
||||
"xgen"]
|
||||
"camerarig"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
250
openpype/hosts/maya/plugins/publish/extract_workfile_xgen.py
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
import os
|
||||
import shutil
|
||||
import copy
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.hosts.maya.api.lib import extract_alembic
|
||||
from openpype.pipeline import publish
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractWorkfileXgen(publish.Extractor):
|
||||
"""Extract Workfile Xgen.
|
||||
|
||||
When submitting a render, we need to prep Xgen side car files.
|
||||
"""
|
||||
|
||||
# Offset to run before workfile scene save.
|
||||
order = pyblish.api.ExtractorOrder - 0.499
|
||||
label = "Extract Workfile Xgen"
|
||||
families = ["workfile"]
|
||||
hosts = ["maya"]
|
||||
|
||||
def get_render_max_frame_range(self, context):
|
||||
"""Return start to end frame range including all renderlayers in
|
||||
context.
|
||||
|
||||
This will return the full frame range which includes all frames of the
|
||||
renderlayer instances to be published/submitted.
|
||||
|
||||
Args:
|
||||
context (pyblish.api.Context): Current publishing context.
|
||||
|
||||
Returns:
|
||||
tuple or None: Start frame, end frame tuple if any renderlayers
|
||||
found. Otherwise None is returned.
|
||||
|
||||
"""
|
||||
|
||||
def _is_active_renderlayer(i):
|
||||
"""Return whether instance is active renderlayer"""
|
||||
if not i.data.get("publish", True):
|
||||
return False
|
||||
|
||||
is_renderlayer = (
|
||||
"renderlayer" in i.data.get("families", []) or
|
||||
i.data["family"] == "renderlayer"
|
||||
)
|
||||
return is_renderlayer
|
||||
|
||||
start_frame = None
|
||||
end_frame = None
|
||||
for instance in context:
|
||||
if not _is_active_renderlayer(instance):
|
||||
# Only consider renderlyare instances
|
||||
continue
|
||||
|
||||
render_start_frame = instance.data["frameStart"]
|
||||
render_end_frame = instance.data["frameStart"]
|
||||
|
||||
if start_frame is None:
|
||||
start_frame = render_start_frame
|
||||
else:
|
||||
start_frame = min(start_frame, render_start_frame)
|
||||
|
||||
if end_frame is None:
|
||||
end_frame = render_end_frame
|
||||
else:
|
||||
end_frame = max(end_frame, render_end_frame)
|
||||
|
||||
if start_frame is None or end_frame is None:
|
||||
return
|
||||
|
||||
return start_frame, end_frame
|
||||
|
||||
def process(self, instance):
|
||||
transfers = []
|
||||
|
||||
# Validate there is any palettes in the scene.
|
||||
if not cmds.ls(type="xgmPalette"):
|
||||
self.log.debug(
|
||||
"No collections found in the scene. Skipping Xgen extraction."
|
||||
)
|
||||
return
|
||||
|
||||
import xgenm
|
||||
|
||||
# Validate to extract only when we are publishing a renderlayer as
|
||||
# well.
|
||||
render_range = self.get_render_max_frame_range(instance.context)
|
||||
if not render_range:
|
||||
self.log.debug(
|
||||
"No publishable renderlayers found in context. Skipping Xgen"
|
||||
" extraction."
|
||||
)
|
||||
return
|
||||
|
||||
start_frame, end_frame = render_range
|
||||
|
||||
# We decrement start frame and increment end frame so motion blur will
|
||||
# render correctly.
|
||||
start_frame -= 1
|
||||
end_frame += 1
|
||||
|
||||
# Extract patches alembic.
|
||||
path_no_ext, _ = os.path.splitext(instance.context.data["currentFile"])
|
||||
kwargs = {"attrPrefix": ["xgen"], "stripNamespaces": True}
|
||||
alembic_files = []
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
patch_names = []
|
||||
for description in xgenm.descriptions(palette):
|
||||
for name in xgenm.boundGeometry(palette, description):
|
||||
patch_names.append(name)
|
||||
|
||||
alembic_file = "{}__{}.abc".format(
|
||||
path_no_ext, palette.replace(":", "__ns__")
|
||||
)
|
||||
extract_alembic(
|
||||
alembic_file,
|
||||
root=patch_names,
|
||||
selection=False,
|
||||
startFrame=float(start_frame),
|
||||
endFrame=float(end_frame),
|
||||
verbose=True,
|
||||
**kwargs
|
||||
)
|
||||
alembic_files.append(alembic_file)
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
published_maya_path = StringTemplate(
|
||||
instance.context.data["anatomy"].templates["publish"]["file"]
|
||||
).format(template_data)
|
||||
published_basename, _ = os.path.splitext(published_maya_path)
|
||||
|
||||
for source in alembic_files:
|
||||
destination = os.path.join(
|
||||
os.path.dirname(instance.data["resourcesDir"]),
|
||||
os.path.basename(
|
||||
source.replace(path_no_ext, published_basename)
|
||||
)
|
||||
)
|
||||
transfers.append((source, destination))
|
||||
|
||||
# Validate that we are using the published workfile.
|
||||
deadline_settings = instance.context.get("deadline")
|
||||
if deadline_settings:
|
||||
publish_settings = deadline_settings["publish"]
|
||||
if not publish_settings["MayaSubmitDeadline"]["use_published"]:
|
||||
self.log.debug(
|
||||
"Not using the published workfile. Abort Xgen extraction."
|
||||
)
|
||||
return
|
||||
|
||||
# Collect Xgen and Delta files.
|
||||
xgen_files = []
|
||||
sources = []
|
||||
current_dir = os.path.dirname(instance.context.data["currentFile"])
|
||||
attrs = ["xgFileName", "xgBaseFile"]
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
for attr in attrs:
|
||||
source = os.path.join(
|
||||
current_dir, cmds.getAttr(palette + "." + attr)
|
||||
)
|
||||
if not os.path.exists(source):
|
||||
continue
|
||||
|
||||
ext = os.path.splitext(source)[1]
|
||||
if ext == ".xgen":
|
||||
xgen_files.append(source)
|
||||
if ext == ".xgd":
|
||||
sources.append(source)
|
||||
|
||||
# Copy .xgen file to temporary location and modify.
|
||||
staging_dir = self.staging_dir(instance)
|
||||
for source in xgen_files:
|
||||
destination = os.path.join(staging_dir, os.path.basename(source))
|
||||
shutil.copy(source, destination)
|
||||
|
||||
lines = []
|
||||
with open(destination, "r") as f:
|
||||
for line in [line.rstrip() for line in f]:
|
||||
if line.startswith("\txgProjectPath"):
|
||||
path = os.path.dirname(instance.data["resourcesDir"])
|
||||
line = "\txgProjectPath\t\t{}/".format(
|
||||
path.replace("\\", "/")
|
||||
)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
with open(destination, "w") as f:
|
||||
f.write("\n".join(lines))
|
||||
|
||||
sources.append(destination)
|
||||
|
||||
# Add resource files to workfile instance.
|
||||
for source in sources:
|
||||
basename = os.path.basename(source)
|
||||
destination = os.path.join(
|
||||
os.path.dirname(instance.data["resourcesDir"]), basename
|
||||
)
|
||||
transfers.append((source, destination))
|
||||
|
||||
destination_dir = os.path.join(
|
||||
instance.data["resourcesDir"], "collections"
|
||||
)
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
project_path = xgenm.getAttr("xgProjectPath", palette)
|
||||
data_path = xgenm.getAttr("xgDataPath", palette)
|
||||
data_path = data_path.replace("${PROJECT}", project_path)
|
||||
for path in data_path.split(";"):
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
source = os.path.join(root, f)
|
||||
destination = "{}/{}{}".format(
|
||||
destination_dir,
|
||||
palette.replace(":", "__ns__"),
|
||||
source.replace(path, "")
|
||||
)
|
||||
transfers.append((source, destination))
|
||||
|
||||
for source, destination in transfers:
|
||||
self.log.debug("Transfer: {} > {}".format(source, destination))
|
||||
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
# Set palette attributes in preparation for workfile publish.
|
||||
attrs = {"xgFileName": None, "xgBaseFile": ""}
|
||||
data = {}
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
attrs["xgFileName"] = "resources/{}.xgen".format(
|
||||
palette.replace(":", "__ns__")
|
||||
)
|
||||
for attr, value in attrs.items():
|
||||
node_attr = palette + "." + attr
|
||||
|
||||
old_value = cmds.getAttr(node_attr)
|
||||
try:
|
||||
data[palette][attr] = old_value
|
||||
except KeyError:
|
||||
data[palette] = {attr: old_value}
|
||||
|
||||
cmds.setAttr(node_attr, value, type="string")
|
||||
self.log.info(
|
||||
"Setting \"{}\" on \"{}\"".format(value, node_attr)
|
||||
)
|
||||
|
||||
cmds.setAttr(palette + "." + "xgExportAsDelta", False)
|
||||
|
||||
instance.data["xgenAttributes"] = data
|
||||
142
openpype/hosts/maya/plugins/publish/extract_xgen.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
import os
|
||||
import copy
|
||||
import tempfile
|
||||
|
||||
from maya import cmds
|
||||
import xgenm
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection, attribute_values, write_xgen_file, delete_after
|
||||
)
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractXgen(publish.Extractor):
|
||||
"""Extract Xgen
|
||||
|
||||
Workflow:
|
||||
- Duplicate nodes used for patches.
|
||||
- Export palette and import onto duplicate nodes.
|
||||
- Export/Publish duplicate nodes and palette.
|
||||
- Export duplicate palette to .xgen file and add to publish.
|
||||
- Publish all xgen files as resources.
|
||||
"""
|
||||
|
||||
label = "Extract Xgen"
|
||||
hosts = ["maya"]
|
||||
families = ["xgen"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
maya_filename = "{}.{}".format(instance.data["name"], self.scene_type)
|
||||
maya_filepath = os.path.join(staging_dir, maya_filename)
|
||||
|
||||
# Get published xgen file name.
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({"ext": "xgen"})
|
||||
templates = instance.context.data["anatomy"].templates["publish"]
|
||||
xgen_filename = StringTemplate(templates["file"]).format(template_data)
|
||||
|
||||
xgen_path = os.path.join(
|
||||
self.staging_dir(instance), xgen_filename
|
||||
).replace("\\", "/")
|
||||
type = "mayaAscii" if self.scene_type == "ma" else "mayaBinary"
|
||||
|
||||
# Duplicate xgen setup.
|
||||
with delete_after() as delete_bin:
|
||||
duplicate_nodes = []
|
||||
# Collect nodes to export.
|
||||
for _, connections in instance.data["xgenConnections"].items():
|
||||
transform_name = connections["transform"].split(".")[0]
|
||||
|
||||
# Duplicate_transform subd patch geometry.
|
||||
duplicate_transform = cmds.duplicate(transform_name)[0]
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
# Discard the children.
|
||||
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
|
||||
children = cmds.listRelatives(
|
||||
duplicate_transform, children=True
|
||||
)
|
||||
cmds.delete(set(children) - set(shapes))
|
||||
|
||||
duplicate_transform = cmds.parent(
|
||||
duplicate_transform, world=True
|
||||
)[0]
|
||||
|
||||
duplicate_nodes.append(duplicate_transform)
|
||||
|
||||
# Export temp xgen palette files.
|
||||
temp_xgen_path = os.path.join(
|
||||
tempfile.gettempdir(), "temp.xgen"
|
||||
).replace("\\", "/")
|
||||
xgenm.exportPalette(
|
||||
instance.data["xgmPalette"].replace("|", ""), temp_xgen_path
|
||||
)
|
||||
self.log.info("Extracted to {}".format(temp_xgen_path))
|
||||
|
||||
# Import xgen onto the duplicate.
|
||||
with maintained_selection():
|
||||
cmds.select(duplicate_nodes)
|
||||
palette = xgenm.importPalette(temp_xgen_path, [])
|
||||
|
||||
delete_bin.append(palette)
|
||||
|
||||
# Export duplicated palettes.
|
||||
xgenm.exportPalette(palette, xgen_path)
|
||||
|
||||
# Export Maya file.
|
||||
attribute_data = {"{}.xgFileName".format(palette): xgen_filename}
|
||||
with attribute_values(attribute_data):
|
||||
with maintained_selection():
|
||||
cmds.select(duplicate_nodes + [palette])
|
||||
cmds.file(
|
||||
maya_filepath,
|
||||
force=True,
|
||||
type=type,
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
constructionHistory=True,
|
||||
shader=True,
|
||||
constraints=True,
|
||||
expressions=True
|
||||
)
|
||||
|
||||
self.log.info("Extracted to {}".format(maya_filepath))
|
||||
|
||||
if os.path.exists(temp_xgen_path):
|
||||
os.remove(temp_xgen_path)
|
||||
|
||||
data = {
|
||||
"xgDataPath": os.path.join(
|
||||
instance.data["resourcesDir"],
|
||||
"collections",
|
||||
palette.replace(":", "__ns__")
|
||||
).replace("\\", "/"),
|
||||
"xgProjectPath": os.path.dirname(
|
||||
instance.data["resourcesDir"]
|
||||
).replace("\\", "/")
|
||||
}
|
||||
write_xgen_file(data, xgen_path)
|
||||
|
||||
# Adding representations.
|
||||
representation = {
|
||||
"name": "xgen",
|
||||
"ext": "xgen",
|
||||
"files": xgen_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
representation = {
|
||||
"name": self.scene_type,
|
||||
"ext": self.scene_type,
|
||||
"files": maya_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
suspended_refresh,
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
|
||||
class ExtractXgenCache(publish.Extractor):
|
||||
"""Produce an alembic of just xgen interactive groom
|
||||
|
||||
"""
|
||||
|
||||
label = "Extract Xgen ABC Cache"
|
||||
hosts = ["maya"]
|
||||
families = ["xgen"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Collect the out set nodes
|
||||
out_descriptions = [node for node in instance
|
||||
if cmds.nodeType(node) == "xgmSplineDescription"]
|
||||
|
||||
start = 1
|
||||
end = 1
|
||||
|
||||
self.log.info("Extracting Xgen Cache..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
||||
parent_dir = self.staging_dir(instance)
|
||||
filename = "{name}.abc".format(**instance.data)
|
||||
path = os.path.join(parent_dir, filename)
|
||||
|
||||
with suspended_refresh():
|
||||
with maintained_selection():
|
||||
command = (
|
||||
'-file '
|
||||
+ path
|
||||
+ ' -df "ogawa" -fr '
|
||||
+ str(start)
|
||||
+ ' '
|
||||
+ str(end)
|
||||
+ ' -step 1 -mxf -wfw'
|
||||
)
|
||||
for desc in out_descriptions:
|
||||
command += (" -obj " + desc)
|
||||
cmds.xgmSplineCache(export=True, j=command)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"stagingDir": dirname,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
36
openpype/hosts/maya/plugins/publish/reset_xgen_attributes.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ResetXgenAttributes(pyblish.api.InstancePlugin):
|
||||
"""Reset Xgen attributes.
|
||||
|
||||
When the incremental save of the workfile triggers, the Xgen attributes
|
||||
changes so this plugin will change it back to the values before publishing.
|
||||
"""
|
||||
|
||||
label = "Reset Xgen Attributes."
|
||||
# Offset to run after workfile increment plugin.
|
||||
order = pyblish.api.IntegratorOrder + 10.0
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
xgen_attributes = instance.data.get("xgenAttributes", {})
|
||||
if not xgen_attributes:
|
||||
return
|
||||
|
||||
for palette, data in xgen_attributes.items():
|
||||
for attr, value in data.items():
|
||||
node_attr = "{}.{}".format(palette, attr)
|
||||
self.log.info(
|
||||
"Setting \"{}\" on \"{}\"".format(value, node_attr)
|
||||
)
|
||||
cmds.setAttr(node_attr, value, type="string")
|
||||
cmds.setAttr(palette + ".xgExportAsDelta", True)
|
||||
|
||||
# Need to save the scene, cause the attribute changes above does not
|
||||
# mark the scene as modified so user can exit without commiting the
|
||||
# changes.
|
||||
self.log.info("Saving changes.")
|
||||
cmds.file(save=True)
|
||||
59
openpype/hosts/maya/plugins/publish/validate_xgen.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import json
|
||||
|
||||
import maya.cmds as cmds
|
||||
import xgenm
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import PublishValidationError
|
||||
|
||||
|
||||
class ValidateXgen(pyblish.api.InstancePlugin):
|
||||
"""Validate Xgen data."""
|
||||
|
||||
label = "Validate Xgen"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
host = ["maya"]
|
||||
families = ["xgen"]
|
||||
|
||||
def process(self, instance):
|
||||
set_members = instance.data.get("setMembers")
|
||||
|
||||
# Only 1 collection/node per instance.
|
||||
if len(set_members) != 1:
|
||||
raise PublishValidationError(
|
||||
"Only one collection per instance is allowed."
|
||||
" Found:\n{}".format(set_members)
|
||||
)
|
||||
|
||||
# Only xgen palette node is allowed.
|
||||
node_type = cmds.nodeType(set_members[0])
|
||||
if node_type != "xgmPalette":
|
||||
raise PublishValidationError(
|
||||
"Only node of type \"xgmPalette\" are allowed. Referred to as"
|
||||
" \"collection\" in the Maya UI."
|
||||
" Node type found: {}".format(node_type)
|
||||
)
|
||||
|
||||
# Cant have inactive modifiers in collection cause Xgen will try and
|
||||
# look for them when loading.
|
||||
palette = instance.data["xgmPalette"].replace("|", "")
|
||||
inactive_modifiers = {}
|
||||
for description in instance.data["xgmDescriptions"]:
|
||||
description = description.split("|")[-2]
|
||||
modifier_names = xgenm.fxModules(palette, description)
|
||||
for name in modifier_names:
|
||||
attr = xgenm.getAttr("active", palette, description, name)
|
||||
# Attribute value are lowercase strings of false/true.
|
||||
if attr == "false":
|
||||
try:
|
||||
inactive_modifiers[description].append(name)
|
||||
except KeyError:
|
||||
inactive_modifiers[description] = [name]
|
||||
|
||||
if inactive_modifiers:
|
||||
raise PublishValidationError(
|
||||
"There are inactive modifiers on the collection. "
|
||||
"Please delete these:\n{}".format(
|
||||
json.dumps(inactive_modifiers, indent=4, sort_keys=True)
|
||||
)
|
||||
)
|
||||
|
|
@ -1,16 +1,33 @@
|
|||
import os
|
||||
from functools import partial
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.maya.api import MayaHost
|
||||
|
||||
from maya import cmds
|
||||
|
||||
host = MayaHost()
|
||||
install_host(host)
|
||||
|
||||
|
||||
print("starting OpenPype usersetup")
|
||||
print("Starting OpenPype usersetup...")
|
||||
|
||||
# build a shelf
|
||||
# Open Workfile Post Initialization.
|
||||
key = "OPENPYPE_OPEN_WORKFILE_POST_INITIALIZATION"
|
||||
if bool(int(os.environ.get(key, "0"))):
|
||||
cmds.evalDeferred(
|
||||
partial(
|
||||
cmds.file,
|
||||
os.environ["AVALON_LAST_WORKFILE"],
|
||||
open=True,
|
||||
force=True
|
||||
),
|
||||
lowestPriority=True
|
||||
)
|
||||
|
||||
|
||||
# Build a shelf.
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
shelf_preset = settings['maya'].get('project_shelf')
|
||||
|
||||
|
|
@ -26,7 +43,10 @@ if shelf_preset:
|
|||
print(import_string)
|
||||
exec(import_string)
|
||||
|
||||
cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], iconPath=icon_path, preset=shelf_preset)")
|
||||
cmds.evalDeferred(
|
||||
"mlib.shelf(name=shelf_preset['name'], iconPath=icon_path,"
|
||||
" preset=shelf_preset)"
|
||||
)
|
||||
|
||||
|
||||
print("finished OpenPype usersetup")
|
||||
print("Finished OpenPype usersetup.")
|
||||
|
|
|
|||
|
|
@ -64,6 +64,16 @@ class FtrackModule(
|
|||
self._timers_manager_module = None
|
||||
|
||||
def get_ftrack_url(self):
|
||||
"""Resolved ftrack url.
|
||||
|
||||
Resolving is trying to fill missing information in url and tried to
|
||||
connect to the server.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Final variant of url or None if url could not be
|
||||
reached.
|
||||
"""
|
||||
|
||||
if self._ftrack_url is _URL_NOT_SET:
|
||||
self._ftrack_url = resolve_ftrack_url(
|
||||
self._settings_ftrack_url,
|
||||
|
|
@ -73,8 +83,19 @@ class FtrackModule(
|
|||
|
||||
ftrack_url = property(get_ftrack_url)
|
||||
|
||||
@property
|
||||
def settings_ftrack_url(self):
|
||||
"""Ftrack url from settings in a format as it is.
|
||||
|
||||
Returns:
|
||||
str: Ftrack url from settings.
|
||||
"""
|
||||
|
||||
return self._settings_ftrack_url
|
||||
|
||||
def get_global_environments(self):
|
||||
"""Ftrack's global environments."""
|
||||
|
||||
return {
|
||||
"FTRACK_SERVER": self.ftrack_url
|
||||
}
|
||||
|
|
@ -510,7 +531,10 @@ def resolve_ftrack_url(url, logger=None):
|
|||
url = "https://" + url
|
||||
|
||||
ftrack_url = None
|
||||
if not url.endswith("ftrackapp.com"):
|
||||
if url and _check_ftrack_url(url):
|
||||
ftrack_url = url
|
||||
|
||||
if not ftrack_url and not url.endswith("ftrackapp.com"):
|
||||
ftrackapp_url = url + ".ftrackapp.com"
|
||||
if _check_ftrack_url(ftrackapp_url):
|
||||
ftrack_url = ftrackapp_url
|
||||
|
|
|
|||
|
|
@ -139,8 +139,7 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
self.fill_ftrack_url()
|
||||
|
||||
def fill_ftrack_url(self):
|
||||
url = os.getenv("FTRACK_SERVER")
|
||||
checked_url = self.check_url(url)
|
||||
checked_url = self.check_url()
|
||||
if checked_url == self.ftsite_input.text():
|
||||
return
|
||||
|
||||
|
|
@ -154,7 +153,7 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
self.api_input.setEnabled(enabled)
|
||||
self.user_input.setEnabled(enabled)
|
||||
|
||||
if not url:
|
||||
if not checked_url:
|
||||
self.btn_advanced.hide()
|
||||
self.btn_simple.hide()
|
||||
self.btn_ftrack_login.hide()
|
||||
|
|
@ -254,7 +253,7 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
)
|
||||
|
||||
def _on_ftrack_login_clicked(self):
|
||||
url = self.check_url(self.ftsite_input.text())
|
||||
url = self.check_url()
|
||||
if not url:
|
||||
return
|
||||
|
||||
|
|
@ -302,21 +301,21 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
if is_logged is not None:
|
||||
self.set_is_logged(is_logged)
|
||||
|
||||
def check_url(self, url):
|
||||
if url is not None:
|
||||
url = url.strip("/ ")
|
||||
|
||||
if not url:
|
||||
def check_url(self):
|
||||
settings_url = self._module.settings_ftrack_url
|
||||
url = self._module.ftrack_url
|
||||
if not settings_url:
|
||||
self.set_error(
|
||||
"Ftrack URL is not defined in settings!"
|
||||
)
|
||||
return
|
||||
|
||||
if "http" not in url:
|
||||
if url.endswith("ftrackapp.com"):
|
||||
url = "https://" + url
|
||||
else:
|
||||
url = "https://{}.ftrackapp.com".format(url)
|
||||
if url is None:
|
||||
self.set_error(
|
||||
"Specified URL does not lead to a valid Ftrack server."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
result = requests.get(
|
||||
url,
|
||||
|
|
|
|||
|
|
@ -61,7 +61,8 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
|
|||
"background",
|
||||
"effect",
|
||||
"staticMesh",
|
||||
"skeletalMesh"
|
||||
"skeletalMesh",
|
||||
"xgen"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"open_workfile_post_initialization": false,
|
||||
"imageio": {
|
||||
"ocio_config": {
|
||||
"enabled": false,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@
|
|||
"label": "Maya",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "open_workfile_post_initialization",
|
||||
"label": "Open Workfile Post Initialization"
|
||||
},
|
||||
{
|
||||
"key": "imageio",
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import Tabs from '@theme/Tabs';
|
|||
import TabItem from '@theme/TabItem';
|
||||
|
||||
|
||||
|
||||
## Working in the studio
|
||||
|
||||
In studio environment you should have OpenPype already installed and deployed, so you can start using it without much setup. Your admin has probably put OpenPype icon on your desktop or even had your computer set up so OpenPype will start automatically.
|
||||
|
|
@ -15,70 +16,66 @@ If this is not the case, please contact your administrator to consult on how to
|
|||
|
||||
## Working from home
|
||||
|
||||
If you are working from home though, you'll need to install it yourself. You should, however, receive the OpenPype installer files from your studio
|
||||
admin, supervisor or production, because OpenPype versions and executables might not be compatible between studios.
|
||||
If you are working from **home** though, you'll **need to install** it yourself. You should, however, receive the OpenPype installer files from your studio
|
||||
admin, supervisor or production, because OpenPype versions and executables might not be compatible between studios.
|
||||
|
||||
To install OpenPype you just need to unzip it anywhere on the disk
|
||||
Installing OpenPype is possible by Installer or by unzipping downloaded ZIP archive to any drive location.
|
||||
|
||||
To use it, you have two options
|
||||
|
||||
**openpype_gui.exe** is the most common for artists. It runs OpenPype GUI in system tray. From there you can run all the available tools. To use any of the features, OpenPype must be running in the tray.
|
||||
|
||||
**openpype_console.exe** in useful for debugging and error reporting. It opens console window where all the necessary information will appear during user's work.
|
||||
:::tip Using the OpenPype Installer
|
||||
See the [Installation section](artist_install.md) for more information on how to use the OpenPype Installer
|
||||
:::
|
||||
|
||||
|
||||
<Tabs
|
||||
groupId="platforms"
|
||||
defaultValue="win"
|
||||
values={[
|
||||
{label: 'Windows', value: 'win'},
|
||||
{label: 'Linux', value: 'linux'},
|
||||
{label: 'Mac', value: 'mac'},
|
||||
]}>
|
||||
You can run OpenPype by desktop "OP" icon (if it exists after installing) or by directly executing
|
||||
|
||||
<TabItem value="win">
|
||||
**openpype_gui.exe** located in the OpenPype folder. This executable being suitable **for artists**.
|
||||
|
||||
WIP - Windows instructions once installers are finished
|
||||
or alternatively by
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="linux">
|
||||
**openpype_console.exe** which is more suitable for **TDs/Admin** for debugging and error reporting. This one runs with
|
||||
opened console window where all the necessary info will appear during user's work session.
|
||||
|
||||
WIP - Linux instructions once installers are finished
|
||||
:::tip Is OpenPype running?
|
||||
OpenPype runs in the operating system's tray. If you see turquoise OpenPype icon in the tray you can easily tell OpenPype is currently running.
|
||||
Keep in mind that on Windows this icon might be hidden by default, in which case, the artist can simply drag the icon down to the tray.
|
||||
:::
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="mac">
|
||||
|
||||
WIP - Mac instructions once installers are finished
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||

|
||||
|
||||
|
||||
## First Launch
|
||||
|
||||
|
||||
When you first start OpenPype, you will be asked to give it some basic information.
|
||||
When you first start OpenPype, you will be asked to fill in some basic information.
|
||||
|
||||
### MongoDB
|
||||
|
||||
In most cases that will only be your studio MongoDB Address.
|
||||
In most cases you will only have to supply the MongoDB Address.
|
||||
It's the database URL you should have received from your Studio admin and often will look like this
|
||||
|
||||
It is a URL that you should receive from you studio and most often will look like this `mongodb://username:passwword@mongo.mystudiodomain.com:12345` or `mongodb://192.168.100.15:27071`, it really depends on your studio setup. When OpenPype Igniter
|
||||
`mongodb://username:passwword@mongo.mystudiodomain.com:12345`
|
||||
|
||||
or
|
||||
|
||||
`mongodb://192.168.100.15:27071`
|
||||
|
||||
it really depends on your studio setup. When OpenPype Igniter
|
||||
asks for it, just put it in the corresponding text field and press `install` button.
|
||||
|
||||
### OpenPype Version Repository
|
||||
|
||||
Sometimes your studio might also ask you to fill in the path to it's version
|
||||
repository. This is a location where OpenPype will be looking for when checking
|
||||
if it's up to date and where updates are installed from automatically.
|
||||
Sometimes your Studio might also ask you to fill in the path to its version
|
||||
repository. This is a location where OpenPype will search for the latest versions, check
|
||||
if it's up to date and where updates are installed from automatically.
|
||||
|
||||
This pat is usually taken from the database directly, so you shouldn't need it.
|
||||
This path is usually taken from the database directly, so you shouldn't need it.
|
||||
|
||||
|
||||
## Updates
|
||||
|
||||
If you're connected to your studio, OpenPype will check for, and install updates automatically every time you run it. That's why during the first start, it will go through a quick update installation process, even though you might have just installed it.
|
||||
If you're connected to your Studio, OpenPype will check for, and install updates automatically every time you run it. That's why during the first start it can go through a quick update installation process, even though you might have just installed it.
|
||||
|
||||
|
||||
## Advanced use
|
||||
## Advanced Usage
|
||||
|
||||
For more advanced use of OpenPype commands please visit [Admin section](admin_openpype_commands.md).
|
||||
|
|
|
|||
125
website/docs/artist_hosts_3dsmax.md
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
---
|
||||
id: artist_hosts_3dsmax
|
||||
title: 3dsmax
|
||||
sidebar_label: 3dsmax
|
||||
---
|
||||
|
||||
:::note Work in progress
|
||||
This part of documentation is still work in progress.
|
||||
:::
|
||||
|
||||
<!-- ## OpenPype Global Tools
|
||||
|
||||
- [Set Context](artist_tools_context_manager)
|
||||
- [Work Files](artist_tools_workfiles)
|
||||
- [Create](artist_tools_creator)
|
||||
- [Load](artist_tools_loader)
|
||||
- [Manage (Inventory)](artist_tools_inventory)
|
||||
- [Publish](artist_tools_publisher)
|
||||
- [Library Loader](artist_tools_library_loader)
|
||||
-->
|
||||
|
||||
|
||||
## First Steps With OpenPype
|
||||
|
||||
Locate **OpenPype Icon** in the OS tray (if hidden dive in the tray toolbar).
|
||||
|
||||
> If you cannot locate the OpenPype icon ...it is not probably running so check [Getting Started](artist_getting_started.md) first.
|
||||
|
||||
By clicking the icon ```OpenPype Menu``` rolls out.
|
||||
|
||||
Choose ```OpenPype Menu > Launcher``` to open the ```Launcher``` window.
|
||||
|
||||
When opened you can **choose** the **project** to work in from the list. Then choose the particular **asset** you want to work on then choose **task**
|
||||
and finally **run 3dsmax by its icon** in the tools.
|
||||
|
||||

|
||||
|
||||
:::note Launcher Content
|
||||
The list of available projects, assets, tasks and tools will differ according to your Studio and need to be set in advance by supervisor/admin.
|
||||
:::
|
||||
|
||||
## Running in the 3dsmax
|
||||
|
||||
If 3dsmax has been launched via OP Launcher there should be **OpenPype Menu** visible in 3dsmax **top header** after start.
|
||||
This is the core functional area for you as a user. Most of your actions will take place here.
|
||||
|
||||

|
||||
|
||||
:::note OpenPype Menu
|
||||
User should use this menu exclusively for **Opening/Saving** when dealing with work files not standard ```File Menu``` even though user still being able perform file operations via this menu but prefferably just performing quick saves during work session not saving actual workfile versions.
|
||||
:::
|
||||
|
||||
## Working With Scene Files
|
||||
|
||||
In OpenPype menu first go to ```Work Files``` menu item so **Work Files Window** shows up.
|
||||
|
||||
Here you can perform Save / Load actions as you would normally do with ```File Save ``` and ```File Open``` in the standard 3dsmax ```File Menu``` and navigate to different project components like assets, tasks, workfiles etc.
|
||||
|
||||
|
||||

|
||||
|
||||
You first choose particular asset and assigned task and corresponding workfile you would like to open.
|
||||
|
||||
If not any workfile present simply hit ```Save As``` and keep ```Subversion``` empty and hit ```Ok```.
|
||||
|
||||

|
||||
|
||||
OpenPype correctly names it and add version to the workfile. This basically happens whenever user trigger ```Save As``` action. Resulting into incremental version numbers like
|
||||
|
||||
```workfileName_v001```
|
||||
|
||||
```workfileName_v002```
|
||||
|
||||
etc.
|
||||
|
||||
Basically meaning user is free of guessing what is the correct naming and other neccessities to keep everthing in order and managed.
|
||||
|
||||
> Note: user still has also other options for naming like ```Subversion```, ```Artist's Note``` but we won't dive into those now.
|
||||
|
||||
Here you can see resulting work file after ```Save As``` action.
|
||||
|
||||

|
||||
|
||||
## Understanding Context
|
||||
|
||||
As seen on our example OpenPype created pretty first workfile and named it ```220901_couch_modeling_v001.max``` meaning it sits in the Project ```220901``` being it ```couch``` asset and workfile being ```modeling``` task and obviously ```v001``` telling user its first existing version of this workfile.
|
||||
|
||||
It is good to be aware that whenever you as a user choose ```asset``` and ```task``` you happen to be in so called **context** meaning that all user actions are in relation with particular ```asset```. This could be quickly seen in host application header and ```OpenPype Menu``` and its accompanying tools.
|
||||
|
||||

|
||||
|
||||
> Whenever you choose different ```asset``` and its ```task``` in **Work Files window** you are basically changing context to the current asset/task you have chosen.
|
||||
|
||||
|
||||
This concludes the basics of working with workfiles in 3dsmax using OpenPype and its tools. Following chapters will cover other aspects like creating multiple assets types and their publishing for later usage in the production.
|
||||
|
||||
---
|
||||
|
||||
## Creating and Publishing Instances
|
||||
|
||||
:::warning Important
|
||||
Before proceeding further please check [Glossary](artist_concepts.md) and [What Is Publishing?](artist_publish.md) So you have clear idea about terminology.
|
||||
:::
|
||||
|
||||
|
||||
### Intro
|
||||
|
||||
Current OpenPype integration (ver 3.15.0) supports only ```PointCache``` and ```Camera``` families now.
|
||||
|
||||
**Pointcache** family being basically any geometry outputted as Alembic cache (.abc) format
|
||||
|
||||
**Camera** family being 3dsmax Camera object with/without animation outputted as native .max, FBX, Alembic format
|
||||
|
||||
|
||||
---
|
||||
|
||||
:::note Work in progress
|
||||
This part of documentation is still work in progress.
|
||||
:::
|
||||
|
||||
## ...to be added
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -601,3 +601,20 @@ about customizing review process refer to [admin section](project_settings/setti
|
|||
|
||||
If you don't move `modelMain` into `reviewMain`, review will be generated but it will
|
||||
be published as separate entity.
|
||||
|
||||
|
||||
## Inventory Actions
|
||||
|
||||
### Connect Geometry
|
||||
|
||||
This action will connect geometries between containers.
|
||||
|
||||
#### Usage
|
||||
|
||||
Select 1 container of type `animation` or `pointcache`, then 1+ container of any type.
|
||||
|
||||
#### Details
|
||||
|
||||
The action searches the selected containers for 1 animation container of type `animation` or `pointcache`. This animation container will be connected to the rest of the selected containers. Matching geometries between containers is done by comparing the attribute `cbId`.
|
||||
|
||||
The connection between geometries is done with a live blendshape.
|
||||
|
|
|
|||
|
|
@ -4,26 +4,96 @@ title: Xgen for Maya
|
|||
sidebar_label: Xgen
|
||||
---
|
||||
|
||||
## Working with Xgen in OpenPype
|
||||
OpenPype supports Xgen classic with the follow workflow. It eases the otherwise cumbersome issues around Xgen's side car files and hidden behaviour inside Maya. The workflow supports publishing, loading and updating of Xgen collections, along with connecting animation from geometry and (guide) curves.
|
||||
|
||||
OpenPype support publishing and loading of Xgen interactive grooms. You can publish
|
||||
them as mayaAscii files with scalps that can be loaded into another maya scene, or as
|
||||
alembic caches.
|
||||
## Setup
|
||||
|
||||
### Publishing Xgen Grooms
|
||||
### Settings
|
||||
|
||||
To prepare xgen for publishing just select all the descriptions that should be published together and the create Xgen Subset in the scene using - **OpenPype menu** → **Create**... and select **Xgen Interactive**. Leave Use selection checked.
|
||||
Go to project settings > `Maya` > enable `Open Workfile Post Initialization`;
|
||||
|
||||
For actual publishing of your groom to go **OpenPype → Publish** and then press ▶ to publish. This will export `.ma` file containing your grooms with any geometries they are attached to and also a baked cache in `.abc` format
|
||||
`project_settings/maya/open_workfile_post_initialization`
|
||||
|
||||
This is due to two errors occurring when opening workfile containing referenced xgen nodes on launch of Maya, specifically:
|
||||
|
||||
:::tip adding more descriptions
|
||||
You can add multiple xgen description into the subset you are about to publish, simply by
|
||||
adding them to the maya set that was created for you. Please make sure that only xgen description nodes are present inside of the set and not the scalp geometry.
|
||||
:::
|
||||
- ``Critical``: Duplicate collection errors on launching workfile. This is because Maya first imports Xgen when referencing in external Maya files, then imports Xgen again when the reference edits are applied.
|
||||
```
|
||||
Importing XGen Collections...
|
||||
# Error: XGen: Failed to find description ball_xgenMain_01_:parent in collection ball_xgenMain_01_:collection. Abort applying delta: P:/PROJECTS/OP01_CG_demo/shots/sh040/work/Lighting/cg_sh040_Lighting_v001__ball_xgenMain_01___collection.xgen #
|
||||
# Error: XGen: Tried to import a duplicate collection, ball_xgenMain_02_:collection, from file P:/PROJECTS/OP01_CG_demo/shots/sh040/work/Lighting/cg_sh040_Lighting_v001__ball_xgenMain_02___collection.xgen. Aborting import. #
|
||||
```
|
||||
- ``Non-critical``: Errors on opening workfile and failed opening of published xgen. This is because Maya imports Xgen when referencing in external Maya files but the reference edits that ensure the location of the Xgen files are correct, has not been applied yet.
|
||||
```
|
||||
Importing XGen Collections...
|
||||
# Error: XGen: Failed to open file: P:/PROJECTS/OP01_CG_demo/shots/sh040/work/Lighting/cg_ball_xgenMain_v035__ball_rigMain_01___collection.xgen #
|
||||
# Error: XGen: Failed to import collection from file P:/PROJECTS/OP01_CG_demo/shots/sh040/work/Lighting/cg_ball_xgenMain_v035__ball_rigMain_01___collection.xgen #
|
||||
```
|
||||
|
||||
### Loading Xgen
|
||||
Go to project settings > `Deadline` > `Publish plugins` > `Maya Submit to Deadline` > disable `Use Published scene`;
|
||||
|
||||
You can use published xgens by loading them using OpenPype Publisher. You can choose to reference or import xgen. We don't have any automatic mesh linking at the moment and it is expected, that groom is published with a scalp, that can then be manually attached to your animated mesh for example.
|
||||
`project_settings/deadline/publish/MayaSubmitDeadline/use_published`
|
||||
|
||||
The alembic representation can be loaded too and it contains the groom converted to curves. Keep in mind that the density of the alembic directly depends on your viewport xgen density at the point of export.
|
||||
This is due to temporary workaround while fixing rendering with published scenes.
|
||||
|
||||
## Create
|
||||
|
||||
Create an Xgen instance to publish. This needs to contain only **one Xgen collection**.
|
||||
|
||||
`OpenPype > Create... > Xgen`
|
||||
|
||||
You can create multiple Xgen instances if you have multiple collections to publish.
|
||||
|
||||
### Publish
|
||||
|
||||
The publishing process will grab geometry used for Xgen along with any external files used in the collection's descriptions. This creates an isolated Maya file with just the Xgen collection's dependencies, so you can use any nested geometry when creating the Xgen description. An Xgen version will consist of:
|
||||
|
||||
- Maya file (`.ma`) - this contains the geometry and the connections to the Xgen collection and descriptions.
|
||||
- Xgen file (`.xgen`) - this contains the Xgen collection and description.
|
||||
- Resource files (`.ptx`, `.xuv`) - this contains Xgen side car files used in the collection and descriptions.
|
||||
|
||||
## Load
|
||||
|
||||
Open the Loader tool, `OpenPype > Loader...`, and navigate to the published Xgen version. On right-click you'll get the option `Reference Xgen (ma)`
|
||||
When loading an Xgen version the following happens:
|
||||
|
||||
- References in the Maya file.
|
||||
- Copies the Xgen file (`.xgen`) to the current workspace.
|
||||
- Modifies the Xgen file copy to load the current workspace first then the published Xgen collection.
|
||||
- Makes a custom attribute on the Xgen collection, `float_ignore`, which can be seen under the `Expressions` tab of the `Xgen` UI. This is done to initialize the Xgen delta file workflow.
|
||||
- Setup an Xgen delta file (`.xgd`) to store any workspace changes of the published Xgen version.
|
||||
|
||||
When the loading is done, Xgen collection will be in the Xgen delta file workflow which means any changes done in the Maya workfile will be stored in the current workspace. The published Xgen collection will remain intact, even if the user assigns maps to any attributes or otherwise modifies any attribute.
|
||||
|
||||
### Updating
|
||||
|
||||
When there are changes to the Xgen version, the user will be notified when opening the workfile or publishing. Since the Xgen is referenced, it follows the standard Maya referencing system and overrides.
|
||||
|
||||
For example publishing `xgenMain` version 1 with the attribute `renderer` set to `None`, then version 2 has `renderer` set to `Arnold Renderer`. When updating from version 1 to 2, the `renderer` attribute will be updated to `Arnold Renderer` unless there is a local override.
|
||||
|
||||
### Connect Patches
|
||||
|
||||
When loading in an Xgen version, it does not have any connections to anything in the workfile, so its static in the position it was published in. Use the [Connect Geometry](artist_hosts_maya#connect-geometry) action to connect Xgen to any matching loaded animated geometry.
|
||||
|
||||
### Connect Guides
|
||||
|
||||
Along with patches you can also connect the Xgen guides to an Alembic cache.
|
||||
|
||||
#### Usage
|
||||
|
||||
Select 1 animation container, of family `animation` or `pointcache`, then the Xgen containers to connect to. Right-click > `Actions` > `Connect Xgen`.
|
||||
|
||||
***Note: Only alembic (`.abc`) representations are allowed.***
|
||||
|
||||
#### Details
|
||||
|
||||
Connecting the guide will make Xgen use the Alembic directly, setting the attributes under `Guide Animation`, so the Alembic needs to contain the same amount of curves as guides in the Xgen.
|
||||
|
||||
The animation container gets connected with the Xgen container, so if the animation container is updated so will the Xgen container's attribute.
|
||||
|
||||
## Rendering
|
||||
|
||||
To render with Xgen, follow the [Rendering With OpenPype](artist_hosts_maya#rendering-with-openpype) guide.
|
||||
|
||||
### Details
|
||||
|
||||
When submitting a workfile with Xgen, all Xgen related files will be collected and published as the workfiles resources. This means the published workfile is no longer referencing the workspace Xgen files.
|
||||
|
|
|
|||
BIN
website/docs/assets/3dsmax_SavingFirstFile2_OP.png
Normal file
|
After Width: | Height: | Size: 32 KiB |
BIN
website/docs/assets/3dsmax_SavingFirstFile_OP.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
website/docs/assets/3dsmax_context.png
Normal file
|
After Width: | Height: | Size: 114 KiB |
BIN
website/docs/assets/3dsmax_menu_OP.png
Normal file
|
After Width: | Height: | Size: 62 KiB |
BIN
website/docs/assets/3dsmax_menu_first_OP.png
Normal file
|
After Width: | Height: | Size: 8.2 KiB |
BIN
website/docs/assets/3dsmax_model_OP.png
Normal file
|
After Width: | Height: | Size: 84 KiB |
BIN
website/docs/assets/3dsmax_tray_OP.png
Normal file
|
After Width: | Height: | Size: 213 KiB |
|
|
@ -49,6 +49,7 @@ module.exports = {
|
|||
],
|
||||
},
|
||||
"artist_hosts_blender",
|
||||
"artist_hosts_3dsmax",
|
||||
"artist_hosts_harmony",
|
||||
"artist_hosts_houdini",
|
||||
"artist_hosts_aftereffects",
|
||||
|
|
|
|||