splitting and improving logic

This commit is contained in:
aardschok 2017-11-28 11:00:19 +01:00
parent ba8b414099
commit 4c4fba479b
8 changed files with 161 additions and 216 deletions

View file

@ -0,0 +1,15 @@
import avalon.maya
class CreateYetiRig(avalon.maya.Creator):
"""Output for procedural plugin nodes ( Yeti / XGen / etc)"""
name = "yetiDefault"
label = "Yeti Cache"
family = "colorbleed.yeticache"
icon = "pagelines"
def __init__(self, *args, **kwargs):
super(CreateYetiRig, self).__init__(*args, **kwargs)
self.data["peroll"] = 0

View file

@ -1,15 +1,20 @@
from maya import cmds
import avalon.maya import avalon.maya
class CreateYetiRig(avalon.maya.Creator): class CreateYetiRig(avalon.maya.Creator):
"""Output for procedural plugin nodes ( Yeti / XGen / etc)""" """Output for procedural plugin nodes ( Yeti / XGen / etc)"""
name = "yetiDefault" label = "Yeti Rig"
label = "Procedural" family = "colorbleed.yetiRig"
family = "colorbleed.yetirig"
icon = "usb" icon = "usb"
def __init__(self, *args, **kwargs): def process(self):
super(CreateYetiRig, self).__init__(*args, **kwargs)
self.data["preroll"] = 0 instance = super(CreateYetiRig, self).process()
self.log.info("Creating Rig instance set up ...")
input_meshes = cmds.sets(name="input_SET", empty=True)
cmds.sets(input_meshes, forceElement=instance)

View file

@ -0,0 +1,17 @@
from avalon import api
class YetiCacheLoader(api.Loader):
families = ["colorbleed.yeticache"]
representations = ["fur"]
label = "Load Yeti Cache"
order = -9
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
pass

View file

@ -17,143 +17,33 @@ SETTINGS = {"renderDensity",
"cbId"} "cbId"}
class CollectYetiRig(pyblish.api.InstancePlugin): class CollectYetiCache(pyblish.api.InstancePlugin):
"""Collect all information of the Yeti Rig""" """Collect all information of the Yeti caches"""
order = pyblish.api.CollectorOrder + 0.4 order = pyblish.api.CollectorOrder + 0.4
label = "Collect Yeti Rig" label = "Collect Yeti Cache"
families = ["colorbleed.yetiRig"] families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
hosts = ["maya"] hosts = ["maya"]
tasks = ["animation", "fx"]
def process(self, instance): def process(self, instance):
assert "input_SET" in cmds.sets(instance.name, query=True), (
"Yeti Rig must have an input_SET")
# Collect animation data # Collect animation data
animation_data = lib.collect_animation_data() animation_data = lib.collect_animation_data()
instance.data.update(animation_data) instance.data.update(animation_data)
# We only want one frame to export if it is not animation # We only want one frame to export if it is not animation
if api.Session["AVALON_TASK"] != "animation": if api.Session["AVALON_TASK"] not in self.tasks:
instance.data["startFrame"] = 1 instance.data["startFrame"] = 1
instance.data["endFrame"] = 1 instance.data["endFrame"] = 1
# Get the input meshes information
input_content = cmds.sets("input_SET", query=True)
input_nodes = cmds.listRelatives(input_content,
allDescendents=True,
fullPath=True) or []
# Get all the shapes
input_meshes = cmds.ls(input_nodes, type="shape", long=True)
inputs = []
for mesh in input_meshes:
connections = cmds.listConnections(mesh,
source=True,
destination=False,
connections=True,
plugs=True,
type="mesh")
source = connections[-1].split(".")[0]
plugs = [i.split(".")[-1] for i in connections]
inputs.append({"connections": plugs,
"inputID": lib.get_id(mesh),
"outputID": lib.get_id(source)})
# Collect any textures if used # Collect any textures if used
node_attrs = {} node_attrs = {}
yeti_resources = [] for node in cmds.ls(instance.data["setMembers"], type="pgYetiMaya"):
for node in cmds.ls(instance[:], type="pgYetiMaya"):
# Get Yeti resources (textures) # Get Yeti resources (textures)
# TODO: referenced files in Yeti Graph
resources = self.get_yeti_resources(node)
yeti_resources.extend(resources)
for attr in SETTINGS: for attr in SETTINGS:
node_attr = "%s.%s" % (node, attr) node_attr = "%s.%s" % (node, attr)
current = cmds.getAttr(node_attr) current = cmds.getAttr(node_attr)
node_attrs[node_attr] = current node_attrs[node_attr] = current
instance.data["inputs"] = inputs
instance.data["settings"] = node_attrs instance.data["settings"] = node_attrs
instance.data["resources"] = yeti_resources
def get_yeti_resources(self, node):
"""Get all texture file paths
If a texture is a sequence it gathers all sibling files to ensure
the texture sequence is complete.
Args:
node (str): node name of the pgYetiMaya node
Returns:
list
"""
resources = []
image_search_path = cmds.getAttr("{}.imageSearchPath".format(node))
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
if texture_filenames and not image_search_path:
raise ValueError("pgYetiMaya node '%s' is missing the path to the "
"files in the 'imageSearchPath "
"atttribute'" % node)
for texture in texture_filenames:
node_resources = {"files": [], "source": texture, "node": node}
texture_filepath = os.path.join(image_search_path, texture)
if len(texture.split(".")) > 2:
# For UDIM based textures (tiles)
if "<UDIM>" in texture:
sequences = self.get_sequence(texture_filepath,
pattern="<UDIM>")
node_resources["files"].extend(sequences)
# Based textures (animated masks f.e)
elif "%04d" in texture:
sequences = self.get_sequence(texture_filepath,
pattern="%04d")
node_resources["files"].extend(sequences)
# Assuming it is a fixed name
else:
node_resources["files"].append(texture_filepath)
else:
node_resources["files"].append(texture_filepath)
resources.append(node_resources)
return resources
def get_sequence(self, filename, pattern="%04d"):
"""Get sequence from filename
Supports negative frame ranges like -001, 0000, 0001 and -0001,
0000, 0001.
Arguments:
filename (str): The full path to filename containing the given
pattern.
pattern (str): The pattern to swap with the variable frame number.
Returns:
list: file sequence.
"""
from avalon.vendor import clique
glob_pattern = filename.replace(pattern, "*")
escaped = re.escape(filename)
re_pattern = escaped.replace(pattern, "-?[0-9]+")
files = glob.glob(glob_pattern)
files = [str(f) for f in files if re.match(re_pattern, f)]
pattern = [clique.PATTERNS["frames"]]
collection, remainer = clique.assemble(files, patterns=pattern)
return collection

View file

@ -5,52 +5,66 @@ import re
from maya import cmds from maya import cmds
import pyblish.api import pyblish.api
from avalon import api
from colorbleed.maya import lib from colorbleed.maya import lib
SETTINGS = {"renderDensity": 10.0, SETTINGS = {"renderDensity",
"renderWidth": 1.0, "renderWidth",
"renderLength": 1.0, "renderLength",
"increaseRenderBounds": 0.1} "increaseRenderBounds",
"cbId"}
class CollectYetiProceduralData(pyblish.api.InstancePlugin): class CollectYetiRig(pyblish.api.InstancePlugin):
"""Collect procedural data""" """Collect all information of the Yeti Rig"""
order = pyblish.api.CollectorOrder + 0.4 order = pyblish.api.CollectorOrder + 0.4
families = ["colorbleed.yetiprocedural"] label = "Collect Yeti Rig"
label = "Collect Yeti Procedural" families = ["colorbleed.yetiRig"]
hosts = ["maya"] hosts = ["maya"]
def process(self, instance): def process(self, instance):
assert "input_SET" in cmds.sets(instance.name, query=True), (
"Yeti Rig must have an input_SET")
# Collect animation data # Collect animation data
animation_data = lib.collect_animation_data() animation_data = lib.collect_animation_data()
instance.data.update(animation_data) instance.data.update(animation_data)
# We only want one frame to export if it is not animation # Get the input meshes information
if api.Session["AVALON_TASK"] != "animation": input_content = cmds.sets("input_SET", query=True)
instance.data["startFrame"] = 1 input_nodes = cmds.listRelatives(input_content,
instance.data["endFrame"] = 1 allDescendents=True,
fullPath=True) or []
# Get all procedural nodes # Get all the shapes
yeti_nodes = cmds.ls(instance[:], type="pgYetiMaya") input_meshes = cmds.ls(input_nodes, type="shape", long=True)
inputs = []
for mesh in input_meshes:
connections = cmds.listConnections(mesh,
source=True,
destination=False,
connections=True,
plugs=True,
type="mesh")
source = connections[-1].split(".")[0]
plugs = [i.split(".")[-1] for i in connections]
inputs.append({"connections": plugs,
"inputID": lib.get_id(mesh),
"outputID": lib.get_id(source)})
# Collect any textures if used # Collect any textures if used
node_attrs = {}
yeti_resources = [] yeti_resources = []
for node in yeti_nodes: for node in cmds.ls(instance[:], type="pgYetiMaya"):
# Get Yeti resources (textures)
# TODO: referenced files in Yeti Graph
resources = self.get_yeti_resources(node) resources = self.get_yeti_resources(node)
yeti_resources.extend(resources) yeti_resources.extend(resources)
node_attrs[node] = {} instance.data["inputs"] = inputs
for attr, value in SETTINGS.iteritems():
current = cmds.getAttr("%s.%s" % (node, attr))
node_attrs[node][attr] = current
instance.data["settings"] = node_attrs
instance.data["resources"] = yeti_resources instance.data["resources"] = yeti_resources
def get_yeti_resources(self, node): def get_yeti_resources(self, node):
@ -78,22 +92,23 @@ class CollectYetiProceduralData(pyblish.api.InstancePlugin):
node_resources = {"files": [], "source": texture, "node": node} node_resources = {"files": [], "source": texture, "node": node}
texture_filepath = os.path.join(image_search_path, texture) texture_filepath = os.path.join(image_search_path, texture)
if len(texture.split(".")) > 2: if len(texture.split(".")) > 2:
# For UDIM based textures (tiles) # For UDIM based textures (tiles)
if "<UDIM>" in texture: if "<UDIM>" in texture:
sequences = self.get_sequence(texture_filepath, sequences = self.get_sequence(texture_filepath,
pattern="<UDIM>") pattern="<UDIM>")
node_resources["node"].extend(sequences) node_resources["files"].extend(sequences)
# Based textures (animated masks f.e) # Based textures (animated masks f.e)
elif "%04d" in texture: elif "%04d" in texture:
sequences = self.get_sequence(texture_filepath, sequences = self.get_sequence(texture_filepath,
pattern="%04d") pattern="%04d")
node_resources["node"].extend(sequences) node_resources["files"].extend(sequences)
# Assuming it is a fixed name # Assuming it is a fixed name
else: else:
node_resources["node"].append(texture_filepath) node_resources["files"].append(texture_filepath)
else: else:
node_resources["node"].append(texture_filepath) node_resources["files"].append(texture_filepath)
resources.append(node_resources) resources.append(node_resources)

View file

@ -1,14 +1,13 @@
import os import os
import json import json
import pprint
from maya import cmds from maya import cmds
import colorbleed.api import colorbleed.api
from cb.utils.maya import context # from cb.utils.maya import context
class ExtractYetiProcedural(colorbleed.api.Extractor): class ExtractYetiCache(colorbleed.api.Extractor):
"""Produce an alembic of just point positions and normals. """Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more, Positions and normals are preserved, but nothing more,
@ -16,9 +15,9 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
""" """
label = "Extract Yeti" label = "Extract Yeti Cache"
hosts = ["maya"] hosts = ["maya"]
families = ["colorbleed.yetiRig"] families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
def process(self, instance): def process(self, instance):
@ -31,7 +30,13 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
# Yeti related staging dirs # Yeti related staging dirs
data_file = os.path.join(dirname, "yeti_settings.json") data_file = os.path.join(dirname, "yeti_settings.json")
maya_path = os.path.join(dirname, "yeti_rig.ma")
# Collect information for writing cache
start_frame = instance.data.get("startFrame")
end_frame = instance.data.get("endFrame")
preroll = instance.data.get("preroll")
if preroll > 0:
start_frame -= preroll
self.log.info("Writing out cache") self.log.info("Writing out cache")
# Start writing the files for snap shot # Start writing the files for snap shot
@ -39,7 +44,7 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
path = os.path.join(dirname, "cache_<NAME>.0001.fur") path = os.path.join(dirname, "cache_<NAME>.0001.fur")
cmds.pgYetiCommand(yeti_nodes, cmds.pgYetiCommand(yeti_nodes,
writeCache=path, writeCache=path,
range=(1, 1), range=(start_frame, end_frame),
sampleTimes="0.0 1.0", sampleTimes="0.0 1.0",
updateViewport=False, updateViewport=False,
generatePreview=False) generatePreview=False)
@ -47,44 +52,16 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")] cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")]
self.log.info("Writing metadata file") self.log.info("Writing metadata file")
image_search_path = ""
settings = instance.data.get("settings", None) settings = instance.data.get("settings", None)
if settings is not None: if settings is not None:
# Create assumed destination folder for imageSearchPath
assumed_temp_data = instance.data["assumedTemplateData"]
template = instance.data["template"]
template_formatted = template.format(**assumed_temp_data)
destination_folder = os.path.dirname(template_formatted)
image_search_path = os.path.join(destination_folder, "resources")
image_search_path = os.path.normpath(image_search_path)
# Store assumed imageSearchPath
settings["imageSearchPath"] = image_search_path
with open(data_file, "w") as fp: with open(data_file, "w") as fp:
json.dump(settings, fp, ensure_ascii=False) json.dump(settings, fp, ensure_ascii=False)
attr_value = {"%s.imageSearchPath" % n: image_search_path for
n in yeti_nodes}
with context.attribute_value(attr_value):
cmds.select(instance.data["setMembers"], noExpand=True)
cmds.file(maya_path,
force=True,
exportSelected=True,
typ="mayaAscii",
preserveReferences=False,
constructionHistory=False,
shader=False)
# Ensure files can be stored # Ensure files can be stored
if "files" not in instance.data: if "files" not in instance.data:
instance.data["files"] = list() instance.data["files"] = list()
instance.data["files"].extend([cache_files, instance.data["files"].extend([cache_files,
"yeti_rig.ma",
"yeti_settings.json"]) "yeti_settings.json"])
self.log.info("Extracted {} to {}".format(instance, dirname)) self.log.info("Extracted {} to {}".format(instance, dirname))

View file

@ -1,11 +1,13 @@
import os import os
import json
from maya import cmds from maya import cmds
import colorbleed.api import colorbleed.api
from cb.utils.maya import context
reload(context)
class ExtractYetiProcedural(colorbleed.api.Extractor): class ExtractYetiRig(colorbleed.api.Extractor):
"""Produce an alembic of just point positions and normals. """Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more, Positions and normals are preserved, but nothing more,
@ -13,12 +15,11 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
""" """
label = "Extract Yeti" label = "Extract Yeti Rig"
hosts = ["maya"] hosts = ["maya"]
families = ["colorbleed.yetiprocedural"] families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
def process(self, instance): def process(self, instance):
print instance
yeti_nodes = cmds.ls(instance, type="pgYetiMaya") yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
if not yeti_nodes: if not yeti_nodes:
@ -26,37 +27,44 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
# Define extract output file path # Define extract output file path
dirname = self.staging_dir(instance) dirname = self.staging_dir(instance)
data_file = os.path.join(dirname, "{}.json".format(instance.name))
start = instance.data.get("startFrame") # Yeti related staging dirs
end = instance.data.get("endFrame") maya_path = os.path.join(dirname, "yeti_rig.ma")
preroll = instance.data.get("preroll")
if preroll > 1:
start -= preroll # caching supports negative frames
self.log.info("Writing out cache")
# Start writing the files
# <NAME> will be replace by the yeti node name
filename = "{0}_<NAME>.%04d.fur".format(instance.name)
path = os.path.join(dirname, filename)
cache_files = cmds.pgYetiCommand(yeti_nodes,
writeCache=path,
range=(start, end),
sampleTimes="0.0 1.0",
updateViewport=False,
generatePreivew=False)
self.log.info("Writing metadata file") self.log.info("Writing metadata file")
image_search_path = ""
settings = instance.data.get("settings", None) settings = instance.data.get("settings", None)
if settings is not None: if settings is not None:
with open(data_file, "w") as fp:
json.dump(settings, fp, ensure_ascii=False) # Create assumed destination folder for imageSearchPath
assumed_temp_data = instance.data["assumedTemplateData"]
template = instance.data["template"]
template_formatted = template.format(**assumed_temp_data)
destination_folder = os.path.dirname(template_formatted)
image_search_path = os.path.join(destination_folder, "resources")
image_search_path = os.path.normpath(image_search_path)
attr_value = {"%s.imageSearchPath" % n: image_search_path for
n in yeti_nodes}
with context.attribute_values(attr_value):
cmds.select(instance.data["setMembers"], noExpand=True)
cmds.file(maya_path,
force=True,
exportSelected=True,
typ="mayaAscii",
preserveReferences=False,
constructionHistory=False,
shader=False)
# Ensure files can be stored # Ensure files can be stored
if "files" not in instance.data: if "files" not in instance.data:
instance.data["files"] = list() instance.data["files"] = list()
instance.data["files"].append(cache_files) instance.data["files"].extend(["yeti_rig.ma",
instance.data["files"].append(data_file) "yeti_settings.json"])
self.log.info("Extracted {} to {}".format(instance, dirname)) self.log.info("Extracted {} to {}".format(instance, dirname))
cmds.select(clear=True)

View file

@ -1,5 +1,6 @@
import os import os
import logging import logging
import pprint
import shutil import shutil
import errno import errno
@ -31,7 +32,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"colorbleed.model", "colorbleed.model",
"colorbleed.pointcache", "colorbleed.pointcache",
"colorbleed.setdress", "colorbleed.setdress",
"colorbleed.rig"] "colorbleed.rig",
"colorbleed.yetiRig"]
def process(self, instance): def process(self, instance):
@ -149,6 +151,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Find the representations to transfer amongst the files # Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension) # Each should be a single representation (as such, a single extension)
representations = [] representations = []
print 'files', instance.data['files']
for files in instance.data["files"]: for files in instance.data["files"]:
# Collection # Collection
@ -162,7 +167,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# #
if isinstance(files, list): if isinstance(files, list):
collection = files collection = files
# Assert that each member has identical suffix # Assert that each member has identical suffix
_, ext = os.path.splitext(collection[0]) _, ext = os.path.splitext(collection[0])
assert all(ext == os.path.splitext(name)[1] assert all(ext == os.path.splitext(name)[1]
@ -170,10 +174,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"Files had varying suffixes, this is a bug" "Files had varying suffixes, this is a bug"
) )
assert not any(os.path.isabs(name) for name in collection)
template_data["representation"] = ext[1:] template_data["representation"] = ext[1:]
for fname in collection: for fname in collection:
src = os.path.join(stagingdir, fname)
if os.path.isabs(fname):
src = fname
fname = os.path.basename(src)
else:
src = os.path.join(stagingdir, fname)
dst = os.path.join( dst = os.path.join(
template_publish.format(**template_data), template_publish.format(**template_data),
fname fname
@ -191,6 +203,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# |_______| # |_______|
# #
fname = files fname = files
assert not os.path.isabs(fname)
_, ext = os.path.splitext(fname) _, ext = os.path.splitext(fname)
template_data["representation"] = ext[1:] template_data["representation"] = ext[1:]
@ -221,6 +234,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
} }
representations.append(representation) representations.append(representation)
pprint.pprint(instance.data["transfers"])
self.log.info("Registering {} items".format(len(representations))) self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations) io.insert_many(representations)
@ -287,10 +302,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
def create_version(self, subset, version_number, locations, data=None): def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination """ Copy given source to destination
Arguments: Args:
subset (dict): the registered subset of the asset subset (dict): the registered subset of the asset
version_number (int): the version number version_number (int): the version number
locations (list): the currently registered locations locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
""" """
# Imprint currently registered location # Imprint currently registered location
version_locations = [location for location in locations if version_locations = [location for location in locations if