mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-27 14:22:37 +01:00
splitting and improving logic
This commit is contained in:
parent
ba8b414099
commit
4c4fba479b
8 changed files with 161 additions and 216 deletions
|
|
@ -0,0 +1,15 @@
|
|||
import avalon.maya
|
||||
|
||||
|
||||
class CreateYetiRig(avalon.maya.Creator):
|
||||
"""Output for procedural plugin nodes ( Yeti / XGen / etc)"""
|
||||
|
||||
name = "yetiDefault"
|
||||
label = "Yeti Cache"
|
||||
family = "colorbleed.yeticache"
|
||||
icon = "pagelines"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateYetiRig, self).__init__(*args, **kwargs)
|
||||
|
||||
self.data["peroll"] = 0
|
||||
|
|
@ -1,15 +1,20 @@
|
|||
from maya import cmds
|
||||
|
||||
import avalon.maya
|
||||
|
||||
|
||||
class CreateYetiRig(avalon.maya.Creator):
|
||||
"""Output for procedural plugin nodes ( Yeti / XGen / etc)"""
|
||||
|
||||
name = "yetiDefault"
|
||||
label = "Procedural"
|
||||
family = "colorbleed.yetirig"
|
||||
label = "Yeti Rig"
|
||||
family = "colorbleed.yetiRig"
|
||||
icon = "usb"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateYetiRig, self).__init__(*args, **kwargs)
|
||||
def process(self):
|
||||
|
||||
self.data["preroll"] = 0
|
||||
instance = super(CreateYetiRig, self).process()
|
||||
|
||||
self.log.info("Creating Rig instance set up ...")
|
||||
|
||||
input_meshes = cmds.sets(name="input_SET", empty=True)
|
||||
cmds.sets(input_meshes, forceElement=instance)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
from avalon import api
|
||||
|
||||
|
||||
class YetiCacheLoader(api.Loader):
|
||||
|
||||
families = ["colorbleed.yeticache"]
|
||||
representations = ["fur"]
|
||||
|
||||
label = "Load Yeti Cache"
|
||||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
|
||||
pass
|
||||
|
|
@ -17,143 +17,33 @@ SETTINGS = {"renderDensity",
|
|||
"cbId"}
|
||||
|
||||
|
||||
class CollectYetiRig(pyblish.api.InstancePlugin):
|
||||
"""Collect all information of the Yeti Rig"""
|
||||
class CollectYetiCache(pyblish.api.InstancePlugin):
|
||||
"""Collect all information of the Yeti caches"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
label = "Collect Yeti Rig"
|
||||
families = ["colorbleed.yetiRig"]
|
||||
label = "Collect Yeti Cache"
|
||||
families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
|
||||
hosts = ["maya"]
|
||||
tasks = ["animation", "fx"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
assert "input_SET" in cmds.sets(instance.name, query=True), (
|
||||
"Yeti Rig must have an input_SET")
|
||||
|
||||
# Collect animation data
|
||||
animation_data = lib.collect_animation_data()
|
||||
instance.data.update(animation_data)
|
||||
|
||||
# We only want one frame to export if it is not animation
|
||||
if api.Session["AVALON_TASK"] != "animation":
|
||||
if api.Session["AVALON_TASK"] not in self.tasks:
|
||||
instance.data["startFrame"] = 1
|
||||
instance.data["endFrame"] = 1
|
||||
|
||||
# Get the input meshes information
|
||||
input_content = cmds.sets("input_SET", query=True)
|
||||
input_nodes = cmds.listRelatives(input_content,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
# Get all the shapes
|
||||
input_meshes = cmds.ls(input_nodes, type="shape", long=True)
|
||||
|
||||
inputs = []
|
||||
for mesh in input_meshes:
|
||||
connections = cmds.listConnections(mesh,
|
||||
source=True,
|
||||
destination=False,
|
||||
connections=True,
|
||||
plugs=True,
|
||||
type="mesh")
|
||||
source = connections[-1].split(".")[0]
|
||||
plugs = [i.split(".")[-1] for i in connections]
|
||||
inputs.append({"connections": plugs,
|
||||
"inputID": lib.get_id(mesh),
|
||||
"outputID": lib.get_id(source)})
|
||||
|
||||
# Collect any textures if used
|
||||
node_attrs = {}
|
||||
yeti_resources = []
|
||||
for node in cmds.ls(instance[:], type="pgYetiMaya"):
|
||||
for node in cmds.ls(instance.data["setMembers"], type="pgYetiMaya"):
|
||||
# Get Yeti resources (textures)
|
||||
# TODO: referenced files in Yeti Graph
|
||||
resources = self.get_yeti_resources(node)
|
||||
yeti_resources.extend(resources)
|
||||
|
||||
for attr in SETTINGS:
|
||||
node_attr = "%s.%s" % (node, attr)
|
||||
current = cmds.getAttr(node_attr)
|
||||
node_attrs[node_attr] = current
|
||||
|
||||
instance.data["inputs"] = inputs
|
||||
instance.data["settings"] = node_attrs
|
||||
instance.data["resources"] = yeti_resources
|
||||
|
||||
def get_yeti_resources(self, node):
|
||||
"""Get all texture file paths
|
||||
|
||||
If a texture is a sequence it gathers all sibling files to ensure
|
||||
the texture sequence is complete.
|
||||
|
||||
Args:
|
||||
node (str): node name of the pgYetiMaya node
|
||||
|
||||
Returns:
|
||||
list
|
||||
"""
|
||||
resources = []
|
||||
image_search_path = cmds.getAttr("{}.imageSearchPath".format(node))
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
|
||||
if texture_filenames and not image_search_path:
|
||||
raise ValueError("pgYetiMaya node '%s' is missing the path to the "
|
||||
"files in the 'imageSearchPath "
|
||||
"atttribute'" % node)
|
||||
|
||||
for texture in texture_filenames:
|
||||
node_resources = {"files": [], "source": texture, "node": node}
|
||||
texture_filepath = os.path.join(image_search_path, texture)
|
||||
if len(texture.split(".")) > 2:
|
||||
|
||||
# For UDIM based textures (tiles)
|
||||
if "<UDIM>" in texture:
|
||||
sequences = self.get_sequence(texture_filepath,
|
||||
pattern="<UDIM>")
|
||||
node_resources["files"].extend(sequences)
|
||||
|
||||
# Based textures (animated masks f.e)
|
||||
elif "%04d" in texture:
|
||||
sequences = self.get_sequence(texture_filepath,
|
||||
pattern="%04d")
|
||||
node_resources["files"].extend(sequences)
|
||||
# Assuming it is a fixed name
|
||||
else:
|
||||
node_resources["files"].append(texture_filepath)
|
||||
else:
|
||||
node_resources["files"].append(texture_filepath)
|
||||
|
||||
resources.append(node_resources)
|
||||
|
||||
return resources
|
||||
|
||||
def get_sequence(self, filename, pattern="%04d"):
|
||||
"""Get sequence from filename
|
||||
|
||||
Supports negative frame ranges like -001, 0000, 0001 and -0001,
|
||||
0000, 0001.
|
||||
|
||||
Arguments:
|
||||
filename (str): The full path to filename containing the given
|
||||
pattern.
|
||||
pattern (str): The pattern to swap with the variable frame number.
|
||||
|
||||
Returns:
|
||||
list: file sequence.
|
||||
|
||||
"""
|
||||
|
||||
from avalon.vendor import clique
|
||||
|
||||
glob_pattern = filename.replace(pattern, "*")
|
||||
|
||||
escaped = re.escape(filename)
|
||||
re_pattern = escaped.replace(pattern, "-?[0-9]+")
|
||||
|
||||
files = glob.glob(glob_pattern)
|
||||
files = [str(f) for f in files if re.match(re_pattern, f)]
|
||||
|
||||
pattern = [clique.PATTERNS["frames"]]
|
||||
collection, remainer = clique.assemble(files, patterns=pattern)
|
||||
|
||||
return collection
|
||||
|
|
|
|||
|
|
@ -5,52 +5,66 @@ import re
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
|
||||
from colorbleed.maya import lib
|
||||
|
||||
|
||||
SETTINGS = {"renderDensity": 10.0,
|
||||
"renderWidth": 1.0,
|
||||
"renderLength": 1.0,
|
||||
"increaseRenderBounds": 0.1}
|
||||
SETTINGS = {"renderDensity",
|
||||
"renderWidth",
|
||||
"renderLength",
|
||||
"increaseRenderBounds",
|
||||
"cbId"}
|
||||
|
||||
|
||||
class CollectYetiProceduralData(pyblish.api.InstancePlugin):
|
||||
"""Collect procedural data"""
|
||||
class CollectYetiRig(pyblish.api.InstancePlugin):
|
||||
"""Collect all information of the Yeti Rig"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
families = ["colorbleed.yetiprocedural"]
|
||||
label = "Collect Yeti Procedural"
|
||||
label = "Collect Yeti Rig"
|
||||
families = ["colorbleed.yetiRig"]
|
||||
hosts = ["maya"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
assert "input_SET" in cmds.sets(instance.name, query=True), (
|
||||
"Yeti Rig must have an input_SET")
|
||||
|
||||
# Collect animation data
|
||||
animation_data = lib.collect_animation_data()
|
||||
instance.data.update(animation_data)
|
||||
|
||||
# We only want one frame to export if it is not animation
|
||||
if api.Session["AVALON_TASK"] != "animation":
|
||||
instance.data["startFrame"] = 1
|
||||
instance.data["endFrame"] = 1
|
||||
# Get the input meshes information
|
||||
input_content = cmds.sets("input_SET", query=True)
|
||||
input_nodes = cmds.listRelatives(input_content,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
# Get all procedural nodes
|
||||
yeti_nodes = cmds.ls(instance[:], type="pgYetiMaya")
|
||||
# Get all the shapes
|
||||
input_meshes = cmds.ls(input_nodes, type="shape", long=True)
|
||||
|
||||
inputs = []
|
||||
for mesh in input_meshes:
|
||||
connections = cmds.listConnections(mesh,
|
||||
source=True,
|
||||
destination=False,
|
||||
connections=True,
|
||||
plugs=True,
|
||||
type="mesh")
|
||||
source = connections[-1].split(".")[0]
|
||||
plugs = [i.split(".")[-1] for i in connections]
|
||||
inputs.append({"connections": plugs,
|
||||
"inputID": lib.get_id(mesh),
|
||||
"outputID": lib.get_id(source)})
|
||||
|
||||
# Collect any textures if used
|
||||
node_attrs = {}
|
||||
yeti_resources = []
|
||||
for node in yeti_nodes:
|
||||
for node in cmds.ls(instance[:], type="pgYetiMaya"):
|
||||
# Get Yeti resources (textures)
|
||||
# TODO: referenced files in Yeti Graph
|
||||
resources = self.get_yeti_resources(node)
|
||||
yeti_resources.extend(resources)
|
||||
|
||||
node_attrs[node] = {}
|
||||
for attr, value in SETTINGS.iteritems():
|
||||
current = cmds.getAttr("%s.%s" % (node, attr))
|
||||
node_attrs[node][attr] = current
|
||||
|
||||
instance.data["settings"] = node_attrs
|
||||
instance.data["inputs"] = inputs
|
||||
instance.data["resources"] = yeti_resources
|
||||
|
||||
def get_yeti_resources(self, node):
|
||||
|
|
@ -78,22 +92,23 @@ class CollectYetiProceduralData(pyblish.api.InstancePlugin):
|
|||
node_resources = {"files": [], "source": texture, "node": node}
|
||||
texture_filepath = os.path.join(image_search_path, texture)
|
||||
if len(texture.split(".")) > 2:
|
||||
|
||||
# For UDIM based textures (tiles)
|
||||
if "<UDIM>" in texture:
|
||||
sequences = self.get_sequence(texture_filepath,
|
||||
pattern="<UDIM>")
|
||||
node_resources["node"].extend(sequences)
|
||||
node_resources["files"].extend(sequences)
|
||||
|
||||
# Based textures (animated masks f.e)
|
||||
elif "%04d" in texture:
|
||||
sequences = self.get_sequence(texture_filepath,
|
||||
pattern="%04d")
|
||||
node_resources["node"].extend(sequences)
|
||||
node_resources["files"].extend(sequences)
|
||||
# Assuming it is a fixed name
|
||||
else:
|
||||
node_resources["node"].append(texture_filepath)
|
||||
node_resources["files"].append(texture_filepath)
|
||||
else:
|
||||
node_resources["node"].append(texture_filepath)
|
||||
node_resources["files"].append(texture_filepath)
|
||||
|
||||
resources.append(node_resources)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,13 @@
|
|||
import os
|
||||
import json
|
||||
import pprint
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import colorbleed.api
|
||||
from cb.utils.maya import context
|
||||
# from cb.utils.maya import context
|
||||
|
||||
|
||||
class ExtractYetiProcedural(colorbleed.api.Extractor):
|
||||
class ExtractYetiCache(colorbleed.api.Extractor):
|
||||
"""Produce an alembic of just point positions and normals.
|
||||
|
||||
Positions and normals are preserved, but nothing more,
|
||||
|
|
@ -16,9 +15,9 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Extract Yeti"
|
||||
label = "Extract Yeti Cache"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.yetiRig"]
|
||||
families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -31,7 +30,13 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
|
|||
|
||||
# Yeti related staging dirs
|
||||
data_file = os.path.join(dirname, "yeti_settings.json")
|
||||
maya_path = os.path.join(dirname, "yeti_rig.ma")
|
||||
|
||||
# Collect information for writing cache
|
||||
start_frame = instance.data.get("startFrame")
|
||||
end_frame = instance.data.get("endFrame")
|
||||
preroll = instance.data.get("preroll")
|
||||
if preroll > 0:
|
||||
start_frame -= preroll
|
||||
|
||||
self.log.info("Writing out cache")
|
||||
# Start writing the files for snap shot
|
||||
|
|
@ -39,7 +44,7 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
|
|||
path = os.path.join(dirname, "cache_<NAME>.0001.fur")
|
||||
cmds.pgYetiCommand(yeti_nodes,
|
||||
writeCache=path,
|
||||
range=(1, 1),
|
||||
range=(start_frame, end_frame),
|
||||
sampleTimes="0.0 1.0",
|
||||
updateViewport=False,
|
||||
generatePreview=False)
|
||||
|
|
@ -47,44 +52,16 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
|
|||
cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")]
|
||||
|
||||
self.log.info("Writing metadata file")
|
||||
image_search_path = ""
|
||||
settings = instance.data.get("settings", None)
|
||||
if settings is not None:
|
||||
|
||||
# Create assumed destination folder for imageSearchPath
|
||||
assumed_temp_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
template_formatted = template.format(**assumed_temp_data)
|
||||
|
||||
destination_folder = os.path.dirname(template_formatted)
|
||||
image_search_path = os.path.join(destination_folder, "resources")
|
||||
image_search_path = os.path.normpath(image_search_path)
|
||||
|
||||
# Store assumed imageSearchPath
|
||||
settings["imageSearchPath"] = image_search_path
|
||||
|
||||
with open(data_file, "w") as fp:
|
||||
json.dump(settings, fp, ensure_ascii=False)
|
||||
|
||||
attr_value = {"%s.imageSearchPath" % n: image_search_path for
|
||||
n in yeti_nodes}
|
||||
|
||||
with context.attribute_value(attr_value):
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
exportSelected=True,
|
||||
typ="mayaAscii",
|
||||
preserveReferences=False,
|
||||
constructionHistory=False,
|
||||
shader=False)
|
||||
|
||||
# Ensure files can be stored
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
instance.data["files"].extend([cache_files,
|
||||
"yeti_rig.ma",
|
||||
"yeti_settings.json"])
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import colorbleed.api
|
||||
from cb.utils.maya import context
|
||||
reload(context)
|
||||
|
||||
|
||||
class ExtractYetiProcedural(colorbleed.api.Extractor):
|
||||
class ExtractYetiRig(colorbleed.api.Extractor):
|
||||
"""Produce an alembic of just point positions and normals.
|
||||
|
||||
Positions and normals are preserved, but nothing more,
|
||||
|
|
@ -13,12 +15,11 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Extract Yeti"
|
||||
label = "Extract Yeti Rig"
|
||||
hosts = ["maya"]
|
||||
families = ["colorbleed.yetiprocedural"]
|
||||
families = ["colorbleed.yetiRig", "colorbleed.yeticache"]
|
||||
|
||||
def process(self, instance):
|
||||
print instance
|
||||
|
||||
yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
|
||||
if not yeti_nodes:
|
||||
|
|
@ -26,37 +27,44 @@ class ExtractYetiProcedural(colorbleed.api.Extractor):
|
|||
|
||||
# Define extract output file path
|
||||
dirname = self.staging_dir(instance)
|
||||
data_file = os.path.join(dirname, "{}.json".format(instance.name))
|
||||
|
||||
start = instance.data.get("startFrame")
|
||||
end = instance.data.get("endFrame")
|
||||
preroll = instance.data.get("preroll")
|
||||
if preroll > 1:
|
||||
start -= preroll # caching supports negative frames
|
||||
|
||||
self.log.info("Writing out cache")
|
||||
# Start writing the files
|
||||
# <NAME> will be replace by the yeti node name
|
||||
filename = "{0}_<NAME>.%04d.fur".format(instance.name)
|
||||
path = os.path.join(dirname, filename)
|
||||
cache_files = cmds.pgYetiCommand(yeti_nodes,
|
||||
writeCache=path,
|
||||
range=(start, end),
|
||||
sampleTimes="0.0 1.0",
|
||||
updateViewport=False,
|
||||
generatePreivew=False)
|
||||
# Yeti related staging dirs
|
||||
maya_path = os.path.join(dirname, "yeti_rig.ma")
|
||||
|
||||
self.log.info("Writing metadata file")
|
||||
image_search_path = ""
|
||||
settings = instance.data.get("settings", None)
|
||||
if settings is not None:
|
||||
with open(data_file, "w") as fp:
|
||||
json.dump(settings, fp, ensure_ascii=False)
|
||||
|
||||
# Create assumed destination folder for imageSearchPath
|
||||
assumed_temp_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
template_formatted = template.format(**assumed_temp_data)
|
||||
|
||||
destination_folder = os.path.dirname(template_formatted)
|
||||
image_search_path = os.path.join(destination_folder, "resources")
|
||||
image_search_path = os.path.normpath(image_search_path)
|
||||
|
||||
attr_value = {"%s.imageSearchPath" % n: image_search_path for
|
||||
n in yeti_nodes}
|
||||
|
||||
with context.attribute_values(attr_value):
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.file(maya_path,
|
||||
force=True,
|
||||
exportSelected=True,
|
||||
typ="mayaAscii",
|
||||
preserveReferences=False,
|
||||
constructionHistory=False,
|
||||
shader=False)
|
||||
|
||||
# Ensure files can be stored
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
instance.data["files"].append(cache_files)
|
||||
instance.data["files"].append(data_file)
|
||||
instance.data["files"].extend(["yeti_rig.ma",
|
||||
"yeti_settings.json"])
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
cmds.select(clear=True)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import logging
|
||||
import pprint
|
||||
import shutil
|
||||
|
||||
import errno
|
||||
|
|
@ -31,7 +32,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"colorbleed.model",
|
||||
"colorbleed.pointcache",
|
||||
"colorbleed.setdress",
|
||||
"colorbleed.rig"]
|
||||
"colorbleed.rig",
|
||||
"colorbleed.yetiRig"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -149,6 +151,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# Find the representations to transfer amongst the files
|
||||
# Each should be a single representation (as such, a single extension)
|
||||
representations = []
|
||||
|
||||
print 'files', instance.data['files']
|
||||
|
||||
for files in instance.data["files"]:
|
||||
|
||||
# Collection
|
||||
|
|
@ -162,7 +167,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
#
|
||||
if isinstance(files, list):
|
||||
collection = files
|
||||
|
||||
# Assert that each member has identical suffix
|
||||
_, ext = os.path.splitext(collection[0])
|
||||
assert all(ext == os.path.splitext(name)[1]
|
||||
|
|
@ -170,10 +174,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"Files had varying suffixes, this is a bug"
|
||||
)
|
||||
|
||||
assert not any(os.path.isabs(name) for name in collection)
|
||||
|
||||
template_data["representation"] = ext[1:]
|
||||
|
||||
for fname in collection:
|
||||
src = os.path.join(stagingdir, fname)
|
||||
|
||||
if os.path.isabs(fname):
|
||||
src = fname
|
||||
fname = os.path.basename(src)
|
||||
else:
|
||||
src = os.path.join(stagingdir, fname)
|
||||
|
||||
dst = os.path.join(
|
||||
template_publish.format(**template_data),
|
||||
fname
|
||||
|
|
@ -191,6 +203,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# |_______|
|
||||
#
|
||||
fname = files
|
||||
assert not os.path.isabs(fname)
|
||||
_, ext = os.path.splitext(fname)
|
||||
|
||||
template_data["representation"] = ext[1:]
|
||||
|
|
@ -221,6 +234,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
}
|
||||
representations.append(representation)
|
||||
|
||||
pprint.pprint(instance.data["transfers"])
|
||||
|
||||
self.log.info("Registering {} items".format(len(representations)))
|
||||
|
||||
io.insert_many(representations)
|
||||
|
|
@ -287,10 +302,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
def create_version(self, subset, version_number, locations, data=None):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
Args:
|
||||
subset (dict): the registered subset of the asset
|
||||
version_number (int): the version number
|
||||
locations (list): the currently registered locations
|
||||
|
||||
Returns:
|
||||
dict: collection of data to create a version
|
||||
"""
|
||||
# Imprint currently registered location
|
||||
version_locations = [location for location in locations if
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue