mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Merge branch 'develop' into feature/PYPE-124_hierarchical_attributes
This commit is contained in:
commit
28d8768456
10 changed files with 542 additions and 61 deletions
|
|
@ -9,10 +9,15 @@ from avalon.vendor import toml
|
|||
from bson.objectid import ObjectId
|
||||
from pype.ftrack import ftrack_utils
|
||||
|
||||
|
||||
class ExpectedError(Exception):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(self, *args, **kwargs)
|
||||
|
||||
|
||||
class Sync_to_Avalon(BaseEvent):
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
|
||||
self.ca_mongoid = 'avalon_mongo_id'
|
||||
# If mongo_id textfield has changed: RETURN!
|
||||
# - infinite loop
|
||||
|
|
@ -21,7 +26,7 @@ class Sync_to_Avalon(BaseEvent):
|
|||
if self.ca_mongoid in ent['keys']:
|
||||
return
|
||||
self.proj = None
|
||||
|
||||
self.errors = []
|
||||
# get project
|
||||
for entity in entities:
|
||||
try:
|
||||
|
|
@ -32,10 +37,12 @@ class Sync_to_Avalon(BaseEvent):
|
|||
break
|
||||
|
||||
# check if project is set to auto-sync
|
||||
if (self.proj is None or
|
||||
if (
|
||||
self.proj is None or
|
||||
'avalon_auto_sync' not in self.proj['custom_attributes'] or
|
||||
self.proj['custom_attributes']['avalon_auto_sync'] is False):
|
||||
return
|
||||
self.proj['custom_attributes']['avalon_auto_sync'] is False
|
||||
):
|
||||
return
|
||||
|
||||
# check if project have Custom Attribute 'avalon_mongo_id'
|
||||
if self.ca_mongoid not in self.proj['custom_attributes']:
|
||||
|
|
@ -51,13 +58,18 @@ class Sync_to_Avalon(BaseEvent):
|
|||
# get avalon project if possible
|
||||
io.install()
|
||||
try:
|
||||
self.avalon_project = io.find_one({"_id": ObjectId(self.projectId)})
|
||||
self.avalon_project = io.find_one({
|
||||
"_id": ObjectId(self.projectId)
|
||||
})
|
||||
except:
|
||||
self.avalon_project = None
|
||||
|
||||
importEntities = []
|
||||
if self.avalon_project is None:
|
||||
self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]})
|
||||
self.avalon_project = io.find_one({
|
||||
"type": "project",
|
||||
"name": self.proj["full_name"]
|
||||
})
|
||||
if self.avalon_project is None:
|
||||
importEntities.append(self.proj)
|
||||
else:
|
||||
|
|
@ -69,9 +81,10 @@ class Sync_to_Avalon(BaseEvent):
|
|||
if entity.entity_type.lower() in ['task']:
|
||||
entity = entity['parent']
|
||||
|
||||
try:
|
||||
mongo_id = entity['custom_attributes'][self.ca_mongoid]
|
||||
except:
|
||||
if (
|
||||
'custom_attributes' not in entity or
|
||||
self.ca_mongoid not in entity['custom_attributes']
|
||||
):
|
||||
message = "Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity.entity_type)
|
||||
self.log.warning(message)
|
||||
self.show_message(event, message, False)
|
||||
|
|
@ -88,25 +101,39 @@ class Sync_to_Avalon(BaseEvent):
|
|||
io.install()
|
||||
try:
|
||||
for entity in importEntities:
|
||||
self.importToAvalon(session, entity)
|
||||
self.importToAvalon(session, event, entity)
|
||||
session.commit()
|
||||
|
||||
except ValueError as ve:
|
||||
message = str(ve)
|
||||
self.show_message(event, message, False)
|
||||
self.log.warning(message)
|
||||
except ExpectedError as ee:
|
||||
items = []
|
||||
for error in self.errors:
|
||||
info = {
|
||||
'label': 'Error',
|
||||
'type': 'textarea',
|
||||
'name': 'error',
|
||||
'value': error
|
||||
}
|
||||
items.append(info)
|
||||
self.log.warning(error)
|
||||
self.show_interface(event, items)
|
||||
|
||||
except Exception as e:
|
||||
message = str(e)
|
||||
ftrack_message = "SyncToAvalon event ended with unexpected error please check log file for more information."
|
||||
self.show_message(event, ftrack_message, False)
|
||||
items = [{
|
||||
'label': 'Error',
|
||||
'type': 'textarea',
|
||||
'name': 'error',
|
||||
'value': ftrack_message
|
||||
}]
|
||||
self.show_interface(event, items)
|
||||
self.log.error(message)
|
||||
|
||||
io.uninstall()
|
||||
|
||||
return
|
||||
|
||||
def importToAvalon(self, session, entity):
|
||||
def importToAvalon(self, session, event, entity):
|
||||
if self.ca_mongoid not in entity['custom_attributes']:
|
||||
raise ValueError("Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity['name']))
|
||||
|
||||
|
|
@ -122,21 +149,26 @@ class Sync_to_Avalon(BaseEvent):
|
|||
|
||||
if self.avalon_project is None:
|
||||
inventory.save(name, config, template)
|
||||
self.avalon_project = io.find_one({'type': 'project', 'name': name})
|
||||
self.avalon_project = io.find_one({'type': type, 'name': name})
|
||||
|
||||
elif self.avalon_project['name'] != name:
|
||||
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name))
|
||||
entity['name'] = self.avalon_project['name']
|
||||
session.commit()
|
||||
|
||||
msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\nName was changed back!'.format(self.avalon_project['name'], name)
|
||||
self.errors.append(msg)
|
||||
return
|
||||
|
||||
self.projectId = self.avalon_project['_id']
|
||||
|
||||
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
|
||||
data = ftrack_utils.get_data(self, entity, session, self.custom_attributes)
|
||||
|
||||
io.update_many(
|
||||
{"_id": ObjectId(self.projectId)},
|
||||
{'$set':{
|
||||
'name':name,
|
||||
'config':config,
|
||||
'data':data,
|
||||
{'$set': {
|
||||
'name': name,
|
||||
'config': config,
|
||||
'data': data,
|
||||
}})
|
||||
|
||||
entity['custom_attributes'][self.ca_mongoid] = str(self.projectId)
|
||||
|
|
@ -144,12 +176,14 @@ class Sync_to_Avalon(BaseEvent):
|
|||
return
|
||||
|
||||
if self.avalon_project is None:
|
||||
self.importToAvalon(session, self.proj)
|
||||
self.importToAvalon(session, event, self.proj)
|
||||
|
||||
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
|
||||
data = ftrack_utils.get_data(self, entity, session, self.custom_attributes)
|
||||
|
||||
# return if entity is silo
|
||||
# only check name if entity is silo
|
||||
if len(data['parents']) == 0:
|
||||
if self.checkSilo(entity, event, session) is False:
|
||||
raise ExpectedError
|
||||
return
|
||||
else:
|
||||
silo = data['parents'][0]
|
||||
|
|
@ -171,30 +205,104 @@ class Sync_to_Avalon(BaseEvent):
|
|||
if avalon_asset is None:
|
||||
mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId))
|
||||
# Raise error if it seems to be different ent. with same name
|
||||
elif (avalon_asset['data']['parents'] != data['parents'] or
|
||||
avalon_asset['silo'] != silo):
|
||||
raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name))
|
||||
elif avalon_asset['name'] != entity['name']:
|
||||
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name))
|
||||
elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
|
||||
old_path = "/".join(avalon_asset['data']['parents'])
|
||||
new_path = "/".join(data['parents'])
|
||||
raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path))
|
||||
elif (
|
||||
avalon_asset['data']['parents'] != data['parents'] or
|
||||
avalon_asset['silo'] != silo
|
||||
):
|
||||
msg = 'In Avalon DB already exists entity with name "{0}"'.format(name)
|
||||
self.errors.append(msg)
|
||||
return
|
||||
else:
|
||||
if avalon_asset['name'] != entity['name']:
|
||||
if self.checkChilds(entity) is False:
|
||||
msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\n\nName was changed back!\n\nCreate new entity if you want to change name.'.format(avalon_asset['name'], entity['name'])
|
||||
entity['name'] = avalon_asset['name']
|
||||
session.commit()
|
||||
self.errors.append(msg)
|
||||
|
||||
if avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
|
||||
old_path = "/".join(avalon_asset['data']['parents'])
|
||||
new_path = "/".join(data['parents'])
|
||||
msg = 'You can\'t move with entities.\nEntity "{}" was moved from "{}" to "{}"\n\nAvalon won\'t work properly, please move them back!'.format(avalon_asset['name'], old_path, new_path)
|
||||
self.errors.append(msg)
|
||||
|
||||
if len(self.errors) > 0:
|
||||
raise ExpectedError
|
||||
|
||||
io.update_many(
|
||||
{"_id": ObjectId(mongo_id)},
|
||||
{'$set':{
|
||||
'name':name,
|
||||
'silo':silo,
|
||||
'data':data,
|
||||
{'$set': {
|
||||
'name': name,
|
||||
'silo': silo,
|
||||
'data': data,
|
||||
'parent': ObjectId(self.projectId)}})
|
||||
|
||||
entity['custom_attributes'][self.ca_mongoid] = str(mongo_id)
|
||||
|
||||
def checkChilds(self, entity):
|
||||
if (entity.entity_type.lower() != 'task' and 'children' not in entity):
|
||||
return True
|
||||
childs = entity['children']
|
||||
for child in childs:
|
||||
if child.entity_type.lower() == 'task':
|
||||
config = ftrack_utils.get_config_data()
|
||||
if 'sync_to_avalon' in config:
|
||||
config = config['sync_to_avalon']
|
||||
if 'statuses_name_change' in config:
|
||||
available_statuses = config['statuses_name_change']
|
||||
else:
|
||||
available_statuses = []
|
||||
ent_status = child['status']['name'].lower()
|
||||
if ent_status not in available_statuses:
|
||||
return False
|
||||
# If not task go deeper
|
||||
elif self.checkChilds(child) is False:
|
||||
return False
|
||||
# If everything is allright
|
||||
return True
|
||||
|
||||
def checkSilo(self, entity, event, session):
|
||||
changes = event['data']['entities'][0]['changes']
|
||||
if 'name' not in changes:
|
||||
return True
|
||||
new_name = changes['name']['new']
|
||||
old_name = changes['name']['old']
|
||||
|
||||
if 'children' not in entity or len(entity['children']) < 1:
|
||||
return True
|
||||
|
||||
if self.checkChilds(entity) is True:
|
||||
self.updateSilo(old_name, new_name)
|
||||
return True
|
||||
|
||||
new_found = 0
|
||||
old_found = 0
|
||||
for asset in io.find({'silo': new_name}):
|
||||
new_found += 1
|
||||
for asset in io.find({'silo': old_name}):
|
||||
old_found += 1
|
||||
|
||||
if new_found > 0 or old_found == 0:
|
||||
return True
|
||||
|
||||
# If any condition is possible, show error to user and change name back
|
||||
msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\n\nName was changed back!\n\nCreate new entity if you want to change name.'.format(old_name, new_name)
|
||||
self.errors.append(msg)
|
||||
entity['name'] = old_name
|
||||
session.commit()
|
||||
|
||||
return False
|
||||
|
||||
def updateSilo(self, old, new):
|
||||
io.update_many(
|
||||
{'silo': old},
|
||||
{'$set': {'silo': new}}
|
||||
)
|
||||
|
||||
def setAvalonAttributes(self):
|
||||
self.custom_attributes = []
|
||||
all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one()
|
||||
query = 'CustomAttributeGroup where name is "avalon"'
|
||||
all_avalon_attr = self.session.query(query).one()
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' not in cust_attr['key']:
|
||||
self.custom_attributes.append(cust_attr)
|
||||
|
|
@ -210,10 +318,13 @@ class Sync_to_Avalon(BaseEvent):
|
|||
self.session, *args
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def _translate_event(self, session, event):
|
||||
exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog']
|
||||
_selection = event['data'].get('entities',[])
|
||||
exceptions = [
|
||||
'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
|
||||
'socialfeed', 'timelog'
|
||||
]
|
||||
_selection = event['data'].get('entities', [])
|
||||
|
||||
_entities = list()
|
||||
for entity in _selection:
|
||||
|
|
@ -227,6 +338,7 @@ class Sync_to_Avalon(BaseEvent):
|
|||
|
||||
return [_entities, event]
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
|
|
|
|||
|
|
@ -1,19 +1,7 @@
|
|||
# :coding: utf-8
|
||||
# :copyright: Copyright (c) 2017 ftrack
|
||||
import os
|
||||
import logging
|
||||
import getpass
|
||||
# import platform
|
||||
import ftrack_api
|
||||
import toml
|
||||
from avalon import io, lib, pipeline
|
||||
from avalon import session as sess
|
||||
import acre
|
||||
|
||||
from app.api import (
|
||||
Templates,
|
||||
Logger
|
||||
)
|
||||
from app.api import Logger
|
||||
|
||||
|
||||
class BaseEvent(object):
|
||||
|
|
@ -47,7 +35,7 @@ class BaseEvent(object):
|
|||
|
||||
def _translate_event(self, session, event):
|
||||
'''Return *event* translated structure to be used with the API.'''
|
||||
_selection = event['data'].get('entities',[])
|
||||
_selection = event['data'].get('entities', [])
|
||||
|
||||
_entities = list()
|
||||
for entity in _selection:
|
||||
|
|
@ -119,7 +107,7 @@ class BaseEvent(object):
|
|||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def show_message(self, event, input_message, result = False):
|
||||
def show_message(self, event, input_message, result=False):
|
||||
"""
|
||||
Shows message to user who triggered event
|
||||
- event - just source of user id
|
||||
|
|
@ -137,6 +125,8 @@ class BaseEvent(object):
|
|||
return
|
||||
|
||||
user_id = event['source']['user']['id']
|
||||
target = 'applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
|
||||
|
||||
self.session.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
|
|
@ -145,7 +135,27 @@ class BaseEvent(object):
|
|||
success=result,
|
||||
message=message
|
||||
),
|
||||
target='applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
|
||||
target=target
|
||||
),
|
||||
on_error='ignore'
|
||||
)
|
||||
|
||||
def show_interface(self, event, items):
|
||||
"""
|
||||
Shows interface to user who triggered event
|
||||
- 'items' must be list containing Ftrack interface items
|
||||
"""
|
||||
user_id = event['source']['user']['id']
|
||||
target = 'applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
|
||||
|
||||
self.session.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='widget',
|
||||
items=items
|
||||
),
|
||||
target=target
|
||||
),
|
||||
on_error='ignore'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
from pprint import *
|
||||
|
||||
import ftrack_api
|
||||
|
|
@ -14,6 +15,21 @@ from app.api import Logger
|
|||
log = Logger.getLogger(__name__)
|
||||
|
||||
|
||||
def get_config_data():
|
||||
templates = os.environ['PYPE_STUDIO_TEMPLATES']
|
||||
path_items = [templates, 'presets', 'ftrack', 'ftrack_config.json']
|
||||
filepath = os.path.sep.join(path_items)
|
||||
data = dict()
|
||||
try:
|
||||
with open(filepath) as data_file:
|
||||
data = json.load(data_file)
|
||||
|
||||
except Exception as e:
|
||||
msg = 'Loading "Ftrack Config file" Failed. Please check log for more information. Times are set to default.'
|
||||
log.warning("{} - {}".format(msg, str(e)))
|
||||
|
||||
return data
|
||||
|
||||
def get_data(parent, entity, session, custom_attributes):
|
||||
entity_type = entity.entity_type
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"yeticache",
|
||||
"nukescript",
|
||||
"review",
|
||||
"scene"]
|
||||
"scene",
|
||||
"ass"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
32
pype/plugins/maya/create/create_ass.py
Normal file
32
pype/plugins/maya/create/create_ass.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import avalon.maya
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class CreateAss(avalon.maya.Creator):
|
||||
"""Arnold Archive"""
|
||||
|
||||
name = "ass"
|
||||
label = "Ass StandIn"
|
||||
family = "ass"
|
||||
icon = "cube"
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAss, self).process()
|
||||
|
||||
data = OrderedDict(**self.data)
|
||||
|
||||
nodes = list()
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = cmds.ls(selection=True)
|
||||
|
||||
cmds.sets(nodes, rm=instance)
|
||||
|
||||
assContent = cmds.sets(name="content_SET")
|
||||
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
||||
cmds.sets([assContent, assProxy], forceElement=instance)
|
||||
|
||||
self.data = data
|
||||
148
pype/plugins/maya/load/load_ass.py
Normal file
148
pype/plugins/maya/load/load_ass.py
Normal file
|
|
@ -0,0 +1,148 @@
|
|||
from avalon import api
|
||||
import pype.maya.plugin
|
||||
import os
|
||||
|
||||
|
||||
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the Proxy"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
||||
label = "Reference .ASS standin with Proxy"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
||||
with maya.maintained_selection():
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
path = self.fname
|
||||
proxyPath = os.path.splitext(path)[0] + ".ma"
|
||||
|
||||
nodes = cmds.file(proxyPath,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=groupName)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
|
||||
class AssStandinLoader(api.Loader):
|
||||
"""Load .ASS file as standin"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
||||
label = "Load .ASS file as standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import avalon.maya.lib as lib
|
||||
from avalon.maya.pipeline import containerise
|
||||
import mtoa.ui.arnoldmenu
|
||||
import pymel.core as pm
|
||||
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# cmds.loadPlugin("gpuCache", quiet=True)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = pm.group(name=label, empty=True)
|
||||
|
||||
# Create transform with shape
|
||||
transform_name = label + "_ASS"
|
||||
# transform = pm.createNode("transform", name=transform_name,
|
||||
# parent=root)
|
||||
|
||||
standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn())
|
||||
standin = standinShape.getParent()
|
||||
standin.rename(transform_name)
|
||||
|
||||
pm.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
|
||||
|
||||
# Lock parenting of the transform and standin
|
||||
cmds.lockNode([root, standin], lock=True)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import pymel.core as pm
|
||||
|
||||
path = api.get_representation_path(representation)
|
||||
|
||||
# Update the standin
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
standins = pm.ls(members, type="AiStandIn", long=True)
|
||||
|
||||
assert len(caches) == 1, "This is a bug"
|
||||
|
||||
for standin in standins:
|
||||
standin.cacheFileName.set(path)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
import maya.cmds as cmds
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
35
pype/plugins/maya/publish/collect_ass.py
Normal file
35
pype/plugins/maya/publish/collect_ass.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
from maya import cmds
|
||||
import pymel.core as pm
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
class CollectAssData(pyblish.api.InstancePlugin):
|
||||
"""Collect Ass data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = 'Collect Ass'
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
||||
context = instance.context
|
||||
|
||||
objsets = instance.data['setMembers']
|
||||
|
||||
for objset in objsets:
|
||||
members = cmds.sets(objset, query=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
continue
|
||||
if objset == "content_SET":
|
||||
instance.data['setMembers'] = members
|
||||
elif objset == "proxy_SET":
|
||||
assert len(members) == 1, "You have multiple proxy meshes, please only use one"
|
||||
instance.data['proxy'] = members
|
||||
|
||||
|
||||
self.log.debug("data: {}".format(instance.data))
|
||||
47
pype/plugins/maya/publish/extract_ass.py
Normal file
47
pype/plugins/maya/publish/extract_ass.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
|
||||
import avalon.maya
|
||||
import pype.api
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ExtractAssStandin(pype.api.Extractor):
|
||||
"""Extract the content of the instance to a ass file
|
||||
|
||||
Things to pay attention to:
|
||||
- If animation is toggled, are the frames correct
|
||||
-
|
||||
"""
|
||||
|
||||
label = "Ass Standin (.ass)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.ass".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
|
||||
# Write out .ass file
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
with avalon.maya.maintained_selection():
|
||||
self.log.info("Writing: {}".format(instance.data["setMembers"]))
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.arnoldExportAss( filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True
|
||||
)
|
||||
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
instance.data["files"].append(file_name)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
73
pype/plugins/maya/publish/extract_assproxy.py
Normal file
73
pype/plugins/maya/publish/extract_assproxy.py
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
import contextlib
|
||||
|
||||
import avalon.maya
|
||||
import pype.api
|
||||
import pype.maya.lib as lib
|
||||
|
||||
|
||||
class ExtractAssProxy(pype.api.Extractor):
|
||||
"""Extract proxy model as Maya Ascii to use as arnold standin
|
||||
|
||||
|
||||
"""
|
||||
|
||||
order = pype.api.Extractor.order + 0.2
|
||||
label = "Ass Proxy (Maya ASCII)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def unparent(root):
|
||||
"""Temporarily unparent `root`"""
|
||||
parent = cmds.listRelatives(root, parent=True)
|
||||
if parent:
|
||||
cmds.parent(root, world=True)
|
||||
yield
|
||||
self.log.info("{} - {}".format(root, parent))
|
||||
cmds.parent(root, parent)
|
||||
else:
|
||||
yield
|
||||
|
||||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
members = instance.data['proxy']
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
transforms=True,
|
||||
noIntermediate=True)
|
||||
self.log.info(members)
|
||||
|
||||
with avalon.maya.maintained_selection():
|
||||
with unparent(members[0]):
|
||||
cmds.select(members, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=False,
|
||||
constraints=False,
|
||||
expressions=False,
|
||||
constructionHistory=False)
|
||||
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
instance.data["files"].append(filename)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
|
|
@ -70,6 +70,13 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
|
|||
|
||||
# check if any objectSets are not present ion the relationships
|
||||
missing_sets = [s for s in sets if s not in relationships]
|
||||
|
||||
for set in missing_sets:
|
||||
if set.endswith("_SET"):
|
||||
missing_sets.remove(set)
|
||||
cls.log.info("Missing Sets "
|
||||
"'{}'".format(missing_sets))
|
||||
|
||||
if missing_sets:
|
||||
# A set of this node is not coming along, this is wrong!
|
||||
cls.log.error("Missing sets '{}' for node "
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue