Merge branch 'develop' into feature/PYPE-92-pr-basic-pipeline-linking

# Conflicts:
#	pype/api.py
#	pype/templates.py
This commit is contained in:
Jakub Jezek 2019-01-11 10:37:09 +01:00
commit 2a1f637880
43 changed files with 1595 additions and 1293 deletions

View file

@ -9,10 +9,15 @@ from avalon.vendor import toml
from bson.objectid import ObjectId
from pype.ftrack import ftrack_utils
class ExpectedError(Exception):
def __init__(self, *args, **kwargs):
super().__init__(self, *args, **kwargs)
class Sync_to_Avalon(BaseEvent):
def launch(self, session, entities, event):
self.ca_mongoid = 'avalon_mongo_id'
# If mongo_id textfield has changed: RETURN!
# - infinite loop
@ -21,7 +26,7 @@ class Sync_to_Avalon(BaseEvent):
if self.ca_mongoid in ent['keys']:
return
self.proj = None
self.errors = []
# get project
for entity in entities:
try:
@ -32,10 +37,12 @@ class Sync_to_Avalon(BaseEvent):
break
# check if project is set to auto-sync
if (self.proj is None or
if (
self.proj is None or
'avalon_auto_sync' not in self.proj['custom_attributes'] or
self.proj['custom_attributes']['avalon_auto_sync'] is False):
return
self.proj['custom_attributes']['avalon_auto_sync'] is False
):
return
# check if project have Custom Attribute 'avalon_mongo_id'
if self.ca_mongoid not in self.proj['custom_attributes']:
@ -51,13 +58,18 @@ class Sync_to_Avalon(BaseEvent):
# get avalon project if possible
io.install()
try:
self.avalon_project = io.find_one({"_id": ObjectId(self.projectId)})
self.avalon_project = io.find_one({
"_id": ObjectId(self.projectId)
})
except:
self.avalon_project = None
importEntities = []
if self.avalon_project is None:
self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]})
self.avalon_project = io.find_one({
"type": "project",
"name": self.proj["full_name"]
})
if self.avalon_project is None:
importEntities.append(self.proj)
else:
@ -69,9 +81,10 @@ class Sync_to_Avalon(BaseEvent):
if entity.entity_type.lower() in ['task']:
entity = entity['parent']
try:
mongo_id = entity['custom_attributes'][self.ca_mongoid]
except:
if (
'custom_attributes' not in entity or
self.ca_mongoid not in entity['custom_attributes']
):
message = "Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity.entity_type)
self.log.warning(message)
self.show_message(event, message, False)
@ -88,25 +101,39 @@ class Sync_to_Avalon(BaseEvent):
io.install()
try:
for entity in importEntities:
self.importToAvalon(session, entity)
self.importToAvalon(session, event, entity)
session.commit()
except ValueError as ve:
message = str(ve)
self.show_message(event, message, False)
self.log.warning(message)
except ExpectedError as ee:
items = []
for error in self.errors:
info = {
'label': 'Error',
'type': 'textarea',
'name': 'error',
'value': error
}
items.append(info)
self.log.warning(error)
self.show_interface(event, items)
except Exception as e:
message = str(e)
ftrack_message = "SyncToAvalon event ended with unexpected error please check log file for more information."
self.show_message(event, ftrack_message, False)
items = [{
'label': 'Error',
'type': 'textarea',
'name': 'error',
'value': ftrack_message
}]
self.show_interface(event, items)
self.log.error(message)
io.uninstall()
return
def importToAvalon(self, session, entity):
def importToAvalon(self, session, event, entity):
if self.ca_mongoid not in entity['custom_attributes']:
raise ValueError("Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity['name']))
@ -122,21 +149,26 @@ class Sync_to_Avalon(BaseEvent):
if self.avalon_project is None:
inventory.save(name, config, template)
self.avalon_project = io.find_one({'type': 'project', 'name': name})
self.avalon_project = io.find_one({'type': type, 'name': name})
elif self.avalon_project['name'] != name:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name))
entity['name'] = self.avalon_project['name']
session.commit()
msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\nName was changed back!'.format(self.avalon_project['name'], name)
self.errors.append(msg)
return
self.projectId = self.avalon_project['_id']
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
data = ftrack_utils.get_data(self, entity, session, self.custom_attributes)
io.update_many(
{"_id": ObjectId(self.projectId)},
{'$set':{
'name':name,
'config':config,
'data':data,
{'$set': {
'name': name,
'config': config,
'data': data,
}})
entity['custom_attributes'][self.ca_mongoid] = str(self.projectId)
@ -144,12 +176,14 @@ class Sync_to_Avalon(BaseEvent):
return
if self.avalon_project is None:
self.importToAvalon(session, self.proj)
self.importToAvalon(session, event, self.proj)
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
data = ftrack_utils.get_data(self, entity, session, self.custom_attributes)
# return if entity is silo
# only check name if entity is silo
if len(data['parents']) == 0:
if self.checkSilo(entity, event, session) is False:
raise ExpectedError
return
else:
silo = data['parents'][0]
@ -171,30 +205,104 @@ class Sync_to_Avalon(BaseEvent):
if avalon_asset is None:
mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId))
# Raise error if it seems to be different ent. with same name
elif (avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo):
raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name))
elif avalon_asset['name'] != entity['name']:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name))
elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
old_path = "/".join(avalon_asset['data']['parents'])
new_path = "/".join(data['parents'])
raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path))
elif (
avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo
):
msg = 'In Avalon DB already exists entity with name "{0}"'.format(name)
self.errors.append(msg)
return
else:
if avalon_asset['name'] != entity['name']:
if self.checkChilds(entity) is False:
msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\n\nName was changed back!\n\nCreate new entity if you want to change name.'.format(avalon_asset['name'], entity['name'])
entity['name'] = avalon_asset['name']
session.commit()
self.errors.append(msg)
if avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
old_path = "/".join(avalon_asset['data']['parents'])
new_path = "/".join(data['parents'])
msg = 'You can\'t move with entities.\nEntity "{}" was moved from "{}" to "{}"\n\nAvalon won\'t work properly, please move them back!'.format(avalon_asset['name'], old_path, new_path)
self.errors.append(msg)
if len(self.errors) > 0:
raise ExpectedError
io.update_many(
{"_id": ObjectId(mongo_id)},
{'$set':{
'name':name,
'silo':silo,
'data':data,
{'$set': {
'name': name,
'silo': silo,
'data': data,
'parent': ObjectId(self.projectId)}})
entity['custom_attributes'][self.ca_mongoid] = str(mongo_id)
def checkChilds(self, entity):
if (entity.entity_type.lower() != 'task' and 'children' not in entity):
return True
childs = entity['children']
for child in childs:
if child.entity_type.lower() == 'task':
config = ftrack_utils.get_config_data()
if 'sync_to_avalon' in config:
config = config['sync_to_avalon']
if 'statuses_name_change' in config:
available_statuses = config['statuses_name_change']
else:
available_statuses = []
ent_status = child['status']['name'].lower()
if ent_status not in available_statuses:
return False
# If not task go deeper
elif self.checkChilds(child) is False:
return False
# If everything is allright
return True
def checkSilo(self, entity, event, session):
changes = event['data']['entities'][0]['changes']
if 'name' not in changes:
return True
new_name = changes['name']['new']
old_name = changes['name']['old']
if 'children' not in entity or len(entity['children']) < 1:
return True
if self.checkChilds(entity) is True:
self.updateSilo(old_name, new_name)
return True
new_found = 0
old_found = 0
for asset in io.find({'silo': new_name}):
new_found += 1
for asset in io.find({'silo': old_name}):
old_found += 1
if new_found > 0 or old_found == 0:
return True
# If any condition is possible, show error to user and change name back
msg = 'You can\'t change name {} to {}, avalon wouldn\'t work properly!\n\nName was changed back!\n\nCreate new entity if you want to change name.'.format(old_name, new_name)
self.errors.append(msg)
entity['name'] = old_name
session.commit()
return False
def updateSilo(self, old, new):
io.update_many(
{'silo': old},
{'$set': {'silo': new}}
)
def setAvalonAttributes(self):
self.custom_attributes = []
all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one()
query = 'CustomAttributeGroup where name is "avalon"'
all_avalon_attr = self.session.query(query).one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' not in cust_attr['key']:
self.custom_attributes.append(cust_attr)
@ -210,10 +318,13 @@ class Sync_to_Avalon(BaseEvent):
self.session, *args
)
return
def _translate_event(self, session, event):
exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog']
_selection = event['data'].get('entities',[])
exceptions = [
'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
'socialfeed', 'timelog'
]
_selection = event['data'].get('entities', [])
_entities = list()
for entity in _selection:
@ -227,6 +338,7 @@ class Sync_to_Avalon(BaseEvent):
return [_entities, event]
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''

View file

@ -1,19 +1,7 @@
# :coding: utf-8
# :copyright: Copyright (c) 2017 ftrack
import os
import logging
import getpass
# import platform
import ftrack_api
import toml
from avalon import io, lib, pipeline
from avalon import session as sess
import acre
from app.api import (
Templates,
Logger
)
from app.api import Logger
class BaseEvent(object):
@ -47,7 +35,7 @@ class BaseEvent(object):
def _translate_event(self, session, event):
'''Return *event* translated structure to be used with the API.'''
_selection = event['data'].get('entities',[])
_selection = event['data'].get('entities', [])
_entities = list()
for entity in _selection:
@ -119,7 +107,7 @@ class BaseEvent(object):
'''
raise NotImplementedError()
def show_message(self, event, input_message, result = False):
def show_message(self, event, input_message, result=False):
"""
Shows message to user who triggered event
- event - just source of user id
@ -137,6 +125,8 @@ class BaseEvent(object):
return
user_id = event['source']['user']['id']
target = 'applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
@ -145,7 +135,27 @@ class BaseEvent(object):
success=result,
message=message
),
target='applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
target=target
),
on_error='ignore'
)
def show_interface(self, event, items):
"""
Shows interface to user who triggered event
- 'items' must be list containing Ftrack interface items
"""
user_id = event['source']['user']['id']
target = 'applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=dict(
type='widget',
items=items
),
target=target
),
on_error='ignore'
)

View file

@ -1,7 +1,6 @@
import sys
import os
import argparse
import subprocess
import json
import threading
import time
import ftrack_api
@ -168,7 +167,6 @@ class FtrackRunner:
if self.bool_timer_event is False:
self.start_timer_thread()
def start_timer_thread(self):
if self.thread_timer is None:
self.thread_timer = FtrackEventsThread(self)
@ -197,7 +195,7 @@ class FtrackRunner:
def stop_countdown_thread(self):
if self.thread_timer_coundown is not None:
self.thread_timer_coundown.runs=False
self.thread_timer_coundown.runs = False
self.thread_timer_coundown.terminate()
self.thread_timer_coundown.wait()
self.thread_timer_coundown = None
@ -209,7 +207,8 @@ class FtrackRunner:
# self.widget_timer.activateWindow()
def change_count_widget(self, time):
self.widget_timer.lbl_rest_time.setText(str(time))
str_time = str(time).replace(".0", "")
self.widget_timer.lbl_rest_time.setText(str_time)
def timer_started(self):
self.start_countdown_thread()
@ -225,22 +224,33 @@ class FtrackRunner:
if self.thread_timer_coundown is not None:
self.stop_countdown_thread()
def timer_restart(self):
if self.thread_timer is not None:
self.thread_timer.signal_restart_timer.emit()
self.timer_started()
def timer_continue(self):
if self.thread_timer_coundown is not None:
self.thread_timer_coundown.signal_continue_timer.emit()
class FtrackEventsThread(QtCore.QThread):
# Senders
signal_timer_started = QtCore.Signal()
signal_timer_stopped = QtCore.Signal()
# Listeners
signal_stop_timer = QtCore.Signal()
signal_restart_timer = QtCore.Signal()
def __init__(self, parent):
super(FtrackEventsThread, self).__init__()
cred = credentials._get_credentials()
self.username = cred['username']
self.signal_stop_timer.connect(self.ftrack_stop_timer)
self.signal_restart_timer.connect(self.ftrack_restart_timer)
self.user = None
self.last_task = None
def run(self):
self.timer_session = ftrack_api.Session(auto_connect_event_hub=True)
@ -248,6 +258,15 @@ class FtrackEventsThread(QtCore.QThread):
'topic=ftrack.update and source.user.username={}'.format(self.username),
self.event_handler)
user_query = 'User where username is "{}"'.format(self.username)
self.user = self.timer_session.query(user_query).one()
timer_query = 'Timer where user.username is "{}"'.format(self.username)
timer = self.timer_session.query(timer_query).first()
if timer is not None:
self.last_task = timer['context']
self.signal_timer_started.emit()
self.timer_session.event_hub.wait()
def event_handler(self, event):
@ -256,24 +275,44 @@ class FtrackEventsThread(QtCore.QThread):
return
except:
return
new = event['data']['entities'][0]['changes']['start']['new']
old = event['data']['entities'][0]['changes']['start']['old']
self.userId = event['source']['user']['id']
if old is None and new is None:
return
elif old is None:
timer_query = 'Timer where user.username is "{}"'.format(self.username)
timer = self.timer_session.query(timer_query).first()
if timer is not None:
self.last_task = timer['context']
if old is None:
self.signal_timer_started.emit()
elif new is None:
self.signal_timer_stopped.emit()
def ftrack_stop_timer(self):
try:
user = self.timer_session.query('User where id is ' + self.userId).one()
user.stop_timer()
self.user.stop_timer()
self.timer_session.commit()
except Exception as e:
log.debug("Timer stop had issues: {}".format(e))
def ftrack_restart_timer(self):
try:
last_task = None
if "FTRACK_LAST_TASK_ID" in os.environ:
task_id = os.environ["FTRACK_LAST_TASK_ID"]
query = 'Task where id is {}'.format(task_id)
last_task = self.timer_session.query(query).one()
if (self.last_task is not None) and (self.user is not None):
self.user.start_timer(self.last_task)
self.timer_session.commit()
except Exception as e:
log.debug("Timer stop had issues: {}".format(e))
class CountdownThread(QtCore.QThread):
# Senders
@ -287,10 +326,12 @@ class CountdownThread(QtCore.QThread):
def __init__(self, parent):
super(CountdownThread, self).__init__()
self.runs = True
self.over_line = False
self.count_length = 60*5 # 5 minutes
self.border_line = 31
config_data = self.load_timer_values()
self.count_length = config_data['full_time']*60
self.border_line = config_data['message_time']*60 + 1
self.reset_count()
self.signal_reset_timer.connect(self.reset_count)
self.signal_continue_timer.connect(self.continue_timer)
@ -335,6 +376,38 @@ class CountdownThread(QtCore.QThread):
thread_keyboard.terminate()
thread_keyboard.wait()
def load_timer_values(self):
templates = os.environ['PYPE_STUDIO_TEMPLATES']
path_items = [templates, 'presets', 'ftrack', 'ftrack_config.json']
filepath = os.path.sep.join(path_items)
data = dict()
try:
with open(filepath) as data_file:
json_dict = json.load(data_file)
data = json_dict['timer']
except Exception as e:
msg = 'Loading "Ftrack Config file" Failed. Please check log for more information. Times are set to default.'
log.warning("{} - {}".format(msg, str(e)))
data = self.validate_timer_values(data)
return data
def validate_timer_values(self, data):
# default values
if 'full_time' not in data:
data['full_time'] = 15
if 'message_time' not in data:
data['message_time'] = 0.5
# minimum values
if data['full_time'] < 2:
data['full_time'] = 2
# message time is earlier that full time
if data['message_time'] > data['full_time']:
data['message_time'] = data['full_time'] - 0.5
return data
class MouseThread(QtCore.QThread):
signal_stop = QtCore.Signal()
@ -377,6 +450,7 @@ class KeyboardThread(QtCore.QThread):
self.k_listener = keyboard.Listener(on_press=self.on_press)
self.k_listener.start()
class StopTimer(QtWidgets.QWidget):
SIZE_W = 300
@ -419,31 +493,31 @@ class StopTimer(QtWidgets.QWidget):
msg_info = "You didn't work for a long time."
msg_question = "Would you like to stop Ftrack timer?"
msg_stopped = "Your Ftrack timer was stopped!"
msg_stopped = "Your Ftrack timer was stopped. Do you want to start again?"
self.lbl_info = QtWidgets.QLabel(msg_info)
self.lbl_info.setFont(self.font)
self.lbl_info.setTextFormat(QtCore.Qt.RichText)
self.lbl_info.setObjectName("lbl_info")
self.lbl_info.setWordWrap(True);
self.lbl_info.setWordWrap(True)
self.lbl_question = QtWidgets.QLabel(msg_question)
self.lbl_question.setFont(self.font)
self.lbl_question.setTextFormat(QtCore.Qt.RichText)
self.lbl_question.setObjectName("lbl_question")
self.lbl_question.setWordWrap(True);
self.lbl_question.setWordWrap(True)
self.lbl_stopped = QtWidgets.QLabel(msg_stopped)
self.lbl_stopped.setFont(self.font)
self.lbl_stopped.setTextFormat(QtCore.Qt.RichText)
self.lbl_stopped.setObjectName("lbl_stopped")
self.lbl_stopped.setWordWrap(True);
self.lbl_stopped.setWordWrap(True)
self.lbl_rest_time = QtWidgets.QLabel("")
self.lbl_rest_time.setFont(self.font)
self.lbl_rest_time.setTextFormat(QtCore.Qt.RichText)
self.lbl_rest_time.setObjectName("lbl_rest_time")
self.lbl_rest_time.setWordWrap(True);
self.lbl_rest_time.setWordWrap(True)
self.lbl_rest_time.setAlignment(QtCore.Qt.AlignCenter)
self.form.addRow(self.lbl_info)
@ -463,13 +537,18 @@ class StopTimer(QtWidgets.QWidget):
self.btn_continue.setToolTip('Timer will continue')
self.btn_continue.clicked.connect(self.continue_timer)
self.btn_ok = QtWidgets.QPushButton("OK")
self.btn_ok.setToolTip('Close window')
self.btn_ok.clicked.connect(self.close_widget)
self.btn_close = QtWidgets.QPushButton("Close")
self.btn_close.setToolTip('Close window')
self.btn_close.clicked.connect(self.close_widget)
self.btn_restart = QtWidgets.QPushButton("Start timer")
self.btn_restart.setToolTip('Timer will be started again')
self.btn_restart.clicked.connect(self.restart_timer)
self.group_btn.addWidget(self.btn_continue)
self.group_btn.addWidget(self.btn_stop)
self.group_btn.addWidget(self.btn_ok)
self.group_btn.addWidget(self.btn_restart)
self.group_btn.addWidget(self.btn_close)
self.main.addLayout(self.form)
self.main.addLayout(self.group_btn)
@ -483,12 +562,17 @@ class StopTimer(QtWidgets.QWidget):
self.btn_continue.setVisible(self.main_context)
self.btn_stop.setVisible(self.main_context)
self.btn_ok.setVisible(not self.main_context)
self.btn_restart.setVisible(not self.main_context)
self.btn_close.setVisible(not self.main_context)
def stop_timer(self):
self.parent.timer_stop()
self.close_widget()
def restart_timer(self):
self.parent.timer_restart()
self.close_widget()
def continue_timer(self):
self.parent.timer_continue()
self.close_widget()

View file

@ -1,6 +1,7 @@
import os
import sys
import re
import json
from pprint import *
import ftrack_api
@ -13,6 +14,22 @@ from app.api import Logger
log = Logger.getLogger(__name__)
def get_config_data():
templates = os.environ['PYPE_STUDIO_TEMPLATES']
path_items = [templates, 'presets', 'ftrack', 'ftrack_config.json']
filepath = os.path.sep.join(path_items)
data = dict()
try:
with open(filepath) as data_file:
data = json.load(data_file)
except Exception as e:
msg = 'Loading "Ftrack Config file" Failed. Please check log for more information. Times are set to default.'
log.warning("{} - {}".format(msg, str(e)))
return data
def get_data(parent, entity, session, custom_attributes):
entity_type = entity.entity_type

View file

@ -125,4 +125,4 @@ class SelectInvalidAction(pyblish.api.Action):
cmds.select(invalid, replace=True, noExpand=True)
else:
self.log.info("No invalid nodes found.")
cmds.select(deselect=True)
cmds.select(deselect=True)

View file

@ -7,9 +7,9 @@
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\show_current_scene_in_explorer.py",
"command": "$PYPE_SCRIPTS\\others\\open_current_folder.py",
"sourcetype": "file",
"title": "# Explore current scene..",
"title": "Open working folder..",
"tooltip": "Show current scene in Explorer"
},
{
@ -25,47 +25,7 @@
{
"type": "menu",
"title": "# Modeling",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\duplicate_normalized.py",
"sourcetype": "file",
"tags": ["modeling",
"duplicate",
"normalized"],
"title": "# # Duplicate Normalized",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\transferUVs.py",
"sourcetype": "file",
"tags": ["modeling",
"transfer",
"uv"],
"title": "# Transfer UVs",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\mirrorSymmetry.py",
"sourcetype": "file",
"tags": ["modeling",
"mirror",
"symmetry"],
"title": "# Mirror Symmetry",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\selectOutlineUI.py",
"sourcetype": "file",
"tags": ["modeling",
"select",
"outline",
"ui"],
"title": "# Select Outline UI",
"tooltip": ""
},
"items": [
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\polyDeleteOtherUVSets.py",
@ -77,17 +37,6 @@
"title": "# Polygon Delete Other UV Sets",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\polyCombineQuick.py",
"sourcetype": "file",
"tags": ["modeling",
"combine",
"polygon",
"quick"],
"title": "# Polygon Combine Quick",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\separateMeshPerShader.py",
@ -108,16 +57,6 @@
"title": "# Polygon Detach and Separate",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\polyRelaxVerts.py",
"sourcetype": "file",
"tags": ["modeling",
"relax",
"verts"],
"title": "# Polygon Relax Vertices",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\modeling\\polySelectEveryNthEdgeUI.py",
@ -142,322 +81,16 @@
},
{
"type": "menu",
"title": "# Rigging",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\addCurveBetween.py",
"sourcetype": "file",
"tags": ["rigging",
"addCurveBetween",
"file"],
"title": "# Add Curve Between"
},
"title": "Rigging",
"items": [
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\averageSkinWeights.py",
"command": "$PYPE_SCRIPTS\\rigging\\advancedSkeleton.py",
"sourcetype": "file",
"tags": ["rigging",
"average",
"skin weights",
"autorigger", "advanced", "skeleton", "advancedskeleton",
"file"],
"title": "# Average Skin Weights"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\cbSmoothSkinWeightUI.py",
"sourcetype": "file",
"tags": ["rigging",
"cbSmoothSkinWeightUI",
"file"],
"title": "# CB Smooth Skin Weight UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\channelBoxManagerUI.py",
"sourcetype": "file",
"tags": ["rigging",
"channelBoxManagerUI",
"file"],
"title": "# Channel Box Manager UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\characterAutorigger.py",
"sourcetype": "file",
"tags": ["rigging",
"characterAutorigger",
"file"],
"title": "# Character Auto Rigger"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\connectUI.py",
"sourcetype": "file",
"tags": ["rigging",
"connectUI",
"file"],
"title": "# Connect UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\copySkinWeightsLocal.py",
"sourcetype": "file",
"tags": ["rigging",
"copySkinWeightsLocal",
"file"],
"title": "# Copy Skin Weights Local"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\createCenterLocator.py",
"sourcetype": "file",
"tags": ["rigging",
"createCenterLocator",
"file"],
"title": "# Create Center Locator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\freezeTransformToGroup.py",
"sourcetype": "file",
"tags": ["rigging",
"freezeTransformToGroup",
"file"],
"title": "# Freeze Transform To Group"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\groupSelected.py",
"sourcetype": "file",
"tags": ["rigging",
"groupSelected",
"file"],
"title": "# Group Selected"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\ikHandlePoleVectorLocator.py",
"sourcetype": "file",
"tags": ["rigging",
"ikHandlePoleVectorLocator",
"file"],
"title": "# IK Handle Pole Vector Locator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\jointOrientUI.py",
"sourcetype": "file",
"tags": ["rigging",
"jointOrientUI",
"file"],
"title": "# Joint Orient UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\jointsOnCurve.py",
"sourcetype": "file",
"tags": ["rigging",
"jointsOnCurve",
"file"],
"title": "# Joints On Curve"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\resetBindSelectedSkinJoints.py",
"sourcetype": "file",
"tags": ["rigging",
"resetBindSelectedSkinJoints",
"file"],
"title": "# Reset Bind Selected Skin Joints"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedComponents.py",
"sourcetype": "file",
"tags": ["rigging",
"selectSkinclusterJointsFromSelectedComponents",
"file"],
"title": "# Select Skincluster Joints From Selected Components"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\selectSkinclusterJointsFromSelectedMesh.py",
"sourcetype": "file",
"tags": ["rigging",
"selectSkinclusterJointsFromSelectedMesh",
"file"],
"title": "# Select Skincluster Joints From Selected Mesh"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\setJointLabels.py",
"sourcetype": "file",
"tags": ["rigging",
"setJointLabels",
"file"],
"title": "# Set Joint Labels"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\setJointOrientationFromCurrentRotation.py",
"sourcetype": "file",
"tags": ["rigging",
"setJointOrientationFromCurrentRotation",
"file"],
"title": "# Set Joint Orientation From Current Rotation"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\setSelectedJointsOrientationZero.py",
"sourcetype": "file",
"tags": ["rigging",
"setSelectedJointsOrientationZero",
"file"],
"title": "# Set Selected Joints Orientation Zero"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\mirrorCurveShape.py",
"sourcetype": "file",
"tags": ["rigging",
"mirrorCurveShape",
"file"],
"title": "# Mirror Curve Shape"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\setRotationOrderUI.py",
"sourcetype": "file",
"tags": ["rigging",
"setRotationOrderUI",
"file"],
"title": "# Set Rotation Order UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\paintItNowUI.py",
"sourcetype": "file",
"tags": ["rigging",
"paintItNowUI",
"file"],
"title": "# Paint It Now UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\parentScaleConstraint.py",
"sourcetype": "file",
"tags": ["rigging",
"parentScaleConstraint",
"file"],
"title": "# Parent Scale Constraint"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\quickSetWeightsUI.py",
"sourcetype": "file",
"tags": ["rigging",
"quickSetWeightsUI",
"file"],
"title": "# Quick Set Weights UI"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\rapidRig.py",
"sourcetype": "file",
"tags": ["rigging",
"rapidRig",
"file"],
"title": "# Rapid Rig"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\regenerate_blendshape_targets.py",
"sourcetype": "file",
"tags": ["rigging",
"regenerate_blendshape_targets",
"file"],
"title": "# Regenerate Blendshape Targets"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\removeRotationAxis.py",
"sourcetype": "file",
"tags": ["rigging",
"removeRotationAxis",
"file"],
"title": "# Remove Rotation Axis"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\resetBindSelectedMeshes.py",
"sourcetype": "file",
"tags": ["rigging",
"resetBindSelectedMeshes",
"file"],
"title": "# Reset Bind Selected Meshes"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\simpleControllerOnSelection.py",
"sourcetype": "file",
"tags": ["rigging",
"simpleControllerOnSelection",
"file"],
"title": "# Simple Controller On Selection"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\simpleControllerOnSelectionHierarchy.py",
"sourcetype": "file",
"tags": ["rigging",
"simpleControllerOnSelectionHierarchy",
"file"],
"title": "# Simple Controller On Selection Hierarchy"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\superRelativeCluster.py",
"sourcetype": "file",
"tags": ["rigging",
"superRelativeCluster",
"file"],
"title": "# Super Relative Cluster"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\tfSmoothSkinWeight.py",
"sourcetype": "file",
"tags": ["rigging",
"tfSmoothSkinWeight",
"file"],
"title": "# TF Smooth Skin Weight"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\toggleIntermediates.py",
"sourcetype": "file",
"tags": ["rigging",
"toggleIntermediates",
"file"],
"title": "# Toggle Intermediates"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\toggleSegmentScaleCompensate.py",
"sourcetype": "file",
"tags": ["rigging",
"toggleSegmentScaleCompensate",
"file"],
"title": "# Toggle Segment Scale Compensate"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\rigging\\toggleSkinclusterDeformNormals.py",
"sourcetype": "file",
"tags": ["rigging",
"toggleSkinclusterDeformNormals",
"file"],
"title": "# Toggle Skincluster Deform Normals"
"title": "Advanced Skeleton"
}]
},
{
@ -841,7 +474,7 @@
},
{
"type": "menu",
"title": "# Animation",
"title": "Animation",
"items": [{
"type": "menu",
"title": "# Attributes",
@ -1060,10 +693,10 @@
},
{
"sourcetype": "file",
"command": "$PYPE_SCRIPTS\\animation\\poseLibrary.py",
"command": "$PYPE_SCRIPTS\\animation\\animLibrary.py",
"tags": ["animation",
"poseLibrary.py"],
"title": "# Pose Library",
"studiolibrary.py"],
"title": "Anim Library",
"type": "action"
}]
},
@ -1220,51 +853,6 @@
"title": "# Instancer To Objects Instances",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\objectsToParticlesAndInstancerCleanSource.py",
"sourcetype": "file",
"tags": ["particles",
"objects",
"Particles",
"Instancer",
"Clean",
"Source"],
"title": "# Objects To Particles & Instancer - Clean Source",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\particleComponentsToLocators.py",
"sourcetype": "file",
"tags": ["particles",
"components",
"locators"],
"title": "# Particle Components To Locators",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\objectsToParticlesAndInstancer.py",
"sourcetype": "file",
"tags": ["particles",
"objects",
"particles",
"instancer"],
"title": "# Objects To Particles And Instancer",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\spawnParticlesOnMesh.py",
"sourcetype": "file",
"tags": ["particles",
"spawn",
"on",
"mesh"],
"title": "# Spawn Particles On Mesh",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\instancerToObjectsInstancesWithAnimation.py",
@ -1274,42 +862,6 @@
"title": "# Instancer To Objects Instances With Animation",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\objectsToParticles.py",
"sourcetype": "file",
"tags": ["particles",
"objectsToParticles"],
"title": "# Objects To Particles",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\add_particle_cacheFile_attrs.py",
"sourcetype": "file",
"tags": ["particles",
"add_particle_cacheFile_attrs"],
"title": "# Add Particle CacheFile Attributes",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\mergeParticleSystems.py",
"sourcetype": "file",
"tags": ["particles",
"mergeParticleSystems"],
"title": "# Merge Particle Systems",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\particlesToLocators.py",
"sourcetype": "file",
"tags": ["particles",
"particlesToLocators"],
"title": "# Particles To Locators",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\instancerToObjectsWithAnimation.py",
@ -1318,61 +870,11 @@
"instancerToObjectsWithAnimation"],
"title": "# Instancer To Objects With Animation",
"tooltip": ""
},
{
"type": "separator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\mayaReplicateHoudiniTool.py",
"sourcetype": "file",
"tags": ["particles",
"houdini",
"houdiniTool",
"houdiniEngine"],
"title": "# Replicate Houdini Tool",
"tooltip": ""
},
{
"type": "separator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\clearInitialState.py",
"sourcetype": "file",
"tags": ["particles",
"clearInitialState"],
"title": "# Clear Initial State",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\particles\\killSelectedParticles.py",
"sourcetype": "file",
"tags": ["particles",
"killSelectedParticles"],
"title": "# Kill Selected Particles",
"tooltip": ""
}]
},
{
"type": "menu",
"title": "# Yeti",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\yeti\\yeti_rig_manager.py",
"sourcetype": "file",
"tags": ["yeti",
"rig",
"fur",
"manager"],
"title": "# Open Yeti Rig Manager",
"tooltip": ""
}]
},
{
"type": "menu",
"title": "# Cleanup",
"title": "Cleanup",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\repair_faulty_containers.py",
@ -1383,35 +885,6 @@
"title": "# Find and Repair Containers",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\selectByType.py",
"sourcetype": "file",
"tags": ["cleanup",
"selectByType"],
"title": "# Select By Type",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\selectIntermediateObjects.py",
"sourcetype": "file",
"tags": ["cleanup",
"selectIntermediateObjects"],
"title": "# Select Intermediate Objects",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\selectNonUniqueNames.py",
"sourcetype": "file",
"tags": ["cleanup",
"select",
"non unique",
"names"],
"title": "# Select Non Unique Names",
"tooltip": ""
},
{
"type": "separator"
},
@ -1470,29 +943,9 @@
"title": "# Remove Unused Looks",
"tooltip": "Remove all loaded yet unused Avalon look containers"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\deleteGhostIntermediateObjects.py",
"sourcetype": "file",
"tags": ["cleanup",
"deleteGhostIntermediateObjects"],
"title": "# Delete Ghost Intermediate Objects",
"tooltip": ""
},
{
"type": "separator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\resetViewportCache.py",
"sourcetype": "file",
"tags": ["cleanup",
"reset",
"viewport",
"cache"],
"title": "# Reset Viewport Cache",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\uniqifyNodeNames.py",
@ -1527,13 +980,13 @@
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\colorbleedRename.py",
"command": "$PYPE_SCRIPTS\\cleanup\\ccRenameReplace.py",
"sourcetype": "file",
"tags": ["cleanup",
"rename",
"ui"],
"title": "# Colorbleed Renamer",
"tooltip": "Colorbleed Rename UI"
"title": "Renamer",
"tooltip": "Rename UI"
},
{
"type": "action",
@ -1543,225 +996,5 @@
"renameShapesToTransform"],
"title": "# Rename Shapes To Transform",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\reorderUI.py",
"sourcetype": "file",
"tags": ["cleanup",
"reorderUI"],
"title": "# Reorder UI",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\cleanup\\pastedCleaner.py",
"sourcetype": "file",
"tags": ["cleanup",
"pastedCleaner"],
"title": "# Pasted Cleaner",
"tooltip": ""
}]
},
{
"type": "menu",
"title": "# Others",
"items": [{
"type": "menu",
"sourcetype": "file",
"title": "# Yeti",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\yeti\\cache_selected_yeti_nodes.py",
"sourcetype": "file",
"tags": ["others",
"yeti",
"cache",
"selected"],
"title": "# Cache Selected Yeti Nodes",
"tooltip": ""
}]
},
{
"type": "menu",
"title": "# Hair",
"tooltip": "",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\hair\\recolorHairCurrentCurve",
"sourcetype": "file",
"tags": ["others",
"selectSoftSelection"],
"title": "# Select Soft Selection",
"tooltip": ""
}]
},
{
"type": "menu",
"command": "$PYPE_SCRIPTS\\others\\display",
"sourcetype": "file",
"tags": ["others",
"display"],
"title": "# Display",
"items": [{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\display\\wireframeSelectedObjects.py",
"sourcetype": "file",
"tags": ["others",
"wireframe",
"selected",
"objects"],
"title": "# Wireframe Selected Objects",
"tooltip": ""
}]
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\archiveSceneUI.py",
"sourcetype": "file",
"tags": ["others",
"archiveSceneUI"],
"title": "# Archive Scene UI",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\getSimilarMeshes.py",
"sourcetype": "file",
"tags": ["others",
"getSimilarMeshes"],
"title": "# Get Similar Meshes",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\createBoundingBoxEachSelected.py",
"sourcetype": "file",
"tags": ["others",
"createBoundingBoxEachSelected"],
"title": "# Create BoundingBox Each Selected",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\curveFromPositionEveryFrame.py",
"sourcetype": "file",
"tags": ["others",
"curveFromPositionEveryFrame"],
"title": "# Curve From Position",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\instanceLeafSmartTransform.py",
"sourcetype": "file",
"tags": ["others",
"instance",
"leaf",
"smart",
"transform"],
"title": "# Instance Leaf Smart Transform",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\instanceSmartTransform.py",
"sourcetype": "file",
"tags": ["others",
"instance",
"smart",
"transform"],
"title": "# Instance Smart Transform",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\randomizeUVShellsSelectedObjects.py",
"sourcetype": "file",
"tags": ["others",
"randomizeUVShellsSelectedObjects"],
"title": "# Randomize UV Shells",
"tooltip": "Select objects before running action"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\centerPivotGroup.py",
"sourcetype": "file",
"tags": ["others",
"centerPivotGroup"],
"title": "# Center Pivot Group",
"tooltip": ""
},
{
"type": "separator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\locatorsOnSelectedFaces.py",
"sourcetype": "file",
"tags": ["others",
"locatorsOnSelectedFaces"],
"title": "# Locators On Selected Faces",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\locatorsOnEdgeSelectionPrompt.py",
"sourcetype": "file",
"tags": ["others",
"locatorsOnEdgeSelectionPrompt"],
"title": "# Locators On Edge Selection Prompt",
"tooltip": ""
},
{
"type": "separator"
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\copyDeformers.py",
"sourcetype": "file",
"tags": ["others",
"copyDeformers"],
"title": "# Copy Deformers",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\selectInReferenceEditor.py",
"sourcetype": "file",
"tags": ["others",
"selectInReferenceEditor"],
"title": "# Select In Reference Editor",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\selectConstrainingObject.py",
"sourcetype": "file",
"tags": ["others",
"selectConstrainingObject"],
"title": "# Select Constraining Object",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\deformerSetRelationsUI.py",
"sourcetype": "file",
"tags": ["others",
"deformerSetRelationsUI"],
"title": "# Deformer Set Relations UI",
"tooltip": ""
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\recreateBaseNodesForAllLatticeNodes.py",
"sourcetype": "file",
"tags": ["others",
"recreate",
"base",
"nodes",
"lattice"],
"title": "# Recreate Base Nodes For Lattice Nodes",
"tooltip": ""
}]
}]

View file

@ -5,13 +5,13 @@
"title": "Version Up",
"tooltip": "Incremental save with a specific format"
},
/* {
{
"type": "action",
"command": "$PYPE_SCRIPTS\\others\\show_current_scene_in_explorer.py",
"sourcetype": "file",
"title": "Explore current scene..",
"tooltip": "Show current scene in Explorer"
}, */
},
{
"type": "action",
"command": "$PYPE_SCRIPTS\\avalon\\launch_manager.py",

View file

@ -53,7 +53,7 @@ class NukeHandler(api.Logger.logging.Handler):
msg = self.format(record)
if record.levelname.lower() in [
"warning",
# "warning",
"critical",
"fatal",
"error"
@ -67,6 +67,7 @@ if nuke_handler.get_name() \
not in [handler.get_name()
for handler in api.Logger.logging.root.handlers[:]]:
api.Logger.logging.getLogger().addHandler(nuke_handler)
api.Logger.logging.getLogger().setLevel(api.Logger.logging.INFO)
if not self.nLogger:
self.nLogger = api.Logger
@ -86,6 +87,7 @@ def reload_config():
"app.api",
"{}.api".format(AVALON_CONFIG),
"{}.templates".format(AVALON_CONFIG),
"{}.nuke.actions".format(AVALON_CONFIG),
"{}.nuke.templates".format(AVALON_CONFIG),
"{}.nuke.menu".format(AVALON_CONFIG)
):
@ -113,12 +115,7 @@ def install():
# Disable all families except for the ones we explicitly want to see
family_states = [
"write",
"lifeGroup",
"backdrop",
"imagesequence",
"mov"
"camera",
"pointcache",
"review"
]
avalon.data["familiesStateDefault"] = False

58
pype/nuke/actions.py Normal file
View file

@ -0,0 +1,58 @@
# absolute_import is needed to counter the `module has no cmds error` in Maya
from __future__ import absolute_import
import pyblish.api
from avalon.nuke.lib import (
reset_selection,
select_nodes
)
from ..action import get_errored_instances_from_context
class SelectInvalidAction(pyblish.api.Action):
"""Select invalid nodes in Maya when plug-in failed.
To retrieve the invalid nodes this assumes a static `get_invalid()`
method is available on the plugin.
"""
label = "Select invalid nodes"
on = "failed" # This action is only available on a failed plug-in
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
try:
import nuke
except ImportError:
raise ImportError("Current host is not Nuke")
errored_instances = get_errored_instances_from_context(context)
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
invalid = list()
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
if isinstance(invalid_nodes, (list, tuple)):
invalid.append(invalid_nodes[0])
else:
self.log.warning("Plug-in returned to be invalid, "
"but has no selectable nodes.")
# Ensure unique (process each node only once)
invalid = list(set(invalid))
if invalid:
self.log.info("Selecting invalid nodes: {}".format(invalid))
reset_selection()
select_nodes(invalid)
else:
self.log.info("No invalid nodes found.")

View file

@ -12,6 +12,50 @@ self = sys.modules[__name__]
self._project = None
def onScriptLoad():
if nuke.env['LINUX']:
nuke.tcl('load ffmpegReader')
nuke.tcl('load ffmpegWriter')
else:
nuke.tcl('load movReader')
nuke.tcl('load movWriter')
def writes_version_sync():
try:
rootVersion = pype.get_version_from_path(nuke.root().name())
padding = len(rootVersion)
new_version = str("{" + ":0>{}".format(padding) + "}").format(
int(rootVersion)
)
log.info("new_version: {}".format(new_version))
except Exception:
return
for each in nuke.allNodes():
if each.Class() == 'Write':
avalon_knob_data = get_avalon_knob_data(each)
if avalon_knob_data['families'] not in ["render"]:
log.info(avalon_knob_data['families'])
continue
try:
node_file = each['file'].value()
log.info("node_file: {}".format(node_file))
node_version = pype.get_version_from_path(node_file, None)
log.info("node_version: {}".format(node_version))
node_new_file = node_file.replace(node_version, new_version)
each['file'].setValue(node_new_file)
except Exception as e:
log.debug("Write node: `{}` has no version in path: {}".format(each.name(), e))
def version_up_script():
import nukescripts
nukescripts.script_and_write_nodes_version_up()
def format_anatomy(data):
from .templates import (
get_anatomy
@ -26,7 +70,7 @@ def format_anatomy(data):
data.update({
"hierarchy": pype.get_hierarchy(),
"frame": "#"*padding,
"VERSION": pype.get_version_from_workfile(file)
"VERSION": pype.get_version_from_path(file)
})
# log.info("format_anatomy:anatomy: {}".format(anatomy))

View file

@ -283,7 +283,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
# Inform user about no changes to the database.
if (component_entity and not component_overwrite and
not new_component):
not new_component):
data["component"] = component_entity
self.log.info(
"Found existing component, and no request to overwrite. "

View file

@ -1,6 +1,5 @@
import pyblish.api
import os
import clique
import json
@ -25,9 +24,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'pointcache': 'cache',
'write': 'img',
'render': 'render',
'nukescript': 'comp',
'review': 'mov'}
exclude = []
def process(self, instance):
for ex in self.exclude:
if ex in instance.data['families']:
return
self.log.debug('instance {}'.format(instance))
@ -59,21 +63,21 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
component_data = {
"name": "ftrackreview-mp4", # Default component name is "main".
"metadata": {'ftr_meta': json.dumps({
'frameIn': int(instance.data["startFrame"]),
'frameOut': int(instance.data["startFrame"]),
'frameRate': 25})}
}
'frameIn': int(instance.data["startFrame"]),
'frameOut': int(instance.data["startFrame"]),
'frameRate': 25})}
}
elif ext in [".jpg"]:
component_data = {
"name": "thumbnail" # Default component name is "main".
}
}
thumbnail = True
location = ft_session.query(
'Location where name is "ftrack.server"').one()
else:
component_data = {
"name": ext[1:] # Default component name is "main".
}
}
location = ft_session.query(
'Location where name is "ftrack.unmanaged"').one()

View file

@ -0,0 +1,153 @@
import os
import subprocess
import pyblish.api
import filelink
class ExtractTranscode(pyblish.api.InstancePlugin):
"""Extracts review movie from image sequence.
Offset to get images to transcode from.
"""
order = pyblish.api.ExtractorOrder + 0.1
label = "Transcode"
optional = True
families = ["review"]
def find_previous_index(self, index, indexes):
"""Finds the closest previous value in a list from a value."""
data = []
for i in indexes:
if i >= index:
continue
data.append(index - i)
return indexes[data.index(min(data))]
def process(self, instance):
if "collection" in instance.data.keys():
self.process_image(instance)
if "output_path" in instance.data.keys():
self.process_movie(instance)
def process_image(self, instance):
collection = instance.data.get("collection", [])
if not list(collection):
msg = "Skipping \"{0}\" because no frames was found."
self.log.warning(msg.format(instance.data["name"]))
return
# Temporary fill the missing frames.
missing = collection.holes()
if not collection.is_contiguous():
pattern = collection.format("{head}{padding}{tail}")
for index in missing.indexes:
dst = pattern % index
src_index = self.find_previous_index(
index, list(collection.indexes)
)
src = pattern % src_index
filelink.create(src, dst)
# Generate args.
# Has to be yuv420p for compatibility with older players and smooth
# playback. This does come with a sacrifice of more visible banding
# issues.
# -crf 18 is visually lossless.
args = [
"ffmpeg", "-y",
"-start_number", str(min(collection.indexes)),
"-framerate", str(instance.context.data["framerate"]),
"-i", collection.format("{head}{padding}{tail}"),
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
"-vframes",
str(max(collection.indexes) - min(collection.indexes) + 1),
"-vf",
"scale=trunc(iw/2)*2:trunc(ih/2)*2",
]
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
args.append(collection.format("{head}.mov"))
self.log.debug("Executing args: {0}".format(args))
# Can't use subprocess.check_output, cause Houdini doesn't like that.
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
# Remove temporary frame fillers
for f in missing:
os.remove(f)
if p.returncode != 0:
raise ValueError(output)
self.log.debug(output)
def process_movie(self, instance):
# Generate args.
# Has to be yuv420p for compatibility with older players and smooth
# playback. This does come with a sacrifice of more visible banding
# issues.
args = [
"ffmpeg", "-y",
"-i", instance.data["output_path"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
split = os.path.splitext(instance.data["output_path"])
args.append(split[0] + "_review.mov")
self.log.debug("Executing args: {0}".format(args))
# Can't use subprocess.check_output, cause Houdini doesn't like that.
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
if p.returncode != 0:
raise ValueError(output)
self.log.debug(output)

View file

@ -1,7 +1,5 @@
from app.api import (
Templates
)
import pype.api as pype
import pyblish.api
@ -13,8 +11,6 @@ class CollectTemplates(pyblish.api.ContextPlugin):
label = "Collect Templates"
def process(self, context):
"""Inject the current working file"""
templates = Templates(
type=["anatomy"]
)
context.data['anatomy'] = templates.anatomy
pype.load_data_from_templates()
context.data['anatomy'] = pype.Anatomy
self.log.info("Anatomy templates collected...")

View file

@ -35,8 +35,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"vrayproxy",
"yetiRig",
"yeticache",
"nukescript",
"review",
"scene"]
"scene",
"ass"]
def process(self, instance):
@ -46,7 +48,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
@ -136,7 +137,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# \|________|
#
root = api.registered_root()
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
@ -171,7 +172,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# |_______|
#
if isinstance(files, list):
collection = files
# Assert that each member has identical suffix
@ -229,17 +229,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Imprint shortcut to context
# for performance reasons.
"context": {
"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": version["name"],
"hierarchy": hierarchy,
"representation": ext[1:]
"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": version["name"],
"hierarchy": hierarchy,
"representation": ext[1:]
}
}

View file

@ -24,13 +24,23 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
label = "Integrate Frames"
order = pyblish.api.IntegratorOrder
families = ["prerendered.frames", "imagesequence", "render"]
families = ["imagesequence", "render", "write", "source"]
family_targets = [".frames", ".local", ".review", "imagesequence", "render"]
def process(self, instance):
families = [f for f in instance.data["families"]
for search in self.family_targets
if search in f]
if not families:
return
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
self.log.info("instance.data: {}".format(instance.data))
if instance.data.get('transfer', True):
self.integrate(instance)
@ -111,11 +121,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
locations=[LOCATION],
data=version_data)
self.log.debug("version: {}".format(version))
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
self.log.debug("version_id: {}".format(version_id))
# Write to disk
# _
# | |
@ -128,11 +136,10 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
#
root = api.registered_root()
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
self.log.debug("hierarchy: {}".format(hierarchy))
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
@ -151,7 +158,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
self.log.debug("integrate_frames:instance.data[files]: {}".format(
instance.data["files"]))
for files in instance.data["files"]:
# Collection
# _______
@ -164,30 +172,35 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
#
if isinstance(files, list):
collection = files
src_collections, remainder = clique.assemble(files)
src_collection = src_collections[0]
# Assert that each member has identical suffix
src_head = src_collection.format("{head}")
src_tail = ext = src_collection.format("{tail}")
dst_collection = []
for fname in collection:
filename, ext = os.path.splitext(fname)
_, frame = os.path.splitext(filename)
template_data["representation"] = ext[1:]
template_data["frame"] = frame[1:]
src = os.path.join(stagingdir, fname)
test_dest_files = list()
for i in [1, 2]:
template_data["representation"] = src_tail[1:]
template_data["frame"] = src_collection.format(
"{padding}") % i
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.render.path
test_dest_files.append(anatomy_filled.render.path)
dst_collection.append(dst)
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
dst_head = dst_collection.format("{head}")
dst_tail = dst_collection.format("{tail}")
for i in src_collection.indexes:
src_padding = src_collection.format("{padding}") % i
src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail)
dst_padding = dst_collection.format("{padding}") % i
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
src = os.path.join(stagingdir, src_file_name)
instance.data["transfers"].append([src, dst])
template = anatomy.render.path
collections, remainder = clique.assemble(dst_collection)
dst = collections[0].format('{head}{padding}{tail}')
else:
# Single file
# _______
@ -197,7 +210,14 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
# | |
# |_______|
#
template_data.pop("frame", None)
anatomy.pop("frame", None)
fname = files
self.log.info("fname: {}".format(fname))
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
@ -206,11 +226,12 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
template_data["representation"] = ext[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.render.path
template = anatomy.render.path
instance.data["transfers"].append([src, dst])
instance.data["transfers"].append([src, dst])
representation = {
"schema": "pype:representation-2.0",
@ -236,12 +257,12 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
@ -256,6 +277,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
transfers = instance.data["transfers"]
for src, dest in transfers:
src = os.path.normpath(src)
dest = os.path.normpath(dest)
if src in dest:
continue
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)

View file

@ -0,0 +1,32 @@
from collections import OrderedDict
import avalon.maya
from maya import cmds
class CreateAss(avalon.maya.Creator):
"""Arnold Archive"""
name = "ass"
label = "Ass StandIn"
family = "ass"
icon = "cube"
def process(self):
instance = super(CreateAss, self).process()
data = OrderedDict(**self.data)
nodes = list()
if (self.options or {}).get("useSelection"):
nodes = cmds.ls(selection=True)
cmds.sets(nodes, rm=instance)
assContent = cmds.sets(name="content_SET")
assProxy = cmds.sets(name="proxy_SET", empty=True)
cmds.sets([assContent, assProxy], forceElement=instance)
self.data = data

View file

@ -0,0 +1,148 @@
from avalon import api
import pype.maya.plugin
import os
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
"""Load the Proxy"""
families = ["ass"]
representations = ["ass"]
label = "Reference .ASS standin with Proxy"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
from avalon import maya
import pymel.core as pm
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
path = self.fname
proxyPath = os.path.splitext(path)[0] + ".ma"
nodes = cmds.file(proxyPath,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName=groupName)
cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True)
# Set attributes
proxyShape = pm.ls(nodes, type="mesh")[0]
proxyShape = pm.ls(nodes, type="mesh")[0]
proxyShape.aiTranslator.set('procedural')
proxyShape.dso.set(path)
proxyShape.aiOverrideShaders.set(0)
self[:] = nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)
class AssStandinLoader(api.Loader):
"""Load .ASS file as standin"""
families = ["ass"]
representations = ["ass"]
label = "Load .ASS file as standin"
order = -5
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
import maya.cmds as cmds
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
import mtoa.ui.arnoldmenu
import pymel.core as pm
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
# cmds.loadPlugin("gpuCache", quiet=True)
# Root group
label = "{}:{}".format(namespace, name)
root = pm.group(name=label, empty=True)
# Create transform with shape
transform_name = label + "_ASS"
# transform = pm.createNode("transform", name=transform_name,
# parent=root)
standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn())
standin = standinShape.getParent()
standin.rename(transform_name)
pm.parent(standin, root)
# Set the standin filepath
standinShape.dso.set(self.fname)
# Lock parenting of the transform and standin
cmds.lockNode([root, standin], lock=True)
nodes = [root, standin]
self[:] = nodes
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
import pymel.core as pm
path = api.get_representation_path(representation)
# Update the standin
members = pm.sets(container['objectName'], query=True)
standins = pm.ls(members, type="AiStandIn", long=True)
assert len(caches) == 1, "This is a bug"
for standin in standins:
standin.cacheFileName.set(path)
container = pm.PyNode(container["objectName"])
container.representation.set(str(representation["_id"]))
def switch(self, container, representation):
self.update(container, representation)
def remove(self, container):
import maya.cmds as cmds
members = cmds.sets(container['objectName'], query=True)
cmds.lockNode(members, lock=False)
cmds.delete([container['objectName']] + members)
# Clean up the namespace
try:
cmds.namespace(removeNamespace=container['namespace'],
deleteNamespaceContent=True)
except RuntimeError:
pass

View file

@ -0,0 +1,35 @@
from maya import cmds
import pymel.core as pm
import pyblish.api
import avalon.api
class CollectAssData(pyblish.api.InstancePlugin):
"""Collect Ass data
"""
order = pyblish.api.CollectorOrder + 0.2
label = 'Collect Ass'
families = ["ass"]
def process(self, instance):
context = instance.context
objsets = instance.data['setMembers']
for objset in objsets:
members = cmds.sets(objset, query=True)
if members is None:
self.log.warning("Skipped empty instance: \"%s\" " % objset)
continue
if objset == "content_SET":
instance.data['setMembers'] = members
elif objset == "proxy_SET":
assert len(members) == 1, "You have multiple proxy meshes, please only use one"
instance.data['proxy'] = members
self.log.debug("data: {}".format(instance.data))

View file

@ -0,0 +1,47 @@
import os
import avalon.maya
import pype.api
from maya import cmds
class ExtractAssStandin(pype.api.Extractor):
"""Extract the content of the instance to a ass file
Things to pay attention to:
- If animation is toggled, are the frames correct
-
"""
label = "Ass Standin (.ass)"
hosts = ["maya"]
families = ["ass"]
def process(self, instance):
staging_dir = self.staging_dir(instance)
file_name = "{}.ass".format(instance.name)
file_path = os.path.join(staging_dir, file_name)
# Write out .ass file
self.log.info("Writing: '%s'" % file_path)
with avalon.maya.maintained_selection():
self.log.info("Writing: {}".format(instance.data["setMembers"]))
cmds.select(instance.data["setMembers"], noExpand=True)
cmds.arnoldExportAss( filename=file_path,
selected=True,
asciiAss=True,
shadowLinks=True,
lightLinks=True,
boundingBox=True
)
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(file_name)
self.log.info("Extracted instance '%s' to: %s"
% (instance.name, staging_dir))

View file

@ -0,0 +1,73 @@
import os
from maya import cmds
import contextlib
import avalon.maya
import pype.api
import pype.maya.lib as lib
class ExtractAssProxy(pype.api.Extractor):
"""Extract proxy model as Maya Ascii to use as arnold standin
"""
order = pype.api.Extractor.order + 0.2
label = "Ass Proxy (Maya ASCII)"
hosts = ["maya"]
families = ["ass"]
def process(self, instance):
@contextlib.contextmanager
def unparent(root):
"""Temporarily unparent `root`"""
parent = cmds.listRelatives(root, parent=True)
if parent:
cmds.parent(root, world=True)
yield
self.log.info("{} - {}".format(root, parent))
cmds.parent(root, parent)
else:
yield
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = "{0}.ma".format(instance.name)
path = os.path.join(stagingdir, filename)
# Perform extraction
self.log.info("Performing extraction..")
# Get only the shape contents we need in such a way that we avoid
# taking along intermediateObjects
members = instance.data['proxy']
members = cmds.ls(members,
dag=True,
transforms=True,
noIntermediate=True)
self.log.info(members)
with avalon.maya.maintained_selection():
with unparent(members[0]):
cmds.select(members, noExpand=True)
cmds.file(path,
force=True,
typ="mayaAscii",
exportSelected=True,
preserveReferences=False,
channels=False,
constraints=False,
expressions=False,
constructionHistory=False)
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(filename)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -228,6 +228,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
# have accesss to these paths, such as if slaves are
# running Linux and the submitter is on Windows.
"PYTHONPATH",
"PATH",
"MTOA_EXTENSIONS_PATH",
"MTOA_EXTENSIONS",
@ -254,33 +255,28 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
PATHS = os.environ["PATH"].split(";")
environment["PATH"] = ";".join([p for p in PATHS
if p.startswith("P:")])
environment["PATH"] = os.environ["PATH"]
clean_pythonpath = ''
for path in environment['PYTHONPATH'].split(os.pathsep):
# self.log.debug('checking path for UTF: {}'.format(path))
try:
path.decode('UTF-8', 'strict')
# path = path.lower().replace("k:/", r"\\kre-c01\\share\\").replace("p:/", r"\\kre-p01\\share\\")
clean_pythonpath += path + os.pathsep
except UnicodeDecodeError:
self.log.debug('path contains non UTF characters')
environment['PYTHONPATH'] = clean_pythonpath
for key in environment:
remapped_key = ''
list_paths = environment[key].split(os.pathsep)
if len(list_paths) > 1:
for path in list_paths:
path = path.replace("K:/", "\\\\kre-c01\\share\\").replace("P:/", "\\\\kre-p01\\share\\")
path = path.replace("K:\\", "\\\\kre-c01\\share\\").replace("P:\\", "\\\\kre-p01\\share\\")
remapped_key += path + os.pathsep
else:
path = list_paths[0].replace("K:/", "\\\\kre-c01\\share\\").replace("P:/", "\\\\kre-p01\\share\\")
path = path.replace("K:\\", "\\\\kre-c01\\share\\").replace("P:\\", "\\\\kre-p01\\share\\")
remapped_key = path
environment[key] = remapped_key
clean_path = ''
for path in environment['PATH'].split(os.pathsep):
clean_path += os.path.normpath(path) + os.pathsep
environment['PATH'] = clean_path
for path in environment:
environment[path] = environment[path].replace(
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
environment['PYPE_STUDIO_CORE'])
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(

View file

@ -70,6 +70,13 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
# check if any objectSets are not present ion the relationships
missing_sets = [s for s in sets if s not in relationships]
for set in missing_sets:
if set.endswith("_SET"):
missing_sets.remove(set)
cls.log.info("Missing Sets "
"'{}'".format(missing_sets))
if missing_sets:
# A set of this node is not coming along, this is wrong!
cls.log.error("Missing sets '{}' for node "

View file

@ -1,47 +0,0 @@
import pyblish.api
class CollectNukeRenderMode(pyblish.api.InstancePlugin):
# TODO: rewrite docstring to nuke
"""Collect current comp's render Mode
Options:
local
deadline
Note that this value is set for each comp separately. When you save the
comp this information will be stored in that file. If for some reason the
available tool does not visualize which render mode is set for the
current comp, please run the following line in the console (Py2)
comp.GetData("rendermode")
This will return the name of the current render mode as seen above under
Options.
"""
order = pyblish.api.CollectorOrder + 0.4
label = "Collect Render Mode"
hosts = ["nuke"]
families = ["write", "render.local"]
def process(self, instance):
"""Collect all image sequence tools"""
options = ["local", "deadline"]
node = instance[0]
if bool(node["render_local"].getValue()):
rendermode = "local"
else:
rendermode = "deadline"
assert rendermode in options, "Must be supported render mode"
# Append family
instance.data["families"].remove("render")
family = "render.{0}".format(rendermode)
instance.data["families"].append(family)
self.log.info("Render mode: {0}".format(rendermode))

View file

@ -0,0 +1,27 @@
import pyblish.api
import shutil
import os
class CopyStagingDir(pyblish.api.InstancePlugin):
"""Copy data rendered into temp local directory
"""
order = pyblish.api.IntegratorOrder - 2
label = "Copy data from temp dir"
hosts = ["nuke", "nukeassist"]
families = ["render.local"]
def process(self, instance):
temp_dir = instance.data.get("stagingDir")
output_dir = instance.data.get("outputDir")
# copy data to correct dir
if not os.path.exists(output_dir):
os.makedirs(output_dir)
self.log.info("output dir has been created")
for f in os.listdir(temp_dir):
self.log.info("copy file to correct destination: {}".format(f))
shutil.copy(os.path.join(temp_dir, os.path.basename(f)),
os.path.join(output_dir, os.path.basename(f)))

View file

@ -0,0 +1,46 @@
import pyblish.api
@pyblish.api.log
class CollectInstanceFamilies(pyblish.api.ContextPlugin):
"""Collect families for all instances"""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Families"
hosts = ["nuke", "nukeassist"]
def process(self, context):
for instance in context.data["instances"]:
if not instance.data["publish"]:
continue
# set for ftrack to accept
instance.data["families"] = ["ftrack"]
if "write" in instance.data["family"]:
node = instance[0]
if not node["render"].value():
families = ["{}.frames".format(
instance.data["avalonKnob"]["families"])]
# to ignore staging dir op in integrate
instance.data['transfer'] = False
else:
# dealing with local/farm rendering
if node["render_farm"].value():
families = ["{}.farm".format(
instance.data["avalonKnob"]["families"])]
else:
families = ["{}.local".format(
instance.data["avalonKnob"]["families"])]
instance.data["families"].extend(families)
# Sort/grouped by family (preserving local index)
context[:] = sorted(context, key=self.sort_by_family)
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -2,6 +2,7 @@ import os
import nuke
import pyblish.api
from avalon import io, api
from pype.nuke.lib import get_avalon_knob_data
@ -9,11 +10,14 @@ from pype.nuke.lib import get_avalon_knob_data
class CollectNukeInstances(pyblish.api.ContextPlugin):
"""Collect all nodes with Avalon knob."""
order = pyblish.api.CollectorOrder
order = pyblish.api.CollectorOrder + 0.01
label = "Collect Instances"
hosts = ["nuke", "nukeassist"]
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.debug("asset_data: {}".format(asset_data["data"]))
instances = []
# creating instances per write node
for node in nuke.allNodes():
@ -44,10 +48,12 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"label": node.name(),
"name": node.name(),
"subset": subset,
"families": [avalon_knob_data["families"]],
"family": avalon_knob_data["family"],
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish')
"publish": node.knob('publish').value(),
"handles": int(asset_data["data"].get("handles", 0)),
"step": 1,
"fps": int(nuke.root()['fps'].value())
})
self.log.info("collected instance: {}".format(instance.data))
@ -55,11 +61,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
context.data["instances"] = instances
# Sort/grouped by family (preserving local index)
context[:] = sorted(context, key=self.sort_by_family)
self.log.debug("context: {}".format(context))
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -0,0 +1,27 @@
import pyblish.api
class CollectReview(pyblish.api.InstancePlugin):
"""Collect review instance from rendered frames
"""
order = pyblish.api.CollectorOrder + 0.3
family = "review"
label = "Collect Review"
hosts = ["nuke"]
families = ["write"]
family_targets = [".local", ".frames"]
def process(self, instance):
families = [(f, search) for f in instance.data["families"]
for search in self.family_targets
if search in f][0]
if families:
root_femilies = families[0].replace(families[1], "")
instance.data["families"].append(".".join([
root_femilies,
self.family
]))
self.log.info("Review collected: `{}`".format(instance))

View file

@ -0,0 +1,56 @@
from avalon import api, io
import nuke
import pyblish.api
import os
from avalon.nuke.lib import (
add_publish_knob,
add_avalon_tab_knob
)
class CollectScript(pyblish.api.ContextPlugin):
"""Publish current script version."""
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Script to publish"
hosts = ['nuke']
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.info("asset_data: {}".format(asset_data["data"]))
root = nuke.root()
add_avalon_tab_knob(root)
add_publish_knob(root)
family = "nukescript"
# creating instances per write node
file_path = root['name'].value()
base_name = os.path.basename(file_path)
subset = "{0}_{1}".format(os.getenv("AVALON_TASK", None), family)
# Get frame range
first_frame = int(root["first_frame"].getValue())
last_frame = int(root["last_frame"].getValue())
# Create instance
instance = context.create_instance(subset)
instance.add(root)
instance.data.update({
"subset": subset,
"asset": os.getenv("AVALON_ASSET", None),
"label": base_name,
"name": base_name,
"startFrame": first_frame,
"endFrame": last_frame,
"publish": root.knob('publish').value(),
"family": family,
"representation": "nk",
"handles": int(asset_data["data"].get("handles", 0)),
"step": 1,
"fps": int(root['fps'].value()),
})
self.log.info('Publishing script version')
context.data["instances"].append(instance)

View file

@ -1,9 +1,9 @@
import os
import tempfile
import nuke
import pyblish.api
import logging
from avalon import io, api
log = logging.getLogger(__name__)
@ -17,16 +17,18 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
hosts = ["nuke", "nukeassist"]
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.debug("asset_data: {}".format(asset_data["data"]))
for instance in context.data["instances"]:
self.log.debug("checking instance: {}".format(instance))
if not instance.data["publish"]:
continue
node = instance[0]
if node.Class() != "Write":
continue
self.log.debug("checking instance: {}".format(instance))
# Determine defined file type
ext = node["file_type"].value()
@ -47,9 +49,10 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
path = nuke.filename(node)
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# Include start and end render frame in label
name = node.name()
# create label
name = node.name()
# Include start and end render frame in label
label = "{0} ({1}-{2})".format(
name,
int(first_frame),
@ -57,42 +60,30 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
)
# preredered frames
if not node["render"].value():
families = "prerendered.frames"
# collect frames by try
# collect families in next file
if "files" not in instance.data:
instance.data["files"] = list()
try:
collected_frames = os.listdir(output_dir)
self.log.debug("collected_frames: {}".format(label))
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(collected_frames)
instance.data['transfer'] = False
else:
# dealing with local/farm rendering
if node["render_farm"].value():
families = "{}.farm".format(instance.data["avalonKnob"]["families"][0])
else:
families = "{}.local".format(instance.data["avalonKnob"]["families"][0])
self.log.debug("checking for error: {}".format(label))
except Exception:
pass
instance.data.update({
"path": path,
"outputDir": output_dir,
"ext": ext,
"label": label,
"families": [families, 'ftrack'],
"startFrame": first_frame,
"endFrame": last_frame,
"outputType": output_type,
"stagingDir": output_dir,
"colorspace": node["colorspace"].value(),
"handles": int(asset_data["data"].get("handles", 0)),
"step": 1,
"fps": int(nuke.root()['fps'].value())
})
self.log.debug("instance.data: {}".format(instance.data))
self.log.debug("context: {}".format(context))
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -0,0 +1,21 @@
import pyblish
class ExtractFramesToIntegrate(pyblish.api.InstancePlugin):
"""Extract rendered frames for integrator
"""
order = pyblish.api.ExtractorOrder
label = "Extract rendered frames"
hosts = ["nuke"]
families = ["render.frames", "prerender.frames", "still.frames"]
def process(self, instance):
staging_dir = instance.data.get('stagingDir', None)
output_dir = instance.data.get('outputDir', None)
if not staging_dir:
staging_dir = output_dir
instance.data['stagingDir'] = staging_dir
instance.data['transfer'] = False

View file

@ -6,7 +6,7 @@ import pyblish.api
class ExtractOutputDirectory(pyblish.api.InstancePlugin):
"""Extracts the output path for any collection or single output_path."""
order = pyblish.api.ExtractorOrder - 0.1
order = pyblish.api.ExtractorOrder - 0.05
label = "Output Directory"
optional = True
@ -16,9 +16,6 @@ class ExtractOutputDirectory(pyblish.api.InstancePlugin):
path = None
if "collection" in instance.data.keys():
path = instance.data["collection"].format()
if "output_path" in instance.data.keys():
path = instance.data["path"]

View file

@ -1,8 +1,10 @@
import pyblish.api
import nuke
import os
import pype
class NukeRenderLocal(pyblish.api.InstancePlugin):
class NukeRenderLocal(pype.api.Extractor):
# TODO: rewrite docstring to nuke
"""Render the current Fusion composition locally.
@ -17,7 +19,7 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
families = ["render.local", "prerender.local", "still.local"]
def process(self, instance):
node = instance[0]
# This should be a ContextPlugin, but this is a workaround
# for a bug in pyblish to run once for a family: issue #250
context = instance.context
@ -33,6 +35,12 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
last_frame = instance.data.get("endFrame", None)
node_subset_name = instance.data.get("name", None)
# swap path to stageDir
temp_dir = self.staging_dir(instance).replace("\\", "/")
output_dir = instance.data.get("outputDir")
path = node['file'].value()
node['file'].setValue(path.replace(output_dir, temp_dir))
self.log.info("Starting render")
self.log.info("Start frame: {}".format(first_frame))
self.log.info("End frame: {}".format(last_frame))
@ -43,6 +51,20 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
int(first_frame),
int(last_frame)
)
# swith to prerendered.frames
instance[0]["render"].setValue(False)
# swap path back to publish path
path = node['file'].value()
node['file'].setValue(path.replace(temp_dir, output_dir))
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"] = [os.listdir(temp_dir)]
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name,
output_dir
))
self.log.info('Finished render')
return

View file

@ -0,0 +1,189 @@
import os
import nuke
import pyblish.api
import pype
class ExtractDataForReview(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
"""
order = pyblish.api.ExtractorOrder + 0.01
label = "Data for review"
optional = True
families = ["write"]
hosts = ["nuke"]
family_targets = [".local", ".review"]
def process(self, instance):
families = [f for f in instance.data["families"]
for search in self.family_targets
if search in f]
if not families:
return
self.log.debug("here:")
# Store selection
selection = [i for i in nuke.allNodes() if i["selected"].getValue()]
self.log.debug("here:")
# Deselect all nodes to prevent external connections
[i["selected"].setValue(False) for i in nuke.allNodes()]
self.log.debug("here:")
self.log.debug("creating staging dir:")
self.staging_dir(instance)
self.render_review_representation(instance,
representation="mov")
self.log.debug("review mov:")
self.transcode_mov(instance)
self.render_review_representation(instance,
representation="jpeg")
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]
def transcode_mov(self, instance):
import subprocess
collection = instance.data["collection"]
staging_dir = instance.data["stagingDir"]
file_name = collection.format("{head}mov")
review_mov = os.path.join(staging_dir, file_name)
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
args.append(review_mov)
self.log.debug("Executing args: {0}".format(args))
self.log.info("transcoding review mov: {0}".format(review_mov))
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
if p.returncode != 0:
raise ValueError(output)
self.log.debug("Removing `{0}`...".format(
instance.data["baked_colorspace_movie"]))
os.remove(instance.data["baked_colorspace_movie"])
instance.data["files"].append(file_name)
def render_review_representation(self,
instance,
representation="mov"):
assert instance.data['files'], "Instance data files should't be empty!"
import clique
import nuke
temporary_nodes = []
staging_dir = instance.data["stagingDir"]
collection = instance.data.get("collection", None)
self.log.warning("instance.data['files']: {}".format(instance.data['files']))
if not collection:
collections, remainder = clique.assemble(*instance.data['files'])
collection = collections[0]
instance.data["collection"] = collection
# Create nodes
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
self.log.warning("first_frame: {}".format(first_frame))
self.log.warning("last_frame: {}".format(last_frame))
node = previous_node = nuke.createNode("Read")
node["file"].setValue(
os.path.join(staging_dir,
os.path.basename(collection.format(
"{head}{padding}{tail}"))).replace("\\", "/"))
node["first"].setValue(first_frame)
node["origfirst"].setValue(first_frame)
node["last"].setValue(last_frame)
node["origlast"].setValue(last_frame)
temporary_nodes.append(node)
reformat_node = nuke.createNode("Reformat")
reformat_node["format"].setValue("HD_1080")
reformat_node["resize"].setValue("fit")
reformat_node["filter"].setValue("Lanczos6")
reformat_node["black_outside"].setValue(True)
reformat_node.setInput(0, previous_node)
previous_node = reformat_node
temporary_nodes.append(reformat_node)
viewer_process_node = nuke.ViewerProcess.node()
dag_node = None
if viewer_process_node:
dag_node = nuke.createNode(viewer_process_node.Class())
dag_node.setInput(0, previous_node)
previous_node = dag_node
temporary_nodes.append(dag_node)
# Copy viewer process values
excludedKnobs = ["name", "xpos", "ypos"]
for item in viewer_process_node.knobs().keys():
if item not in excludedKnobs and item in dag_node.knobs():
x1 = viewer_process_node[item]
x2 = dag_node[item]
x2.fromScript(x1.toScript(False))
else:
self.log.warning("No viewer node found.")
# create write node
write_node = nuke.createNode("Write")
if representation in "mov":
file = collection.format("{head}baked.mov")
path = os.path.join(staging_dir, file).replace("\\", "/")
instance.data["baked_colorspace_movie"] = path
write_node["file"].setValue(path)
write_node["file_type"].setValue("mov")
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
elif representation in "jpeg":
file = collection.format("{head}jpeg")
path = os.path.join(staging_dir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
write_node["file_type"].setValue("jpeg")
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
# retime for
first_frame = int(last_frame)/2
last_frame = int(last_frame)/2
# add into files for integration as representation
instance.data["files"].append(file)
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
# Clean up
for node in temporary_nodes:
nuke.delete(node)

View file

@ -0,0 +1,34 @@
import pyblish.api
import os
import pype
import shutil
class ExtractScript(pype.api.Extractor):
"""Publish script
"""
label = 'Extract Script'
order = pyblish.api.ExtractorOrder - 0.05
optional = True
hosts = ['nuke']
families = ["nukescript"]
def process(self, instance):
self.log.debug("instance extracting: {}".format(instance.data))
current_script = instance.context.data["currentFile"]
# Define extract output file path
dir_path = self.staging_dir(instance)
filename = "{0}".format(instance.data["name"])
path = os.path.join(dir_path, filename)
self.log.info("Performing extraction..")
shutil.copy(current_script, path)
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(filename)
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))

View file

@ -6,7 +6,7 @@ class ExtractScriptSave(pyblish.api.Extractor):
"""
"""
label = 'Script Save'
order = pyblish.api.Extractor.order - 0.45
order = pyblish.api.Extractor.order - 0.1
hosts = ['nuke']
def process(self, instance):

View file

@ -0,0 +1,23 @@
import pyblish.api
class WriteToRender(pyblish.api.InstancePlugin):
"""Swith Render knob on write instance to on,
so next time publish will be set to render
"""
order = pyblish.api.ExtractorOrder + 0.1
label = "Write to render next"
optional = True
hosts = ["nuke", "nukeassist"]
families = ["write"]
def process(self, instance):
if [f for f in instance.data["families"]
if ".frames" in f]:
instance[0]["render"].setValue(True)
self.log.info("Swith write node render to `on`")
else:
# swith to
instance[0]["render"].setValue(False)
self.log.info("Swith write node render to `Off`")

View file

@ -0,0 +1,19 @@
import nuke
import pyblish.api
class IncrementScriptVersion(pyblish.api.ContextPlugin):
"""Increment current script version."""
order = pyblish.api.IntegratorOrder + 0.9
label = "Increment Current Script Version"
optional = True
hosts = ['nuke']
families = ["nukescript", "render.local", "render.frames"]
def process(self, context):
from pype.lib import version_up
path = context.data["currentFile"]
nuke.scriptSaveAs(version_up(path))
self.log.info('Incrementing script version')

View file

@ -23,39 +23,37 @@ class RepairCollectionAction(pyblish.api.Action):
class ValidateCollection(pyblish.api.InstancePlugin):
""" Validates file output. """
order = pyblish.api.ValidatorOrder
# optional = True
families = ['prerendered.frames']
order = pyblish.api.ValidatorOrder + 0.1
families = ["render.frames", "still.frames", "prerender.frames"]
label = "Check prerendered frames"
hosts = ["nuke"]
actions = [RepairCollectionAction]
def process(self, instance):
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
if not instance.data["files"]:
return
collections, remainder = clique.assemble(*instance.data['files'])
self.log.info('collections: {}'.format(str(collections)))
collection = collections[0]
frame_length = instance.data["endFrame"] \
- instance.data["startFrame"] + 1
if frame_length is not 1:
assert len(collections) == 1, self.log.info(
"There are multiple collections in the folder")
assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing")
assert len(collections) == 1, "There are multiple collections in the folder"
assert collection.is_contiguous(), "Some frames appear to be missing"
assert remainder is not None, self.log.info("There are some extra files in folder")
basename, ext = os.path.splitext(list(collections[0])[0])
assert all(ext == os.path.splitext(name)[1]
for name in collections[0]), self.log.info(
"Files had varying suffixes"
)
assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute")
assert remainder is not None, "There are some extra files in folder"
self.log.info('frame_length: {}'.format(frame_length))
self.log.info('len(list(instance.data["files"])): {}'.format(
len(list(instance.data["files"][0]))))
self.log.info('len(collection.indexes): {}'.format(
len(collection.indexes)))
assert len(list(instance.data["files"][0])) is frame_length, self.log.info(
"{} missing frames. Use repair to render all frames".format(__name__))
assert len(
collection.indexes
) is frame_length, "{} missing frames. Use "
"repair to render all frames".format(__name__)

View file

@ -0,0 +1,51 @@
import pyblish.api
import pype.api
import pype.nuke.actions
class RepairWriteFamiliesAction(pyblish.api.Action):
label = "Fix Write's render attributes"
on = "failed"
icon = "wrench"
def process(self, instance, plugin):
self.log.info("instance {}".format(instance))
instance["render"].setValue(True)
self.log.info("Rendering toggled ON")
@pyblish.api.log
class ValidateWriteFamilies(pyblish.api.InstancePlugin):
""" Validates write families. """
order = pyblish.api.ValidatorOrder
label = "Check correct writes families"
hosts = ["nuke"]
families = ["write"]
actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction]
@staticmethod
def get_invalid(instance):
if not [f for f in instance.data["families"]
if ".frames" in f]:
return
if not instance.data["files"]:
return (instance)
def process(self, instance):
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
invalid = self.get_invalid(instance)
if invalid:
raise ValueError(str("`{}`: Switch `Render` on! "
"> {}".format(__name__, invalid)))
self.log.info("Checked correct writes families")
@classmethod
def repair(cls, instance):
cls.log.info("instance {}".format(instance))
instance[0]["render"].setValue(True)
cls.log.info("Rendering toggled ON")

View file

View file

@ -1,203 +0,0 @@
import re
import tempfile
import json
import os
import sys
import pyblish.api
print 'pyblish_utils loaded'
def save_preset(path, preset):
"""Save options to path"""
with open(path, "w") as f:
json.dump(preset, f)
def load_preset(path):
"""Load options json from path"""
with open(path, "r") as f:
return json.load(f)
def temp_dir(context):
"""Provide a temporary directory in which to store extracted files"""
extract_dir = context.data('extractDir')
if not extract_dir:
extract_dir = tempfile.mkdtemp()
context.set_data('extractDir', value=extract_dir)
return extract_dir
def version_get(string, prefix, suffix=None):
"""Extract version information from filenames. Code from Foundry's nukescripts.version_get()"""
if string is None:
raise ValueError, "Empty version string - no match"
regex = "[/_.]" + prefix + "\d+"
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
msg = "No \"_" + prefix + "#\" found in \"" + string + "\""
raise ValueError, msg
return (matches[-1:][0][1], re.search("\d+", matches[-1:][0]).group())
def version_set(string, prefix, oldintval, newintval):
"""Changes version information from filenames. Code from Foundry's nukescripts.version_set()"""
regex = "[/_.]" + prefix + "\d+"
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
return ""
# Filter to retain only version strings with matching numbers
matches = filter(lambda s: int(s[2:]) == oldintval, matches)
# Replace all version strings with matching numbers
for match in matches:
# use expression instead of expr so 0 prefix does not make octal
fmt = "%%(#)0%dd" % (len(match) - 2)
newfullvalue = match[0] + prefix + str(fmt % {"#": newintval})
string = re.sub(match, newfullvalue, string)
return string
def version_up(string):
try:
(prefix, v) = version_get(string, 'v')
v = int(v)
file = version_set(string, prefix, v, v + 1)
except:
raise ValueError, 'Unable to version up File'
return file
def open_folder(path):
"""Provide a temporary directory in which to store extracted files"""
import subprocess
path = os.path.abspath(path)
if sys.platform == 'win32':
subprocess.Popen('explorer "%s"' % path)
elif sys.platform == 'darwin': # macOS
subprocess.Popen(['open', path])
else: # linux
try:
subprocess.Popen(['xdg-open', path])
except OSError:
raise OSError('unsupported xdg-open call??')
def filter_instances(context, plugin):
"""Provide a temporary directory in which to store extracted files"""
# Get the errored instances
allInstances = []
for result in context.data["results"]:
if (result["instance"] is not None and
result["instance"] not in allInstances):
allInstances.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(allInstances, plugin)
return instances
def load_capture_preset(path):
import capture_gui
import capture
path = path
preset = capture_gui.lib.load_json(path)
print preset
options = dict()
# CODEC
id = 'Codec'
for key in preset[id]:
options[str(key)] = preset[id][key]
# GENERIC
id = 'Generic'
for key in preset[id]:
if key.startswith('isolate'):
pass
# options['isolate'] = preset[id][key]
else:
options[str(key)] = preset[id][key]
# RESOLUTION
id = 'Resolution'
options['height'] = preset[id]['height']
options['width'] = preset[id]['width']
# DISPLAY OPTIONS
id = 'Display Options'
disp_options = {}
for key in preset['Display Options']:
if key.startswith('background'):
disp_options[key] = preset['Display Options'][key]
else:
disp_options['displayGradient'] = True
options['display_options'] = disp_options
# VIEWPORT OPTIONS
temp_options = {}
id = 'Renderer'
for key in preset[id]:
temp_options[str(key)] = preset[id][key]
temp_options2 = {}
id = 'Viewport Options'
light_options = {0: "default",
1: 'all',
2: 'selected',
3: 'flat',
4: 'nolights'}
for key in preset[id]:
if key == 'high_quality':
temp_options2['multiSampleEnable'] = True
temp_options2['multiSampleCount'] = 4
temp_options2['textureMaxResolution'] = 512
temp_options2['enableTextureMaxRes'] = True
if key == 'alphaCut':
temp_options2['transparencyAlgorithm'] = 5
temp_options2['transparencyQuality'] = 1
if key == 'headsUpDisplay':
temp_options['headsUpDisplay'] = True
if key == 'displayLights':
temp_options[str(key)] = light_options[preset[id][key]]
else:
temp_options[str(key)] = preset[id][key]
for key in ['override_viewport_options', 'high_quality', 'alphaCut']:
temp_options.pop(key, None)
options['viewport_options'] = temp_options
options['viewport2_options'] = temp_options2
# use active sound track
scene = capture.parse_active_scene()
options['sound'] = scene['sound']
cam_options = dict()
cam_options['overscan'] = 1.0
cam_options['displayFieldChart'] = False
cam_options['displayFilmGate'] = False
cam_options['displayFilmOrigin'] = False
cam_options['displayFilmPivot'] = False
cam_options['displayGateMask'] = False
cam_options['displayResolution'] = False
cam_options['displaySafeAction'] = False
cam_options['displaySafeTitle'] = False
# options['display_options'] = temp_options
return options