mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
resolving conflicts
This commit is contained in:
commit
7b2fb79612
53 changed files with 970 additions and 691 deletions
|
|
@ -33,7 +33,7 @@ from .templates import (
|
|||
get_asset,
|
||||
get_task,
|
||||
set_avalon_workdir,
|
||||
get_version_from_workfile,
|
||||
get_version_from_path,
|
||||
get_workdir_template,
|
||||
set_hierarchy,
|
||||
set_project_code
|
||||
|
|
@ -77,7 +77,7 @@ __all__ = [
|
|||
"get_asset",
|
||||
"get_task",
|
||||
"set_avalon_workdir",
|
||||
"get_version_from_workfile",
|
||||
"get_version_from_path",
|
||||
"get_workdir_template",
|
||||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
|
|
|
|||
192
pype/ftrack/actions/action_delete_asset.py
Normal file
192
pype/ftrack/actions/action_delete_asset.py
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
import sys
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
import argparse
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class DeleteEntity(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'delete.entity'
|
||||
#: Action label.
|
||||
label = 'Delete entity'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
icon = "https://www.iconsdb.com/icons/preview/white/full-trash-xxl.png"
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
selection = event["data"].get("selection", None)
|
||||
if selection is None or len(selection) > 1:
|
||||
return False
|
||||
|
||||
valid = ["task"]
|
||||
entityType = selection[0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
discover = False
|
||||
roleList = ['Pypeclub', 'Administrator']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
entity = entities[0]
|
||||
title = 'Going to delete "{}"'.format(entity['name'])
|
||||
|
||||
items = []
|
||||
item = {
|
||||
'label': 'Enter "DELETE" to confirm',
|
||||
'name': 'key',
|
||||
'type': 'text',
|
||||
'value': ''
|
||||
}
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No Assets to delete!'
|
||||
}
|
||||
elif values.get('key', '').lower() != 'delete':
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Entered key does not match'
|
||||
}
|
||||
entity = entities[0]
|
||||
project = entity['project']
|
||||
|
||||
self.db.install()
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
})
|
||||
|
||||
if av_entity is not None:
|
||||
all_ids = []
|
||||
all_ids.append(av_entity['_id'])
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
|
||||
if len(all_ids) == 0:
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'None of assets'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
session.delete(entity)
|
||||
session.commit()
|
||||
self.db.uninstall()
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'All assets were deleted!'
|
||||
}
|
||||
|
||||
def find_child(self, entity):
|
||||
output = []
|
||||
id = entity['_id']
|
||||
visuals = [x for x in self.db.find({'data.visualParent': id})]
|
||||
assert len(visuals) == 0, 'This asset has another asset as child'
|
||||
childs = self.db.find({'parent': id})
|
||||
for child in childs:
|
||||
output.append(child['_id'])
|
||||
output.extend(self.find_child(child))
|
||||
return output
|
||||
|
||||
def find_assets(self, asset_names):
|
||||
assets = []
|
||||
for name in asset_names:
|
||||
entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': name
|
||||
})
|
||||
if entity is not None and entity not in assets:
|
||||
assets.append(entity)
|
||||
return assets
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
# assume that register is being called from an old or incompatible API and
|
||||
# return without doing anything.
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = DeleteEntity(session)
|
||||
action_handler.register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
189
pype/ftrack/actions/action_delete_asset_byname.py
Normal file
189
pype/ftrack/actions/action_delete_asset_byname.py
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
import sys
|
||||
import logging
|
||||
import argparse
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AssetsRemover(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'remove.assets'
|
||||
#: Action label.
|
||||
label = 'Delete Assets by Name'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
selection = event["data"].get("selection", None)
|
||||
if selection is None:
|
||||
return False
|
||||
|
||||
valid = ["show", "task"]
|
||||
entityType = selection[0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
discover = False
|
||||
roleList = ['Pypeclub', 'Administrator']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
title = 'Enter Asset names to delete'
|
||||
|
||||
items = []
|
||||
for i in range(15):
|
||||
|
||||
item = {
|
||||
'label': 'Asset {}'.format(i+1),
|
||||
'name': 'asset_{}'.format(i+1),
|
||||
'type': 'text',
|
||||
'value': ''
|
||||
}
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() != 'Project':
|
||||
project = entity['project']
|
||||
else:
|
||||
project = entity
|
||||
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No Assets to delete!'
|
||||
}
|
||||
|
||||
asset_names = []
|
||||
|
||||
for k, v in values.items():
|
||||
if v.replace(' ', '') != '':
|
||||
asset_names.append(v)
|
||||
|
||||
self.db.install()
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
|
||||
assets = self.find_assets(asset_names)
|
||||
|
||||
all_ids = []
|
||||
for asset in assets:
|
||||
all_ids.append(asset['_id'])
|
||||
all_ids.extend(self.find_child(asset))
|
||||
|
||||
if len(all_ids) == 0:
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'None of assets'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'All assets were deleted!'
|
||||
}
|
||||
|
||||
def find_child(self, entity):
|
||||
output = []
|
||||
id = entity['_id']
|
||||
visuals = [x for x in self.db.find({'data.visualParent': id})]
|
||||
assert len(visuals) == 0, 'This asset has another asset as child'
|
||||
childs = self.db.find({'parent': id})
|
||||
for child in childs:
|
||||
output.append(child['_id'])
|
||||
output.extend(self.find_child(child))
|
||||
return output
|
||||
|
||||
def find_assets(self, asset_names):
|
||||
assets = []
|
||||
for name in asset_names:
|
||||
entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': name
|
||||
})
|
||||
if entity is not None and entity not in assets:
|
||||
assets.append(entity)
|
||||
return assets
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
# assume that register is being called from an old or incompatible API and
|
||||
# return without doing anything.
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = AssetsRemover(session)
|
||||
action_handler.register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -114,6 +114,18 @@ def import_to_avalon(
|
|||
output['errors'] = errors
|
||||
return output
|
||||
|
||||
else:
|
||||
# not override existing templates!
|
||||
templates = av_project['config'].get('template', None)
|
||||
if templates is not None:
|
||||
for key, value in config['template'].items():
|
||||
if (
|
||||
key in templates and
|
||||
templates[key] is not None and
|
||||
templates[key] != value
|
||||
):
|
||||
config['template'][key] = templates[key]
|
||||
|
||||
projectId = av_project['_id']
|
||||
|
||||
data = get_data(
|
||||
|
|
@ -406,7 +418,9 @@ def get_data(entity, session, custom_attributes):
|
|||
{'type': 'asset', 'name': parName}
|
||||
)['_id']
|
||||
|
||||
hierarchy = os.path.sep.join(folderStruct)
|
||||
hierarchy = ""
|
||||
if len(folderStruct) > 0:
|
||||
hierarchy = os.path.sep.join(folderStruct)
|
||||
|
||||
data['visualParent'] = parentId
|
||||
data['parents'] = folderStruct
|
||||
|
|
|
|||
|
|
@ -180,13 +180,14 @@ class AppAction(BaseHandler):
|
|||
os.environ["AVALON_APP_NAME"] = self.identifier
|
||||
|
||||
anatomy = pype.Anatomy
|
||||
hierarchy = database[project_name].find_one({
|
||||
hierarchy = ""
|
||||
parents = database[project_name].find_one({
|
||||
"type": 'asset',
|
||||
"name": entity['parent']['name']
|
||||
})['data']['parents']
|
||||
|
||||
if hierarchy:
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
if parents:
|
||||
hierarchy = os.path.join(*parents)
|
||||
|
||||
data = {"project": {"name": entity['project']['full_name'],
|
||||
"code": entity['project']['name']},
|
||||
|
|
@ -305,24 +306,27 @@ class AppAction(BaseHandler):
|
|||
# Change status of task to In progress
|
||||
config = get_config_data()
|
||||
|
||||
if (
|
||||
'status_on_app_launch' in config and
|
||||
'sync_to_avalon' in config and
|
||||
'statuses_name_change' in config['sync_to_avalon']
|
||||
):
|
||||
statuses = config['sync_to_avalon']['statuses_name_change']
|
||||
if entity['status']['name'].lower() in statuses:
|
||||
status_name = config['status_on_app_launch']
|
||||
if 'status_update' in config:
|
||||
statuses = config['status_update']
|
||||
|
||||
actual_status = entity['status']['name'].lower()
|
||||
next_status_name = None
|
||||
for key, value in statuses.items():
|
||||
if actual_status in value or '_any_' in value:
|
||||
if key != '_ignore_':
|
||||
next_status_name = key
|
||||
break
|
||||
|
||||
if next_status_name is not None:
|
||||
try:
|
||||
query = 'Status where name is "{}"'.format(status_name)
|
||||
query = 'Status where name is "{}"'.format(next_status_name)
|
||||
status = session.query(query).one()
|
||||
task['status'] = status
|
||||
entity['status'] = status
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
msg = (
|
||||
'Status "{}" in config wasn\'t found on Ftrack'
|
||||
).format(status_name)
|
||||
).format(next_status_name)
|
||||
self.log.warning(msg)
|
||||
|
||||
# Set origin avalon environments
|
||||
|
|
|
|||
11
pype/lib.py
11
pype/lib.py
|
|
@ -424,14 +424,9 @@ def get_avalon_project_template():
|
|||
"""
|
||||
template = Templates(type=["anatomy"])
|
||||
proj_template = {}
|
||||
# TODO this down should work but it can't be in default.toml:
|
||||
# - Raises error when App (e.g. Nuke) is started
|
||||
# proj_template['workfile'] = template.anatomy.avalon.workfile
|
||||
# proj_template['work'] = template.anatomy.avalon.work
|
||||
# proj_template['publish'] = template.anatomy.avalon.publish
|
||||
proj_template['workfile'] = "{asset[name]}_{task[name]}_v{version:0>3}<_{comment}>"
|
||||
proj_template['work'] = "{root}/{project}/{hierarchy}/{asset}/work/{task}"
|
||||
proj_template['publish'] = "{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}"
|
||||
proj_template['workfile'] = template.anatomy.avalon.workfile
|
||||
proj_template['work'] = template.anatomy.avalon.work
|
||||
proj_template['publish'] = template.anatomy.avalon.publish
|
||||
return proj_template
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -107,6 +107,9 @@ def on_init(_):
|
|||
# Force load objExport plug-in (requested by artists)
|
||||
cmds.loadPlugin("objExport", quiet=True)
|
||||
|
||||
# Force load objExport plug-in (requested by artists)
|
||||
cmds.loadPlugin("spore", quiet=True)
|
||||
|
||||
from .customize import (
|
||||
override_component_mask_commands,
|
||||
override_toolbox_ui
|
||||
|
|
@ -131,8 +134,8 @@ def on_save(_):
|
|||
|
||||
avalon.logger.info("Running callback on save..")
|
||||
|
||||
# Update current task for the current scene
|
||||
update_task_from_path(cmds.file(query=True, sceneName=True))
|
||||
# # Update current task for the current scene
|
||||
# update_task_from_path(cmds.file(query=True, sceneName=True))
|
||||
|
||||
# Generate ids of the current context on nodes in the scene
|
||||
nodes = lib.get_id_required_nodes(referenced_nodes=False)
|
||||
|
|
@ -146,8 +149,8 @@ def on_open(_):
|
|||
from avalon.vendor.Qt import QtWidgets
|
||||
from ..widgets import popup
|
||||
|
||||
# Update current task for the current scene
|
||||
update_task_from_path(cmds.file(query=True, sceneName=True))
|
||||
# # Update current task for the current scene
|
||||
# update_task_from_path(cmds.file(query=True, sceneName=True))
|
||||
|
||||
# Validate FPS after update_task_from_path to
|
||||
# ensure it is using correct FPS for the asset
|
||||
|
|
|
|||
|
|
@ -78,6 +78,8 @@ def override_toolbox_ui():
|
|||
import avalon.tools.cbsceneinventory as inventory
|
||||
import avalon.tools.cbloader as loader
|
||||
from avalon.maya.pipeline import launch_workfiles_app
|
||||
import mayalookassigner
|
||||
|
||||
|
||||
# Ensure the maya web icon on toolbox exists
|
||||
web_button = "ToolBox|MainToolboxLayout|mayaWebButton"
|
||||
|
|
@ -98,6 +100,18 @@ def override_toolbox_ui():
|
|||
background_color = (0.267, 0.267, 0.267)
|
||||
controls = []
|
||||
|
||||
control = mc.iconTextButton(
|
||||
"pype_toolbox_lookmanager",
|
||||
annotation="Look Manager",
|
||||
label="Look Manager",
|
||||
image=os.path.join(icons, "lookmanager.png"),
|
||||
command=lambda: mayalookassigner.show(),
|
||||
bgc=background_color,
|
||||
width=icon_size,
|
||||
height=icon_size,
|
||||
parent=parent)
|
||||
controls.append(control)
|
||||
|
||||
control = mc.iconTextButton(
|
||||
"pype_toolbox_workfiles",
|
||||
annotation="Work Files",
|
||||
|
|
@ -134,16 +148,16 @@ def override_toolbox_ui():
|
|||
parent=parent)
|
||||
controls.append(control)
|
||||
|
||||
control = mc.iconTextButton(
|
||||
"pype_toolbox",
|
||||
annotation="Colorbleed",
|
||||
label="Colorbleed",
|
||||
image=os.path.join(icons, "pype_logo_36x36.png"),
|
||||
bgc=background_color,
|
||||
width=icon_size,
|
||||
height=icon_size,
|
||||
parent=parent)
|
||||
controls.append(control)
|
||||
# control = mc.iconTextButton(
|
||||
# "pype_toolbox",
|
||||
# annotation="Kredenc",
|
||||
# label="Kredenc",
|
||||
# image=os.path.join(icons, "kredenc_logo.png"),
|
||||
# bgc=background_color,
|
||||
# width=icon_size,
|
||||
# height=icon_size,
|
||||
# parent=parent)
|
||||
# controls.append(control)
|
||||
|
||||
# Add the buttons on the bottom and stack
|
||||
# them above each other with side padding
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def writes_version_sync():
|
|||
node_file = each['file'].value()
|
||||
log.info("node_file: {}".format(node_file))
|
||||
|
||||
node_version = pype.get_version_from_path(node_file, None)
|
||||
node_version = pype.get_version_from_path(node_file)
|
||||
log.info("node_version: {}".format(node_version))
|
||||
|
||||
node_new_file = node_file.replace(node_version, new_version)
|
||||
|
|
@ -70,7 +70,7 @@ def format_anatomy(data):
|
|||
data.update({
|
||||
"hierarchy": pype.get_hierarchy(),
|
||||
"frame": "#"*padding,
|
||||
"VERSION": pype.get_version_from_path(file)
|
||||
"version": pype.get_version_from_path(file)
|
||||
})
|
||||
|
||||
# log.info("format_anatomy:anatomy: {}".format(anatomy))
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import tempfile
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05
|
||||
|
|
@ -28,7 +29,9 @@ class Extractor(pyblish.api.InstancePlugin):
|
|||
staging_dir = instance.data.get('stagingDir', None)
|
||||
|
||||
if not staging_dir:
|
||||
staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
return staging_dir
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import os
|
||||
|
||||
import ftrack_api_old as ftrack_api
|
||||
import pyblish.api
|
||||
|
||||
try:
|
||||
import ftrack_api_old as ftrack_api
|
||||
except Exception:
|
||||
import ftrack_api
|
||||
|
||||
|
||||
class CollectFtrackApi(pyblish.api.ContextPlugin):
|
||||
""" Collects an ftrack session and the current task id. """
|
||||
|
|
@ -36,7 +36,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
self.log.debug('instance {}'.format(instance))
|
||||
|
||||
assumed_data = instance.data["assumedTemplateData"]
|
||||
assumed_version = assumed_data["VERSION"]
|
||||
assumed_version = assumed_data["version"]
|
||||
version_number = int(assumed_version)
|
||||
family = instance.data['family'].lower()
|
||||
asset_type = ''
|
||||
|
|
@ -67,7 +67,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
'frameOut': int(instance.data["startFrame"]),
|
||||
'frameRate': 25})}
|
||||
}
|
||||
elif ext in [".jpg"]:
|
||||
elif ext in [".jpg", ".jpeg"]:
|
||||
component_data = {
|
||||
"name": "thumbnail" # Default component name is "main".
|
||||
}
|
||||
|
|
@ -12,7 +12,7 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "JSON"
|
||||
hosts = ['nuke', 'maya']
|
||||
hosts = ['maya']
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
@ -25,6 +25,7 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
|
||||
output_data = []
|
||||
for instance in context:
|
||||
self.log.debug(instance['data'])
|
||||
|
||||
data = {}
|
||||
for key, value in instance.data.iteritems():
|
||||
|
|
@ -108,7 +108,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
version_number += int(version["name"])
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
if hierarchy:
|
||||
|
|
@ -122,7 +122,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"VERSION": version_number,
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ class CollectJSON(pyblish.api.ContextPlugin):
|
|||
|
||||
label = "JSON"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ['maya']
|
||||
|
||||
def version_get(self, string, prefix):
|
||||
""" Extract version information from filenames. Code from Foundry"s
|
||||
|
|
@ -26,7 +27,7 @@ class CollectJSON(pyblish.api.ContextPlugin):
|
|||
return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group()
|
||||
|
||||
def process(self, context):
|
||||
current_file = context.data("currentFile")
|
||||
current_file = context.data.get("currentFile", '')
|
||||
# Skip if current file is not a directory
|
||||
if not os.path.isdir(current_file):
|
||||
return
|
||||
|
|
|
|||
23
pype/plugins/global/publish/collect_scene_version.py
Normal file
23
pype/plugins/global/publish/collect_scene_version.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import os
|
||||
import pype.api as pype
|
||||
|
||||
class CollectSceneVersion(pyblish.api.ContextPlugin):
|
||||
"""Finds version in the filename or passes the one found in the context
|
||||
Arguments:
|
||||
version (int, optional): version number of the publish
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = 'Collect Version'
|
||||
|
||||
def process(self, context):
|
||||
|
||||
filename = os.path.basename(context.data.get('currentFile'))
|
||||
|
||||
rootVersion = pype.get_version_from_path(filename)
|
||||
|
||||
context.data['version'] = rootVersion
|
||||
|
||||
self.log.info('Scene Version: %s' % context.data('version'))
|
||||
|
|
@ -112,7 +112,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
self.log.info("Verifying version from assumed destination")
|
||||
|
||||
assumed_data = instance.data["assumedTemplateData"]
|
||||
assumed_version = assumed_data["VERSION"]
|
||||
assumed_version = assumed_data["version"]
|
||||
if assumed_version != next_version:
|
||||
raise AttributeError("Assumed version 'v{0:03d}' does not match"
|
||||
"next version in database "
|
||||
|
|
@ -141,10 +141,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# \|________|
|
||||
#
|
||||
root = api.registered_root()
|
||||
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
|
||||
if hierarchy:
|
||||
hierarchy = ""
|
||||
parents = io.find_one({
|
||||
"type": 'asset',
|
||||
"name": ASSET
|
||||
})['data']['parents']
|
||||
if parents and len(parents) > 0:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
hierarchy = os.path.join(*parents)
|
||||
|
||||
template_data = {"root": root,
|
||||
"project": {"name": PROJECT,
|
||||
|
|
@ -153,7 +157,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
template_publish = project["config"]["template"]["publish"]
|
||||
|
|
|
|||
|
|
@ -43,8 +43,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
self.register(instance)
|
||||
|
||||
self.log.info("Integrating Asset in to the database ...")
|
||||
self.log.info("instance.data: {}".format(instance.data))
|
||||
# self.log.info("Integrating Asset in to the database ...")
|
||||
# self.log.info("instance.data: {}".format(instance.data))
|
||||
if instance.data.get('transfer', True):
|
||||
self.integrate(instance)
|
||||
|
||||
|
|
@ -110,13 +110,16 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
self.log.info("Verifying version from assumed destination")
|
||||
|
||||
assumed_data = instance.data["assumedTemplateData"]
|
||||
assumed_version = assumed_data["VERSION"]
|
||||
assumed_version = assumed_data["version"]
|
||||
if assumed_version != next_version:
|
||||
raise AttributeError("Assumed version 'v{0:03d}' does not match"
|
||||
"next version in database "
|
||||
"('v{1:03d}')".format(assumed_version,
|
||||
next_version))
|
||||
|
||||
if instance.data.get('version'):
|
||||
next_version = int(instance.data.get('version'))
|
||||
|
||||
self.log.debug("Next version: v{0:03d}".format(next_version))
|
||||
|
||||
version_data = self.create_version_data(context, instance)
|
||||
|
|
@ -139,11 +142,12 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# \|________|
|
||||
#
|
||||
root = api.registered_root()
|
||||
hierarchy = io.find_one({"type": 'asset', "name": ASSET})[
|
||||
hierarchy = ""
|
||||
parents = io.find_one({"type": 'asset', "name": ASSET})[
|
||||
'data']['parents']
|
||||
if hierarchy:
|
||||
if parents and len(parents) > 0:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
hierarchy = os.path.join(*parents)
|
||||
|
||||
template_data = {"root": root,
|
||||
"project": {"name": PROJECT,
|
||||
|
|
@ -153,7 +157,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
# template_publish = project["config"]["template"]["publish"]
|
||||
|
|
@ -163,8 +167,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# Each should be a single representation (as such, a single extension)
|
||||
representations = []
|
||||
destination_list = []
|
||||
self.log.debug("integrate_frames:instance.data[files]: {}".format(
|
||||
instance.data["files"]))
|
||||
|
||||
for files in instance.data["files"]:
|
||||
# Collection
|
||||
# _______
|
||||
|
|
@ -205,7 +208,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
src = os.path.join(stagingdir, src_file_name)
|
||||
instance.data["transfers"].append([src, dst])
|
||||
template = anatomy.render.path
|
||||
|
||||
else:
|
||||
# Single file
|
||||
|
|
@ -235,30 +237,37 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
dst = anatomy_filled.render.path
|
||||
template = anatomy.render.path
|
||||
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
template_data["frame"] = "#####"
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
path_to_save = anatomy_filled.render.path
|
||||
template = anatomy.render.fullpath
|
||||
self.log.debug('ext[1:]: {}'.format(ext[1:]))
|
||||
|
||||
representation = {
|
||||
"schema": "pype:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": ext[1:],
|
||||
"data": {'path': dst, 'template': template},
|
||||
"data": {'path': path_to_save, 'template': template},
|
||||
"dependencies": instance.data.get("dependencies", "").split(),
|
||||
|
||||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"project": {
|
||||
"name": PROJECT,
|
||||
"code": project['data']['code']
|
||||
},
|
||||
"task": api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -133,14 +133,14 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
# AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
# "http://localhost:8082")
|
||||
# assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
"http://localhost:8082")
|
||||
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
|
||||
|
||||
try:
|
||||
deadline_url = os.environ["DEADLINE_REST_URL"]
|
||||
except KeyError:
|
||||
self.log.error("Deadline REST API url not found.")
|
||||
# try:
|
||||
# deadline_url = os.environ["DEADLINE_REST_URL"]
|
||||
# except KeyError:
|
||||
# self.log.error("Deadline REST API url not found.")
|
||||
|
||||
# Get a submission job
|
||||
job = instance.data.get("deadlineSubmissionJob")
|
||||
|
|
@ -326,7 +326,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
url = "{}/api/jobs".format(AVALON_DEADLINE)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
|
|||
28
pype/plugins/global/publish/validate_ffmpeg_installed.py
Normal file
28
pype/plugins/global/publish/validate_ffmpeg_installed.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
class ValidateFfmpegInstallef(pyblish.api.Validator):
|
||||
"""Validate availability of ffmpeg tool in PATH"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'Validate ffmpeg installation'
|
||||
families = ['review']
|
||||
optional = True
|
||||
|
||||
def is_tool(self, name):
|
||||
try:
|
||||
devnull = open(os.devnull, "w")
|
||||
subprocess.Popen(
|
||||
[name], stdout=devnull, stderr=devnull
|
||||
).communicate()
|
||||
except OSError as e:
|
||||
if e.errno == os.errno.ENOENT:
|
||||
return False
|
||||
return True
|
||||
|
||||
def process(self, instance):
|
||||
if self.is_tool('ffmpeg') is False:
|
||||
self.log.error("ffmpeg not found in PATH")
|
||||
raise RuntimeError('ffmpeg not installed.')
|
||||
|
|
@ -19,8 +19,7 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
|
|||
data = { "project": {"name": "D001_projectsx",
|
||||
"code": "prjX"},
|
||||
"representation": "exr",
|
||||
"VERSION": 3,
|
||||
"SUBVERSION": 10,
|
||||
"version": 3,
|
||||
"task": "animation",
|
||||
"asset": "sh001",
|
||||
"hierarchy": "ep101/sq01/sh010"}
|
||||
|
|
@ -32,8 +31,7 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
|
|||
data = { "project": {"name": "D001_projectsy",
|
||||
"code": "prjY"},
|
||||
"representation": "abc",
|
||||
"VERSION": 1,
|
||||
"SUBVERSION": 5,
|
||||
"version": 1,
|
||||
"task": "lookdev",
|
||||
"asset": "bob",
|
||||
"hierarchy": "ep101/sq01/bob"}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import pype.maya.lib as lib
|
|||
|
||||
from avalon.vendor import requests
|
||||
import avalon.maya
|
||||
import os
|
||||
|
||||
|
||||
class CreateRenderGlobals(avalon.maya.Creator):
|
||||
|
|
@ -32,13 +33,14 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
else:
|
||||
pools = response.json()
|
||||
self.data["primaryPool"] = pools
|
||||
# We add a string "-" to allow the user to not set any secondary pools
|
||||
# We add a string "-" to allow the user to not
|
||||
# set any secondary pools
|
||||
self.data["secondaryPool"] = ["-"] + pools
|
||||
|
||||
# We don't need subset or asset attributes
|
||||
self.data.pop("subset", None)
|
||||
self.data.pop("asset", None)
|
||||
self.data.pop("active", None)
|
||||
# self.data.pop("subset", None)
|
||||
# self.data.pop("asset", None)
|
||||
# self.data.pop("active", None)
|
||||
|
||||
self.data["suspendPublishJob"] = False
|
||||
self.data["extendFrames"] = False
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from avalon import api
|
||||
import pype.maya.plugin
|
||||
import os
|
||||
import pymel.core as pm
|
||||
|
||||
|
||||
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -37,7 +38,6 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
|
|
@ -51,6 +51,67 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import os
|
||||
from maya import cmds
|
||||
|
||||
node = container["objectName"]
|
||||
|
||||
path = api.get_representation_path(representation)
|
||||
# path = self.fname
|
||||
proxyPath = os.path.splitext(path)[0] + ".ma"
|
||||
|
||||
# Get reference node from container members
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
reference_node = self._get_reference_node(members)
|
||||
|
||||
assert os.path.exists(path), "%s does not exist." % proxyPath
|
||||
|
||||
try:
|
||||
content = cmds.file(proxyPath,
|
||||
loadReference=reference_node,
|
||||
type="mayaAscii",
|
||||
returnNewNodes=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(content, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
except RuntimeError as exc:
|
||||
# When changing a reference to a file that has load errors the
|
||||
# command will raise an error even if the file is still loaded
|
||||
# correctly (e.g. when raising errors on Arnold attributes)
|
||||
# When the file is loaded and has content, we consider it's fine.
|
||||
if not cmds.referenceQuery(reference_node, isLoaded=True):
|
||||
raise
|
||||
|
||||
content = cmds.referenceQuery(reference_node,
|
||||
nodes=True,
|
||||
dagPath=True)
|
||||
if not content:
|
||||
raise
|
||||
|
||||
self.log.warning("Ignoring file read error:\n%s", exc)
|
||||
|
||||
# Add new nodes of the reference to the container
|
||||
cmds.sets(content, forceElement=node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
invalid = [x for x in members if ".placeHolderList" in x]
|
||||
if invalid:
|
||||
cmds.sets(invalid, remove=node)
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
|
||||
class AssStandinLoader(api.Loader):
|
||||
"""Load .ASS file as standin"""
|
||||
|
|
@ -99,10 +160,6 @@ class AssStandinLoader(api.Loader):
|
|||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
|
||||
|
||||
# Lock parenting of the transform and standin
|
||||
cmds.lockNode([root, standin], lock=True)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class CollectModelData(pyblish.api.InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
label = 'Collect Model Data'
|
||||
families = ["model"]
|
||||
|
||||
|
|
@ -26,4 +26,7 @@ class CollectModelData(pyblish.api.InstancePlugin):
|
|||
instance.data['endFrame'] = frame
|
||||
|
||||
# make ftrack publishable
|
||||
instance.data["families"] = ['ftrack']
|
||||
if instance.data.get('families'):
|
||||
instance.data['families'].append('ftrack')
|
||||
else:
|
||||
instance.data['families'] = ['ftrack']
|
||||
|
|
|
|||
24
pype/plugins/maya/publish/collect_remove_marked.py
Normal file
24
pype/plugins/maya/publish/collect_remove_marked.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRemoveMarked(pyblish.api.ContextPlugin):
|
||||
"""Collect model data
|
||||
|
||||
Ensures always only a single frame is extracted (current frame).
|
||||
|
||||
Note:
|
||||
This is a workaround so that the `pype.model` family can use the
|
||||
same pointcache extractor implementation as animation and pointcaches.
|
||||
This always enforces the "current" frame to be published.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = 'Remove Marked Instances'
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# make ftrack publishable
|
||||
for instance in context:
|
||||
if instance.data.get('remove'):
|
||||
context.remove(instance)
|
||||
|
|
@ -4,7 +4,8 @@ import pymel.core as pm
|
|||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
class CollectReviewData(pyblish.api.InstancePlugin):
|
||||
|
||||
class CollectReview(pyblish.api.InstancePlugin):
|
||||
"""Collect Review data
|
||||
|
||||
"""
|
||||
|
|
@ -15,12 +16,9 @@ class CollectReviewData(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
# make ftrack publishable
|
||||
instance.data["families"] = ['ftrack']
|
||||
context = instance.context
|
||||
self.log.debug('instance: {}'.format(instance))
|
||||
|
||||
task = avalon.api.Session["AVALON_TASK"]
|
||||
# pseudo code
|
||||
|
||||
# get cameras
|
||||
members = instance.data['setMembers']
|
||||
|
|
@ -33,7 +31,7 @@ class CollectReviewData(pyblish.api.InstancePlugin):
|
|||
camera = cameras[0]
|
||||
self.log.debug('camera: {}'.format(camera))
|
||||
|
||||
objectset = context.data['objectsets']
|
||||
objectset = instance.context.data['objectsets']
|
||||
|
||||
reviewable_subset = None
|
||||
reviewable_subset = list(set(members) & set(objectset))
|
||||
|
|
@ -41,14 +39,37 @@ class CollectReviewData(pyblish.api.InstancePlugin):
|
|||
assert len(reviewable_subset) <= 1, "Multiple subsets for review"
|
||||
self.log.debug('subset for review: {}'.format(reviewable_subset))
|
||||
|
||||
for inst in context:
|
||||
self.log.debug('instance: {}'.format(instance))
|
||||
i = 0
|
||||
for inst in instance.context:
|
||||
|
||||
self.log.debug('processing {}'.format(inst))
|
||||
self.log.debug('processing2 {}'.format(instance.context[i]))
|
||||
data = instance.context[i].data
|
||||
|
||||
if inst.name == reviewable_subset[0]:
|
||||
inst.data['families'].append('review')
|
||||
inst.data['review_camera'] = camera
|
||||
self.log.info('adding review family to {}'.format(reviewable_subset))
|
||||
if data.get('families'):
|
||||
data['families'].append('review')
|
||||
else:
|
||||
data['families'] = ['review']
|
||||
self.log.debug('adding review family to {}'.format(reviewable_subset))
|
||||
data['review_camera'] = camera
|
||||
data["publish"] = False
|
||||
data['startFrameReview'] = instance.data['startFrame']
|
||||
data['endFrameReview'] = instance.data['endFrame']
|
||||
data['handles'] = instance.data['handles']
|
||||
data['step'] = instance.data['step']
|
||||
data['fps'] = instance.data['fps']
|
||||
cmds.setAttr(str(instance) + '.active', 0)
|
||||
inst.data['publish'] = 0
|
||||
instance.context[i].data.update(data)
|
||||
instance.data['remove'] = True
|
||||
i += 1
|
||||
else:
|
||||
instance.data['subset'] = task + 'Review'
|
||||
instance.data['review_camera'] = camera
|
||||
instance.data['startFrameReview'] = instance.data['startFrame']
|
||||
instance.data['endFrameReview'] = instance.data['endFrame']
|
||||
|
||||
# make ftrack publishable
|
||||
instance.data["families"] = ['ftrack']
|
||||
|
||||
cmds.setAttr(str(instance) + '.active', 1)
|
||||
|
|
|
|||
|
|
@ -1,38 +1,26 @@
|
|||
import os
|
||||
import subprocess
|
||||
import contextlib
|
||||
import time
|
||||
import sys
|
||||
|
||||
import capture_gui
|
||||
import clique
|
||||
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
import avalon.maya
|
||||
|
||||
from maya import cmds
|
||||
from maya import cmds, mel
|
||||
import pymel.core as pm
|
||||
from pype.vendor import ffmpeg
|
||||
reload(ffmpeg)
|
||||
|
||||
import avalon.maya
|
||||
|
||||
# import maya_utils as mu
|
||||
|
||||
# from tweakHUD import master
|
||||
# from tweakHUD import draft_hud as dHUD
|
||||
# from tweakHUD import ftrackStrings as fStrings
|
||||
|
||||
#
|
||||
# def soundOffsetFunc(oSF, SF, H):
|
||||
# tmOff = (oSF - H) - SF
|
||||
# return tmOff
|
||||
|
||||
|
||||
# TODO: move codec settings to presets
|
||||
class ExtractQuicktime(pype.api.Extractor):
|
||||
"""Extract a Camera as Alembic.
|
||||
"""Extract Quicktime from viewport capture.
|
||||
|
||||
The cameras gets baked to world space by default. Only when the instance's
|
||||
`bakeToWorldSpace` is set to False it will include its full hierarchy.
|
||||
Takes review camera and creates review Quicktime video based on viewport
|
||||
capture.
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -44,8 +32,17 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
def process(self, instance):
|
||||
self.log.info("Extracting capture..")
|
||||
|
||||
start = instance.data.get("startFrame", 1)
|
||||
end = instance.data.get("endFrame", 25)
|
||||
# get scene fps
|
||||
fps = mel.eval('currentTimeUnitToFPS()')
|
||||
|
||||
# if start and end frames cannot be determined, get them
|
||||
# from Maya timeline
|
||||
start = instance.data.get("startFrameReview")
|
||||
end = instance.data.get("endFrameReview")
|
||||
if start is None:
|
||||
start = cmds.playbackOptions(query=True, animationStartTime=True)
|
||||
if end is None:
|
||||
end = cmds.playbackOptions(query=True, animationEndTime=True)
|
||||
self.log.info("start: {}, end: {}".format(start, end))
|
||||
handles = instance.data.get("handles", 0)
|
||||
if handles:
|
||||
|
|
@ -53,46 +50,7 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
end += handles
|
||||
|
||||
# get cameras
|
||||
members = instance.data['setMembers']
|
||||
camera = instance.data['review_camera']
|
||||
# cameras = cmds.ls(members, leaf=True, shapes=True, long=True,
|
||||
# dag=True, type="camera")
|
||||
|
||||
# # validate required settings
|
||||
# assert len(cameras) == 1, "Not a single camera found in extraction"
|
||||
# camera = cameras[0]
|
||||
|
||||
|
||||
# project_code = ftrack_data['Project']['code']
|
||||
# task_type = ftrack_data['Task']['type']
|
||||
#
|
||||
# # load Preset
|
||||
# studio_repos = os.path.abspath(os.environ.get('studio_repos'))
|
||||
# shot_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '_' + task_type + '_' + asset + '.json'))
|
||||
#
|
||||
# task_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '_' + task_type + '.json'))
|
||||
#
|
||||
# project_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '.json'))
|
||||
#
|
||||
# default_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# 'default.json')
|
||||
#
|
||||
# if os.path.isfile(shot_preset_path):
|
||||
# preset_to_use = shot_preset_path
|
||||
# elif os.path.isfile(task_preset_path):
|
||||
# preset_to_use = task_preset_path
|
||||
# elif os.path.isfile(project_preset_path):
|
||||
# preset_to_use = project_preset_path
|
||||
# else:
|
||||
# preset_to_use = default_preset_path
|
||||
|
||||
capture_preset = ""
|
||||
try:
|
||||
preset = lib.load_capture_preset(capture_preset)
|
||||
|
|
@ -100,15 +58,13 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
preset = {}
|
||||
self.log.info('using viewport preset: {}'.format(capture_preset))
|
||||
|
||||
#preset["off_screen"] = False
|
||||
|
||||
preset['camera'] = camera
|
||||
preset['format'] = "image"
|
||||
# preset['compression'] = "qt"
|
||||
preset['quality'] = 50
|
||||
preset['compression'] = "jpg"
|
||||
preset['start_frame'] = 1
|
||||
preset['end_frame'] = 25
|
||||
preset['start_frame'] = start
|
||||
preset['end_frame'] = end
|
||||
preset['camera_options'] = {
|
||||
"displayGateMask": False,
|
||||
"displayResolution": False,
|
||||
|
|
@ -143,50 +99,34 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
self.log.info("file list {}".format(playblast))
|
||||
# self.log.info("Calculating HUD data overlay")
|
||||
|
||||
# stagingdir = "C:/Users/milan.kolar/AppData/Local/Temp/pyblish_tmp_ucsymm"
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
input_path = os.path.join(stagingdir, collections[0].format('{head}{padding}{tail}'))
|
||||
input_path = os.path.join(
|
||||
stagingdir, collections[0].format('{head}{padding}{tail}'))
|
||||
self.log.info("input {}".format(input_path))
|
||||
|
||||
movieFile = filename + ".mov"
|
||||
full_movie_path = os.path.join(stagingdir, movieFile)
|
||||
self.log.info("output {}".format(full_movie_path))
|
||||
# fls = [os.path.join(stagingdir, filename).replace("\\","/") for f in os.listdir( dir_path ) if f.endswith(preset['compression'])]
|
||||
# self.log.info("file list {}}".format(fls[0]))
|
||||
|
||||
out, err = (
|
||||
ffmpeg
|
||||
.input(input_path, framerate=25)
|
||||
.output(full_movie_path)
|
||||
.run(overwrite_output=True)
|
||||
)
|
||||
with avalon.maya.suspended_refresh():
|
||||
try:
|
||||
(
|
||||
ffmpeg
|
||||
.input(input_path, framerate=fps, start_number=int(start))
|
||||
.output(full_movie_path)
|
||||
.run(overwrite_output=True,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
)
|
||||
except ffmpeg.Error as e:
|
||||
ffmpeg_error = 'ffmpeg error: {}'.format(e.stderr)
|
||||
self.log.error(ffmpeg_error)
|
||||
raise RuntimeError(ffmpeg_error)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
instance.data["files"].append(movieFile)
|
||||
|
||||
# ftrackStrings = fStrings.annotationData()
|
||||
# nData = ftrackStrings.niceData
|
||||
# nData['version'] = instance.context.data('version')
|
||||
# fFrame = int(pm.playbackOptions( q = True, minTime = True))
|
||||
# eFrame = int(pm.playbackOptions( q = True, maxTime = True))
|
||||
# nData['frame'] = [(str("{0:05d}".format(f))) for f in range(fFrame, eFrame + 1)]
|
||||
# soundOfst = int(float(nData['oFStart'])) - int(float(nData['handle'])) - fFrame
|
||||
# soundFile = mu.giveMePublishedAudio()
|
||||
# self.log.info("SOUND offset %s" % str(soundOfst))
|
||||
# self.log.info("SOUND source video to %s" % str(soundFile))
|
||||
# ann = dHUD.draftAnnotate()
|
||||
# if soundFile:
|
||||
# ann.addAnotation(seqFls = fls, outputMoviePth = movieFullPth, annotateDataArr = nData, soundFile = soundFile, soundOffset = soundOfst)
|
||||
# else:
|
||||
# ann.addAnotation(seqFls = fls, outputMoviePth = movieFullPth, annotateDataArr = nData)
|
||||
|
||||
# for f in fls:
|
||||
# os.remove(f)
|
||||
|
||||
# playblast = (ann.expPth).replace("\\","/")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
|
|
|
|||
|
|
@ -1,323 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
import pype.maya.lib as lib
|
||||
|
||||
|
||||
def get_renderer_variables(renderlayer=None):
|
||||
"""Retrieve the extension which has been set in the VRay settings
|
||||
|
||||
Will return None if the current renderer is not VRay
|
||||
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
|
||||
start with `rs`. Use the actual node name, do NOT use the `nice name`
|
||||
|
||||
Args:
|
||||
renderlayer (str): the node name of the renderlayer.
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
|
||||
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
|
||||
|
||||
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
|
||||
render_attrs["padding"]))
|
||||
|
||||
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
|
||||
|
||||
if renderer == "vray":
|
||||
# Maya's renderSettings function does not return V-Ray file extension
|
||||
# so we get the extension from vraySettings
|
||||
extension = cmds.getAttr("vraySettings.imageFormatStr")
|
||||
|
||||
# When V-Ray image format has not been switched once from default .png
|
||||
# the getAttr command above returns None. As such we explicitly set
|
||||
# it to `.png`
|
||||
if extension is None:
|
||||
extension = "png"
|
||||
|
||||
filename_prefix = "<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
else:
|
||||
# Get the extension, getAttr defaultRenderGlobals.imageFormat
|
||||
# returns an index number.
|
||||
filename_base = os.path.basename(filename_0)
|
||||
extension = os.path.splitext(filename_base)[-1].strip(".")
|
||||
filename_prefix = "<Scene>/<RenderLayer>/<RenderLayer>"
|
||||
|
||||
return {"ext": extension,
|
||||
"filename_prefix": filename_prefix,
|
||||
"padding": padding,
|
||||
"filename_0": filename_0}
|
||||
|
||||
|
||||
def preview_fname(folder, scene, layer, padding, ext):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
folder (str): The root output folder (image path)
|
||||
scene (str): The scene name
|
||||
layer (str): The layer name to be rendered
|
||||
padding (int): The padding length
|
||||
ext(str): The output file extension
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
# Following hardcoded "<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
output = "{scene}/{layer}/{layer}.{number}.{ext}".format(
|
||||
scene=scene,
|
||||
layer=layer,
|
||||
number="#" * padding,
|
||||
ext=ext
|
||||
)
|
||||
|
||||
return os.path.join(folder, output)
|
||||
|
||||
|
||||
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit available render layers to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.debug('Starting deadline submitter')
|
||||
|
||||
try:
|
||||
deadline_url = os.environ["DEADLINE_REST_URL"]
|
||||
except KeyError:
|
||||
self.log.error("Deadline REST API url not found.")
|
||||
|
||||
# AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
|
||||
# "http://localhost:8082")
|
||||
# assert AVALON_DEADLINE, "Requires AVALON_DEADLINE
|
||||
|
||||
context = instance.context
|
||||
|
||||
filepath = None
|
||||
|
||||
allInstances = []
|
||||
for result in context.data["results"]:
|
||||
if (result["instance"] is not None and
|
||||
result["instance"] not in allInstances):
|
||||
allInstances.append(result["instance"])
|
||||
|
||||
for inst in allInstances:
|
||||
print(inst)
|
||||
if inst.data['family'] == 'scene':
|
||||
filepath = inst.data['destination_list'][0]
|
||||
|
||||
if not filepath:
|
||||
filepath = context.data["currentFile"]
|
||||
|
||||
self.log.debug(filepath)
|
||||
|
||||
workspace = context.data["workspaceDir"]
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
scene = os.path.splitext(filename)[0]
|
||||
dirname = os.path.join(workspace, "renders")
|
||||
renderlayer = instance.data['setMembers'] # rs_beauty
|
||||
renderlayer_name = instance.data['subset'] # beauty
|
||||
renderlayer_globals = instance.data["renderGlobals"]
|
||||
legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
jobname = "%s - %s" % (filename, instance.name)
|
||||
|
||||
# Get the variables depending on the renderer
|
||||
render_variables = get_renderer_variables(renderlayer)
|
||||
output_filename_0 = preview_fname(folder=dirname,
|
||||
scene=scene,
|
||||
layer=renderlayer_name,
|
||||
padding=render_variables["padding"],
|
||||
ext=render_variables["ext"])
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
os.makedirs(dirname)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
# /manual-submission.html#job-info-file-options
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": deadline_user,
|
||||
|
||||
"Plugin": instance.data.get("mayaRenderPlugin", "MayaBatch"),
|
||||
"Frames": "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["startFrame"]),
|
||||
end=int(instance.data["endFrame"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
),
|
||||
|
||||
"Comment": comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": filepath,
|
||||
|
||||
# Output directory and filename
|
||||
"OutputFilePath": dirname.replace("\\", "/"),
|
||||
"OutputFilePrefix": render_variables["filename_prefix"],
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": cmds.about(version=True),
|
||||
|
||||
# Only render layers are considered renderable in this pipeline
|
||||
"UsingRenderLayers": True,
|
||||
|
||||
# Use legacy Render Layer system
|
||||
"UseLegacyRenderLayers": legacy_layers,
|
||||
|
||||
# Render only this layer
|
||||
"RenderLayer": renderlayer,
|
||||
|
||||
# Determine which renderer to use from the file itself
|
||||
"Renderer": instance.data["renderer"],
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": workspace,
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Include critical environment variables with submission
|
||||
keys = [
|
||||
# This will trigger `userSetup.py` on the slave
|
||||
# such that proper initialisation happens the same
|
||||
# way as it does on a local machine.
|
||||
# TODO(marcus): This won't work if the slaves don't
|
||||
# have accesss to these paths, such as if slaves are
|
||||
# running Linux and the submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
|
||||
"MTOA_EXTENSIONS_PATH",
|
||||
"MTOA_EXTENSIONS",
|
||||
"DYLD_LIBRARY_PATH",
|
||||
"MAYA_RENDER_DESC_PATH",
|
||||
"MAYA_MODULE_PATH",
|
||||
"ARNOLD_PLUGIN_PATH",
|
||||
"AVALON_SCHEMA",
|
||||
|
||||
# todo: This is a temporary fix for yeti variables
|
||||
"PEREGRINEL_LICENSE",
|
||||
"REDSHIFT_MAYAEXTENSIONSPATH",
|
||||
"REDSHIFT_DISABLEOUTPUTLOCKFILES"
|
||||
"VRAY_FOR_MAYA2018_PLUGINS_X64",
|
||||
"VRAY_PLUGINS_X64",
|
||||
"VRAY_USE_THREAD_AFFINITY",
|
||||
"MAYA_MODULE_PATH",
|
||||
"TOOL_ENV"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
|
||||
for path in os.environ:
|
||||
if path.lower().startswith('pype_'):
|
||||
environment[path] = os.environ[path]
|
||||
|
||||
environment["PATH"] = os.environ["PATH"]
|
||||
|
||||
clean_pythonpath = ''
|
||||
for path in environment['PYTHONPATH'].split(os.pathsep):
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_pythonpath += path + os.pathsep
|
||||
except UnicodeDecodeError:
|
||||
self.log.debug('path contains non UTF characters')
|
||||
environment['PYTHONPATH'] = clean_pythonpath
|
||||
|
||||
clean_path = ''
|
||||
for path in environment['PATH'].split(os.pathsep):
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
|
||||
environment['PATH'] = clean_path
|
||||
|
||||
for path in environment:
|
||||
environment[path] = environment[path].replace(
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
|
||||
environment['PYPE_STUDIO_CORE'])
|
||||
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
# Include optional render globals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
payload["JobInfo"].update(render_globals)
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.info("using render plugin : {}".format(plugin))
|
||||
|
||||
self.preflight_check(instance)
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data['source'] = filepath
|
||||
instance.data["outputDir"] = os.path.dirname(output_filename_0)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
for key in ("startFrame", "endFrame", "byFrameStep"):
|
||||
value = instance.data[key]
|
||||
|
||||
if int(value) == value:
|
||||
continue
|
||||
|
||||
self.log.warning(
|
||||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
|
@ -51,7 +51,7 @@ def get_renderer_variables(renderlayer=None):
|
|||
# returns an index number.
|
||||
filename_base = os.path.basename(filename_0)
|
||||
extension = os.path.splitext(filename_base)[-1].strip(".")
|
||||
filename_prefix = "<Scene>/<Scene>_<RenderLayer>/<RenderLayer>"
|
||||
filename_prefix = "<Scene>/<RenderLayer>/<RenderLayer>"
|
||||
|
||||
return {"ext": extension,
|
||||
"filename_prefix": filename_prefix,
|
||||
|
|
@ -78,7 +78,7 @@ def preview_fname(folder, scene, layer, padding, ext):
|
|||
"""
|
||||
|
||||
# Following hardcoded "<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
output = "{scene}/{scene}_{layer}/{layer}.{number}.{ext}".format(
|
||||
output = "{scene}/{layer}/{layer}.{number}.{ext}".format(
|
||||
scene=scene,
|
||||
layer=layer,
|
||||
number="#" * padding,
|
||||
|
|
@ -97,9 +97,10 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -109,7 +110,25 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
context = instance.context
|
||||
workspace = context.data["workspaceDir"]
|
||||
filepath = context.data["currentFile"]
|
||||
|
||||
filepath = None
|
||||
|
||||
allInstances = []
|
||||
for result in context.data["results"]:
|
||||
if (result["instance"] is not None and
|
||||
result["instance"] not in allInstances):
|
||||
allInstances.append(result["instance"])
|
||||
|
||||
for inst in allInstances:
|
||||
print(inst)
|
||||
if inst.data['family'] == 'scene':
|
||||
filepath = inst.data['destination_list'][0]
|
||||
|
||||
if not filepath:
|
||||
filepath = context.data["currentFile"]
|
||||
|
||||
self.log.debug(filepath)
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
scene = os.path.splitext(filename)[0]
|
||||
|
|
@ -203,22 +222,64 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# have accesss to these paths, such as if slaves are
|
||||
# running Linux and the submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
|
||||
"MTOA_EXTENSIONS_PATH",
|
||||
"MTOA_EXTENSIONS",
|
||||
"DYLD_LIBRARY_PATH",
|
||||
"MAYA_RENDER_DESC_PATH",
|
||||
"MAYA_MODULE_PATH",
|
||||
"ARNOLD_PLUGIN_PATH",
|
||||
"AVALON_SCHEMA",
|
||||
|
||||
# todo: This is a temporary fix for yeti variables
|
||||
"PEREGRINEL_LICENSE",
|
||||
"REDSHIFT_MAYAEXTENSIONSPATH",
|
||||
"REDSHIFT_DISABLEOUTPUTLOCKFILES",
|
||||
"VRAY_FOR_MAYA2018_PLUGINS",
|
||||
"VRAY_PLUGINS",
|
||||
"REDSHIFT_DISABLEOUTPUTLOCKFILES"
|
||||
"VRAY_FOR_MAYA2018_PLUGINS_X64",
|
||||
"VRAY_PLUGINS_X64",
|
||||
"VRAY_USE_THREAD_AFFINITY",
|
||||
"MAYA_MODULE_PATH"
|
||||
"MAYA_MODULE_PATH",
|
||||
"TOOL_ENV"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
#self.log.debug("enviro: {}".format(pprint(environment)))
|
||||
for path in os.environ:
|
||||
if path.lower().startswith('pype_'):
|
||||
environment[path] = os.environ[path]
|
||||
|
||||
PATHS = os.environ["PATH"].split(";")
|
||||
environment["PATH"] = ";".join([p for p in PATHS
|
||||
if p.startswith("P:")])
|
||||
environment["PATH"] = os.environ["PATH"]
|
||||
self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
|
||||
clean_environment = {}
|
||||
for key in environment:
|
||||
clean_path = ""
|
||||
self.log.debug("key: {}".format(key))
|
||||
to_process = environment[key]
|
||||
if key == "PYPE_STUDIO_CORE_MOUNT":
|
||||
clean_path = environment[key]
|
||||
elif "://" in environment[key]:
|
||||
clean_path = environment[key]
|
||||
elif os.pathsep not in to_process:
|
||||
try:
|
||||
path = environment[key]
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path = os.path.normpath(path)
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
else:
|
||||
for path in environment[key].split(os.pathsep):
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
clean_path = clean_path.replace(
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE']))
|
||||
clean_environment[key] = clean_path
|
||||
|
||||
environment = clean_environment
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import pyblish.api
|
|||
|
||||
from avalon.vendor import requests
|
||||
from pype.plugin import contextplugin_should_run
|
||||
|
||||
import os
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
|
@ -19,7 +19,7 @@ class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
|
|||
return
|
||||
|
||||
try:
|
||||
deadline_url = os.environ["DEADLINE_REST_URL"]
|
||||
AVALON_DEADLINE = os.environ["AVALON_DEADLINE"]
|
||||
except KeyError:
|
||||
self.log.error("Deadline REST API url not found.")
|
||||
raise ValueError("Deadline REST API url not found.")
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
version = (0, 1, 0)
|
||||
label = 'Mesh Edge Length Non Zero'
|
||||
actions = [pype.maya.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
__tolerance = 1e-5
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ def get_file_rule(rule):
|
|||
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
|
||||
|
||||
|
||||
class ValidateRenderImageRule(pyblish.api.ContextPlugin):
|
||||
class ValidateRenderImageRule(pyblish.api.InstancePlugin):
|
||||
"""Validates "images" file rule is set to "renders/"
|
||||
|
||||
"""
|
||||
|
|
@ -19,7 +19,7 @@ class ValidateRenderImageRule(pyblish.api.ContextPlugin):
|
|||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
|
||||
assert get_file_rule("images") == "renders", (
|
||||
"Workspace's `images` file rule must be set to: renders"
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ class WriteToRender(pyblish.api.InstancePlugin):
|
|||
families = ["write"]
|
||||
|
||||
def process(self, instance):
|
||||
return
|
||||
if [f for f in instance.data["families"]
|
||||
if ".frames" in f]:
|
||||
instance[0]["render"].setValue(True)
|
||||
|
|
@ -20,29 +20,37 @@ class ValidateWriteFamilies(pyblish.api.InstancePlugin):
|
|||
""" Validates write families. """
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Check correct writes families"
|
||||
label = "Valitade writes families"
|
||||
hosts = ["nuke"]
|
||||
families = ["write"]
|
||||
actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
def get_invalid(self, instance):
|
||||
if not [f for f in instance.data["families"]
|
||||
if ".frames" in f]:
|
||||
return
|
||||
|
||||
if not instance.data["files"]:
|
||||
if not instance.data.get('files'):
|
||||
return (instance)
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
invalid = self.get_invalid(self, instance)
|
||||
|
||||
if invalid:
|
||||
raise ValueError(str("`{}`: Switch `Render` on! "
|
||||
"> {}".format(__name__, invalid)))
|
||||
|
||||
self.log.info("Checked correct writes families")
|
||||
# if any(".frames" in f for f in instance.data["families"]):
|
||||
# if not instance.data["files"]:
|
||||
# raise ValueError("instance {} is set to publish frames\
|
||||
# but no files were collected, render the frames first or\
|
||||
# check 'render' checkbox onthe no to 'ON'".format(instance)))
|
||||
#
|
||||
#
|
||||
# self.log.info("Checked correct writes families")
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
|
@ -25,7 +25,7 @@ class CrateWriteRender(avalon.nuke.Creator):
|
|||
name = "WriteRender"
|
||||
label = "Create Write Render"
|
||||
hosts = ["nuke"]
|
||||
family = "{}_write".format(preset)
|
||||
family = "write"
|
||||
families = preset
|
||||
icon = "sign-out"
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ class CrateWriteRender(avalon.nuke.Creator):
|
|||
|
||||
data = OrderedDict()
|
||||
|
||||
data["family"] = self.family.split("_")[1]
|
||||
data["family"] = self.family
|
||||
data["families"] = self.families
|
||||
|
||||
{data.update({k: v}) for k, v in self.data.items()
|
||||
|
|
@ -44,15 +44,15 @@ class CrateWriteRender(avalon.nuke.Creator):
|
|||
def process(self):
|
||||
self.name = self.data["subset"]
|
||||
|
||||
family = self.family.split("_")[0]
|
||||
node = self.family.split("_")[1]
|
||||
family = self.family
|
||||
node = 'write'
|
||||
|
||||
instance = nuke.toNode(self.data["subset"])
|
||||
|
||||
if not instance:
|
||||
write_data = {
|
||||
"class": node,
|
||||
"preset": family,
|
||||
"preset": self.preset,
|
||||
"avalon": self.data
|
||||
}
|
||||
|
||||
|
|
@ -68,7 +68,7 @@ class CrateWritePrerender(avalon.nuke.Creator):
|
|||
name = "WritePrerender"
|
||||
label = "Create Write Prerender"
|
||||
hosts = ["nuke"]
|
||||
family = "{}_write".format(preset)
|
||||
family = "write"
|
||||
families = preset
|
||||
icon = "sign-out"
|
||||
|
||||
|
|
@ -89,13 +89,13 @@ class CrateWritePrerender(avalon.nuke.Creator):
|
|||
|
||||
instance = nuke.toNode(self.data["subset"])
|
||||
|
||||
family = self.family.split("_")[0]
|
||||
node = self.family.split("_")[1]
|
||||
family = self.family
|
||||
node = 'write'
|
||||
|
||||
if not instance:
|
||||
write_data = {
|
||||
"class": node,
|
||||
"preset": family,
|
||||
"preset": self.preset,
|
||||
"avalon": self.data
|
||||
}
|
||||
|
||||
|
|
@ -111,7 +111,7 @@ class CrateWriteStill(avalon.nuke.Creator):
|
|||
name = "WriteStill"
|
||||
label = "Create Write Still"
|
||||
hosts = ["nuke"]
|
||||
family = "{}_write".format(preset)
|
||||
family = "write"
|
||||
families = preset
|
||||
icon = "image"
|
||||
|
||||
|
|
@ -120,7 +120,7 @@ class CrateWriteStill(avalon.nuke.Creator):
|
|||
|
||||
data = OrderedDict()
|
||||
|
||||
data["family"] = self.family.split("_")[1]
|
||||
data["family"] = self.family
|
||||
data["families"] = self.families
|
||||
|
||||
{data.update({k: v}) for k, v in self.data.items()
|
||||
|
|
@ -132,14 +132,14 @@ class CrateWriteStill(avalon.nuke.Creator):
|
|||
|
||||
instance = nuke.toNode(self.data["subset"])
|
||||
|
||||
family = self.family.split("_")[0]
|
||||
node = self.family.split("_")[1]
|
||||
family = self.family
|
||||
node = 'write'
|
||||
|
||||
if not instance:
|
||||
write_data = {
|
||||
"frame_range": [nuke.frame(), nuke.frame()],
|
||||
"class": node,
|
||||
"preset": family,
|
||||
"preset": self.preset,
|
||||
"avalon": self.data
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -36,13 +36,13 @@ def preserve_trim(node):
|
|||
if start_at_frame:
|
||||
node['frame_mode'].setValue("start at")
|
||||
node['frame'].setValue(str(script_start))
|
||||
log.info("start frame of reader was set to"
|
||||
log.info("start frame of Read was set to"
|
||||
"{}".format(script_start))
|
||||
|
||||
if offset_frame:
|
||||
node['frame_mode'].setValue("offset")
|
||||
node['frame'].setValue(str((script_start + offset_frame)))
|
||||
log.info("start frame of reader was set to"
|
||||
log.info("start frame of Read was set to"
|
||||
"{}".format(script_start))
|
||||
|
||||
|
||||
|
|
@ -67,7 +67,7 @@ def loader_shift(node, frame, relative=True):
|
|||
|
||||
if relative:
|
||||
node['frame_mode'].setValue("start at")
|
||||
node['frame'].setValue(str(script_start))
|
||||
node['frame'].setValue(str(frame))
|
||||
|
||||
return int(script_start)
|
||||
|
||||
|
|
@ -75,8 +75,8 @@ def loader_shift(node, frame, relative=True):
|
|||
class LoadSequence(api.Loader):
|
||||
"""Load image sequence into Nuke"""
|
||||
|
||||
families = ["write"]
|
||||
representations = ["*"]
|
||||
families = ["write", "source"]
|
||||
representations = ["exr", "dpx"]
|
||||
|
||||
label = "Load sequence"
|
||||
order = -10
|
||||
|
|
@ -86,44 +86,33 @@ class LoadSequence(api.Loader):
|
|||
def load(self, context, name, namespace, data):
|
||||
from avalon.nuke import (
|
||||
containerise,
|
||||
ls_img_sequence,
|
||||
viewer_update_and_undo_stop
|
||||
)
|
||||
for k, v in context.items():
|
||||
log.info("key: `{}`, value: {}\n".format(k, v))
|
||||
# for k, v in context.items():
|
||||
# log.info("key: `{}`, value: {}\n".format(k, v))
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
first = version_data.get("startFrame", None)
|
||||
last = version_data.get("endFrame", None)
|
||||
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
# Use the first file for now
|
||||
# TODO: fix path fname
|
||||
file = ls_img_sequence(os.path.dirname(self.fname), one=True)
|
||||
log.info("file: {}\n".format(file))
|
||||
file = self.fname
|
||||
log.info("file: {}\n".format(self.fname))
|
||||
|
||||
read_name = "Read_" + context["representation"]["context"]["subset"]
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
# TODO: it might be universal read to img/geo/camera
|
||||
r = nuke.createNode(
|
||||
"Read",
|
||||
"name {}".format(read_name))
|
||||
r["file"].setValue(file['path'])
|
||||
if len(file['frames']) is 1:
|
||||
first = file['frames'][0][0]
|
||||
last = file['frames'][0][1]
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
else:
|
||||
first = file['frames'][0][0]
|
||||
last = file['frames'][:-1][1]
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
log.warning("Missing frames in image sequence")
|
||||
r["file"].setValue(self.fname)
|
||||
|
||||
# Set colorspace defined in version data
|
||||
colorspace = context["version"]["data"].get("colorspace", None)
|
||||
|
|
@ -134,6 +123,10 @@ class LoadSequence(api.Loader):
|
|||
start = context["version"]["data"].get("startFrame", None)
|
||||
if start is not None:
|
||||
loader_shift(r, start, relative=True)
|
||||
r["origfirst"].setValue(first)
|
||||
r["first"].setValue(first)
|
||||
r["origlast"].setValue(last)
|
||||
r["last"].setValue(last)
|
||||
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
add_keys = ["startFrame", "endFrame", "handles",
|
||||
|
|
@ -142,8 +135,9 @@ class LoadSequence(api.Loader):
|
|||
data_imprint = {}
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: context["version"]['data'][k]})
|
||||
data_imprint.update({"objectName": read_name})
|
||||
|
||||
containerise(r,
|
||||
return containerise(r,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
|
|
@ -168,9 +162,9 @@ class LoadSequence(api.Loader):
|
|||
update_container
|
||||
)
|
||||
log.info("this i can see")
|
||||
node = container["_tool"]
|
||||
# TODO: prepare also for other readers img/geo/camera
|
||||
assert node.Class() == "Reader", "Must be Reader"
|
||||
node = nuke.toNode(container['objectName'])
|
||||
# TODO: prepare also for other Read img/geo/camera
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
root = api.get_representation_path(representation)
|
||||
file = ls_img_sequence(os.path.dirname(root), one=True)
|
||||
|
|
@ -189,7 +183,7 @@ class LoadSequence(api.Loader):
|
|||
|
||||
# Update the loader's path whilst preserving some values
|
||||
with preserve_trim(node):
|
||||
node["file"] = file["path"]
|
||||
node["file"].setValue(file["path"])
|
||||
|
||||
# Set the global in to the start frame of the sequence
|
||||
global_in_changed = loader_shift(node, start, relative=False)
|
||||
|
|
@ -208,8 +202,8 @@ class LoadSequence(api.Loader):
|
|||
|
||||
from avalon.nuke import viewer_update_and_undo_stop
|
||||
|
||||
node = container["_tool"]
|
||||
assert node.Class() == "Reader", "Must be Reader"
|
||||
node = nuke.toNode(container['objectName'])
|
||||
assert node.Class() == "Read", "Must be Read"
|
||||
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import pyblish.api
|
|||
class SelectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
hosts = ["nuke"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -2,46 +2,46 @@ import pyblish.api
|
|||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectInstanceFamilies(pyblish.api.ContextPlugin):
|
||||
class CollectInstanceFamilies(pyblish.api.InstancePlugin):
|
||||
"""Collect families for all instances"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Families"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ['write']
|
||||
|
||||
def process(self, context):
|
||||
for instance in context.data["instances"]:
|
||||
def process(self, instance):
|
||||
|
||||
if "write" in instance.data["family"]:
|
||||
node = instance[0]
|
||||
node = instance[0]
|
||||
|
||||
# set for ftrack to accept
|
||||
instance.data["families"] = ["ftrack"]
|
||||
self.log.info('processing {}'.format(node))
|
||||
|
||||
if not node["render"].value():
|
||||
families = ["{}.frames".format(
|
||||
instance.data["avalonKnob"]["families"])]
|
||||
# to ignore staging dir op in integrate
|
||||
instance.data['transfer'] = False
|
||||
else:
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families = ["{}.farm".format(
|
||||
instance.data["avalonKnob"]["families"])]
|
||||
else:
|
||||
families = ["{}.local".format(
|
||||
instance.data["avalonKnob"]["families"])]
|
||||
families = []
|
||||
if instance.data.get('families'):
|
||||
families.append(instance.data['families'])
|
||||
|
||||
instance.data["families"].extend(families)
|
||||
# set for ftrack to accept
|
||||
# instance.data["families"] = ["ftrack"]
|
||||
|
||||
elif "source" in instance.data["family"]:
|
||||
families = []
|
||||
families.append(instance.data["avalonKnob"]["families"])
|
||||
if node["render"].value():
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families.append("render.farm")
|
||||
else:
|
||||
families.append("render.local")
|
||||
else:
|
||||
families.append("render.frames")
|
||||
# to ignore staging dir op in integrate
|
||||
instance.data['transfer'] = False
|
||||
|
||||
families.append('ftrack')
|
||||
|
||||
|
||||
instance.data["families"] = families
|
||||
|
||||
instance.data["families"] = families
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=self.sort_by_family)
|
||||
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
|
|
|
|||
|
|
@ -56,8 +56,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
"fps": int(nuke.root()['fps'].value())
|
||||
|
||||
})
|
||||
if node.Class() == "Write":
|
||||
instance.data["families"] = [avalon_knob_data["families"]]
|
||||
# if node.Class() == "Write":
|
||||
# instance.data["families"] = [avalon_knob_data["families"]]
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -14,14 +14,16 @@ class CollectReview(pyblish.api.InstancePlugin):
|
|||
family_targets = [".local", ".frames"]
|
||||
|
||||
def process(self, instance):
|
||||
pass
|
||||
families = [(f, search) for f in instance.data["families"]
|
||||
for search in self.family_targets
|
||||
if search in f][0]
|
||||
|
||||
if families:
|
||||
root_femilies = families[0].replace(families[1], "")
|
||||
instance.data["families"].append(".".join([
|
||||
root_femilies,
|
||||
self.family
|
||||
]))
|
||||
root_families = families[0].replace(families[1], "")
|
||||
# instance.data["families"].append(".".join([
|
||||
# root_families,
|
||||
# self.family
|
||||
# ]))
|
||||
instance.data["families"].append("render.review")
|
||||
self.log.info("Review collected: `{}`".format(instance))
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import tempfile
|
|||
import nuke
|
||||
import pyblish.api
|
||||
import logging
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -50,6 +51,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
output_dir = os.path.dirname(path)
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
# get version
|
||||
version = pype.get_version_from_path(path)
|
||||
instance.data['version'] = version
|
||||
self.log.debug('Write Version: %s' % instance.data('version'))
|
||||
|
||||
# create label
|
||||
name = node.name()
|
||||
# Include start and end render frame in label
|
||||
|
|
@ -64,14 +70,13 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
# collect families in next file
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
try:
|
||||
collected_frames = os.listdir(output_dir)
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
instance.data["files"].append(collected_frames)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
self.log.debug("couldn't collect frames: {}".format(label))
|
||||
|
||||
instance.data.update({
|
||||
"path": path,
|
||||
|
|
@ -84,6 +89,8 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
"colorspace": node["colorspace"].value(),
|
||||
})
|
||||
|
||||
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import pyblish.api
|
|||
import nuke
|
||||
import os
|
||||
import pype
|
||||
import clique
|
||||
|
||||
|
||||
class NukeRenderLocal(pype.api.Extractor):
|
||||
|
|
@ -66,5 +67,11 @@ class NukeRenderLocal(pype.api.Extractor):
|
|||
output_dir
|
||||
))
|
||||
|
||||
collections, remainder = clique.assemble(*instance.data['files'])
|
||||
self.log.info('collections: {}'.format(str(collections)))
|
||||
|
||||
collection = collections[0]
|
||||
instance.data['collection'] = collection
|
||||
|
||||
self.log.info('Finished render')
|
||||
return
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
import nuke
|
||||
import pyblish.api
|
||||
import pype
|
||||
from pype.vendor import ffmpeg
|
||||
|
||||
|
||||
class ExtractDataForReview(pype.api.Extractor):
|
||||
|
|
@ -12,29 +13,21 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.01
|
||||
label = "Data for review"
|
||||
label = "Extract Review"
|
||||
optional = True
|
||||
|
||||
families = ["write"]
|
||||
families = ["render.review"]
|
||||
hosts = ["nuke"]
|
||||
family_targets = [".local", ".review"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
families = [f for f in instance.data["families"]
|
||||
for search in self.family_targets
|
||||
if search in f]
|
||||
if not families:
|
||||
return
|
||||
self.log.debug("here:")
|
||||
# Store selection
|
||||
selection = [i for i in nuke.allNodes() if i["selected"].getValue()]
|
||||
self.log.debug("here:")
|
||||
# Deselect all nodes to prevent external connections
|
||||
[i["selected"].setValue(False) for i in nuke.allNodes()]
|
||||
self.log.debug("here:")
|
||||
self.log.debug("creating staging dir:")
|
||||
self.staging_dir(instance)
|
||||
|
||||
self.render_review_representation(instance,
|
||||
representation="mov")
|
||||
self.log.debug("review mov:")
|
||||
|
|
@ -52,34 +45,20 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
staging_dir = instance.data["stagingDir"]
|
||||
file_name = collection.format("{head}mov")
|
||||
|
||||
review_mov = os.path.join(staging_dir, file_name)
|
||||
|
||||
if instance.data.get("baked_colorspace_movie"):
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", instance.data["baked_colorspace_movie"],
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-crf", "18",
|
||||
"-timecode", "00:00:00:01",
|
||||
]
|
||||
|
||||
args.append(review_mov)
|
||||
|
||||
self.log.debug("Executing args: {0}".format(args))
|
||||
review_mov = os.path.join(staging_dir, file_name).replace("\\", "/")
|
||||
|
||||
self.log.info("transcoding review mov: {0}".format(review_mov))
|
||||
p = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.dirname(args[-1])
|
||||
)
|
||||
if instance.data.get("baked_colorspace_movie"):
|
||||
input_movie = instance.data["baked_colorspace_movie"]
|
||||
out, err = (
|
||||
ffmpeg
|
||||
.input(input_movie)
|
||||
.output(review_mov, pix_fmt='yuv420p', crf=18, timecode="00:00:00:01")
|
||||
.overwrite_output()
|
||||
.run()
|
||||
)
|
||||
|
||||
output = p.communicate()[0]
|
||||
|
||||
if p.returncode != 0:
|
||||
raise ValueError(output)
|
||||
|
||||
self.log.debug("Removing `{0}`...".format(
|
||||
instance.data["baked_colorspace_movie"]))
|
||||
|
|
@ -100,19 +79,10 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
|
||||
collection = instance.data.get("collection", None)
|
||||
|
||||
self.log.warning("instance.data['files']: {}".format(instance.data['files']))
|
||||
if not collection:
|
||||
collections, remainder = clique.assemble(*instance.data['files'])
|
||||
collection = collections[0]
|
||||
instance.data["collection"] = collection
|
||||
|
||||
# Create nodes
|
||||
first_frame = min(collection.indexes)
|
||||
last_frame = max(collection.indexes)
|
||||
|
||||
self.log.warning("first_frame: {}".format(first_frame))
|
||||
self.log.warning("last_frame: {}".format(last_frame))
|
||||
|
||||
node = previous_node = nuke.createNode("Read")
|
||||
|
||||
node["file"].setValue(
|
||||
|
|
@ -158,6 +128,7 @@ class ExtractDataForReview(pype.api.Extractor):
|
|||
if representation in "mov":
|
||||
file = collection.format("{head}baked.mov")
|
||||
path = os.path.join(staging_dir, file).replace("\\", "/")
|
||||
self.log.debug("Path: {}".format(path))
|
||||
instance.data["baked_colorspace_movie"] = path
|
||||
write_node["file"].setValue(path)
|
||||
write_node["file_type"].setValue("mov")
|
||||
|
|
|
|||
|
|
@ -7,12 +7,15 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin):
|
|||
"""Increment current script version."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.9
|
||||
label = "Increment Current Script Version"
|
||||
label = "Increment Script Version"
|
||||
optional = True
|
||||
hosts = ['nuke']
|
||||
families = ["nukescript", "render.local", "render.frames"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Atomicity not held, aborting.")
|
||||
|
||||
from pype.lib import version_up
|
||||
path = context.data["currentFile"]
|
||||
nuke.scriptSaveAs(version_up(path))
|
||||
|
|
@ -20,20 +20,21 @@ class RepairCollectionAction(pyblish.api.Action):
|
|||
self.log.info("Rendering toggled ON")
|
||||
|
||||
|
||||
class ValidateCollection(pyblish.api.InstancePlugin):
|
||||
class ValidatePrerenderedFrames(pyblish.api.InstancePlugin):
|
||||
""" Validates file output. """
|
||||
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["render.frames", "still.frames", "prerender.frames"]
|
||||
|
||||
label = "Check prerendered frames"
|
||||
label = "Validate prerendered frame"
|
||||
hosts = ["nuke"]
|
||||
actions = [RepairCollectionAction]
|
||||
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
|
||||
if not instance.data["files"]:
|
||||
return
|
||||
|
||||
assert instance.data.get('files'), "no frames were collected, you need to render them"
|
||||
|
||||
collections, remainder = clique.assemble(*instance.data['files'])
|
||||
self.log.info('collections: {}'.format(str(collections)))
|
||||
|
|
@ -57,3 +58,5 @@ class ValidateCollection(pyblish.api.InstancePlugin):
|
|||
collection.indexes
|
||||
) is frame_length, "{} missing frames. Use "
|
||||
"repair to render all frames".format(__name__)
|
||||
|
||||
instance.data['collection'] = collection
|
||||
|
|
|
|||
15
pype/plugins/nuke/publish/validate_version_match.py
Normal file
15
pype/plugins/nuke/publish/validate_version_match.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateVersionMatch(pyblish.api.InstancePlugin):
|
||||
"""Checks if write version matches workfile version"""
|
||||
|
||||
label = "Validate Version Match"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["nuke"]
|
||||
families = ['render.frames']
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
assert instance.data['version'] == instance.context.data['version'], "\
|
||||
Version in write doesn't match version of the workfile"
|
||||
|
|
@ -58,7 +58,7 @@ def reset_data_from_templates():
|
|||
log.info("Data from templates were Unloaded...")
|
||||
|
||||
|
||||
def get_version_from_workfile(file):
|
||||
def get_version_from_path(file):
|
||||
"""
|
||||
Finds version number in file path string
|
||||
|
||||
|
|
@ -87,7 +87,7 @@ def get_project_code():
|
|||
string: project code
|
||||
"""
|
||||
|
||||
return io.find_one({"type": "project"})["data"]["code"]
|
||||
return io.find_one({"type": "project"})["data"].get("code", '')
|
||||
|
||||
|
||||
def set_project_code(code):
|
||||
|
|
@ -167,14 +167,16 @@ def get_hierarchy():
|
|||
string: asset hierarchy path
|
||||
|
||||
"""
|
||||
hierarchy = io.find_one({
|
||||
parents = io.find_one({
|
||||
"type": 'asset',
|
||||
"name": get_asset()}
|
||||
)['data']['parents']
|
||||
|
||||
if hierarchy:
|
||||
hierarchy = ""
|
||||
if parents and len(parents) > 0:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
return os.path.join(*hierarchy).replace("\\", "/")
|
||||
hierarchy = os.path.join(*parents).replace("\\", "/")
|
||||
return hierarchy
|
||||
|
||||
|
||||
def set_hierarchy(hierarchy):
|
||||
|
|
@ -266,10 +268,9 @@ def get_workdir_template(data=None):
|
|||
anatomy = api.Anatomy
|
||||
|
||||
try:
|
||||
anatomy = anatomy.format(data or get_context_data())
|
||||
work = anatomy.work.format(data or get_context_data())
|
||||
except Exception as e:
|
||||
log.error("{0} Error in "
|
||||
"get_workdir_template(): {1}".format(__name__, e))
|
||||
|
||||
return os.path.join(anatomy.work.root,
|
||||
anatomy.work.folder)
|
||||
return os.path.join(work.root, work.folder)
|
||||
|
|
|
|||
BIN
res/icons/Thumbs.db
Normal file
BIN
res/icons/Thumbs.db
Normal file
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 20 KiB |
BIN
res/icons/lookmanager.png
Normal file
BIN
res/icons/lookmanager.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.4 KiB |
|
|
@ -6,7 +6,7 @@ from pype.api import Logger
|
|||
log = Logger.getLogger(__name__, "nuke")
|
||||
|
||||
|
||||
nuke.addOnScriptSave(writes_version_sync)
|
||||
nuke.addOnScriptSave(onScriptLoad)
|
||||
# nuke.addOnScriptSave(writes_version_sync)
|
||||
# nuke.addOnScriptSave(onScriptLoad)
|
||||
|
||||
log.info('Automatic syncing of write file knob to script version')
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue