Merge remote-tracking branch 'refs/remotes/origin/develop' into feature/PYPE-570-maya-renderlayer-creator

This commit is contained in:
Ondrej Samohel 2019-12-16 10:31:00 +01:00
commit 68b16b55d1
41 changed files with 1808 additions and 1404 deletions

View file

@ -1,354 +1,606 @@
import os
import sys
import logging
import collections
import uuid
from datetime import datetime
from queue import Queue
from bson.objectid import ObjectId
import argparse
import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib.io_nonsingleton import DbConnector
class DeleteAsset(BaseAction):
class DeleteAssetSubset(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'delete.asset'
identifier = "delete.asset.subset"
#: Action label.
label = 'Delete Asset/Subsets'
label = "Delete Asset/Subsets"
#: Action description.
description = 'Removes from Avalon with all childs and asset from Ftrack'
icon = '{}/ftrack/action_icons/DeleteAsset.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
description = "Removes from Avalon with all childs and asset from Ftrack"
icon = "{}/ftrack/action_icons/DeleteAsset.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
#: Db
db = DbConnector()
role_list = ["Pypeclub", "Administrator", "Project Manager"]
#: Db connection
dbcon = DbConnector()
value = None
splitter = {"type": "label", "value": "---"}
action_data_by_id = {}
asset_prefix = "asset:"
subset_prefix = "subset:"
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
""" Validation """
task_ids = []
for ent_info in event["data"]["selection"]:
entType = ent_info.get("entityType", "")
if entType == "task":
task_ids.append(ent_info["entityId"])
valid = ["task"]
entityType = event["data"]["selection"][0].get("entityType", "")
if entityType.lower() not in valid:
return False
return True
for entity in entities:
ftrack_id = entity["id"]
if ftrack_id not in task_ids:
continue
if entity.entity_type.lower() != "task":
return True
return False
def _launch(self, event):
self.reset_session()
try:
self.db.install()
args = self._translate_event(
self.session, event
)
if "values" not in event["data"]:
self.dbcon.install()
return self._interface(self.session, *args)
interface = self._interface(
self.session, *args
)
confirmation = self.confirm_delete(
True, *args
)
if interface:
return interface
confirmation = self.confirm_delete(*args)
if confirmation:
return confirmation
self.dbcon.install()
response = self.launch(
self.session, *args
)
finally:
self.db.uninstall()
self.dbcon.uninstall()
return self._handle_result(
self.session, response, *args
)
def interface(self, session, entities, event):
if not event['data'].get('values', {}):
self.attempt = 1
items = []
entity = entities[0]
title = 'Choose items to delete from "{}"'.format(entity['name'])
project = entity['project']
self.show_message(event, "Preparing data...", True)
items = []
title = "Choose items to delete"
self.db.Session['AVALON_PROJECT'] = project["full_name"]
# Filter selection and get ftrack ids
selection = event["data"].get("selection") or []
ftrack_ids = []
project_in_selection = False
for entity in selection:
entity_type = (entity.get("entityType") or "").lower()
if entity_type != "task":
if entity_type == "show":
project_in_selection = True
continue
av_entity = self.db.find_one({
'type': 'asset',
'name': entity['name']
ftrack_id = entity.get("entityId")
if not ftrack_id:
continue
ftrack_ids.append(ftrack_id)
if project_in_selection:
msg = "It is not possible to use this action on project entity."
self.show_message(event, msg, True)
# Filter event even more (skip task entities)
# - task entities are not relevant for avalon
for entity in entities:
ftrack_id = entity["id"]
if ftrack_id not in ftrack_ids:
continue
if entity.entity_type.lower() == "task":
ftrack_ids.remove(ftrack_id)
if not ftrack_ids:
# It is bug if this happens!
return {
"success": False,
"message": "Invalid selection for this action (Bug)"
}
if entities[0].entity_type.lower() == "project":
project = entities[0]
else:
project = entities[0]["project"]
project_name = project["full_name"]
self.dbcon.Session["AVALON_PROJECT"] = project_name
selected_av_entities = self.dbcon.find({
"type": "asset",
"data.ftrackId": {"$in": ftrack_ids}
})
selected_av_entities = [ent for ent in selected_av_entities]
if not selected_av_entities:
return {
"success": False,
"message": "Didn't found entities in avalon"
}
# Remove cached action older than 2 minutes
old_action_ids = []
for id, data in self.action_data_by_id.items():
created_at = data.get("created_at")
if not created_at:
old_action_ids.append(id)
continue
cur_time = datetime.now()
existing_in_sec = (created_at - cur_time).total_seconds()
if existing_in_sec > 60 * 2:
old_action_ids.append(id)
for id in old_action_ids:
self.action_data_by_id.pop(id, None)
# Store data for action id
action_id = str(uuid.uuid1())
self.action_data_by_id[action_id] = {
"attempt": 1,
"created_at": datetime.now(),
"project_name": project_name,
"subset_ids_by_name": {},
"subset_ids_by_parent": {}
}
id_item = {
"type": "hidden",
"name": "action_id",
"value": action_id
}
items.append(id_item)
asset_ids = [ent["_id"] for ent in selected_av_entities]
subsets_for_selection = self.dbcon.find({
"type": "subset",
"parent": {"$in": asset_ids}
})
asset_ending = ""
if len(selected_av_entities) > 1:
asset_ending = "s"
asset_title = {
"type": "label",
"value": "# Delete asset{}:".format(asset_ending)
}
asset_note = {
"type": "label",
"value": (
"<p><i>NOTE: Action will delete checked entities"
" in Ftrack and Avalon with all children entities and"
" published content.</i></p>"
)
}
items.append(asset_title)
items.append(asset_note)
asset_items = collections.defaultdict(list)
for asset in selected_av_entities:
ent_path_items = [project_name]
ent_path_items.extend(asset.get("data", {}).get("parents") or [])
ent_path_to_parent = "/".join(ent_path_items) + "/"
asset_items[ent_path_to_parent].append(asset)
for asset_parent_path, assets in sorted(asset_items.items()):
items.append({
"type": "label",
"value": "## <b>- {}</b>".format(asset_parent_path)
})
if av_entity is None:
return {
'success': False,
'message': 'Didn\'t found assets in avalon'
}
asset_label = {
'type': 'label',
'value': '## Delete whole asset: ##'
}
asset_item = {
'label': av_entity['name'],
'name': 'whole_asset',
'type': 'boolean',
'value': False
}
splitter = {
'type': 'label',
'value': '{}'.format(200*"-")
}
subset_label = {
'type': 'label',
'value': '## Subsets: ##'
}
if av_entity is not None:
items.append(asset_label)
items.append(asset_item)
items.append(splitter)
all_subsets = self.db.find({
'type': 'subset',
'parent': av_entity['_id']
for asset in assets:
items.append({
"label": asset["name"],
"name": "{}{}".format(
self.asset_prefix, str(asset["_id"])
),
"type": 'boolean',
"value": False
})
subset_items = []
for subset in all_subsets:
item = {
'label': subset['name'],
'name': str(subset['_id']),
'type': 'boolean',
'value': False
}
subset_items.append(item)
if len(subset_items) > 0:
items.append(subset_label)
items.extend(subset_items)
else:
return {
'success': False,
'message': 'Didn\'t found assets in avalon'
}
subset_ids_by_name = collections.defaultdict(list)
subset_ids_by_parent = collections.defaultdict(list)
for subset in subsets_for_selection:
subset_id = subset["_id"]
name = subset["name"]
parent_id = subset["parent"]
subset_ids_by_name[name].append(subset_id)
subset_ids_by_parent[parent_id].append(subset_id)
if not subset_ids_by_name:
return {
'items': items,
'title': title
"items": items,
"title": title
}
def confirm_delete(self, first_attempt, entities, event):
if first_attempt is True:
if 'values' not in event['data']:
return
subset_ending = ""
if len(subset_ids_by_name.keys()) > 1:
subset_ending = "s"
values = event['data']['values']
subset_title = {
"type": "label",
"value": "# Subset{} to delete:".format(subset_ending)
}
subset_note = {
"type": "label",
"value": (
"<p><i>WARNING: Subset{} will be removed"
" for all <b>selected</b> entities.</i></p>"
).format(subset_ending)
}
if len(values) <= 0:
return
if 'whole_asset' not in values:
return
else:
values = self.values
items.append(self.splitter)
items.append(subset_title)
items.append(subset_note)
title = 'Confirmation of deleting {}'
if values['whole_asset'] is True:
title = title.format(
'whole asset {}'.format(
entities[0]['name']
)
)
else:
subsets = []
for key, value in values.items():
if value is True:
subsets.append(key)
len_subsets = len(subsets)
if len_subsets == 0:
for name in subset_ids_by_name:
items.append({
"label": "<b>{}</b>".format(name),
"name": "{}{}".format(self.subset_prefix, name),
"type": "boolean",
"value": False
})
self.action_data_by_id[action_id]["subset_ids_by_parent"] = (
subset_ids_by_parent
)
self.action_data_by_id[action_id]["subset_ids_by_name"] = (
subset_ids_by_name
)
return {
"items": items,
"title": title
}
def confirm_delete(self, entities, event):
values = event["data"]["values"]
action_id = values.get("action_id")
spec_data = self.action_data_by_id.get(action_id)
if not spec_data:
# it is a bug if this happens!
return {
"success": False,
"message": "Something bad has happened. Please try again."
}
# Process Delete confirmation
delete_key = values.get("delete_key")
if delete_key:
delete_key = delete_key.lower().strip()
# Go to launch part if user entered `delete`
if delete_key == "delete":
return
# Skip whole process if user didn't enter any text
elif delete_key == "":
self.action_data_by_id.pop(action_id, None)
return {
'success': True,
'message': 'Nothing was selected to delete'
"success": True,
"message": "Deleting cancelled (delete entry was empty)"
}
elif len_subsets == 1:
title = title.format(
'{} subset'.format(len_subsets)
)
else:
title = title.format(
'{} subsets'.format(len_subsets)
)
# Get data to show again
to_delete = spec_data["to_delete"]
else:
to_delete = collections.defaultdict(list)
for key, value in values.items():
if not value:
continue
if key.startswith(self.asset_prefix):
_key = key.replace(self.asset_prefix, "")
to_delete["assets"].append(_key)
elif key.startswith(self.subset_prefix):
_key = key.replace(self.subset_prefix, "")
to_delete["subsets"].append(_key)
self.action_data_by_id[action_id]["to_delete"] = to_delete
asset_to_delete = len(to_delete.get("assets") or []) > 0
subset_to_delete = len(to_delete.get("subsets") or []) > 0
if not asset_to_delete and not subset_to_delete:
self.action_data_by_id.pop(action_id, None)
return {
"success": True,
"message": "Nothing was selected to delete"
}
attempt = spec_data["attempt"]
if attempt > 3:
self.action_data_by_id.pop(action_id, None)
return {
"success": False,
"message": "You didn't enter \"DELETE\" properly 3 times!"
}
self.action_data_by_id[action_id]["attempt"] += 1
title = "Confirmation of deleting"
if asset_to_delete:
asset_len = len(to_delete["assets"])
asset_ending = ""
if asset_len > 1:
asset_ending = "s"
title += " {} Asset{}".format(asset_len, asset_ending)
if subset_to_delete:
title += " and"
if subset_to_delete:
sub_len = len(to_delete["subsets"])
type_ending = ""
sub_ending = ""
if sub_len == 1:
subset_ids_by_name = spec_data["subset_ids_by_name"]
if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1:
sub_ending = "s"
elif sub_len > 1:
type_ending = "s"
sub_ending = "s"
title += " {} type{} of subset{}".format(
sub_len, type_ending, sub_ending
)
self.values = values
items = []
id_item = {"type": "hidden", "name": "action_id", "value": action_id}
delete_label = {
'type': 'label',
'value': '# Please enter "DELETE" to confirm #'
}
delete_item = {
'name': 'delete_key',
'type': 'text',
'value': '',
'empty_text': 'Type Delete here...'
"name": "delete_key",
"type": "text",
"value": "",
"empty_text": "Type Delete here..."
}
items.append(id_item)
items.append(delete_label)
items.append(delete_item)
return {
'items': items,
'title': title
"items": items,
"title": title
}
def launch(self, session, entities, event):
if 'values' not in event['data']:
return
values = event['data']['values']
if len(values) <= 0:
return
if 'delete_key' not in values:
return
if values['delete_key'].lower() != 'delete':
if values['delete_key'].lower() == '':
return {
'success': False,
'message': 'Deleting cancelled'
}
if self.attempt < 3:
self.attempt += 1
return_dict = self.confirm_delete(False, entities, event)
return_dict['title'] = '{} ({} attempt)'.format(
return_dict['title'], self.attempt
)
return return_dict
self.show_message(event, "Processing...", True)
values = event["data"]["values"]
action_id = values.get("action_id")
spec_data = self.action_data_by_id.get(action_id)
if not spec_data:
# it is a bug if this happens!
return {
'success': False,
'message': 'You didn\'t enter "DELETE" properly 3 times!'
"success": False,
"message": "Something bad has happened. Please try again."
}
entity = entities[0]
project = entity['project']
report_messages = collections.defaultdict(list)
self.db.Session['AVALON_PROJECT'] = project["full_name"]
project_name = spec_data["project_name"]
to_delete = spec_data["to_delete"]
self.dbcon.Session["AVALON_PROJECT"] = project_name
all_ids = []
if self.values.get('whole_asset', False) is True:
av_entity = self.db.find_one({
'type': 'asset',
'name': entity['name']
assets_to_delete = to_delete.get("assets") or []
subsets_to_delete = to_delete.get("subsets") or []
# Convert asset ids to ObjectId obj
assets_to_delete = [ObjectId(id) for id in assets_to_delete if id]
subset_ids_by_parent = spec_data["subset_ids_by_parent"]
subset_ids_by_name = spec_data["subset_ids_by_name"]
subset_ids_to_archive = []
asset_ids_to_archive = []
ftrack_ids_to_delete = []
if len(assets_to_delete) > 0:
# Prepare data when deleting whole avalon asset
avalon_assets = self.dbcon.find({"type": "asset"})
avalon_assets_by_parent = collections.defaultdict(list)
for asset in avalon_assets:
parent_id = asset["data"]["visualParent"]
avalon_assets_by_parent[parent_id].append(asset)
if asset["_id"] in assets_to_delete:
ftrack_id = asset["data"]["ftrackId"]
ftrack_ids_to_delete.append(ftrack_id)
children_queue = Queue()
for mongo_id in assets_to_delete:
children_queue.put(mongo_id)
while not children_queue.empty():
mongo_id = children_queue.get()
if mongo_id in asset_ids_to_archive:
continue
asset_ids_to_archive.append(mongo_id)
for subset_id in subset_ids_by_parent.get(mongo_id, []):
if subset_id not in subset_ids_to_archive:
subset_ids_to_archive.append(subset_id)
children = avalon_assets_by_parent.get(mongo_id)
if not children:
continue
for child in children:
child_id = child["_id"]
if child_id not in asset_ids_to_archive:
children_queue.put(child_id)
# Prepare names of assets in ftrack and ids of subsets in mongo
asset_names_to_delete = []
if len(subsets_to_delete) > 0:
for name in subsets_to_delete:
asset_names_to_delete.append(name)
for subset_id in subset_ids_by_name[name]:
if subset_id in subset_ids_to_archive:
continue
subset_ids_to_archive.append(subset_id)
# Get ftrack ids of entities where will be delete only asset
not_deleted_entities_id = []
ftrack_id_name_map = {}
if asset_names_to_delete:
for entity in entities:
ftrack_id = entity["id"]
ftrack_id_name_map[ftrack_id] = entity["name"]
if ftrack_id in ftrack_ids_to_delete:
continue
not_deleted_entities_id.append(ftrack_id)
mongo_proc_txt = "MongoProcessing: "
ftrack_proc_txt = "Ftrack processing: "
if asset_ids_to_archive:
self.log.debug("{}Archivation of assets <{}>".format(
mongo_proc_txt,
", ".join([str(id) for id in asset_ids_to_archive])
))
self.dbcon.update_many(
{
"_id": {"$in": asset_ids_to_archive},
"type": "asset"
},
{"$set": {"type": "archived_asset"}}
)
if subset_ids_to_archive:
self.log.debug("{}Archivation of subsets <{}>".format(
mongo_proc_txt,
", ".join([str(id) for id in subset_ids_to_archive])
))
self.dbcon.update_many(
{
"_id": {"$in": subset_ids_to_archive},
"type": "subset"
},
{"$set": {"type": "archived_subset"}}
)
if ftrack_ids_to_delete:
self.log.debug("{}Deleting Ftrack Entities <{}>".format(
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
))
joined_ids_to_delete = ", ".join(
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
)
ftrack_ents_to_delete = self.session.query(
"select id, link from TypedContext where id in ({})".format(
joined_ids_to_delete
)
).all()
for entity in ftrack_ents_to_delete:
self.session.delete(entity)
try:
self.session.commit()
except Exception:
ent_path = "/".join(
[ent["name"] for ent in entity["link"]]
)
msg = "Failed to delete entity"
report_messages[msg].append(ent_path)
self.session.rollback()
self.log.warning(
"{} <{}>".format(msg, ent_path),
exc_info=True
)
if not_deleted_entities_id:
joined_not_deleted = ", ".join([
"\"{}\"".format(ftrack_id)
for ftrack_id in not_deleted_entities_id
])
joined_asset_names = ", ".join([
"\"{}\"".format(name)
for name in asset_names_to_delete
])
# Find assets of selected entities with names of checked subsets
assets = self.session.query((
"select id from Asset where"
" context_id in ({}) and name in ({})"
).format(joined_not_deleted, joined_asset_names)).all()
self.log.debug("{}Deleting Ftrack Assets <{}>".format(
ftrack_proc_txt,
", ".join([asset["id"] for asset in assets])
))
for asset in assets:
self.session.delete(asset)
try:
self.session.commit()
except Exception:
self.session.rollback()
msg = "Failed to delete asset"
report_messages[msg].append(asset["id"])
self.log.warning(
"{} <{}>".format(asset["id"]),
exc_info=True
)
return self.report_handle(report_messages, project_name, event)
def report_handle(self, report_messages, project_name, event):
if not report_messages:
return {
"success": True,
"message": "Deletion was successful!"
}
title = "Delete report ({}):".format(project_name)
items = []
items.append({
"type": "label",
"value": "# Deleting was not completely successful"
})
items.append({
"type": "label",
"value": "<p><i>Check logs for more information</i></p>"
})
for msg, _items in report_messages.items():
if not _items or not msg:
continue
items.append({
"type": "label",
"value": "# {}".format(msg)
})
if av_entity is not None:
all_ids.append(av_entity['_id'])
all_ids.extend(self.find_child(av_entity))
if isinstance(_items, str):
_items = [_items]
items.append({
"type": "label",
"value": '<p>{}</p>'.format("<br>".join(_items))
})
items.append(self.splitter)
session.delete(entity)
session.commit()
else:
subset_names = []
for key, value in self.values.items():
if key == 'delete_key' or value is False:
continue
entity_id = ObjectId(key)
av_entity = self.db.find_one({'_id': entity_id})
subset_names.append(av_entity['name'])
if av_entity is None:
continue
all_ids.append(entity_id)
all_ids.extend(self.find_child(av_entity))
for ft_asset in entity['assets']:
if ft_asset['name'] in subset_names:
session.delete(ft_asset)
session.commit()
if len(all_ids) == 0:
return {
'success': True,
'message': 'No entities to delete in avalon'
}
delete_query = {'_id': {'$in': all_ids}}
self.db.delete_many(delete_query)
self.show_interface(items, title, event)
return {
'success': True,
'message': 'All assets were deleted!'
"success": False,
"message": "Deleting finished. Read report messages."
}
def find_child(self, entity):
output = []
id = entity['_id']
visuals = [x for x in self.db.find({'data.visualParent': id})]
assert len(visuals) == 0, 'This asset has another asset as child'
childs = self.db.find({'parent': id})
for child in childs:
output.append(child['_id'])
output.extend(self.find_child(child))
return output
def find_assets(self, asset_names):
assets = []
for name in asset_names:
entity = self.db.find_one({
'type': 'asset',
'name': name
})
if entity is not None and entity not in assets:
assets.append(entity)
return assets
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
DeleteAsset(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))
DeleteAssetSubset(session, plugins_presets).register()

View file

@ -1,175 +0,0 @@
import os
import sys
import logging
import argparse
import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib.io_nonsingleton import DbConnector
class AssetsRemover(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'remove.assets'
#: Action label.
label = "Pype Admin"
variant = '- Delete Assets by Name'
#: Action description.
description = 'Removes assets from Ftrack and Avalon db with all childs'
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: Db
db = DbConnector()
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
valid = ["show", "task"]
entityType = event["data"]["selection"][0].get("entityType", "")
if entityType.lower() not in valid:
return False
return True
def interface(self, session, entities, event):
if not event['data'].get('values', {}):
title = 'Enter Asset names to delete'
items = []
for i in range(15):
item = {
'label': 'Asset {}'.format(i+1),
'name': 'asset_{}'.format(i+1),
'type': 'text',
'value': ''
}
items.append(item)
return {
'items': items,
'title': title
}
def launch(self, session, entities, event):
entity = entities[0]
if entity.entity_type.lower() != 'Project':
project = entity['project']
else:
project = entity
if 'values' not in event['data']:
return
values = event['data']['values']
if len(values) <= 0:
return {
'success': True,
'message': 'No Assets to delete!'
}
asset_names = []
for k, v in values.items():
if v.replace(' ', '') != '':
asset_names.append(v)
self.db.install()
self.db.Session['AVALON_PROJECT'] = project["full_name"]
assets = self.find_assets(asset_names)
all_ids = []
for asset in assets:
all_ids.append(asset['_id'])
all_ids.extend(self.find_child(asset))
if len(all_ids) == 0:
self.db.uninstall()
return {
'success': True,
'message': 'None of assets'
}
delete_query = {'_id': {'$in': all_ids}}
self.db.delete_many(delete_query)
self.db.uninstall()
return {
'success': True,
'message': 'All assets were deleted!'
}
def find_child(self, entity):
output = []
id = entity['_id']
visuals = [x for x in self.db.find({'data.visualParent': id})]
assert len(visuals) == 0, 'This asset has another asset as child'
childs = self.db.find({'parent': id})
for child in childs:
output.append(child['_id'])
output.extend(self.find_child(child))
return output
def find_assets(self, asset_names):
assets = []
for name in asset_names:
entity = self.db.find_one({
'type': 'asset',
'name': name
})
if entity is not None and entity not in assets:
assets.append(entity)
return assets
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
AssetsRemover(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -4,6 +4,7 @@ from pypeapp import config
class VersionToTaskStatus(BaseEvent):
# Presets usage
default_status_mapping = {}
def launch(self, session, event):
@ -11,69 +12,124 @@ class VersionToTaskStatus(BaseEvent):
# start of event procedure ----------------------------------
for entity in event['data'].get('entities', []):
# Filter non-assetversions
if (
entity['entityType'] == 'assetversion' and
'statusid' in (entity.get('keys') or [])
):
# Filter AssetVersions
if entity["entityType"] != "assetversion":
continue
version = session.get('AssetVersion', entity['entityId'])
try:
version_status = session.get(
'Status', entity['changes']['statusid']['new']
)
except Exception:
# Skip if statusid not in keys (in changes)
keys = entity.get("keys")
if not keys or "statusid" not in keys:
continue
# Get new version task name
version_status_id = (
entity
.get("changes", {})
.get("statusid", {})
.get("new", {})
)
# Just check that `new` is set to any value
if not version_status_id:
continue
try:
version_status = session.get("Status", version_status_id)
except Exception:
self.log.warning(
"Troubles with query status id [ {} ]".format(
version_status_id
),
exc_info=True
)
if not version_status:
continue
version_status_orig = version_status["name"]
# Load status mapping from presets
status_mapping = (
config.get_presets()
.get("ftrack", {})
.get("ftrack_config", {})
.get("status_version_to_task")
) or self.default_status_mapping
# Skip if mapping is empty
if not status_mapping:
continue
# Lower version status name and check if has mapping
version_status = version_status_orig.lower()
new_status_names = status_mapping.get(version_status)
if not new_status_names:
continue
self.log.debug(
"Processing AssetVersion status change: [ {} ]".format(
version_status_orig
)
)
# Backwards compatibility (convert string to list)
if isinstance(new_status_names, str):
new_status_names = [new_status_names]
# Lower all names from presets
new_status_names = [name.lower() for name in new_status_names]
# Get entities necessary for processing
version = session.get("AssetVersion", entity["entityId"])
task = version.get("task")
if not task:
continue
project_schema = task["project"]["project_schema"]
# Get all available statuses for Task
statuses = project_schema.get_statuses("Task", task["type_id"])
# map lowered status name with it's object
stat_names_low = {
status["name"].lower(): status for status in statuses
}
new_status = None
for status_name in new_status_names:
if status_name not in stat_names_low:
continue
task_status = version_status
task = version['task']
self.log.info('>>> version status: [ {} ]'.format(
version_status['name']))
version_name_low = version_status['name'].lower()
# store object of found status
new_status = stat_names_low[status_name]
self.log.debug("Status to set: [ {} ]".format(
new_status["name"]
))
break
status_mapping = (
config.get_presets()
.get("ftrack", {})
.get("ftrack_config", {})
.get("status_version_to_task")
) or self.default_status_mapping
# Skip if status names were not found for paticulat entity
if not new_status:
self.log.warning(
"Any of statuses from presets can be set: {}".format(
str(new_status_names)
)
)
continue
status_to_set = status_mapping.get(version_name_low)
# Get full path to task for logging
ent_path = "/".join([ent["name"] for ent in task["link"]])
self.log.info(
'>>> status to set: [ {} ]'.format(status_to_set))
if status_to_set is not None:
query = 'Status where name is "{}"'.format(status_to_set)
try:
task_status = session.query(query).one()
except Exception:
self.log.info(
'!!! status was not found in Ftrack [ {} ]'.format(
status_to_set
)
)
continue
# Proceed if the task status was set
if task_status is not None:
# Get path to task
path = task['name']
for p in task['ancestors']:
path = p['name'] + '/' + path
# Setting task status
try:
task['status'] = task_status
session.commit()
except Exception as e:
session.rollback()
self.log.warning('!!! [ {} ] status couldnt be set:\
[ {} ]'.format(path, e))
session.rollback()
else:
self.log.info('>>> [ {} ] updated to [ {} ]'.format(
path, task_status['name']))
# Setting task status
try:
task["status"] = new_status
session.commit()
self.log.debug("[ {} ] Status updated to [ {} ]".format(
ent_path, new_status['name']
))
except Exception:
session.rollback()
self.log.warning(
"[ {} ]Status couldn't be set".format(ent_path),
exc_info=True
)
def register(session, plugins_presets):

View file

@ -1,10 +1,32 @@
import os
import sys
import logging
import getpass
import atexit
import tempfile
import threading
import datetime
import time
import queue
import pymongo
import requests
import ftrack_api
import ftrack_api.session
import ftrack_api.cache
import ftrack_api.operation
import ftrack_api._centralized_storage_scenario
import ftrack_api.event
from ftrack_api.logging import LazyLogMessage as L
try:
from urllib.parse import urlparse, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
from pypeapp import Logger
from pype.ftrack.lib.custom_db_connector import DbConnector
def ftrack_events_mongo_settings():
host = None
@ -49,7 +71,9 @@ def ftrack_events_mongo_settings():
def get_ftrack_event_mongo_info():
host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings()
host, port, database, username, password, collection, auth_db = (
ftrack_events_mongo_settings()
)
user_pass = ""
if username and password:
user_pass = "{}:{}@".format(username, password)
@ -97,3 +121,303 @@ def check_ftrack_url(url, log_errors=True):
print('DEBUG: Ftrack server {} is accessible.'.format(url))
return url
class StorerEventHub(ftrack_api.event.hub.EventHub):
def __init__(self, *args, **kwargs):
self.sock = kwargs.pop("sock")
super(StorerEventHub, self).__init__(*args, **kwargs)
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "heartbeat":
# Reply with heartbeat.
self.sock.sendall(b"storer")
return self._send_packet(self._code_name_mapping['heartbeat'])
elif code_name == "connect":
event = ftrack_api.event.base.Event(
topic="pype.storer.started",
data={},
source={
"id": self.id,
"user": {"username": self._api_user}
}
)
self._event_queue.put(event)
return super(StorerEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
class ProcessEventHub(ftrack_api.event.hub.EventHub):
url, database, table_name = get_ftrack_event_mongo_info()
is_table_created = False
pypelog = Logger().get_logger("Session Processor")
def __init__(self, *args, **kwargs):
self.dbcon = DbConnector(
mongo_url=self.url,
database_name=self.database,
table_name=self.table_name
)
self.sock = kwargs.pop("sock")
super(ProcessEventHub, self).__init__(*args, **kwargs)
def prepare_dbcon(self):
try:
self.dbcon.install()
self.dbcon._database.list_collection_names()
except pymongo.errors.AutoReconnect:
self.pypelog.error(
"Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
)
)
sys.exit(0)
except pymongo.errors.OperationFailure:
self.pypelog.error((
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(self.database, self.table_name))
self.sock.sendall(b"MongoError")
sys.exit(0)
def wait(self, duration=None):
"""Overriden wait
Event are loaded from Mongo DB when queue is empty. Handled event is
set as processed in Mongo DB.
"""
started = time.time()
self.prepare_dbcon()
while True:
try:
event = self._event_queue.get(timeout=0.1)
except queue.Empty:
if not self.load_events():
time.sleep(0.5)
else:
try:
self._handle(event)
self.dbcon.update_one(
{"id": event["id"]},
{"$set": {"pype_data.is_processed": True}}
)
except pymongo.errors.AutoReconnect:
self.pypelog.error((
"Mongo server \"{}\" is not responding, exiting."
).format(os.environ["AVALON_MONGO"]))
sys.exit(0)
# Additional special processing of events.
if event['topic'] == 'ftrack.meta.disconnected':
break
if duration is not None:
if (time.time() - started) > duration:
break
def load_events(self):
"""Load not processed events sorted by stored date"""
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
result = self.dbcon.delete_many({
"pype_data.stored": {"$lte": ago_date},
"pype_data.is_processed": True
})
not_processed_events = self.dbcon.find(
{"pype_data.is_processed": False}
).sort(
[("pype_data.stored", pymongo.ASCENDING)]
)
found = False
for event_data in not_processed_events:
new_event_data = {
k: v for k, v in event_data.items()
if k not in ["_id", "pype_data"]
}
try:
event = ftrack_api.event.base.Event(**new_event_data)
except Exception:
self.logger.exception(L(
'Failed to convert payload into event: {0}',
event_data
))
continue
found = True
self._event_queue.put(event)
return found
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which skip events and extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "event":
return
if code_name == "heartbeat":
self.sock.sendall(b"processor")
return self._send_packet(self._code_name_mapping["heartbeat"])
return super()._handle_packet(code, packet_identifier, path, data)
class SocketSession(ftrack_api.session.Session):
'''An isolated session for interaction with an ftrack server.'''
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None, sock=None, Eventhub=None
):
super(ftrack_api.session.Session, self).__init__()
self.logger = logging.getLogger(
__name__ + '.' + self.__class__.__name__
)
self._closed = False
if server_url is None:
server_url = os.environ.get('FTRACK_SERVER')
if not server_url:
raise TypeError(
'Required "server_url" not specified. Pass as argument or set '
'in environment variable FTRACK_SERVER.'
)
self._server_url = server_url
if api_key is None:
api_key = os.environ.get(
'FTRACK_API_KEY',
# Backwards compatibility
os.environ.get('FTRACK_APIKEY')
)
if not api_key:
raise TypeError(
'Required "api_key" not specified. Pass as argument or set in '
'environment variable FTRACK_API_KEY.'
)
self._api_key = api_key
if api_user is None:
api_user = os.environ.get('FTRACK_API_USER')
if not api_user:
try:
api_user = getpass.getuser()
except Exception:
pass
if not api_user:
raise TypeError(
'Required "api_user" not specified. Pass as argument, set in '
'environment variable FTRACK_API_USER or one of the standard '
'environment variables used by Python\'s getpass module.'
)
self._api_user = api_user
# Currently pending operations.
self.recorded_operations = ftrack_api.operation.Operations()
self.record_operations = True
self.cache_key_maker = cache_key_maker
if self.cache_key_maker is None:
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
# Enforce always having a memory cache at top level so that the same
# in-memory instance is returned from session.
self.cache = ftrack_api.cache.LayeredCache([
ftrack_api.cache.MemoryCache()
])
if cache is not None:
if callable(cache):
cache = cache(self)
if cache is not None:
self.cache.caches.append(cache)
self._managed_request = None
self._request = requests.Session()
self._request.auth = ftrack_api.session.SessionAuthentication(
self._api_key, self._api_user
)
self.auto_populate = auto_populate
# Fetch server information and in doing so also check credentials.
self._server_information = self._fetch_server_information()
# Now check compatibility of server based on retrieved information.
self.check_server_compatibility()
# Construct event hub and load plugins.
if Eventhub is None:
Eventhub = ftrack_api.event.hub.EventHub
self._event_hub = Eventhub(
self._server_url,
self._api_user,
self._api_key,
sock=sock
)
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
target=self._event_hub.connect
)
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# To help with migration from auto_connect_event_hub default changing
# from True to False.
self._event_hub._deprecation_warning_auto_connect = (
auto_connect_event_hub is None
)
# Register to auto-close session on exit.
atexit.register(self.close)
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
self._plugin_paths = os.environ.get(
'FTRACK_EVENT_PLUGIN_PATH', ''
).split(os.pathsep)
self._discover_plugins(plugin_arguments=plugin_arguments)
# TODO: Make schemas read-only and non-mutable (or at least without
# rebuilding types)?
if schema_cache_path is not False:
if schema_cache_path is None:
schema_cache_path = os.environ.get(
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
)
schema_cache_path = os.path.join(
schema_cache_path, 'ftrack_api_schema_cache.json'
)
self.schemas = self._load_schemas(schema_cache_path)
self.types = self._build_entity_type_classes(self.schemas)
ftrack_api._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)

View file

@ -1,292 +0,0 @@
import logging
import os
import atexit
import datetime
import tempfile
import threading
import time
import requests
import queue
import pymongo
import ftrack_api
import ftrack_api.session
import ftrack_api.cache
import ftrack_api.operation
import ftrack_api._centralized_storage_scenario
import ftrack_api.event
from ftrack_api.logging import LazyLogMessage as L
from pype.ftrack.lib.custom_db_connector import DbConnector
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
from pypeapp import Logger
log = Logger().get_logger("Session processor")
class ProcessEventHub(ftrack_api.event.hub.EventHub):
url, database, table_name = get_ftrack_event_mongo_info()
is_table_created = False
def __init__(self, *args, **kwargs):
self.dbcon = DbConnector(
mongo_url=self.url,
database_name=self.database,
table_name=self.table_name
)
self.sock = kwargs.pop("sock")
super(ProcessEventHub, self).__init__(*args, **kwargs)
def prepare_dbcon(self):
try:
self.dbcon.install()
self.dbcon._database.list_collection_names()
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
))
sys.exit(0)
except pymongo.errors.OperationFailure:
log.error((
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(self.database, self.table_name))
self.sock.sendall(b"MongoError")
sys.exit(0)
def wait(self, duration=None):
"""Overriden wait
Event are loaded from Mongo DB when queue is empty. Handled event is
set as processed in Mongo DB.
"""
started = time.time()
self.prepare_dbcon()
while True:
try:
event = self._event_queue.get(timeout=0.1)
except queue.Empty:
if not self.load_events():
time.sleep(0.5)
else:
try:
self._handle(event)
self.dbcon.update_one(
{"id": event["id"]},
{"$set": {"pype_data.is_processed": True}}
)
except pymongo.errors.AutoReconnect:
log.error((
"Mongo server \"{}\" is not responding, exiting."
).format(os.environ["AVALON_MONGO"]))
sys.exit(0)
# Additional special processing of events.
if event['topic'] == 'ftrack.meta.disconnected':
break
if duration is not None:
if (time.time() - started) > duration:
break
def load_events(self):
"""Load not processed events sorted by stored date"""
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
result = self.dbcon.delete_many({
"pype_data.stored": {"$lte": ago_date},
"pype_data.is_processed": True
})
not_processed_events = self.dbcon.find(
{"pype_data.is_processed": False}
).sort(
[("pype_data.stored", pymongo.ASCENDING)]
)
found = False
for event_data in not_processed_events:
new_event_data = {
k: v for k, v in event_data.items()
if k not in ["_id", "pype_data"]
}
try:
event = ftrack_api.event.base.Event(**new_event_data)
except Exception:
self.logger.exception(L(
'Failed to convert payload into event: {0}',
event_data
))
continue
found = True
self._event_queue.put(event)
return found
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which skip events and extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "event":
return
if code_name == "heartbeat":
self.sock.sendall(b"processor")
return self._send_packet(self._code_name_mapping["heartbeat"])
return super()._handle_packet(code, packet_identifier, path, data)
class ProcessSession(ftrack_api.session.Session):
'''An isolated session for interaction with an ftrack server.'''
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None, sock=None
):
super(ftrack_api.session.Session, self).__init__()
self.logger = logging.getLogger(
__name__ + '.' + self.__class__.__name__
)
self._closed = False
if server_url is None:
server_url = os.environ.get('FTRACK_SERVER')
if not server_url:
raise TypeError(
'Required "server_url" not specified. Pass as argument or set '
'in environment variable FTRACK_SERVER.'
)
self._server_url = server_url
if api_key is None:
api_key = os.environ.get(
'FTRACK_API_KEY',
# Backwards compatibility
os.environ.get('FTRACK_APIKEY')
)
if not api_key:
raise TypeError(
'Required "api_key" not specified. Pass as argument or set in '
'environment variable FTRACK_API_KEY.'
)
self._api_key = api_key
if api_user is None:
api_user = os.environ.get('FTRACK_API_USER')
if not api_user:
try:
api_user = getpass.getuser()
except Exception:
pass
if not api_user:
raise TypeError(
'Required "api_user" not specified. Pass as argument, set in '
'environment variable FTRACK_API_USER or one of the standard '
'environment variables used by Python\'s getpass module.'
)
self._api_user = api_user
# Currently pending operations.
self.recorded_operations = ftrack_api.operation.Operations()
self.record_operations = True
self.cache_key_maker = cache_key_maker
if self.cache_key_maker is None:
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
# Enforce always having a memory cache at top level so that the same
# in-memory instance is returned from session.
self.cache = ftrack_api.cache.LayeredCache([
ftrack_api.cache.MemoryCache()
])
if cache is not None:
if callable(cache):
cache = cache(self)
if cache is not None:
self.cache.caches.append(cache)
self._managed_request = None
self._request = requests.Session()
self._request.auth = ftrack_api.session.SessionAuthentication(
self._api_key, self._api_user
)
self.auto_populate = auto_populate
# Fetch server information and in doing so also check credentials.
self._server_information = self._fetch_server_information()
# Now check compatibility of server based on retrieved information.
self.check_server_compatibility()
# Construct event hub and load plugins.
self._event_hub = ProcessEventHub(
self._server_url,
self._api_user,
self._api_key,
sock=sock
)
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
target=self._event_hub.connect
)
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# To help with migration from auto_connect_event_hub default changing
# from True to False.
self._event_hub._deprecation_warning_auto_connect = (
auto_connect_event_hub is None
)
# Register to auto-close session on exit.
atexit.register(self.close)
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
self._plugin_paths = os.environ.get(
'FTRACK_EVENT_PLUGIN_PATH', ''
).split(os.pathsep)
self._discover_plugins(plugin_arguments=plugin_arguments)
# TODO: Make schemas read-only and non-mutable (or at least without
# rebuilding types)?
if schema_cache_path is not False:
if schema_cache_path is None:
schema_cache_path = os.environ.get(
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
)
schema_cache_path = os.path.join(
schema_cache_path, 'ftrack_api_schema_cache.json'
)
self.schemas = self._load_schemas(schema_cache_path)
self.types = self._build_entity_type_classes(self.schemas)
ftrack_api._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)

View file

@ -1,269 +0,0 @@
import logging
import os
import atexit
import tempfile
import threading
import requests
import ftrack_api
import ftrack_api.session
import ftrack_api.cache
import ftrack_api.operation
import ftrack_api._centralized_storage_scenario
import ftrack_api.event
from ftrack_api.logging import LazyLogMessage as L
class StorerEventHub(ftrack_api.event.hub.EventHub):
def __init__(self, *args, **kwargs):
self.sock = kwargs.pop("sock")
super(StorerEventHub, self).__init__(*args, **kwargs)
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "heartbeat":
# Reply with heartbeat.
self.sock.sendall(b"storer")
return self._send_packet(self._code_name_mapping['heartbeat'])
elif code_name == "connect":
event = ftrack_api.event.base.Event(
topic="pype.storer.started",
data={},
source={
"id": self.id,
"user": {"username": self._api_user}
}
)
self._event_queue.put(event)
return super(StorerEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
class StorerSession(ftrack_api.session.Session):
'''An isolated session for interaction with an ftrack server.'''
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None, sock=None
):
'''Initialise session.
*server_url* should be the URL of the ftrack server to connect to
including any port number. If not specified attempt to look up from
:envvar:`FTRACK_SERVER`.
*api_key* should be the API key to use for authentication whilst
*api_user* should be the username of the user in ftrack to record
operations against. If not specified, *api_key* should be retrieved
from :envvar:`FTRACK_API_KEY` and *api_user* from
:envvar:`FTRACK_API_USER`.
If *auto_populate* is True (the default), then accessing entity
attributes will cause them to be automatically fetched from the server
if they are not already. This flag can be changed on the session
directly at any time.
*plugin_paths* should be a list of paths to search for plugins. If not
specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`.
*cache* should be an instance of a cache that fulfils the
:class:`ftrack_api.cache.Cache` interface and will be used as the cache
for the session. It can also be a callable that will be called with the
session instance as sole argument. The callable should return ``None``
if a suitable cache could not be configured, but session instantiation
can continue safely.
.. note::
The session will add the specified cache to a pre-configured layered
cache that specifies the top level cache as a
:class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary
to construct a separate memory cache for typical behaviour. Working
around this behaviour or removing the memory cache can lead to
unexpected behaviour.
*cache_key_maker* should be an instance of a key maker that fulfils the
:class:`ftrack_api.cache.KeyMaker` interface and will be used to
generate keys for objects being stored in the *cache*. If not specified,
a :class:`~ftrack_api.cache.StringKeyMaker` will be used.
If *auto_connect_event_hub* is True then embedded event hub will be
automatically connected to the event server and allow for publishing and
subscribing to **non-local** events. If False, then only publishing and
subscribing to **local** events will be possible until the hub is
manually connected using :meth:`EventHub.connect
<ftrack_api.event.hub.EventHub.connect>`.
.. note::
The event hub connection is performed in a background thread to
improve session startup time. If a registered plugin requires a
connected event hub then it should check the event hub connection
status explicitly. Subscribing to events does *not* require a
connected event hub.
Enable schema caching by setting *schema_cache_path* to a folder path.
If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to
determine the path to store cache in. If the environment variable is
also not specified then a temporary directory will be used. Set to
`False` to disable schema caching entirely.
*plugin_arguments* should be an optional mapping (dict) of keyword
arguments to pass to plugin register functions upon discovery. If a
discovered plugin has a signature that is incompatible with the passed
arguments, the discovery mechanism will attempt to reduce the passed
arguments to only those that the plugin accepts. Note that a warning
will be logged in this case.
'''
super(ftrack_api.session.Session, self).__init__()
self.logger = logging.getLogger(
__name__ + '.' + self.__class__.__name__
)
self._closed = False
if server_url is None:
server_url = os.environ.get('FTRACK_SERVER')
if not server_url:
raise TypeError(
'Required "server_url" not specified. Pass as argument or set '
'in environment variable FTRACK_SERVER.'
)
self._server_url = server_url
if api_key is None:
api_key = os.environ.get(
'FTRACK_API_KEY',
# Backwards compatibility
os.environ.get('FTRACK_APIKEY')
)
if not api_key:
raise TypeError(
'Required "api_key" not specified. Pass as argument or set in '
'environment variable FTRACK_API_KEY.'
)
self._api_key = api_key
if api_user is None:
api_user = os.environ.get('FTRACK_API_USER')
if not api_user:
try:
api_user = getpass.getuser()
except Exception:
pass
if not api_user:
raise TypeError(
'Required "api_user" not specified. Pass as argument, set in '
'environment variable FTRACK_API_USER or one of the standard '
'environment variables used by Python\'s getpass module.'
)
self._api_user = api_user
# Currently pending operations.
self.recorded_operations = ftrack_api.operation.Operations()
self.record_operations = True
self.cache_key_maker = cache_key_maker
if self.cache_key_maker is None:
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
# Enforce always having a memory cache at top level so that the same
# in-memory instance is returned from session.
self.cache = ftrack_api.cache.LayeredCache([
ftrack_api.cache.MemoryCache()
])
if cache is not None:
if callable(cache):
cache = cache(self)
if cache is not None:
self.cache.caches.append(cache)
self._managed_request = None
self._request = requests.Session()
self._request.auth = ftrack_api.session.SessionAuthentication(
self._api_key, self._api_user
)
self.auto_populate = auto_populate
# Fetch server information and in doing so also check credentials.
self._server_information = self._fetch_server_information()
# Now check compatibility of server based on retrieved information.
self.check_server_compatibility()
# Construct event hub and load plugins.
self._event_hub = StorerEventHub(
self._server_url,
self._api_user,
self._api_key,
sock=sock
)
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
target=self._event_hub.connect
)
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# To help with migration from auto_connect_event_hub default changing
# from True to False.
self._event_hub._deprecation_warning_auto_connect = (
auto_connect_event_hub is None
)
# Register to auto-close session on exit.
atexit.register(self.close)
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
self._plugin_paths = os.environ.get(
'FTRACK_EVENT_PLUGIN_PATH', ''
).split(os.pathsep)
self._discover_plugins(plugin_arguments=plugin_arguments)
# TODO: Make schemas read-only and non-mutable (or at least without
# rebuilding types)?
if schema_cache_path is not False:
if schema_cache_path is None:
schema_cache_path = os.environ.get(
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
)
schema_cache_path = os.path.join(
schema_cache_path, 'ftrack_api_schema_cache.json'
)
self.schemas = self._load_schemas(schema_cache_path)
self.types = self._build_entity_type_classes(self.schemas)
ftrack_api._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)

View file

@ -1,7 +1,5 @@
import os
import sys
import time
import signal
import socket
import threading
import subprocess
@ -10,7 +8,9 @@ from pypeapp import Logger
class SocketThread(threading.Thread):
"""Thread that checks suprocess of storer of processor of events"""
MAX_TIMEOUT = 35
def __init__(self, name, port, filepath):
super(SocketThread, self).__init__()
self.log = Logger().get_logger("SocketThread", "Event Thread")

View file

@ -1,12 +1,9 @@
import os
import sys
import datetime
import signal
import socket
import pymongo
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.session_processor import ProcessSession
from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub
from pypeapp import Logger
log = Logger().get_logger("Event processor")
@ -24,12 +21,14 @@ def main(args):
sock.sendall(b"CreatedProcess")
try:
session = ProcessSession(auto_connect_event_hub=True, sock=sock)
server = FtrackServer('event')
session = SocketSession(
auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub
)
server = FtrackServer("event")
log.debug("Launched Ftrack Event processor")
server.run_server(session)
except Exception as exc:
except Exception:
log.error("Event server crashed. See traceback below", exc_info=True)
finally:

View file

@ -7,22 +7,22 @@ import pymongo
import ftrack_api
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
from pype.ftrack.ftrack_server.lib import (
get_ftrack_event_mongo_info,
SocketSession,
StorerEventHub
)
from pype.ftrack.lib.custom_db_connector import DbConnector
from session_storer import StorerSession
from pypeapp import Logger
log = Logger().get_logger("Event storer")
class SessionFactory:
session = None
url, database, table_name = get_ftrack_event_mongo_info()
class SessionClass:
def __init__(self):
self.session = None
session_obj = SessionClass()
dbcon = DbConnector(
mongo_url=url,
database_name=database,
@ -75,7 +75,11 @@ def launch(event):
def trigger_sync(event):
session = session_obj.session
session = SessionFactory.session
source_id = event.get("source", {}).get("id")
if not source_id or source_id != session.event_hub.id:
return
if session is None:
log.warning("Session is not set. Can't trigger Sync to avalon action.")
return True
@ -93,7 +97,7 @@ def trigger_sync(event):
"$set": {"pype_data.is_processed": True}
}
dbcon.update_many(query, set_dict)
selections = []
for project in projects:
if project["status"] != "active":
@ -154,8 +158,10 @@ def main(args):
sock.sendall(b"CreatedStore")
try:
session = StorerSession(auto_connect_event_hub=True, sock=sock)
session_obj.session = session
session = SocketSession(
auto_connect_event_hub=True, sock=sock, Eventhub=StorerEventHub
)
SessionFactory.session = session
register(session)
server = FtrackServer("event")
log.debug("Launched Ftrack Event storer")

View file

@ -1,4 +1,3 @@
import os
import sys
import time
import datetime
@ -7,7 +6,6 @@ import threading
from ftrack_server import FtrackServer
import ftrack_api
from ftrack_api.event.hub import EventHub
from pypeapp import Logger
log = Logger().get_logger("Event Server Legacy")
@ -37,7 +35,10 @@ class TimerChecker(threading.Thread):
if not self.session.event_hub.connected:
if not connected:
if (datetime.datetime.now() - start).seconds > self.max_time_out:
if (
(datetime.datetime.now() - start).seconds >
self.max_time_out
):
log.error((
"Exiting event server. Session was not connected"
" to ftrack server in {} seconds."
@ -61,7 +62,7 @@ class TimerChecker(threading.Thread):
def main(args):
check_thread = None
try:
server = FtrackServer('event')
server = FtrackServer("event")
session = ftrack_api.Session(auto_connect_event_hub=True)
check_thread = TimerChecker(server, session)

View file

@ -2,7 +2,7 @@ import functools
import time
from pypeapp import Logger
import ftrack_api
from pype.ftrack.ftrack_server import session_processor
from pype.ftrack.ftrack_server.lib import SocketSession
class MissingPermision(Exception):
@ -41,7 +41,7 @@ class BaseHandler(object):
self.log = Logger().get_logger(self.__class__.__name__)
if not(
isinstance(session, ftrack_api.session.Session) or
isinstance(session, session_processor.ProcessSession)
isinstance(session, SocketSession)
):
raise Exception((
"Session object entered with args is instance of \"{}\""

View file

@ -6,6 +6,7 @@ from collections import OrderedDict
from avalon import api, io, lib
import avalon.nuke
from avalon.nuke import lib as anlib
import pype.api as pype
import nuke
@ -1195,6 +1196,176 @@ class BuildWorkfile(WorkfileSettings):
def position_up(self, multiply=1):
self.ypos -= (self.ypos_size * multiply) + self.ypos_gap
class Exporter_review_lut:
"""
Generator object for review lut from Nuke
Args:
klass (pyblish.plugin): pyblish plugin parent
"""
_temp_nodes = []
data = dict({
"representations": list()
})
def __init__(self,
klass,
instance,
name=None,
ext=None,
cube_size=None,
lut_size=None,
lut_style=None):
self.log = klass.log
self.instance = instance
self.name = name or "baked_lut"
self.ext = ext or "cube"
self.cube_size = cube_size or 32
self.lut_size = lut_size or 1024
self.lut_style = lut_style or "linear"
self.stagingDir = self.instance.data["stagingDir"]
self.collection = self.instance.data.get("collection", None)
# set frame start / end and file name to self
self.get_file_info()
self.log.info("File info was set...")
self.file = self.fhead + self.name + ".{}".format(self.ext)
self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/")
def generate_lut(self):
# ---------- start nodes creation
# CMSTestPattern
cms_node = nuke.createNode("CMSTestPattern")
cms_node["cube_size"].setValue(self.cube_size)
# connect
self._temp_nodes.append(cms_node)
self.previous_node = cms_node
self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes))
# Node View Process
ipn = self.get_view_process_node()
if ipn is not None:
# connect
ipn.setInput(0, self.previous_node)
self._temp_nodes.append(ipn)
self.previous_node = ipn
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
# OCIODisplay
dag_node = nuke.createNode("OCIODisplay")
# connect
dag_node.setInput(0, self.previous_node)
self._temp_nodes.append(dag_node)
self.previous_node = dag_node
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
# GenerateLUT
gen_lut_node = nuke.createNode("GenerateLUT")
gen_lut_node["file"].setValue(self.path)
gen_lut_node["file_type"].setValue(".{}".format(self.ext))
gen_lut_node["lut1d"].setValue(self.lut_size)
gen_lut_node["style1d"].setValue(self.lut_style)
# connect
gen_lut_node.setInput(0, self.previous_node)
self._temp_nodes.append(gen_lut_node)
self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes))
# ---------- end nodes creation
# Export lut file
nuke.execute(
gen_lut_node.name(),
int(self.first_frame),
int(self.first_frame))
self.log.info("Exported...")
# ---------- generate representation data
self.get_representation_data()
self.log.debug("Representation... `{}`".format(self.data))
# ---------- Clean up
for node in self._temp_nodes:
nuke.delete(node)
self.log.info("Deleted nodes...")
return self.data
def get_file_info(self):
if self.collection:
self.log.debug("Collection: `{}`".format(self.collection))
# get path
self.fname = os.path.basename(self.collection.format(
"{head}{padding}{tail}"))
self.fhead = self.collection.format("{head}")
# get first and last frame
self.first_frame = min(self.collection.indexes)
self.last_frame = max(self.collection.indexes)
else:
self.fname = os.path.basename(self.instance.data.get("path", None))
self.fhead = os.path.splitext(self.fname)[0] + "."
self.first_frame = self.instance.data.get("frameStart", None)
self.last_frame = self.instance.data.get("frameEnd", None)
if "#" in self.fhead:
self.fhead = self.fhead.replace("#", "")[:-1]
def get_representation_data(self):
repre = {
'name': self.name,
'ext': self.ext,
'files': self.file,
"stagingDir": self.stagingDir,
"anatomy_template": "publish",
"tags": [self.name.replace("_", "-")]
}
self.data["representations"].append(repre)
def get_view_process_node(self):
"""
Will get any active view process.
Arguments:
self (class): in object definition
Returns:
nuke.Node: copy node of Input Process node
"""
anlib.reset_selection()
ipn_orig = None
for v in [n for n in nuke.allNodes()
if "Viewer" in n.Class()]:
ip = v['input_process'].getValue()
ipn = v['input_process_node'].getValue()
if "VIEWER_INPUT" not in ipn and ip:
ipn_orig = nuke.toNode(ipn)
ipn_orig.setSelected(True)
if ipn_orig:
# copy selected to clipboard
nuke.nodeCopy('%clipboard%')
# reset selection
anlib.reset_selection()
# paste node and selection is on it only
nuke.nodePaste('%clipboard%')
# assign to variable
ipn = nuke.selectedNode()
return ipn
def get_dependent_nodes(nodes):
"""Get all dependent nodes connected to the list of nodes.

View file

@ -100,6 +100,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
label = "RenderedFrames"
def process(self, context):
pixel_aspect = 1
lut_path = None
if os.environ.get("PYPE_PUBLISH_PATHS"):
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
self.log.info("Collecting paths: {}".format(paths))
@ -144,6 +146,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
self.log.info("setting session using metadata")
api.Session.update(session)
os.environ.update(session)
instance = metadata.get("instance")
if instance:
instance_family = instance.get("family")
pixel_aspect = instance.get("pixelAspect", 1)
lut_path = instance.get("lutPath", None)
else:
# Search in directory
@ -181,6 +189,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
families.append("ftrack")
if "review" not in families:
families.append("review")
if "write" in instance_family:
families.append("write")
for collection in collections:
instance = context.create_instance(str(collection))
@ -197,6 +207,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
start = data.get("frameStart", indices[0])
end = data.get("frameEnd", indices[-1])
self.log.debug("Collected pixel_aspect:\n"
"{}".format(pixel_aspect))
self.log.debug("type pixel_aspect:\n"
"{}".format(type(pixel_aspect)))
# root = os.path.normpath(root)
# self.log.info("Source: {}}".format(data.get("source", "")))
@ -212,8 +227,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
"frameStart": start,
"frameEnd": end,
"fps": fps,
"source": data.get('source', '')
"source": data.get('source', ''),
"pixelAspect": pixel_aspect,
})
if lut_path:
instance.data.update({"lutPath": lut_path})
instance.append(collection)
instance.context.data['fps'] = fps

View file

@ -85,3 +85,6 @@ class CollectTemplates(pyblish.api.InstancePlugin):
instance.data["assumedDestination"] = os.path.dirname(
(anatomy.format(template_data))["publish"]["path"]
)
self.log.info("Assumed Destination has been created...")
self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"]))
self.log.debug("__ template: `{}`".format(instance.data["template"]))

View file

@ -1,9 +1,8 @@
import os
import math
import pyblish.api
import clique
import pype.api
from pypeapp import config
class ExtractReview(pyblish.api.InstancePlugin):
@ -22,16 +21,19 @@ class ExtractReview(pyblish.api.InstancePlugin):
families = ["review"]
hosts = ["nuke", "maya", "shell"]
outputs = {}
ext_filter = []
def process(self, instance):
# adding plugin attributes from presets
publish_presets = config.get_presets()["plugins"]["global"]["publish"]
plugin_attrs = publish_presets[self.__class__.__name__]
output_profiles = plugin_attrs.get("outputs", {})
output_profiles = self.outputs or {}
inst_data = instance.data
fps = inst_data.get("fps")
start_frame = inst_data.get("frameStart")
resolution_height = instance.data.get("resolutionHeight", 1080)
resolution_width = instance.data.get("resolutionWidth", 1920)
pixel_aspect = instance.data.get("pixelAspect", 1)
self.log.debug("Families In: `{}`".format(instance.data["families"]))
# get representation and loop them
@ -40,7 +42,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
# filter out mov and img sequences
representations_new = representations[:]
for repre in representations:
if repre['ext'] in plugin_attrs["ext_filter"]:
if repre['ext'] in self.ext_filter:
tags = repre.get("tags", [])
self.log.info("Try repre: {}".format(repre))
@ -92,8 +94,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
self.log.info("p_tags: `{}`".format(p_tags))
# add families
[instance.data["families"].append(t)
for t in p_tags
if t not in instance.data["families"]]
for t in p_tags
if t not in instance.data["families"]]
# add to
[new_tags.append(t) for t in p_tags
if t not in new_tags]
@ -147,22 +150,83 @@ class ExtractReview(pyblish.api.InstancePlugin):
)
output_args = []
output_args.extend(profile.get('codec', []))
codec_args = profile.get('codec', [])
output_args.extend(codec_args)
# preset's output data
output_args.extend(profile.get('output', []))
# letter_box
# TODO: add to documentation
lb = profile.get('letter_box', None)
if lb:
lb = profile.get('letter_box', 0)
if lb is not 0:
if "reformat" not in p_tags:
lb /= pixel_aspect
output_args.append(
"-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
"-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
# In case audio is longer than video.
output_args.append("-shortest")
# output filename
output_args.append(full_output_path)
self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect))
self.log.debug("__ resolution_width: `{}`".format(resolution_width))
self.log.debug("__ resolution_height: `{}`".format(resolution_height))
# scaling none square pixels and 1920 width
if "reformat" in p_tags:
width_scale = 1920
width_half_pad = 0
res_w = int(float(resolution_width) * pixel_aspect)
height_half_pad = int((
(res_w - 1920) / (
res_w * .01) * (
1080 * .01)) / 2
)
height_scale = 1080 - (height_half_pad * 2)
if height_scale > 1080:
height_half_pad = 0
height_scale = 1080
width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2
width_scale = int(1920 - (width_half_pad * 2))
self.log.debug("__ width_scale: `{}`".format(width_scale))
self.log.debug("__ width_half_pad: `{}`".format(width_half_pad))
self.log.debug("__ height_scale: `{}`".format(height_scale))
self.log.debug("__ height_half_pad: `{}`".format(height_half_pad))
scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format(
width_scale, height_scale, width_half_pad, height_half_pad
)
vf_back = self.add_video_filter_args(
output_args, scaling_arg)
# add it to output_args
output_args.insert(0, vf_back)
# baking lut file application
lut_path = instance.data.get("lutPath")
if lut_path and ("bake-lut" in p_tags):
# removing Gama info as it is all baked in lut
gamma = next((g for g in input_args
if "-gamma" in g), None)
if gamma:
input_args.remove(gamma)
# create lut argument
lut_arg = "lut3d=file='{}'".format(
lut_path.replace(
"\\", "/").replace(":/", "\\:/")
)
lut_arg += ",colormatrix=bt601:bt709"
vf_back = self.add_video_filter_args(
output_args, lut_arg)
# add it to output_args
output_args.insert(0, vf_back)
self.log.info("Added Lut to ffmpeg command")
self.log.debug("_ output_args: `{}`".format(output_args))
mov_args = [
os.path.join(
os.environ.get(
@ -185,7 +249,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
'files': repr_file,
"tags": new_tags,
"outputName": name,
"codec": profile.get('codec', [])
"codec": codec_args
})
if repre_new.get('preview'):
repre_new.pop("preview")
@ -209,3 +273,40 @@ class ExtractReview(pyblish.api.InstancePlugin):
instance.data["representations"] = representations_new
self.log.debug("Families Out: `{}`".format(instance.data["families"]))
def add_video_filter_args(self, args, inserting_arg):
"""
Fixing video filter argumets to be one long string
Args:
args (list): list of string arguments
inserting_arg (str): string argument we want to add
(without flag `-vf`)
Returns:
str: long joined argument to be added back to list of arguments
"""
# find all video format settings
vf_settings = [p for p in args
for v in ["-filter:v", "-vf"]
if v in p]
self.log.debug("_ vf_settings: `{}`".format(vf_settings))
# remove them from output args list
for p in vf_settings:
self.log.debug("_ remove p: `{}`".format(p))
args.remove(p)
self.log.debug("_ args: `{}`".format(args))
# strip them from all flags
vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "")
for p in vf_settings]
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
vf_fixed.insert(0, inserting_arg)
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
# create new video filter setting
vf_back = "-vf " + ",".join(vf_fixed)
return vf_back

View file

@ -71,7 +71,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"yetiRig",
"yeticache",
"nukenodes",
"gizmo"
"gizmo",
"source",
"matchmove",
"image"
@ -414,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
}
if sequence_repre and repre.get("frameStart"):
representation['context']['frame'] = src_padding_exp % repre.get("frameStart")
representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart"))
self.log.debug("__ representation: {}".format(representation))
destination_list.append(dst)

View file

@ -27,6 +27,8 @@ class ValidateFfmpegInstallef(pyblish.api.Validator):
return True
def process(self, instance):
self.log.info("ffmpeg path: `{}`".format(
os.environ.get("FFMPEG_PATH", "")))
if self.is_tool(
os.path.join(
os.environ.get("FFMPEG_PATH", ""), "ffmpeg")) is False:

View file

@ -219,10 +219,6 @@ class CollectLook(pyblish.api.InstancePlugin):
with lib.renderlayer(instance.data["renderlayer"]):
self.collect(instance)
# make ftrack publishable
self.maketx = instance.data.get('maketx', True)
instance.data['maketx'] = self.maketx
self.log.info('maketx: {}'.format(self.maketx))
def collect(self, instance):

View file

@ -74,6 +74,8 @@ def maketx(source, destination, *args):
cmd.extend(args)
cmd.extend(["-o", destination, source])
cmd = " ".join(cmd)
CREATE_NO_WINDOW = 0x08000000
kwargs = dict(args=cmd, stderr=subprocess.STDOUT)
@ -183,6 +185,7 @@ class ExtractLook(pype.api.Extractor):
transfers = list()
hardlinks = list()
hashes = dict()
forceCopy = instance.data.get("forceCopy", False)
self.log.info(files)
for filepath in files_metadata:
@ -195,20 +198,26 @@ class ExtractLook(pype.api.Extractor):
files_metadata[filepath]["color_space"] = "raw"
source, mode, hash = self._process_texture(
filepath, do_maketx, staging=dir_path, linearise=linearise
filepath,
do_maketx,
staging=dir_path,
linearise=linearise,
force=forceCopy
)
destination = self.resource_destination(instance,
source,
do_maketx)
# Force copy is specified.
if instance.data.get("forceCopy", False):
if forceCopy:
mode = COPY
if mode == COPY:
transfers.append((source, destination))
self.log.info('copying')
elif mode == HARDLINK:
hardlinks.append((source, destination))
self.log.info('hardlinking')
# Store the hashes from hash to destination to include in the
# database
@ -231,11 +240,12 @@ class ExtractLook(pype.api.Extractor):
# ensure after context it's still the original value.
color_space_attr = resource["node"] + ".colorSpace"
color_space = cmds.getAttr(color_space_attr)
if files_metadata[source]["color_space"] == "raw":
# set colorpsace to raw if we linearized it
color_space = "Raw"
# Remap file node filename to destination
attr = resource["attribute"]
remap[attr] = destinations[source]
remap[color_space_attr] = color_space
self.log.info("Finished remapping destinations ...")
@ -310,6 +320,12 @@ class ExtractLook(pype.api.Extractor):
# Source hash for the textures
instance.data["sourceHashes"] = hashes
"""
self.log.info("Returning colorspaces to their original values ...")
for attr, value in remap.items():
self.log.info(" - {}: {}".format(attr, value))
cmds.setAttr(attr, value, type="string")
"""
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
maya_path))
@ -330,7 +346,7 @@ class ExtractLook(pype.api.Extractor):
instance.data["assumedDestination"], "resources", basename + ext
)
def _process_texture(self, filepath, do_maketx, staging, linearise):
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
"""Process a single texture file on disk for publishing.
This will:
1. Check whether it's already published, if so it will do hardlink
@ -352,7 +368,7 @@ class ExtractLook(pype.api.Extractor):
# If source has been published before with the same settings,
# then don't reprocess but hardlink from the original
existing = find_paths_by_hash(texture_hash)
if existing:
if existing and not force:
self.log.info("Found hash in database, preparing hardlink..")
source = next((p for p in existing if os.path.exists(p)), None)
if filepath:

View file

@ -34,6 +34,7 @@ class CreateWriteRender(plugin.PypeCreator):
data.update({k: v})
self.data = data
self.nodes = nuke.selectedNodes()
self.log.info("self.data: '{}'".format(self.data))
def process(self):
@ -46,9 +47,9 @@ class CreateWriteRender(plugin.PypeCreator):
# use selection
if (self.options or {}).get("useSelection"):
nodes = nuke.selectedNodes()
nodes = self.nodes
assert len(nodes) == 1, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`")
assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`")
selected_node = nodes[0]
inputs = [selected_node]

View file

@ -22,6 +22,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
self.log.debug("asset_data: {}".format(asset_data["data"]))
instances = []
root = nuke.root()
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
for node in nuke.allNodes():
@ -30,11 +32,11 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
continue
except Exception as E:
self.log.warning(E)
continue
# get data from avalon knob
self.log.debug("node[name]: {}".format(node['name'].value()))
avalon_knob_data = get_avalon_knob_data(node)
avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"])
self.log.debug("avalon_knob_data: {}".format(avalon_knob_data))
@ -84,10 +86,22 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
node.end()
family = avalon_knob_data["family"]
families = [avalon_knob_data["families"]]
families = avalon_knob_data.get("families")
if families:
families = [families]
else:
families = [family]
# Get format
format = root['format'].value()
resolution_width = format.width()
resolution_height = format.height()
pixel_aspect = format.pixelAspect()
if node.Class() not in "Read":
if node["render"].value():
if "render" not in node.knobs().keys():
families.insert(0, family)
elif node["render"].value():
self.log.info("flagged for render")
add_family = "render.local"
# dealing with local/farm rendering
@ -111,7 +125,10 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish').value(),
"step": 1,
"fps": nuke.root()['fps'].value()
"fps": nuke.root()['fps'].value(),
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"pixelAspect": pixel_aspect,
})
@ -119,5 +136,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
instances.append(instance)
context.data["instances"] = instances
self.log.debug("context: {}".format(context))

View file

@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Writes"
hosts = ["nuke", "nukeassist"]
families = ["render", "render.local", "render.farm"]
families = ["write"]
def process(self, instance):
@ -76,7 +76,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
}
try:
collected_frames = os.listdir(output_dir)
collected_frames = [f for f in os.listdir(output_dir)
if ext in f]
if collected_frames:
representation['frameStart'] = "%0{}d".format(
len(str(last_frame))) % first_frame
@ -95,11 +96,11 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"frameEnd": last_frame - handle_end,
"version": int(version),
"colorspace": node["colorspace"].value(),
"families": [instance.data["family"]] + instance.data["families"],
"families": [instance.data["family"]],
"subset": instance.data["subset"],
"fps": instance.context.data["fps"]
}
instance.data["family"] = "write"
group_node = [x for x in instance if x.Class() == "Group"][0]
deadlineChunkSize = 1
if "deadlineChunkSize" in group_node.knobs():
@ -109,6 +110,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
if "deadlinePriority" in group_node.knobs():
deadlinePriority = group_node["deadlinePriority"].value()
families = [f for f in instance.data["families"] if "write" not in f]
instance.data.update({
"versionData": version_data,
"path": path,
@ -119,10 +121,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"frameStart": first_frame,
"frameEnd": last_frame,
"outputType": output_type,
"family": "write",
"families": families,
"colorspace": node["colorspace"].value(),
"deadlineChunkSize": deadlineChunkSize,
"deadlinePriority": deadlinePriority,
"subsetGroup": "renders"
})
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -1,187 +0,0 @@
import os
import nuke
import pyblish.api
import pype
class ExtractReviewData(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
"""
order = pyblish.api.ExtractorOrder + 0.01
label = "Extract Review Data"
families = ["review"]
hosts = ["nuke"]
def process(self, instance):
# Store selection
selection = [i for i in nuke.allNodes() if i["selected"].getValue()]
# Deselect all nodes to prevent external connections
[i["selected"].setValue(False) for i in nuke.allNodes()]
self.log.debug("creating staging dir:")
self.staging_dir(instance)
self.log.debug("instance: {}".format(instance))
self.log.debug("instance.data[families]: {}".format(
instance.data["families"]))
if "still" not in instance.data["families"]:
self.render_review_representation(instance,
representation="mov")
self.render_review_representation(instance,
representation="jpeg")
else:
self.render_review_representation(instance, representation="jpeg")
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]
def render_review_representation(self,
instance,
representation="mov"):
assert instance.data['representations'][0]['files'], "Instance data files should't be empty!"
temporary_nodes = []
stagingDir = instance.data[
'representations'][0]["stagingDir"].replace("\\", "/")
self.log.debug("StagingDir `{0}`...".format(stagingDir))
collection = instance.data.get("collection", None)
if collection:
# get path
fname = os.path.basename(collection.format(
"{head}{padding}{tail}"))
fhead = collection.format("{head}")
# get first and last frame
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
else:
fname = os.path.basename(instance.data.get("path", None))
fhead = os.path.splitext(fname)[0] + "."
first_frame = instance.data.get("frameStart", None)
last_frame = instance.data.get("frameEnd", None)
rnode = nuke.createNode("Read")
rnode["file"].setValue(
os.path.join(stagingDir, fname).replace("\\", "/"))
rnode["first"].setValue(first_frame)
rnode["origfirst"].setValue(first_frame)
rnode["last"].setValue(last_frame)
rnode["origlast"].setValue(last_frame)
temporary_nodes.append(rnode)
previous_node = rnode
# get input process and connect it to baking
ipn = self.get_view_process_node()
if ipn is not None:
ipn.setInput(0, previous_node)
previous_node = ipn
temporary_nodes.append(ipn)
reformat_node = nuke.createNode("Reformat")
ref_node = self.nodes.get("Reformat", None)
if ref_node:
for k, v in ref_node:
self.log.debug("k,v: {0}:{1}".format(k,v))
if isinstance(v, unicode):
v = str(v)
reformat_node[k].setValue(v)
reformat_node.setInput(0, previous_node)
previous_node = reformat_node
temporary_nodes.append(reformat_node)
dag_node = nuke.createNode("OCIODisplay")
dag_node.setInput(0, previous_node)
previous_node = dag_node
temporary_nodes.append(dag_node)
# create write node
write_node = nuke.createNode("Write")
if representation in "mov":
file = fhead + "baked.mov"
name = "baked"
path = os.path.join(stagingDir, file).replace("\\", "/")
self.log.debug("Path: {}".format(path))
instance.data["baked_colorspace_movie"] = path
write_node["file"].setValue(path)
write_node["file_type"].setValue("mov")
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
tags = ["review", "delete"]
elif representation in "jpeg":
file = fhead + "jpeg"
name = "thumbnail"
path = os.path.join(stagingDir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
write_node["file_type"].setValue("jpeg")
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
tags = ["thumbnail"]
# retime for
first_frame = int(last_frame) / 2
last_frame = int(last_frame) / 2
repre = {
'name': name,
'ext': representation,
'files': file,
"stagingDir": stagingDir,
"frameStart": first_frame,
"frameEnd": last_frame,
"anatomy_template": "render",
"tags": tags
}
instance.data["representations"].append(repre)
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
self.log.debug("representations: {}".format(instance.data["representations"]))
# Clean up
for node in temporary_nodes:
nuke.delete(node)
def get_view_process_node(self):
# Select only the target node
if nuke.selectedNodes():
[n.setSelected(False) for n in nuke.selectedNodes()]
ipn_orig = None
for v in [n for n in nuke.allNodes()
if "Viewer" in n.Class()]:
ip = v['input_process'].getValue()
ipn = v['input_process_node'].getValue()
if "VIEWER_INPUT" not in ipn and ip:
ipn_orig = nuke.toNode(ipn)
ipn_orig.setSelected(True)
if ipn_orig:
nuke.nodeCopy('%clipboard%')
[n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all
nuke.nodePaste('%clipboard%')
ipn = nuke.selectedNode()
return ipn

View file

@ -0,0 +1,58 @@
import os
import pyblish.api
from avalon.nuke import lib as anlib
from pype.nuke import lib as pnlib
import pype
reload(pnlib)
class ExtractReviewLutData(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
"""
order = pyblish.api.ExtractorOrder + 0.005
label = "Extract Review Data Lut"
families = ["review"]
hosts = ["nuke"]
def process(self, instance):
families = instance.data["families"]
self.log.info("Creating staging dir...")
if "representations" in instance.data:
staging_dir = instance.data[
"representations"][0]["stagingDir"].replace("\\", "/")
instance.data["stagingDir"] = staging_dir
instance.data["representations"][0]["tags"] = ["review"]
else:
instance.data["representations"] = []
# get output path
render_path = instance.data['path']
staging_dir = os.path.normpath(os.path.dirname(render_path))
instance.data["stagingDir"] = staging_dir
self.log.info(
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
with anlib.maintained_selection():
exporter = pnlib.Exporter_review_lut(
self, instance
)
data = exporter.generate_lut()
# assign to representations
instance.data["lutPath"] = os.path.join(
exporter.stagingDir, exporter.file).replace("\\", "/")
instance.data["representations"] += data["representations"]
if "render.farm" in families:
instance.data["families"].remove("review")
instance.data["families"].remove("ftrack")
self.log.debug(
"_ lutPath: {}".format(instance.data["lutPath"]))
self.log.debug(
"_ representations: {}".format(instance.data["representations"]))

View file

@ -0,0 +1,174 @@
import os
import nuke
from avalon.nuke import lib as anlib
import pyblish.api
import pype
class ExtractThumbnail(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
"""
order = pyblish.api.ExtractorOrder + 0.01
label = "Extract Thumbnail"
families = ["review", "render.farm"]
hosts = ["nuke"]
def process(self, instance):
with anlib.maintained_selection():
self.log.debug("instance: {}".format(instance))
self.log.debug("instance.data[families]: {}".format(
instance.data["families"]))
self.render_thumbnail(instance)
def render_thumbnail(self, instance):
node = instance[0] # group node
self.log.info("Creating staging dir...")
if "representations" in instance.data:
staging_dir = instance.data[
"representations"][0]["stagingDir"].replace("\\", "/")
instance.data["stagingDir"] = staging_dir
instance.data["representations"][0]["tags"] = ["review"]
else:
instance.data["representations"] = []
# get output path
render_path = instance.data['path']
staging_dir = os.path.normpath(os.path.dirname(render_path))
instance.data["stagingDir"] = staging_dir
self.log.info(
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
temporary_nodes = []
collection = instance.data.get("collection", None)
if collection:
# get path
fname = os.path.basename(collection.format(
"{head}{padding}{tail}"))
fhead = collection.format("{head}")
# get first and last frame
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
else:
fname = os.path.basename(instance.data.get("path", None))
fhead = os.path.splitext(fname)[0] + "."
first_frame = instance.data.get("frameStart", None)
last_frame = instance.data.get("frameEnd", None)
if "#" in fhead:
fhead = fhead.replace("#", "")[:-1]
path_render = os.path.join(staging_dir, fname).replace("\\", "/")
# check if file exist otherwise connect to write node
if os.path.isfile(path_render):
rnode = nuke.createNode("Read")
rnode["file"].setValue(path_render)
rnode["first"].setValue(first_frame)
rnode["origfirst"].setValue(first_frame)
rnode["last"].setValue(last_frame)
rnode["origlast"].setValue(last_frame)
temporary_nodes.append(rnode)
previous_node = rnode
else:
previous_node = node
# get input process and connect it to baking
ipn = self.get_view_process_node()
if ipn is not None:
ipn.setInput(0, previous_node)
previous_node = ipn
temporary_nodes.append(ipn)
reformat_node = nuke.createNode("Reformat")
ref_node = self.nodes.get("Reformat", None)
if ref_node:
for k, v in ref_node:
self.log.debug("k, v: {0}:{1}".format(k, v))
if isinstance(v, unicode):
v = str(v)
reformat_node[k].setValue(v)
reformat_node.setInput(0, previous_node)
previous_node = reformat_node
temporary_nodes.append(reformat_node)
dag_node = nuke.createNode("OCIODisplay")
dag_node.setInput(0, previous_node)
previous_node = dag_node
temporary_nodes.append(dag_node)
# create write node
write_node = nuke.createNode("Write")
file = fhead + "jpeg"
name = "thumbnail"
path = os.path.join(staging_dir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
write_node["file_type"].setValue("jpeg")
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
tags = ["thumbnail"]
# retime for
first_frame = int(last_frame) / 2
last_frame = int(last_frame) / 2
repre = {
'name': name,
'ext': "jpeg",
'files': file,
"stagingDir": staging_dir,
"frameStart": first_frame,
"frameEnd": last_frame,
"anatomy_template": "render",
"tags": tags
}
instance.data["representations"].append(repre)
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
self.log.debug(
"representations: {}".format(instance.data["representations"]))
# Clean up
for node in temporary_nodes:
nuke.delete(node)
def get_view_process_node(self):
# Select only the target node
if nuke.selectedNodes():
[n.setSelected(False) for n in nuke.selectedNodes()]
ipn_orig = None
for v in [n for n in nuke.allNodes()
if "Viewer" in n.Class()]:
ip = v['input_process'].getValue()
ipn = v['input_process_node'].getValue()
if "VIEWER_INPUT" not in ipn and ip:
ipn_orig = nuke.toNode(ipn)
ipn_orig.setSelected(True)
if ipn_orig:
nuke.nodeCopy('%clipboard%')
[n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all
nuke.nodePaste('%clipboard%')
ipn = nuke.selectedNode()
return ipn

View file

@ -1,9 +1,7 @@
import os
import json
import getpass
import nuke
from avalon import api
from avalon.vendor import requests
import re
@ -27,40 +25,36 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
def process(self, instance):
node = None
for x in instance:
if x.Class() == "Write":
node = x
if node is None:
return
node = instance[0]
# for x in instance:
# if x.Class() == "Write":
# node = x
#
# if node is None:
# return
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
"http://localhost:8082")
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
context = instance.context
workspace = os.path.dirname(context.data["currentFile"])
filepath = None
# get path
path = nuke.filename(node)
output_dir = instance.data['outputDir']
# get output path
render_path = instance.data['path']
render_dir = os.path.normpath(os.path.dirname(render_path))
filepath = context.data["currentFile"]
script_path = context.data["currentFile"]
self.log.debug(filepath)
filename = os.path.basename(filepath)
script_name = os.path.basename(script_path)
comment = context.data.get("comment", "")
dirname = os.path.join(workspace, "renders")
deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
jobname = "%s - %s" % (script_name, instance.name)
ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))
try:
# Ensure render folder exists
os.makedirs(dirname)
os.makedirs(render_dir)
except OSError:
pass
@ -71,7 +65,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
payload = {
"JobInfo": {
# Top-level group name
"BatchName": filename,
"BatchName": script_name,
# Job name, as seen in Monitor
"Name": jobname,
@ -95,20 +89,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
},
"PluginInfo": {
# Input
"SceneFile": filepath,
"SceneFile": script_path,
# Output directory and filename
"OutputFilePath": dirname.replace("\\", "/"),
"OutputFilePath": render_dir.replace("\\", "/"),
# "OutputFilePrefix": render_variables["filename_prefix"],
# Mandatory for Deadline
"Version": ver.group(),
# Resolve relative references
"ProjectPath": workspace,
"ProjectPath": script_path,
"AWSAssetFile0": render_path,
# Only the specific write node is rendered.
"WriteNode": instance[0].name()
"WriteNode": node.name()
},
# Mandatory for Deadline, may be empty

View file

@ -28,7 +28,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
""" Validates file output. """
order = pyblish.api.ValidatorOrder + 0.1
families = ["render.no"]
families = ["render"]
label = "Validate rendered frame"
hosts = ["nuke", "nukestudio"]

View file

@ -1,6 +1,6 @@
import time
import collections
from Qt import QtCore, QtGui, QtWidgets
from Qt import QtCore
from pynput import mouse, keyboard
from pypeapp import Logger
@ -29,6 +29,13 @@ class IdleManager(QtCore.QThread):
def tray_start(self):
self.start()
def tray_exit(self):
self.stop()
try:
self.time_signals = {}
except Exception:
pass
def add_time_signal(self, emit_time, signal):
""" If any module want to use IdleManager, need to use add_time_signal
:param emit_time: time when signal will be emitted

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

View file

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 56 56" style="enable-background:new 0 0 56 56;" xml:space="preserve">
<g>
<path d="M28,0C12.561,0,0,12.561,0,28s12.561,28,28,28s28-12.561,28-28S43.439,0,28,0z M28,54C13.663,54,2,42.336,2,28
S13.663,2,28,2s26,11.664,26,26S42.337,54,28,54z"/>
<path d="M40,16H16c-0.553,0-1,0.448-1,1s0.447,1,1,1h24c0.553,0,1-0.448,1-1S40.553,16,40,16z"/>
<path d="M40,27H16c-0.553,0-1,0.448-1,1s0.447,1,1,1h24c0.553,0,1-0.448,1-1S40.553,27,40,27z"/>
<path d="M40,38H16c-0.553,0-1,0.448-1,1s0.447,1,1,1h24c0.553,0,1-0.448,1-1S40.553,38,40,38z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 823 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

@ -1,23 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="482.428px" height="482.429px" viewBox="0 0 482.428 482.429" fill="#ffffff" style="enable-background:new 0 0 482.428 482.429;"
xml:space="preserve">
<g>
<path d="M381.163,57.799h-75.094C302.323,25.316,274.686,0,241.214,0c-33.471,0-61.104,25.315-64.85,57.799h-75.098
c-30.39,0-55.111,24.728-55.111,55.117v2.828c0,23.223,14.46,43.1,34.83,51.199v260.369c0,30.39,24.724,55.117,55.112,55.117
h210.236c30.389,0,55.111-24.729,55.111-55.117V166.944c20.369-8.1,34.83-27.977,34.83-51.199v-2.828
C436.274,82.527,411.551,57.799,381.163,57.799z M241.214,26.139c19.037,0,34.927,13.645,38.443,31.66h-76.879
C206.293,39.783,222.184,26.139,241.214,26.139z M375.305,427.312c0,15.978-13,28.979-28.973,28.979H136.096
c-15.973,0-28.973-13.002-28.973-28.979V170.861h268.182V427.312z M410.135,115.744c0,15.978-13,28.979-28.973,28.979H101.266
c-15.973,0-28.973-13.001-28.973-28.979v-2.828c0-15.978,13-28.979,28.973-28.979h279.897c15.973,0,28.973,13.001,28.973,28.979
V115.744z"/>
<path d="M171.144,422.863c7.218,0,13.069-5.853,13.069-13.068V262.641c0-7.216-5.852-13.07-13.069-13.07
c-7.217,0-13.069,5.854-13.069,13.07v147.154C158.074,417.012,163.926,422.863,171.144,422.863z"/>
<path d="M241.214,422.863c7.218,0,13.07-5.853,13.07-13.068V262.641c0-7.216-5.854-13.07-13.07-13.07
c-7.217,0-13.069,5.854-13.069,13.07v147.154C228.145,417.012,233.996,422.863,241.214,422.863z"/>
<path d="M311.284,422.863c7.217,0,13.068-5.853,13.068-13.068V262.641c0-7.216-5.852-13.07-13.068-13.07
c-7.219,0-13.07,5.854-13.07,13.07v147.154C298.213,417.012,304.067,422.863,311.284,422.863z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

@ -1,15 +1,10 @@
import os
from . import QtCore, QtGui, QtWidgets
from . import SvgButton
from . import get_resource
from pypeapp import style
class ComponentItem(QtWidgets.QFrame):
C_NORMAL = '#777777'
C_HOVER = '#ffffff'
C_ACTIVE = '#4BB543'
C_ACTIVE_HOVER = '#4BF543'
signal_remove = QtCore.Signal(object)
signal_thumbnail = QtCore.Signal(object)
@ -58,10 +53,8 @@ class ComponentItem(QtWidgets.QFrame):
self.icon.setText("")
self.icon.setScaledContents(True)
self.btn_action_menu = SvgButton(
get_resource('menu.svg'), 22, 22,
[self.C_NORMAL, self.C_HOVER],
frame_image_info, False
self.btn_action_menu = PngButton(
name="menu", size=QtCore.QSize(22, 22)
)
self.action_menu = QtWidgets.QMenu()
@ -88,7 +81,9 @@ class ComponentItem(QtWidgets.QFrame):
self.file_info.setStyleSheet('padding-left:3px;')
expanding_sizePolicy.setHeightForWidth(self.name.sizePolicy().hasHeightForWidth())
expanding_sizePolicy.setHeightForWidth(
self.name.sizePolicy().hasHeightForWidth()
)
frame_name_repre = QtWidgets.QFrame(frame)
@ -104,7 +99,8 @@ class ComponentItem(QtWidgets.QFrame):
layout.addWidget(self.ext, alignment=QtCore.Qt.AlignRight)
frame_name_repre.setSizePolicy(
QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding
QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.MinimumExpanding
)
# Repre + icons
@ -156,12 +152,7 @@ class ComponentItem(QtWidgets.QFrame):
layout_main.addWidget(frame_middle)
self.remove = SvgButton(
get_resource('trash.svg'), 22, 22,
[self.C_NORMAL, self.C_HOVER],
frame, False
)
self.remove = PngButton(name="trash", size=QtCore.QSize(22, 22))
layout_main.addWidget(self.remove)
layout = QtWidgets.QVBoxLayout(self)
@ -351,6 +342,7 @@ class LightingButton(QtWidgets.QPushButton):
color: #4BF543;
}
"""
def __init__(self, text, *args, **kwargs):
super().__init__(text, *args, **kwargs)
self.setStyleSheet(self.lightingbtnstyle)
@ -362,3 +354,173 @@ class LightingButton(QtWidgets.QPushButton):
height = preview_font_metrics.height() + 5
self.setMaximumWidth(width)
self.setMaximumHeight(height)
class PngFactory:
png_names = {
"trash": {
"normal": QtGui.QIcon(get_resource("trash.png")),
"hover": QtGui.QIcon(get_resource("trash_hover.png")),
"pressed": QtGui.QIcon(get_resource("trash_pressed.png")),
"pressed_hover": QtGui.QIcon(
get_resource("trash_pressed_hover.png")
),
"disabled": QtGui.QIcon(get_resource("trash_disabled.png"))
},
"menu": {
"normal": QtGui.QIcon(get_resource("menu.png")),
"hover": QtGui.QIcon(get_resource("menu_hover.png")),
"pressed": QtGui.QIcon(get_resource("menu_pressed.png")),
"pressed_hover": QtGui.QIcon(
get_resource("menu_pressed_hover.png")
),
"disabled": QtGui.QIcon(get_resource("menu_disabled.png"))
}
}
class PngButton(QtWidgets.QPushButton):
png_button_style = """
QPushButton {
border: none;
background-color: transparent;
padding-top: 0px;
padding-bottom: 0px;
padding-left: 0px;
padding-right: 0px;
}
QPushButton:hover {}
QPushButton:pressed {}
QPushButton:disabled {}
QPushButton:checked {}
QPushButton:checked:hover {}
QPushButton:checked:pressed {}
"""
def __init__(
self, name=None, path=None, hover_path=None, pressed_path=None,
hover_pressed_path=None, disabled_path=None,
size=None, *args, **kwargs
):
self._hovered = False
self._pressed = False
super(PngButton, self).__init__(*args, **kwargs)
self.setStyleSheet(self.png_button_style)
png_dict = {}
if name:
png_dict = PngFactory.png_names.get(name) or {}
if not png_dict:
print((
"WARNING: There is not set icon with name \"{}\""
"in PngFactory!"
).format(name))
ico_normal = png_dict.get("normal")
ico_hover = png_dict.get("hover")
ico_pressed = png_dict.get("pressed")
ico_hover_pressed = png_dict.get("pressed_hover")
ico_disabled = png_dict.get("disabled")
if path:
ico_normal = QtGui.QIcon(path)
if hover_path:
ico_hover = QtGui.QIcon(hover_path)
if pressed_path:
ico_pressed = QtGui.QIcon(hover_path)
if hover_pressed_path:
ico_hover_pressed = QtGui.QIcon(hover_pressed_path)
if disabled_path:
ico_disabled = QtGui.QIcon(disabled_path)
self.setIcon(ico_normal)
if size:
self.setIconSize(size)
self.setMaximumSize(size)
self.ico_normal = ico_normal
self.ico_hover = ico_hover
self.ico_pressed = ico_pressed
self.ico_hover_pressed = ico_hover_pressed
self.ico_disabled = ico_disabled
def setDisabled(self, in_bool):
super(PngButton, self).setDisabled(in_bool)
icon = self.ico_normal
if in_bool and self.ico_disabled:
icon = self.ico_disabled
self.setIcon(icon)
def enterEvent(self, event):
self._hovered = True
if not self.isEnabled():
return
icon = self.ico_normal
if self.ico_hover:
icon = self.ico_hover
if self._pressed and self.ico_hover_pressed:
icon = self.ico_hover_pressed
if self.icon() != icon:
self.setIcon(icon)
def mouseMoveEvent(self, event):
super(PngButton, self).mouseMoveEvent(event)
if self._pressed:
mouse_pos = event.pos()
hovering = self.rect().contains(mouse_pos)
if hovering and not self._hovered:
self.enterEvent(event)
elif not hovering and self._hovered:
self.leaveEvent(event)
def leaveEvent(self, event):
self._hovered = False
if not self.isEnabled():
return
icon = self.ico_normal
if self._pressed and self.ico_pressed:
icon = self.ico_pressed
if self.icon() != icon:
self.setIcon(icon)
def mousePressEvent(self, event):
self._pressed = True
if not self.isEnabled():
return
icon = self.ico_hover
if self.ico_pressed:
icon = self.ico_pressed
if self.ico_hover_pressed:
mouse_pos = event.pos()
if self.rect().contains(mouse_pos):
icon = self.ico_hover_pressed
if icon is None:
icon = self.ico_normal
if self.icon() != icon:
self.setIcon(icon)
def mouseReleaseEvent(self, event):
if not self.isEnabled():
return
if self._pressed:
self._pressed = False
mouse_pos = event.pos()
if self.rect().contains(mouse_pos):
self.clicked.emit()
icon = self.ico_normal
if self._hovered and self.ico_hover:
icon = self.ico_hover
if self.icon() != icon:
self.setIcon(icon)