mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'release/2.4.0'
This commit is contained in:
commit
59260feeb9
807 changed files with 11103 additions and 170887 deletions
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 pype club
|
||||
Copyright (c) 2018 orbi tools s.r.o
|
||||
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
|
|
|||
|
|
@ -14,6 +14,11 @@ class AvalonApps:
|
|||
self.parent = parent
|
||||
self.app_launcher = None
|
||||
|
||||
def process_modules(self, modules):
|
||||
if "RestApiServer" in modules:
|
||||
from .rest_api import AvalonRestApi
|
||||
self.rest_api_obj = AvalonRestApi()
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu=None):
|
||||
# Actions
|
||||
|
|
|
|||
86
pype/avalon_apps/rest_api.py
Normal file
86
pype/avalon_apps/rest_api.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import bson
|
||||
import bson.json_util
|
||||
from pype.services.rest_api import RestApi, abort, CallbackResult
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
|
||||
|
||||
class AvalonRestApi(RestApi):
|
||||
dbcon = DbConnector(
|
||||
os.environ["AVALON_MONGO"],
|
||||
os.environ["AVALON_DB"]
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.dbcon.install()
|
||||
|
||||
@RestApi.route("/projects/<project_name>", url_prefix="/avalon", methods="GET")
|
||||
def get_project(self, request):
|
||||
project_name = request.url_data["project_name"]
|
||||
if not project_name:
|
||||
output = {}
|
||||
for project_name in self.dbcon.tables():
|
||||
project = self.dbcon[project_name].find_one({"type": "project"})
|
||||
output[project_name] = project
|
||||
|
||||
return CallbackResult(data=self.result_to_json(output))
|
||||
|
||||
project = self.dbcon[project_name].find_one({"type": "project"})
|
||||
|
||||
if project:
|
||||
return CallbackResult(data=self.result_to_json(project))
|
||||
|
||||
abort(404, "Project \"{}\" was not found in database".format(
|
||||
project_name
|
||||
))
|
||||
|
||||
@RestApi.route("/projects/<project_name>/assets/<asset>", url_prefix="/avalon", methods="GET")
|
||||
def get_assets(self, request):
|
||||
_project_name = request.url_data["project_name"]
|
||||
_asset = request.url_data["asset"]
|
||||
|
||||
if not self.dbcon.exist_table(_project_name):
|
||||
abort(404, "Project \"{}\" was not found in database".format(
|
||||
project_name
|
||||
))
|
||||
|
||||
if not _asset:
|
||||
assets = self.dbcon[_project_name].find({"type": "asset"})
|
||||
output = self.result_to_json(assets)
|
||||
return CallbackResult(data=output)
|
||||
|
||||
# identificator can be specified with url query (default is `name`)
|
||||
identificator = request.query.get("identificator", "name")
|
||||
|
||||
asset = self.dbcon[_project_name].find_one({
|
||||
"type": "asset",
|
||||
identificator: _asset
|
||||
})
|
||||
if asset:
|
||||
id = asset["_id"]
|
||||
asset["_id"] = str(id)
|
||||
return asset
|
||||
|
||||
abort(404, "Asset \"{}\" with {} was not found in project {}".format(
|
||||
_asset, identificator, project_name
|
||||
))
|
||||
|
||||
def result_to_json(self, result):
|
||||
""" Converts result of MongoDB query to dict without $oid (ObjectId)
|
||||
keys with help of regex matching.
|
||||
|
||||
..note:
|
||||
This will convert object type entries similar to ObjectId.
|
||||
"""
|
||||
bson_json = bson.json_util.dumps(result)
|
||||
# Replace "{$oid: "{entity id}"}" with "{entity id}"
|
||||
regex1 = '(?P<id>{\"\$oid\": \"[^\"]+\"})'
|
||||
regex2 = '{\"\$oid\": (?P<id>\"[^\"]+\")}'
|
||||
for value in re.findall(regex1, bson_json):
|
||||
for substr in re.findall(regex2, value):
|
||||
bson_json = bson_json.replace(value, substr)
|
||||
|
||||
return json.loads(bson_json)
|
||||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
import json
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction, MissingPermision
|
||||
from pype.clockify import ClockifyAPI
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
from .lib import *
|
||||
from .ftrack_server import FtrackServer
|
||||
from .ftrack_server import FtrackServer, check_ftrack_url
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,9 @@ import argparse
|
|||
import json
|
||||
import arrow
|
||||
import logging
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, get_ca_mongoid
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pypeapp import config
|
||||
from ftrack_api.exception import NoResultFoundError
|
||||
|
||||
|
|
@ -171,7 +172,6 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
def avalon_mongo_id_attributes(self, session):
|
||||
# Attribute Name and Label
|
||||
cust_attr_name = get_ca_mongoid()
|
||||
cust_attr_label = 'Avalon/Mongo Id'
|
||||
|
||||
# Types that don't need object_type_id
|
||||
|
|
@ -207,7 +207,7 @@ class CustomAttributes(BaseAction):
|
|||
group = self.get_group('avalon')
|
||||
|
||||
data = {}
|
||||
data['key'] = cust_attr_name
|
||||
data['key'] = CustAttrIdKey
|
||||
data['label'] = cust_attr_label
|
||||
data['type'] = custom_attribute_type
|
||||
data['default'] = ''
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
import argparse
|
||||
import re
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import lib as avalonlib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import re
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
|
||||
|
|
@ -142,6 +142,13 @@ class CreateProjectFolders(BaseAction):
|
|||
else:
|
||||
data['project_id'] = parent['project']['id']
|
||||
|
||||
existing_entity = self.session.query((
|
||||
"TypedContext where name is \"{}\" and "
|
||||
"parent_id is \"{}\" and project_id is \"{}\""
|
||||
).format(name, data['parent_id'], data['project_id'])).first()
|
||||
if existing_entity:
|
||||
return existing_entity
|
||||
|
||||
new_ent = self.session.create(ent_type, data)
|
||||
self.session.commit()
|
||||
return new_ent
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import json
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,357 +1,606 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import collections
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from queue import Queue
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class DeleteAsset(BaseAction):
|
||||
class DeleteAssetSubset(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'delete.asset'
|
||||
identifier = "delete.asset.subset"
|
||||
#: Action label.
|
||||
label = 'Delete Asset/Subsets'
|
||||
label = "Delete Asset/Subsets"
|
||||
#: Action description.
|
||||
description = 'Removes from Avalon with all childs and asset from Ftrack'
|
||||
icon = '{}/ftrack/action_icons/DeleteAsset.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
description = "Removes from Avalon with all childs and asset from Ftrack"
|
||||
icon = "{}/ftrack/action_icons/DeleteAsset.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
#: Db connection
|
||||
dbcon = DbConnector()
|
||||
|
||||
value = None
|
||||
splitter = {"type": "label", "value": "---"}
|
||||
action_data_by_id = {}
|
||||
asset_prefix = "asset:"
|
||||
subset_prefix = "subset:"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
""" Validation """
|
||||
task_ids = []
|
||||
for ent_info in event["data"]["selection"]:
|
||||
entType = ent_info.get("entityType", "")
|
||||
if entType == "task":
|
||||
task_ids.append(ent_info["entityId"])
|
||||
|
||||
valid = ["task"]
|
||||
entityType = event["data"]["selection"][0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
return True
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
if ftrack_id not in task_ids:
|
||||
continue
|
||||
if entity.entity_type.lower() != "task":
|
||||
return True
|
||||
return False
|
||||
|
||||
def _launch(self, event):
|
||||
self.reset_session()
|
||||
try:
|
||||
self.db.install()
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
if "values" not in event["data"]:
|
||||
self.dbcon.install()
|
||||
return self._interface(self.session, *args)
|
||||
|
||||
interface = self._interface(
|
||||
self.session, *args
|
||||
)
|
||||
|
||||
confirmation = self.confirm_delete(
|
||||
True, *args
|
||||
)
|
||||
|
||||
if interface:
|
||||
return interface
|
||||
|
||||
confirmation = self.confirm_delete(*args)
|
||||
if confirmation:
|
||||
return confirmation
|
||||
|
||||
self.dbcon.install()
|
||||
response = self.launch(
|
||||
self.session, *args
|
||||
)
|
||||
finally:
|
||||
self.db.uninstall()
|
||||
self.dbcon.uninstall()
|
||||
|
||||
return self._handle_result(
|
||||
self.session, response, *args
|
||||
)
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
self.attempt = 1
|
||||
items = []
|
||||
entity = entities[0]
|
||||
title = 'Choose items to delete from "{}"'.format(entity['name'])
|
||||
project = entity['project']
|
||||
self.show_message(event, "Preparing data...", True)
|
||||
items = []
|
||||
title = "Choose items to delete"
|
||||
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
# Filter selection and get ftrack ids
|
||||
selection = event["data"].get("selection") or []
|
||||
ftrack_ids = []
|
||||
project_in_selection = False
|
||||
for entity in selection:
|
||||
entity_type = (entity.get("entityType") or "").lower()
|
||||
if entity_type != "task":
|
||||
if entity_type == "show":
|
||||
project_in_selection = True
|
||||
continue
|
||||
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
ftrack_id = entity.get("entityId")
|
||||
if not ftrack_id:
|
||||
continue
|
||||
|
||||
ftrack_ids.append(ftrack_id)
|
||||
|
||||
if project_in_selection:
|
||||
msg = "It is not possible to use this action on project entity."
|
||||
self.show_message(event, msg, True)
|
||||
|
||||
# Filter event even more (skip task entities)
|
||||
# - task entities are not relevant for avalon
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
if ftrack_id not in ftrack_ids:
|
||||
continue
|
||||
|
||||
if entity.entity_type.lower() == "task":
|
||||
ftrack_ids.remove(ftrack_id)
|
||||
|
||||
if not ftrack_ids:
|
||||
# It is bug if this happens!
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Invalid selection for this action (Bug)"
|
||||
}
|
||||
|
||||
if entities[0].entity_type.lower() == "project":
|
||||
project = entities[0]
|
||||
else:
|
||||
project = entities[0]["project"]
|
||||
|
||||
project_name = project["full_name"]
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
selected_av_entities = self.dbcon.find({
|
||||
"type": "asset",
|
||||
"data.ftrackId": {"$in": ftrack_ids}
|
||||
})
|
||||
selected_av_entities = [ent for ent in selected_av_entities]
|
||||
if not selected_av_entities:
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Didn't found entities in avalon"
|
||||
}
|
||||
|
||||
# Remove cached action older than 2 minutes
|
||||
old_action_ids = []
|
||||
for id, data in self.action_data_by_id.items():
|
||||
created_at = data.get("created_at")
|
||||
if not created_at:
|
||||
old_action_ids.append(id)
|
||||
continue
|
||||
cur_time = datetime.now()
|
||||
existing_in_sec = (created_at - cur_time).total_seconds()
|
||||
if existing_in_sec > 60 * 2:
|
||||
old_action_ids.append(id)
|
||||
|
||||
for id in old_action_ids:
|
||||
self.action_data_by_id.pop(id, None)
|
||||
|
||||
# Store data for action id
|
||||
action_id = str(uuid.uuid1())
|
||||
self.action_data_by_id[action_id] = {
|
||||
"attempt": 1,
|
||||
"created_at": datetime.now(),
|
||||
"project_name": project_name,
|
||||
"subset_ids_by_name": {},
|
||||
"subset_ids_by_parent": {}
|
||||
}
|
||||
|
||||
id_item = {
|
||||
"type": "hidden",
|
||||
"name": "action_id",
|
||||
"value": action_id
|
||||
}
|
||||
|
||||
items.append(id_item)
|
||||
asset_ids = [ent["_id"] for ent in selected_av_entities]
|
||||
subsets_for_selection = self.dbcon.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": asset_ids}
|
||||
})
|
||||
|
||||
asset_ending = ""
|
||||
if len(selected_av_entities) > 1:
|
||||
asset_ending = "s"
|
||||
|
||||
asset_title = {
|
||||
"type": "label",
|
||||
"value": "# Delete asset{}:".format(asset_ending)
|
||||
}
|
||||
asset_note = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>NOTE: Action will delete checked entities"
|
||||
" in Ftrack and Avalon with all children entities and"
|
||||
" published content.</i></p>"
|
||||
)
|
||||
}
|
||||
|
||||
items.append(asset_title)
|
||||
items.append(asset_note)
|
||||
|
||||
asset_items = collections.defaultdict(list)
|
||||
for asset in selected_av_entities:
|
||||
ent_path_items = [project_name]
|
||||
ent_path_items.extend(asset.get("data", {}).get("parents") or [])
|
||||
ent_path_to_parent = "/".join(ent_path_items) + "/"
|
||||
asset_items[ent_path_to_parent].append(asset)
|
||||
|
||||
for asset_parent_path, assets in sorted(asset_items.items()):
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "## <b>- {}</b>".format(asset_parent_path)
|
||||
})
|
||||
|
||||
if av_entity is None:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found assets in avalon'
|
||||
}
|
||||
|
||||
asset_label = {
|
||||
'type': 'label',
|
||||
'value': '## Delete whole asset: ##'
|
||||
}
|
||||
asset_item = {
|
||||
'label': av_entity['name'],
|
||||
'name': 'whole_asset',
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
splitter = {
|
||||
'type': 'label',
|
||||
'value': '{}'.format(200*"-")
|
||||
}
|
||||
subset_label = {
|
||||
'type': 'label',
|
||||
'value': '## Subsets: ##'
|
||||
}
|
||||
if av_entity is not None:
|
||||
items.append(asset_label)
|
||||
items.append(asset_item)
|
||||
items.append(splitter)
|
||||
|
||||
all_subsets = self.db.find({
|
||||
'type': 'subset',
|
||||
'parent': av_entity['_id']
|
||||
for asset in assets:
|
||||
items.append({
|
||||
"label": asset["name"],
|
||||
"name": "{}{}".format(
|
||||
self.asset_prefix, str(asset["_id"])
|
||||
),
|
||||
"type": 'boolean',
|
||||
"value": False
|
||||
})
|
||||
|
||||
subset_items = []
|
||||
for subset in all_subsets:
|
||||
item = {
|
||||
'label': subset['name'],
|
||||
'name': str(subset['_id']),
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
subset_items.append(item)
|
||||
if len(subset_items) > 0:
|
||||
items.append(subset_label)
|
||||
items.extend(subset_items)
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found assets in avalon'
|
||||
}
|
||||
subset_ids_by_name = collections.defaultdict(list)
|
||||
subset_ids_by_parent = collections.defaultdict(list)
|
||||
for subset in subsets_for_selection:
|
||||
subset_id = subset["_id"]
|
||||
name = subset["name"]
|
||||
parent_id = subset["parent"]
|
||||
subset_ids_by_name[name].append(subset_id)
|
||||
subset_ids_by_parent[parent_id].append(subset_id)
|
||||
|
||||
if not subset_ids_by_name:
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def confirm_delete(self, first_attempt, entities, event):
|
||||
if first_attempt is True:
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
subset_ending = ""
|
||||
if len(subset_ids_by_name.keys()) > 1:
|
||||
subset_ending = "s"
|
||||
|
||||
values = event['data']['values']
|
||||
subset_title = {
|
||||
"type": "label",
|
||||
"value": "# Subset{} to delete:".format(subset_ending)
|
||||
}
|
||||
subset_note = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>WARNING: Subset{} will be removed"
|
||||
" for all <b>selected</b> entities.</i></p>"
|
||||
).format(subset_ending)
|
||||
}
|
||||
|
||||
if len(values) <= 0:
|
||||
return
|
||||
if 'whole_asset' not in values:
|
||||
return
|
||||
else:
|
||||
values = self.values
|
||||
items.append(self.splitter)
|
||||
items.append(subset_title)
|
||||
items.append(subset_note)
|
||||
|
||||
title = 'Confirmation of deleting {}'
|
||||
if values['whole_asset'] is True:
|
||||
title = title.format(
|
||||
'whole asset {}'.format(
|
||||
entities[0]['name']
|
||||
)
|
||||
)
|
||||
else:
|
||||
subsets = []
|
||||
for key, value in values.items():
|
||||
if value is True:
|
||||
subsets.append(key)
|
||||
len_subsets = len(subsets)
|
||||
if len_subsets == 0:
|
||||
for name in subset_ids_by_name:
|
||||
items.append({
|
||||
"label": "<b>{}</b>".format(name),
|
||||
"name": "{}{}".format(self.subset_prefix, name),
|
||||
"type": "boolean",
|
||||
"value": False
|
||||
})
|
||||
|
||||
self.action_data_by_id[action_id]["subset_ids_by_parent"] = (
|
||||
subset_ids_by_parent
|
||||
)
|
||||
self.action_data_by_id[action_id]["subset_ids_by_name"] = (
|
||||
subset_ids_by_name
|
||||
)
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def confirm_delete(self, entities, event):
|
||||
values = event["data"]["values"]
|
||||
action_id = values.get("action_id")
|
||||
spec_data = self.action_data_by_id.get(action_id)
|
||||
if not spec_data:
|
||||
# it is a bug if this happens!
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Something bad has happened. Please try again."
|
||||
}
|
||||
|
||||
# Process Delete confirmation
|
||||
delete_key = values.get("delete_key")
|
||||
if delete_key:
|
||||
delete_key = delete_key.lower().strip()
|
||||
# Go to launch part if user entered `delete`
|
||||
if delete_key == "delete":
|
||||
return
|
||||
# Skip whole process if user didn't enter any text
|
||||
elif delete_key == "":
|
||||
self.action_data_by_id.pop(action_id, None)
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing was selected to delete'
|
||||
"success": True,
|
||||
"message": "Deleting cancelled (delete entry was empty)"
|
||||
}
|
||||
elif len_subsets == 1:
|
||||
title = title.format(
|
||||
'{} subset'.format(len_subsets)
|
||||
)
|
||||
else:
|
||||
title = title.format(
|
||||
'{} subsets'.format(len_subsets)
|
||||
)
|
||||
# Get data to show again
|
||||
to_delete = spec_data["to_delete"]
|
||||
|
||||
else:
|
||||
to_delete = collections.defaultdict(list)
|
||||
for key, value in values.items():
|
||||
if not value:
|
||||
continue
|
||||
if key.startswith(self.asset_prefix):
|
||||
_key = key.replace(self.asset_prefix, "")
|
||||
to_delete["assets"].append(_key)
|
||||
|
||||
elif key.startswith(self.subset_prefix):
|
||||
_key = key.replace(self.subset_prefix, "")
|
||||
to_delete["subsets"].append(_key)
|
||||
|
||||
self.action_data_by_id[action_id]["to_delete"] = to_delete
|
||||
|
||||
asset_to_delete = len(to_delete.get("assets") or []) > 0
|
||||
subset_to_delete = len(to_delete.get("subsets") or []) > 0
|
||||
|
||||
if not asset_to_delete and not subset_to_delete:
|
||||
self.action_data_by_id.pop(action_id, None)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Nothing was selected to delete"
|
||||
}
|
||||
|
||||
attempt = spec_data["attempt"]
|
||||
if attempt > 3:
|
||||
self.action_data_by_id.pop(action_id, None)
|
||||
return {
|
||||
"success": False,
|
||||
"message": "You didn't enter \"DELETE\" properly 3 times!"
|
||||
}
|
||||
|
||||
self.action_data_by_id[action_id]["attempt"] += 1
|
||||
|
||||
title = "Confirmation of deleting"
|
||||
|
||||
if asset_to_delete:
|
||||
asset_len = len(to_delete["assets"])
|
||||
asset_ending = ""
|
||||
if asset_len > 1:
|
||||
asset_ending = "s"
|
||||
title += " {} Asset{}".format(asset_len, asset_ending)
|
||||
if subset_to_delete:
|
||||
title += " and"
|
||||
|
||||
if subset_to_delete:
|
||||
sub_len = len(to_delete["subsets"])
|
||||
type_ending = ""
|
||||
sub_ending = ""
|
||||
if sub_len == 1:
|
||||
subset_ids_by_name = spec_data["subset_ids_by_name"]
|
||||
if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1:
|
||||
sub_ending = "s"
|
||||
|
||||
elif sub_len > 1:
|
||||
type_ending = "s"
|
||||
sub_ending = "s"
|
||||
|
||||
title += " {} type{} of subset{}".format(
|
||||
sub_len, type_ending, sub_ending
|
||||
)
|
||||
|
||||
self.values = values
|
||||
items = []
|
||||
|
||||
id_item = {"type": "hidden", "name": "action_id", "value": action_id}
|
||||
delete_label = {
|
||||
'type': 'label',
|
||||
'value': '# Please enter "DELETE" to confirm #'
|
||||
}
|
||||
|
||||
delete_item = {
|
||||
'name': 'delete_key',
|
||||
'type': 'text',
|
||||
'value': '',
|
||||
'empty_text': 'Type Delete here...'
|
||||
"name": "delete_key",
|
||||
"type": "text",
|
||||
"value": "",
|
||||
"empty_text": "Type Delete here..."
|
||||
}
|
||||
|
||||
items.append(id_item)
|
||||
items.append(delete_label)
|
||||
items.append(delete_item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return
|
||||
if 'delete_key' not in values:
|
||||
return
|
||||
|
||||
if values['delete_key'].lower() != 'delete':
|
||||
if values['delete_key'].lower() == '':
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Deleting cancelled'
|
||||
}
|
||||
if self.attempt < 3:
|
||||
self.attempt += 1
|
||||
return_dict = self.confirm_delete(False, entities, event)
|
||||
return_dict['title'] = '{} ({} attempt)'.format(
|
||||
return_dict['title'], self.attempt
|
||||
)
|
||||
return return_dict
|
||||
self.show_message(event, "Processing...", True)
|
||||
values = event["data"]["values"]
|
||||
action_id = values.get("action_id")
|
||||
spec_data = self.action_data_by_id.get(action_id)
|
||||
if not spec_data:
|
||||
# it is a bug if this happens!
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'You didn\'t enter "DELETE" properly 3 times!'
|
||||
"success": False,
|
||||
"message": "Something bad has happened. Please try again."
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
project = entity['project']
|
||||
report_messages = collections.defaultdict(list)
|
||||
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
project_name = spec_data["project_name"]
|
||||
to_delete = spec_data["to_delete"]
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
all_ids = []
|
||||
if self.values.get('whole_asset', False) is True:
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
assets_to_delete = to_delete.get("assets") or []
|
||||
subsets_to_delete = to_delete.get("subsets") or []
|
||||
|
||||
# Convert asset ids to ObjectId obj
|
||||
assets_to_delete = [ObjectId(id) for id in assets_to_delete if id]
|
||||
|
||||
subset_ids_by_parent = spec_data["subset_ids_by_parent"]
|
||||
subset_ids_by_name = spec_data["subset_ids_by_name"]
|
||||
|
||||
subset_ids_to_archive = []
|
||||
asset_ids_to_archive = []
|
||||
ftrack_ids_to_delete = []
|
||||
if len(assets_to_delete) > 0:
|
||||
# Prepare data when deleting whole avalon asset
|
||||
avalon_assets = self.dbcon.find({"type": "asset"})
|
||||
avalon_assets_by_parent = collections.defaultdict(list)
|
||||
for asset in avalon_assets:
|
||||
parent_id = asset["data"]["visualParent"]
|
||||
avalon_assets_by_parent[parent_id].append(asset)
|
||||
if asset["_id"] in assets_to_delete:
|
||||
ftrack_id = asset["data"]["ftrackId"]
|
||||
ftrack_ids_to_delete.append(ftrack_id)
|
||||
|
||||
children_queue = Queue()
|
||||
for mongo_id in assets_to_delete:
|
||||
children_queue.put(mongo_id)
|
||||
|
||||
while not children_queue.empty():
|
||||
mongo_id = children_queue.get()
|
||||
if mongo_id in asset_ids_to_archive:
|
||||
continue
|
||||
|
||||
asset_ids_to_archive.append(mongo_id)
|
||||
for subset_id in subset_ids_by_parent.get(mongo_id, []):
|
||||
if subset_id not in subset_ids_to_archive:
|
||||
subset_ids_to_archive.append(subset_id)
|
||||
|
||||
children = avalon_assets_by_parent.get(mongo_id)
|
||||
if not children:
|
||||
continue
|
||||
|
||||
for child in children:
|
||||
child_id = child["_id"]
|
||||
if child_id not in asset_ids_to_archive:
|
||||
children_queue.put(child_id)
|
||||
|
||||
# Prepare names of assets in ftrack and ids of subsets in mongo
|
||||
asset_names_to_delete = []
|
||||
if len(subsets_to_delete) > 0:
|
||||
for name in subsets_to_delete:
|
||||
asset_names_to_delete.append(name)
|
||||
for subset_id in subset_ids_by_name[name]:
|
||||
if subset_id in subset_ids_to_archive:
|
||||
continue
|
||||
subset_ids_to_archive.append(subset_id)
|
||||
|
||||
# Get ftrack ids of entities where will be delete only asset
|
||||
not_deleted_entities_id = []
|
||||
ftrack_id_name_map = {}
|
||||
if asset_names_to_delete:
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
ftrack_id_name_map[ftrack_id] = entity["name"]
|
||||
if ftrack_id in ftrack_ids_to_delete:
|
||||
continue
|
||||
not_deleted_entities_id.append(ftrack_id)
|
||||
|
||||
mongo_proc_txt = "MongoProcessing: "
|
||||
ftrack_proc_txt = "Ftrack processing: "
|
||||
if asset_ids_to_archive:
|
||||
self.log.debug("{}Archivation of assets <{}>".format(
|
||||
mongo_proc_txt,
|
||||
", ".join([str(id) for id in asset_ids_to_archive])
|
||||
))
|
||||
self.dbcon.update_many(
|
||||
{
|
||||
"_id": {"$in": asset_ids_to_archive},
|
||||
"type": "asset"
|
||||
},
|
||||
{"$set": {"type": "archived_asset"}}
|
||||
)
|
||||
|
||||
if subset_ids_to_archive:
|
||||
self.log.debug("{}Archivation of subsets <{}>".format(
|
||||
mongo_proc_txt,
|
||||
", ".join([str(id) for id in subset_ids_to_archive])
|
||||
))
|
||||
self.dbcon.update_many(
|
||||
{
|
||||
"_id": {"$in": subset_ids_to_archive},
|
||||
"type": "subset"
|
||||
},
|
||||
{"$set": {"type": "archived_subset"}}
|
||||
)
|
||||
|
||||
if ftrack_ids_to_delete:
|
||||
self.log.debug("{}Deleting Ftrack Entities <{}>".format(
|
||||
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
|
||||
))
|
||||
|
||||
joined_ids_to_delete = ", ".join(
|
||||
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
|
||||
)
|
||||
ftrack_ents_to_delete = self.session.query(
|
||||
"select id, link from TypedContext where id in ({})".format(
|
||||
joined_ids_to_delete
|
||||
)
|
||||
).all()
|
||||
for entity in ftrack_ents_to_delete:
|
||||
self.session.delete(entity)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in entity["link"]]
|
||||
)
|
||||
msg = "Failed to delete entity"
|
||||
report_messages[msg].append(ent_path)
|
||||
self.session.rollback()
|
||||
self.log.warning(
|
||||
"{} <{}>".format(msg, ent_path),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not_deleted_entities_id:
|
||||
joined_not_deleted = ", ".join([
|
||||
"\"{}\"".format(ftrack_id)
|
||||
for ftrack_id in not_deleted_entities_id
|
||||
])
|
||||
joined_asset_names = ", ".join([
|
||||
"\"{}\"".format(name)
|
||||
for name in asset_names_to_delete
|
||||
])
|
||||
# Find assets of selected entities with names of checked subsets
|
||||
assets = self.session.query((
|
||||
"select id from Asset where"
|
||||
" context_id in ({}) and name in ({})"
|
||||
).format(joined_not_deleted, joined_asset_names)).all()
|
||||
|
||||
self.log.debug("{}Deleting Ftrack Assets <{}>".format(
|
||||
ftrack_proc_txt,
|
||||
", ".join([asset["id"] for asset in assets])
|
||||
))
|
||||
for asset in assets:
|
||||
self.session.delete(asset)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
self.session.rollback()
|
||||
msg = "Failed to delete asset"
|
||||
report_messages[msg].append(asset["id"])
|
||||
self.log.warning(
|
||||
"{} <{}>".format(asset["id"]),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return self.report_handle(report_messages, project_name, event)
|
||||
|
||||
def report_handle(self, report_messages, project_name, event):
|
||||
if not report_messages:
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Deletion was successful!"
|
||||
}
|
||||
|
||||
title = "Delete report ({}):".format(project_name)
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# Deleting was not completely successful"
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<p><i>Check logs for more information</i></p>"
|
||||
})
|
||||
for msg, _items in report_messages.items():
|
||||
if not _items or not msg:
|
||||
continue
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
|
||||
if av_entity is not None:
|
||||
all_ids.append(av_entity['_id'])
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
if isinstance(_items, str):
|
||||
_items = [_items]
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": '<p>{}</p>'.format("<br>".join(_items))
|
||||
})
|
||||
items.append(self.splitter)
|
||||
|
||||
session.delete(entity)
|
||||
session.commit()
|
||||
else:
|
||||
subset_names = []
|
||||
for key, value in self.values.items():
|
||||
if key == 'delete_key' or value is False:
|
||||
continue
|
||||
|
||||
entity_id = ObjectId(key)
|
||||
av_entity = self.db.find_one({'_id': entity_id})
|
||||
subset_names.append(av_entity['name'])
|
||||
if av_entity is None:
|
||||
continue
|
||||
all_ids.append(entity_id)
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
|
||||
for ft_asset in entity['assets']:
|
||||
if ft_asset['name'] in subset_names:
|
||||
session.delete(ft_asset)
|
||||
session.commit()
|
||||
|
||||
if len(all_ids) == 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No entities to delete in avalon'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
self.db.delete_many(delete_query)
|
||||
self.show_interface(items, title, event)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'All assets were deleted!'
|
||||
"success": False,
|
||||
"message": "Deleting finished. Read report messages."
|
||||
}
|
||||
|
||||
def find_child(self, entity):
|
||||
output = []
|
||||
id = entity['_id']
|
||||
visuals = [x for x in self.db.find({'data.visualParent': id})]
|
||||
assert len(visuals) == 0, 'This asset has another asset as child'
|
||||
childs = self.db.find({'parent': id})
|
||||
for child in childs:
|
||||
output.append(child['_id'])
|
||||
output.extend(self.find_child(child))
|
||||
return output
|
||||
|
||||
def find_assets(self, asset_names):
|
||||
assets = []
|
||||
for name in asset_names:
|
||||
entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': name
|
||||
})
|
||||
if entity is not None and entity not in assets:
|
||||
assets.append(entity)
|
||||
return assets
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
DeleteAsset(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
DeleteAssetSubset(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,178 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AssetsRemover(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'remove.assets'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Delete Assets by Name'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
valid = ["show", "task"]
|
||||
entityType = event["data"]["selection"][0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
title = 'Enter Asset names to delete'
|
||||
|
||||
items = []
|
||||
for i in range(15):
|
||||
|
||||
item = {
|
||||
'label': 'Asset {}'.format(i+1),
|
||||
'name': 'asset_{}'.format(i+1),
|
||||
'type': 'text',
|
||||
'value': ''
|
||||
}
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() != 'Project':
|
||||
project = entity['project']
|
||||
else:
|
||||
project = entity
|
||||
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No Assets to delete!'
|
||||
}
|
||||
|
||||
asset_names = []
|
||||
|
||||
for k, v in values.items():
|
||||
if v.replace(' ', '') != '':
|
||||
asset_names.append(v)
|
||||
|
||||
self.db.install()
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
|
||||
assets = self.find_assets(asset_names)
|
||||
|
||||
all_ids = []
|
||||
for asset in assets:
|
||||
all_ids.append(asset['_id'])
|
||||
all_ids.extend(self.find_child(asset))
|
||||
|
||||
if len(all_ids) == 0:
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'None of assets'
|
||||
}
|
||||
|
||||
or_subquery = []
|
||||
for id in all_ids:
|
||||
or_subquery.append({'_id': id})
|
||||
delete_query = {'$or': or_subquery}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'All assets were deleted!'
|
||||
}
|
||||
|
||||
def find_child(self, entity):
|
||||
output = []
|
||||
id = entity['_id']
|
||||
visuals = [x for x in self.db.find({'data.visualParent': id})]
|
||||
assert len(visuals) == 0, 'This asset has another asset as child'
|
||||
childs = self.db.find({'parent': id})
|
||||
for child in childs:
|
||||
output.append(child['_id'])
|
||||
output.extend(self.find_child(child))
|
||||
return output
|
||||
|
||||
def find_assets(self, asset_names):
|
||||
assets = []
|
||||
for name in asset_names:
|
||||
entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': name
|
||||
})
|
||||
if entity is not None and entity not in assets:
|
||||
assets.append(entity)
|
||||
return assets
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
AssetsRemover(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -4,7 +4,7 @@ import json
|
|||
import logging
|
||||
import subprocess
|
||||
from operator import itemgetter
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import Logger, config
|
||||
|
||||
|
|
@ -36,12 +36,13 @@ class DJVViewAction(BaseAction):
|
|||
'file_ext', ["img", "mov", "exr"]
|
||||
)
|
||||
|
||||
def register(self):
|
||||
assert (self.djv_path is not None), (
|
||||
'DJV View is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
super().register()
|
||||
def preregister(self):
|
||||
if self.djv_path is None:
|
||||
return (
|
||||
'DJV View is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
return True
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
"""Return available actions based on *event*. """
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import argparse
|
|||
import logging
|
||||
import json
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
@ -108,6 +108,7 @@ class JobKiller(BaseAction):
|
|||
'Changing Job ({}) status: {} -> failed'
|
||||
).format(job['id'], origin_status))
|
||||
except Exception:
|
||||
session.rollback()
|
||||
self.log.warning((
|
||||
'Changing Job ({}) has failed'
|
||||
).format(job['id']))
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
|
|
|||
|
|
@ -2,12 +2,9 @@ import os
|
|||
import json
|
||||
|
||||
from ruamel import yaml
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
from pype.ftrack.lib import get_avalon_attr
|
||||
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
from pype.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
|
|
@ -55,6 +52,8 @@ class PrepareProject(BaseAction):
|
|||
attributes_to_set = {}
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
|
|
@ -65,6 +64,8 @@ class PrepareProject(BaseAction):
|
|||
if attr["entity_type"].lower() != "show":
|
||||
continue
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import json
|
|||
|
||||
from pypeapp import Logger, config
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from avalon import io, api
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -61,12 +61,12 @@ class RVAction(BaseAction):
|
|||
def set_rv_path(self):
|
||||
self.rv_path = self.config_data.get("rv_path")
|
||||
|
||||
def register(self):
|
||||
assert (self.rv_path is not None), (
|
||||
'RV is not installed'
|
||||
' or paths in presets are not set correctly'
|
||||
)
|
||||
super().register()
|
||||
def preregister(self):
|
||||
if self.rv_path is None:
|
||||
return (
|
||||
'RV is not installed or paths in presets are not set correctly'
|
||||
)
|
||||
return True
|
||||
|
||||
def get_components_from_entity(self, session, entity, components):
|
||||
"""Get components from various entity types.
|
||||
|
|
|
|||
347
pype/ftrack/actions/action_seed.py
Normal file
347
pype/ftrack/actions/action_seed.py
Normal file
|
|
@ -0,0 +1,347 @@
|
|||
import os
|
||||
from operator import itemgetter
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
class SeedDebugProject(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = "seed.debug.project"
|
||||
#: Action label.
|
||||
label = "SeedDebugProject"
|
||||
#: Action description.
|
||||
description = "Description"
|
||||
#: priority
|
||||
priority = 100
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub"]
|
||||
icon = "{}/ftrack/action_icons/SeedProject.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
|
||||
# Asset names which will be created in `Assets` entity
|
||||
assets = [
|
||||
"Addax", "Alpaca", "Ant", "Antelope", "Aye", "Badger", "Bear", "Bee",
|
||||
"Beetle", "Bluebird", "Bongo", "Bontebok", "Butterflie", "Caiman",
|
||||
"Capuchin", "Capybara", "Cat", "Caterpillar", "Coyote", "Crocodile",
|
||||
"Cuckoo", "Deer", "Dragonfly", "Duck", "Eagle", "Egret", "Elephant",
|
||||
"Falcon", "Fossa", "Fox", "Gazelle", "Gecko", "Gerbil",
|
||||
"GiantArmadillo", "Gibbon", "Giraffe", "Goose", "Gorilla",
|
||||
"Grasshoper", "Hare", "Hawk", "Hedgehog", "Heron", "Hog",
|
||||
"Hummingbird", "Hyena", "Chameleon", "Cheetah", "Iguana", "Jackal",
|
||||
"Jaguar", "Kingfisher", "Kinglet", "Kite", "Komodo", "Lemur",
|
||||
"Leopard", "Lion", "Lizard", "Macaw", "Malachite", "Mandrill",
|
||||
"Mantis", "Marmoset", "Meadowlark", "Meerkat", "Mockingbird",
|
||||
"Mongoose", "Monkey", "Nyal", "Ocelot", "Okapi", "Oribi", "Oriole",
|
||||
"Otter", "Owl", "Panda", "Parrot", "Pelican", "Pig", "Porcupine",
|
||||
"Reedbuck", "Rhinocero", "Sandpiper", "Servil", "Skink", "Sloth",
|
||||
"Snake", "Spider", "Squirrel", "Sunbird", "Swallow", "Swift", "Tiger",
|
||||
"Sylph", "Tanager", "Vulture", "Warthog", "Waterbuck", "Woodpecker",
|
||||
"Zebra"
|
||||
]
|
||||
|
||||
# Tasks which will be created for Assets
|
||||
asset_tasks = [
|
||||
"Modeling", "Lookdev", "Rigging"
|
||||
]
|
||||
# Tasks which will be created for Shots
|
||||
shot_tasks = [
|
||||
"Animation", "Lighting", "Compositing", "FX"
|
||||
]
|
||||
|
||||
# Define how much sequences will be created
|
||||
default_seq_count = 5
|
||||
# Define how much shots will be created for each sequence
|
||||
default_shots_count = 10
|
||||
|
||||
existing_projects = None
|
||||
new_project_item = "< New Project >"
|
||||
current_project_item = "< Current Project >"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event["data"].get("values", {}):
|
||||
return
|
||||
|
||||
title = "Select Project where you want to create seed data"
|
||||
|
||||
items = []
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
description_label = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"WARNING: Action does NOT check if entities already exist !!!"
|
||||
)
|
||||
}
|
||||
items.append(description_label)
|
||||
|
||||
all_projects = session.query("select full_name from Project").all()
|
||||
self.existing_projects = [proj["full_name"] for proj in all_projects]
|
||||
projects_items = [
|
||||
{"label": proj, "value": proj} for proj in self.existing_projects
|
||||
]
|
||||
|
||||
data_items = []
|
||||
|
||||
data_items.append({
|
||||
"label": self.new_project_item,
|
||||
"value": self.new_project_item
|
||||
})
|
||||
|
||||
data_items.append({
|
||||
"label": self.current_project_item,
|
||||
"value": self.current_project_item
|
||||
})
|
||||
|
||||
data_items.extend(sorted(
|
||||
projects_items,
|
||||
key=itemgetter("label"),
|
||||
reverse=False
|
||||
))
|
||||
projects_item = {
|
||||
"label": "Choose Project",
|
||||
"type": "enumerator",
|
||||
"name": "project_name",
|
||||
"data": data_items,
|
||||
"value": self.current_project_item
|
||||
}
|
||||
items.append(projects_item)
|
||||
items.append(item_splitter)
|
||||
|
||||
items.append({
|
||||
"label": "Number of assets",
|
||||
"type": "number",
|
||||
"name": "asset_count",
|
||||
"value": len(self.assets)
|
||||
})
|
||||
items.append({
|
||||
"label": "Number of sequences",
|
||||
"type": "number",
|
||||
"name": "seq_count",
|
||||
"value": self.default_seq_count
|
||||
})
|
||||
items.append({
|
||||
"label": "Number of shots",
|
||||
"type": "number",
|
||||
"name": "shots_count",
|
||||
"value": self.default_shots_count
|
||||
})
|
||||
items.append(item_splitter)
|
||||
|
||||
note_label = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>NOTE: Enter project name and choose schema if you "
|
||||
"chose `\"< New Project >\"`(code is optional)</i><p>"
|
||||
)
|
||||
}
|
||||
items.append(note_label)
|
||||
items.append({
|
||||
"label": "Project name",
|
||||
"name": "new_project_name",
|
||||
"type": "text",
|
||||
"value": ""
|
||||
})
|
||||
|
||||
project_schemas = [
|
||||
sch["name"] for sch in self.session.query("ProjectSchema").all()
|
||||
]
|
||||
schemas_item = {
|
||||
"label": "Choose Schema",
|
||||
"type": "enumerator",
|
||||
"name": "new_schema_name",
|
||||
"data": [
|
||||
{"label": sch, "value": sch} for sch in project_schemas
|
||||
],
|
||||
"value": project_schemas[0]
|
||||
}
|
||||
items.append(schemas_item)
|
||||
|
||||
items.append({
|
||||
"label": "*Project code",
|
||||
"name": "new_project_code",
|
||||
"type": "text",
|
||||
"value": "",
|
||||
"empty_text": "Optional..."
|
||||
})
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def launch(self, session, in_entities, event):
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
|
||||
# THIS IS THE PROJECT PART
|
||||
values = event["data"]["values"]
|
||||
selected_project = values["project_name"]
|
||||
if selected_project == self.new_project_item:
|
||||
project_name = values["new_project_name"]
|
||||
if project_name in self.existing_projects:
|
||||
msg = "Project \"{}\" already exist".format(project_name)
|
||||
self.log.error(msg)
|
||||
return {"success": False, "message": msg}
|
||||
|
||||
project_code = values["new_project_code"]
|
||||
project_schema_name = values["new_schema_name"]
|
||||
if not project_code:
|
||||
project_code = project_name
|
||||
project_code = project_code.lower().replace(" ", "_").strip()
|
||||
_project = session.query(
|
||||
"Project where name is \"{}\"".format(project_code)
|
||||
).first()
|
||||
if _project:
|
||||
msg = "Project with code \"{}\" already exist".format(
|
||||
project_code
|
||||
)
|
||||
self.log.error(msg)
|
||||
return {"success": False, "message": msg}
|
||||
|
||||
project_schema = session.query(
|
||||
"ProjectSchema where name is \"{}\"".format(
|
||||
project_schema_name
|
||||
)
|
||||
).one()
|
||||
# Create the project with the chosen schema.
|
||||
self.log.debug((
|
||||
"*** Creating Project: name <{}>, code <{}>, schema <{}>"
|
||||
).format(project_name, project_code, project_schema_name))
|
||||
project = session.create("Project", {
|
||||
"name": project_code,
|
||||
"full_name": project_name,
|
||||
"project_schema": project_schema
|
||||
})
|
||||
session.commit()
|
||||
|
||||
elif selected_project == self.current_project_item:
|
||||
entity = in_entities[0]
|
||||
if entity.entity_type.lower() == "project":
|
||||
project = entity
|
||||
else:
|
||||
if "project" in entity:
|
||||
project = entity["project"]
|
||||
else:
|
||||
project = entity["parent"]["project"]
|
||||
project_schema = project["project_schema"]
|
||||
self.log.debug((
|
||||
"*** Using Project: name <{}>, code <{}>, schema <{}>"
|
||||
).format(
|
||||
project["full_name"], project["name"], project_schema["name"]
|
||||
))
|
||||
else:
|
||||
project = session.query("Project where full_name is \"{}\"".format(
|
||||
selected_project
|
||||
)).one()
|
||||
project_schema = project["project_schema"]
|
||||
self.log.debug((
|
||||
"*** Using Project: name <{}>, code <{}>, schema <{}>"
|
||||
).format(
|
||||
project["full_name"], project["name"], project_schema["name"]
|
||||
))
|
||||
|
||||
# THIS IS THE MAGIC PART
|
||||
task_types = {}
|
||||
for _type in project_schema["_task_type_schema"]["types"]:
|
||||
if _type["name"] not in task_types:
|
||||
task_types[_type["name"]] = _type
|
||||
self.task_types = task_types
|
||||
|
||||
asset_count = values.get("asset_count") or len(self.assets)
|
||||
seq_count = values.get("seq_count") or self.default_seq_count
|
||||
shots_count = values.get("shots_count") or self.default_shots_count
|
||||
|
||||
self.create_assets(project, asset_count)
|
||||
self.create_shots(project, seq_count, shots_count)
|
||||
|
||||
return True
|
||||
|
||||
def create_assets(self, project, asset_count):
|
||||
self.log.debug("*** Creating assets:")
|
||||
|
||||
main_entity = self.session.create("Folder", {
|
||||
"name": "Assets",
|
||||
"parent": project
|
||||
})
|
||||
self.log.debug("- Assets")
|
||||
available_assets = len(self.assets)
|
||||
repetitive_times = (
|
||||
int(asset_count / available_assets) +
|
||||
(asset_count % available_assets > 0)
|
||||
)
|
||||
created_assets = 0
|
||||
for _asset_name in self.assets:
|
||||
if created_assets >= asset_count:
|
||||
break
|
||||
for asset_num in range(1, repetitive_times + 1):
|
||||
if created_assets >= asset_count:
|
||||
break
|
||||
asset_name = "%s_%02d" % (_asset_name, asset_num)
|
||||
asset = self.session.create("AssetBuild", {
|
||||
"name": asset_name,
|
||||
"parent": main_entity
|
||||
})
|
||||
created_assets += 1
|
||||
self.log.debug("- Assets/{}".format(asset_name))
|
||||
|
||||
for task_name in self.asset_tasks:
|
||||
self.session.create("Task", {
|
||||
"name": task_name,
|
||||
"parent": asset,
|
||||
"type": self.task_types[task_name]
|
||||
})
|
||||
self.log.debug("- Assets/{}/{}".format(
|
||||
asset_name, task_name
|
||||
))
|
||||
|
||||
self.log.debug("*** Commiting Assets")
|
||||
self.session.commit()
|
||||
|
||||
def create_shots(self, project, seq_count, shots_count):
|
||||
self.log.debug("*** Creating shots:")
|
||||
main_entity = self.session.create("Folder", {
|
||||
"name": "Shots",
|
||||
"parent": project
|
||||
})
|
||||
self.log.debug("- Shots")
|
||||
|
||||
for seq_num in range(1, seq_count+1):
|
||||
seq_name = "sq%03d" % seq_num
|
||||
seq = self.session.create("Sequence", {
|
||||
"name": seq_name,
|
||||
"parent": main_entity
|
||||
})
|
||||
self.log.debug("- Shots/{}".format(seq_name))
|
||||
|
||||
for shot_num in range(1, shots_count+1):
|
||||
shot_name = "%ssh%04d" % (seq_name, (shot_num*10))
|
||||
shot = self.session.create("Shot", {
|
||||
"name": shot_name,
|
||||
"parent": seq
|
||||
})
|
||||
self.log.debug("- Shots/{}/{}".format(seq_name, shot_name))
|
||||
|
||||
for task_name in self.shot_tasks:
|
||||
self.session.create("Task", {
|
||||
"name": task_name,
|
||||
"parent": shot,
|
||||
"type": self.task_types[task_name]
|
||||
})
|
||||
self.log.debug("- Shots/{}/{}/{}".format(
|
||||
seq_name, shot_name, task_name
|
||||
))
|
||||
|
||||
self.log.debug("*** Commiting Shots")
|
||||
self.session.commit()
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SeedDebugProject(session, plugins_presets).register()
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,351 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseAction):
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs.local'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (Local)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for entity in entities:
|
||||
if (
|
||||
entity.get('context_type', '').lower() in ('show', 'task') and
|
||||
entity.entity_type.lower() != 'task'
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.interface_messages = {}
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Hierachical attributes'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
self.log.debug('Job with id "{}" created'.format(job['id']))
|
||||
|
||||
process_session = ftrack_api.Session(
|
||||
server_url=session.server_url,
|
||||
api_key=session.api_key,
|
||||
api_user=session.api_user,
|
||||
auto_connect_event_hub=True
|
||||
)
|
||||
|
||||
try:
|
||||
# Collect hierarchical attrs
|
||||
self.log.debug('Collecting Hierarchical custom attributes started')
|
||||
custom_attributes = {}
|
||||
all_avalon_attr = process_session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).one()
|
||||
|
||||
error_key = (
|
||||
'Hierarchical attributes with set "default" value (not allowed)'
|
||||
)
|
||||
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
|
||||
if cust_attr['default']:
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
self.interface_messages[error_key].append(
|
||||
cust_attr['label']
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'Custom attribute "{}" has set default value.'
|
||||
' This attribute can\'t be synchronized'
|
||||
).format(cust_attr['label']))
|
||||
continue
|
||||
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
self.log.debug(
|
||||
'Collecting Hierarchical custom attributes has finished'
|
||||
)
|
||||
|
||||
if not custom_attributes:
|
||||
msg = 'No hierarchical attributes to sync.'
|
||||
self.log.debug(msg)
|
||||
return {
|
||||
'success': True,
|
||||
'message': msg
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() == 'project':
|
||||
project_name = entity['full_name']
|
||||
else:
|
||||
project_name = entity['project']['full_name']
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
_entities = self._get_entities(event, process_session)
|
||||
|
||||
for entity in _entities:
|
||||
self.log.debug(30*'-')
|
||||
self.log.debug(
|
||||
'Processing entity "{}"'.format(entity.get('name', entity))
|
||||
)
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
for key in custom_attributes:
|
||||
self.log.debug(30*'*')
|
||||
self.log.debug(
|
||||
'Processing Custom attribute key "{}"'.format(key)
|
||||
)
|
||||
# check if entity has that attribute
|
||||
if key not in entity['custom_attributes']:
|
||||
error_key = 'Missing key on entities'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.error((
|
||||
'- key "{}" not found on "{}"'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
value = self.get_hierarchical_value(key, entity)
|
||||
if value is None:
|
||||
error_key = (
|
||||
'Missing value for key on entity'
|
||||
' and its parents (synchronization was skipped)'
|
||||
)
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'- key "{}" not set on "{}" or its parents'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
self.update_hierarchical_attribute(entity, key, value)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
'Action "{}" failed'.format(self.label),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
finally:
|
||||
self.db_con.uninstall()
|
||||
|
||||
if job['status'] in ('queued', 'running'):
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
if self.interface_messages:
|
||||
title = "Errors during SyncHierarchicalAttrs"
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages, title=title, event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_value(self, key, entity):
|
||||
value = entity['custom_attributes'][key]
|
||||
if (
|
||||
value is not None or
|
||||
entity.entity_type.lower() == 'project'
|
||||
):
|
||||
return value
|
||||
|
||||
return self.get_hierarchical_value(key, entity['parent'])
|
||||
|
||||
def update_hierarchical_attribute(self, entity, key, value):
|
||||
if (
|
||||
entity['context_type'].lower() not in ('show', 'task') or
|
||||
entity.entity_type.lower() == 'task'
|
||||
):
|
||||
return
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
hierarchy = '/'.join(
|
||||
[a['name'] for a in entity.get('ancestors', [])]
|
||||
)
|
||||
if hierarchy:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], hierarchy, entity['name']]
|
||||
)
|
||||
elif entity.entity_type.lower() == 'project':
|
||||
hierarchy = entity['full_name']
|
||||
else:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], entity['name']]
|
||||
)
|
||||
|
||||
self.log.debug('- updating entity "{}"'.format(hierarchy))
|
||||
|
||||
# collect entity's custom attributes
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
# Find entity in Mongo DB
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
|
||||
ent_name, str(mongoid)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Change value if entity has set it's own
|
||||
entity_value = custom_attributes[key]
|
||||
if entity_value is not None:
|
||||
value = entity_value
|
||||
|
||||
data = mongo_entity.get('data') or {}
|
||||
|
||||
data[key] = value
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
self.log.debug(
|
||||
'-- stored value "{}"'.format(value)
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
193
pype/ftrack/actions/action_sync_to_avalon.py
Normal file
193
pype/ftrack/actions/action_sync_to_avalon.py
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
|
||||
|
||||
class SyncToAvalonLocal(BaseAction):
|
||||
"""
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
Stores all information about entity.
|
||||
- Name(string) - Most important information = identifier of entity
|
||||
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
|
||||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon'
|
||||
- custom attributes that start with 'avalon_' are skipped
|
||||
|
||||
* These information are stored for entities in whole project.
|
||||
|
||||
Avalon ID of asset is stored to Ftrack
|
||||
- Custom attribute 'avalon_mongo_id'.
|
||||
- action IS NOT creating this Custom attribute if doesn't exist
|
||||
- run 'Create Custom Attributes' action
|
||||
- or do it manually (Not recommended)
|
||||
"""
|
||||
|
||||
#: Action identifier.
|
||||
identifier = "sync.to.avalon.local"
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
#: Action variant
|
||||
variant = "- Sync To Avalon (Local)"
|
||||
#: Action description.
|
||||
description = "Send data from Ftrack to Avalon"
|
||||
#: priority
|
||||
priority = 200
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub"]
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for ent in event["data"]["selection"]:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent["entityType"].lower() in ["show", "task"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, in_entities, event):
|
||||
time_start = time.time()
|
||||
|
||||
self.show_message(event, "Synchronization - Preparing data", True)
|
||||
# Get ftrack project
|
||||
if in_entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = in_entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = in_entities[0]["project"]["full_name"]
|
||||
|
||||
try:
|
||||
self.entities_factory.launch_setup(ft_project_name)
|
||||
time_1 = time.time()
|
||||
|
||||
self.entities_factory.set_cutom_attributes()
|
||||
time_2 = time.time()
|
||||
|
||||
# This must happen before all filtering!!!
|
||||
self.entities_factory.prepare_avalon_entities(ft_project_name)
|
||||
time_3 = time.time()
|
||||
|
||||
self.entities_factory.filter_by_ignore_sync()
|
||||
time_4 = time.time()
|
||||
|
||||
self.entities_factory.duplicity_regex_check()
|
||||
time_5 = time.time()
|
||||
|
||||
self.entities_factory.prepare_ftrack_ent_data()
|
||||
time_6 = time.time()
|
||||
|
||||
self.entities_factory.synchronize()
|
||||
time_7 = time.time()
|
||||
|
||||
self.log.debug(
|
||||
"*** Synchronization finished ***"
|
||||
)
|
||||
self.log.debug(
|
||||
"preparation <{}>".format(time_1 - time_start)
|
||||
)
|
||||
self.log.debug(
|
||||
"set_cutom_attributes <{}>".format(time_2 - time_1)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_avalon_entities <{}>".format(time_3 - time_2)
|
||||
)
|
||||
self.log.debug(
|
||||
"filter_by_ignore_sync <{}>".format(time_4 - time_3)
|
||||
)
|
||||
self.log.debug(
|
||||
"duplicity_regex_check <{}>".format(time_5 - time_4)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
|
||||
)
|
||||
self.log.debug(
|
||||
"synchronize <{}>".format(time_7 - time_6)
|
||||
)
|
||||
self.log.debug(
|
||||
"* Total time: {}".format(time_7 - time_start)
|
||||
)
|
||||
|
||||
report = self.entities_factory.report()
|
||||
if report and report.get("items"):
|
||||
default_title = "Synchronization report ({}):".format(
|
||||
ft_project_name
|
||||
)
|
||||
self.show_interface(
|
||||
items=report["items"],
|
||||
title=report.get("title", default_title),
|
||||
event=event
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Synchronization Finished"
|
||||
}
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Synchronization failed due to code error", exc_info=True
|
||||
)
|
||||
msg = "An error occurred during synchronization"
|
||||
title = "Synchronization report ({}):".format(ft_project_name)
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "## Traceback of the error"
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<p>{}</p>".format(
|
||||
str(traceback.format_exc()).replace(
|
||||
"\n", "<br>").replace(
|
||||
" ", " "
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
report = {"items": []}
|
||||
try:
|
||||
report = self.entities_factory.report()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_items = report.get("items", [])
|
||||
if _items:
|
||||
items.append(self.entities_factory.report_splitter)
|
||||
items.extend(_items)
|
||||
|
||||
self.show_interface(items, title, event)
|
||||
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
finally:
|
||||
try:
|
||||
self.entities_factory.dbcon.uninstall()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.entities_factory.session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncToAvalonLocal(session, plugins_presets).register()
|
||||
|
|
@ -1,266 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import collections
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib import avalon_sync as ftracklib
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class SyncToAvalon(BaseAction):
|
||||
'''
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
Stores all information about entity.
|
||||
- Name(string) - Most important information = identifier of entity
|
||||
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
|
||||
- Silo(string) - Last parent except project
|
||||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
|
||||
|
||||
* These information are stored also for all parents and children entities.
|
||||
|
||||
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
|
||||
- action IS NOT creating this Custom attribute if doesn't exist
|
||||
- run 'Create Custom Attributes' action or do it manually (Not recommended)
|
||||
|
||||
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
|
||||
- name, parents and silo are checked -> shows error if are not exact the same
|
||||
- after sync it is not allowed to change names or move entities
|
||||
|
||||
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
|
||||
- tries to find entity by name
|
||||
- found:
|
||||
- raise error if ftrackId/visual parent/parents are not same
|
||||
- not found:
|
||||
- Creates asset/project
|
||||
|
||||
'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon.local'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync To Avalon (Local)'
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub']
|
||||
#: Action priority
|
||||
priority = 200
|
||||
|
||||
project_query = (
|
||||
"select full_name, name, custom_attributes"
|
||||
", project_schema._task_type_schema.types.name"
|
||||
" from Project where full_name is \"{}\""
|
||||
)
|
||||
|
||||
entities_query = (
|
||||
"select id, name, parent_id, link, custom_attributes"
|
||||
" from TypedContext where project.full_name is \"{}\""
|
||||
)
|
||||
|
||||
# Entity type names(lowered) that won't be synchronized with their children
|
||||
ignore_entity_types = ["task", "milestone"]
|
||||
|
||||
def __init__(self, session, plugins_presets):
|
||||
super(SyncToAvalon, self).__init__(session)
|
||||
# reload utils on initialize (in case of server restart)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
time_start = time.time()
|
||||
message = ""
|
||||
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Ftrack to Avalon.'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
self.log.debug("Preparing entities for synchronization")
|
||||
|
||||
if entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = entities[0]["project"]["full_name"]
|
||||
|
||||
project_entities = session.query(
|
||||
self.entities_query.format(ft_project_name)
|
||||
).all()
|
||||
|
||||
ft_project = session.query(
|
||||
self.project_query.format(ft_project_name)
|
||||
).one()
|
||||
|
||||
entities_by_id = {}
|
||||
entities_by_parent = collections.defaultdict(list)
|
||||
|
||||
entities_by_id[ft_project["id"]] = ft_project
|
||||
for ent in project_entities:
|
||||
entities_by_id[ent["id"]] = ent
|
||||
entities_by_parent[ent["parent_id"]].append(ent)
|
||||
|
||||
importable = []
|
||||
for ent_info in event["data"]["selection"]:
|
||||
ent = entities_by_id[ent_info["entityId"]]
|
||||
for link_ent_info in ent["link"]:
|
||||
link_ent = entities_by_id[link_ent_info["id"]]
|
||||
if (
|
||||
ent.entity_type.lower() in self.ignore_entity_types or
|
||||
link_ent in importable
|
||||
):
|
||||
continue
|
||||
|
||||
importable.append(link_ent)
|
||||
|
||||
def add_children(parent_id):
|
||||
ents = entities_by_parent[parent_id]
|
||||
for ent in ents:
|
||||
if ent.entity_type.lower() in self.ignore_entity_types:
|
||||
continue
|
||||
|
||||
if ent not in importable:
|
||||
importable.append(ent)
|
||||
|
||||
add_children(ent["id"])
|
||||
|
||||
# add children of selection to importable
|
||||
for ent_info in event["data"]["selection"]:
|
||||
add_children(ent_info["entityId"])
|
||||
|
||||
# Check names: REGEX in schema/duplicates - raise error if found
|
||||
all_names = []
|
||||
duplicates = []
|
||||
|
||||
for entity in importable:
|
||||
ftracklib.avalon_check_name(entity)
|
||||
if entity.entity_type.lower() == "project":
|
||||
continue
|
||||
|
||||
if entity['name'] in all_names:
|
||||
duplicates.append("'{}'".format(entity['name']))
|
||||
else:
|
||||
all_names.append(entity['name'])
|
||||
|
||||
if len(duplicates) > 0:
|
||||
# TODO Show information to user and return False
|
||||
raise ValueError(
|
||||
"Entity name duplication: {}".format(", ".join(duplicates))
|
||||
)
|
||||
|
||||
# ----- PROJECT ------
|
||||
avalon_project = ftracklib.get_avalon_project(ft_project)
|
||||
custom_attributes = ftracklib.get_avalon_attr(session)
|
||||
|
||||
# Import all entities to Avalon DB
|
||||
for entity in importable:
|
||||
result = ftracklib.import_to_avalon(
|
||||
session=session,
|
||||
entity=entity,
|
||||
ft_project=ft_project,
|
||||
av_project=avalon_project,
|
||||
custom_attributes=custom_attributes
|
||||
)
|
||||
# TODO better error handling
|
||||
# maybe split into critical, warnings and messages?
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
ftracklib.show_errors(self, event, result['errors'])
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'message': "Sync to avalon FAILED"
|
||||
}
|
||||
|
||||
if avalon_project is None:
|
||||
if 'project' in result:
|
||||
avalon_project = result['project']
|
||||
|
||||
job['status'] = 'done'
|
||||
|
||||
except ValueError as ve:
|
||||
# TODO remove this part!!!!
|
||||
job['status'] = 'failed'
|
||||
message = str(ve)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(message),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
job['status'] = 'failed'
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
log_message = "{}/{}/Line: {}".format(
|
||||
exc_type, fname, exc_tb.tb_lineno
|
||||
)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(log_message),
|
||||
exc_info=True
|
||||
)
|
||||
# TODO add traceback to message and show to user
|
||||
message = (
|
||||
'Unexpected Error'
|
||||
' - Please check Log for more information'
|
||||
)
|
||||
finally:
|
||||
if job['status'] in ['queued', 'running']:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
time_end = time.time()
|
||||
self.log.debug("Synchronization took \"{}\"".format(
|
||||
str(time_end - time_start)
|
||||
))
|
||||
|
||||
if job["status"] != "failed":
|
||||
self.log.debug("Triggering Sync hierarchical attributes")
|
||||
self.trigger_action("sync.hierarchical.attrs.local", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
return {
|
||||
'success': False,
|
||||
'message': message
|
||||
}
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': "Synchronization was successfull"
|
||||
}
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
|
@ -6,7 +6,7 @@ import collections
|
|||
import json
|
||||
import re
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import io, inventory, schema
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import argparse
|
|||
import logging
|
||||
import json
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
@ -43,7 +43,7 @@ class ThumbToChildren(BaseAction):
|
|||
'description': 'Push thumbnails to Childrens'
|
||||
})
|
||||
})
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
for entity in entities:
|
||||
thumbid = entity['thumbnail_id']
|
||||
|
|
@ -53,10 +53,11 @@ class ThumbToChildren(BaseAction):
|
|||
|
||||
# inform the user that the job is done
|
||||
job['status'] = 'done'
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
session.rollback()
|
||||
# fail the job if something goes wrong
|
||||
job['status'] = 'failed'
|
||||
raise
|
||||
raise exc
|
||||
finally:
|
||||
session.commit()
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import argparse
|
||||
import logging
|
||||
import json
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
@ -40,9 +40,9 @@ class ThumbToParent(BaseAction):
|
|||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Push thumbnails to parents'
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
for entity in entities:
|
||||
parent = None
|
||||
|
|
@ -74,10 +74,11 @@ class ThumbToParent(BaseAction):
|
|||
# inform the user that the job is done
|
||||
job['status'] = status or 'done'
|
||||
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
session.rollback()
|
||||
# fail the job if something goes wrong
|
||||
job['status'] = 'failed'
|
||||
raise e
|
||||
raise exc
|
||||
|
||||
finally:
|
||||
session.commit()
|
||||
|
|
|
|||
|
|
@ -1,14 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
import json
|
||||
import re
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import io, inventory, schema
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
|
|
@ -134,7 +126,6 @@ class PypeUpdateFromV2_2_0(BaseAction):
|
|||
"title": title
|
||||
}
|
||||
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
|
@ -182,7 +173,7 @@ class PypeUpdateFromV2_2_0(BaseAction):
|
|||
{"type": "asset"},
|
||||
{"$unset": {"silo": ""}}
|
||||
)
|
||||
|
||||
|
||||
self.log.debug("- setting schema of assets to v.3")
|
||||
self.db_con.update_many(
|
||||
{"type": "asset"},
|
||||
|
|
@ -191,10 +182,8 @@ class PypeUpdateFromV2_2_0(BaseAction):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register plugin. Called when used as an plugin."""
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
PypeUpdateFromV2_2_0(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import os
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class ActionAskWhereIRun(BaseAction):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import platform
|
||||
import socket
|
||||
import getpass
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,383 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
|
||||
from pypeapp import config
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseAction):
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (Server)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
config.get_presets().get('services', {}).get(
|
||||
'statics_server', {}
|
||||
).get('default_port', 8021)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def register(self):
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.discover',
|
||||
self._discover
|
||||
)
|
||||
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.launch and data.actionIdentifier={}'.format(
|
||||
self.identifier
|
||||
),
|
||||
self._launch
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
role_check = False
|
||||
discover = False
|
||||
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in role_list:
|
||||
role_check = True
|
||||
break
|
||||
|
||||
if role_check is True:
|
||||
for entity in entities:
|
||||
context_type = entity.get('context_type', '').lower()
|
||||
if (
|
||||
context_type in ('show', 'task') and
|
||||
entity.entity_type.lower() != 'task'
|
||||
):
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
self.interface_messages = {}
|
||||
|
||||
user = session.query(
|
||||
'User where id is "{}"'.format(event['source']['user']['id'])
|
||||
).one()
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Hierachical attributes'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
self.log.debug('Job with id "{}" created'.format(job['id']))
|
||||
|
||||
process_session = ftrack_api.Session(
|
||||
server_url=session.server_url,
|
||||
api_key=session.api_key,
|
||||
api_user=session.api_user,
|
||||
auto_connect_event_hub=True
|
||||
)
|
||||
try:
|
||||
# Collect hierarchical attrs
|
||||
self.log.debug('Collecting Hierarchical custom attributes started')
|
||||
custom_attributes = {}
|
||||
all_avalon_attr = process_session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).one()
|
||||
|
||||
error_key = (
|
||||
'Hierarchical attributes with set "default" value (not allowed)'
|
||||
)
|
||||
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
|
||||
if cust_attr['default']:
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
self.interface_messages[error_key].append(
|
||||
cust_attr['label']
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'Custom attribute "{}" has set default value.'
|
||||
' This attribute can\'t be synchronized'
|
||||
).format(cust_attr['label']))
|
||||
continue
|
||||
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
self.log.debug(
|
||||
'Collecting Hierarchical custom attributes has finished'
|
||||
)
|
||||
|
||||
if not custom_attributes:
|
||||
msg = 'No hierarchical attributes to sync.'
|
||||
self.log.debug(msg)
|
||||
return {
|
||||
'success': True,
|
||||
'message': msg
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() == 'project':
|
||||
project_name = entity['full_name']
|
||||
else:
|
||||
project_name = entity['project']['full_name']
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
_entities = self._get_entities(event, process_session)
|
||||
|
||||
for entity in _entities:
|
||||
self.log.debug(30*'-')
|
||||
self.log.debug(
|
||||
'Processing entity "{}"'.format(entity.get('name', entity))
|
||||
)
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
for key in custom_attributes:
|
||||
self.log.debug(30*'*')
|
||||
self.log.debug(
|
||||
'Processing Custom attribute key "{}"'.format(key)
|
||||
)
|
||||
# check if entity has that attribute
|
||||
if key not in entity['custom_attributes']:
|
||||
error_key = 'Missing key on entities'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.error((
|
||||
'- key "{}" not found on "{}"'
|
||||
).format(key, entity.get('name', entity)))
|
||||
continue
|
||||
|
||||
value = self.get_hierarchical_value(key, entity)
|
||||
if value is None:
|
||||
error_key = (
|
||||
'Missing value for key on entity'
|
||||
' and its parents (synchronization was skipped)'
|
||||
)
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
self.interface_messages[error_key].append(
|
||||
'- key: "{}" - entity: "{}"'.format(key, ent_name)
|
||||
)
|
||||
|
||||
self.log.warning((
|
||||
'- key "{}" not set on "{}" or its parents'
|
||||
).format(key, ent_name))
|
||||
continue
|
||||
|
||||
self.update_hierarchical_attribute(entity, key, value)
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
self.log.error(
|
||||
'Action "{}" failed'.format(self.label),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
finally:
|
||||
self.db_con.uninstall()
|
||||
|
||||
if job['status'] in ('queued', 'running'):
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
if self.interface_messages:
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages,
|
||||
title="something went wrong",
|
||||
event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_value(self, key, entity):
|
||||
value = entity['custom_attributes'][key]
|
||||
if (
|
||||
value is not None or
|
||||
entity.entity_type.lower() == 'project'
|
||||
):
|
||||
return value
|
||||
|
||||
return self.get_hierarchical_value(key, entity['parent'])
|
||||
|
||||
def update_hierarchical_attribute(self, entity, key, value):
|
||||
if (
|
||||
entity['context_type'].lower() not in ('show', 'task') or
|
||||
entity.entity_type.lower() == 'task'
|
||||
):
|
||||
return
|
||||
|
||||
ent_name = entity.get('name', entity)
|
||||
if entity.entity_type.lower() == 'project':
|
||||
ent_name = entity['full_name']
|
||||
|
||||
hierarchy = '/'.join(
|
||||
[a['name'] for a in entity.get('ancestors', [])]
|
||||
)
|
||||
if hierarchy:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], hierarchy, entity['name']]
|
||||
)
|
||||
elif entity.entity_type.lower() == 'project':
|
||||
hierarchy = entity['full_name']
|
||||
else:
|
||||
hierarchy = '/'.join(
|
||||
[entity['project']['full_name'], entity['name']]
|
||||
)
|
||||
|
||||
self.log.debug('- updating entity "{}"'.format(hierarchy))
|
||||
|
||||
# collect entity's custom attributes
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
|
||||
ent_name
|
||||
)
|
||||
)
|
||||
return
|
||||
# Find entity in Mongo DB
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
|
||||
if error_key not in self.interface_messages:
|
||||
self.interface_messages[error_key] = []
|
||||
|
||||
if ent_name not in self.interface_messages[error_key]:
|
||||
self.interface_messages[error_key].append(ent_name)
|
||||
|
||||
self.log.warning(
|
||||
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
|
||||
ent_name, str(mongoid)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Change value if entity has set it's own
|
||||
entity_value = custom_attributes[key]
|
||||
if entity_value is not None:
|
||||
value = entity_value
|
||||
|
||||
data = mongo_entity.get('data') or {}
|
||||
|
||||
data[key] = value
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -1,338 +1,227 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import collections
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
from pypeapp import config
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class SyncToAvalon(BaseAction):
|
||||
'''
|
||||
class SyncToAvalonServer(BaseAction):
|
||||
"""
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
Stores all information about entity.
|
||||
- Name(string) - Most important information = identifier of entity
|
||||
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
|
||||
- Silo(string) - Last parent except project
|
||||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
|
||||
* All Custom attributes in group 'Avalon'
|
||||
- custom attributes that start with 'avalon_' are skipped
|
||||
|
||||
* These information are stored also for all parents and children entities.
|
||||
* These information are stored for entities in whole project.
|
||||
|
||||
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
|
||||
Avalon ID of asset is stored to Ftrack
|
||||
- Custom attribute 'avalon_mongo_id'.
|
||||
- action IS NOT creating this Custom attribute if doesn't exist
|
||||
- run 'Create Custom Attributes' action or do it manually (Not recommended)
|
||||
|
||||
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
|
||||
- name, parents and silo are checked -> shows error if are not exact the same
|
||||
- after sync it is not allowed to change names or move entities
|
||||
|
||||
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
|
||||
- tries to find entity by name
|
||||
- found:
|
||||
- raise error if ftrackId/visual parent/parents are not same
|
||||
- not found:
|
||||
- Creates asset/project
|
||||
|
||||
'''
|
||||
|
||||
- run 'Create Custom Attributes' action
|
||||
- or do it manually (Not recommended)
|
||||
"""
|
||||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon'
|
||||
identifier = "sync.to.avalon.server"
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = "- Sync To Avalon (Server)"
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
description = "Send data from Ftrack to Avalon"
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
config.get_presets().get('services', {}).get(
|
||||
'statics_server', {}
|
||||
).get('default_port', 8021)
|
||||
"PYPE_STATICS_SERVER",
|
||||
"http://localhost:{}".format(
|
||||
config.get_presets().get("services", {}).get(
|
||||
"rest_api", {}
|
||||
).get("default_port", 8021)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
project_query = (
|
||||
"select full_name, name, custom_attributes"
|
||||
", project_schema._task_type_schema.types.name"
|
||||
" from Project where full_name is \"{}\""
|
||||
)
|
||||
|
||||
entities_query = (
|
||||
"select id, name, parent_id, link, custom_attributes"
|
||||
" from TypedContext where project.full_name is \"{}\""
|
||||
)
|
||||
|
||||
# Entity type names(lowered) that won't be synchronized with their children
|
||||
ignore_entity_types = ["task", "milestone"]
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
|
||||
|
||||
def register(self):
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.discover',
|
||||
self._discover
|
||||
"topic=ftrack.action.discover",
|
||||
self._discover,
|
||||
priority=self.priority
|
||||
)
|
||||
|
||||
self.session.event_hub.subscribe(
|
||||
'topic=ftrack.action.launch and data.actionIdentifier={0}'.format(
|
||||
self.identifier
|
||||
),
|
||||
self._launch
|
||||
)
|
||||
launch_subscription = (
|
||||
"topic=ftrack.action.launch and data.actionIdentifier={0}"
|
||||
).format(self.identifier)
|
||||
self.session.event_hub.subscribe(launch_subscription, self._launch)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
roleCheck = False
|
||||
discover = False
|
||||
roleList = ['Pypeclub', 'Administrator', 'Project Manager']
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'] in roleList:
|
||||
roleCheck = True
|
||||
""" Validation """
|
||||
# Check if selection is valid
|
||||
valid_selection = False
|
||||
for ent in event["data"]["selection"]:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent["entityType"].lower() in ["show", "task"]:
|
||||
valid_selection = True
|
||||
break
|
||||
if roleCheck is True:
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() not in ['task', 'assetversion']:
|
||||
discover = True
|
||||
break
|
||||
|
||||
return discover
|
||||
if not valid_selection:
|
||||
return False
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
# Get user and check his roles
|
||||
user_id = event.get("source", {}).get("user", {}).get("id")
|
||||
if not user_id:
|
||||
return False
|
||||
|
||||
user = session.query("User where id is \"{}\"".format(user_id)).first()
|
||||
if not user:
|
||||
return False
|
||||
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
for role in user["user_security_roles"]:
|
||||
if role["security_role"]["name"] in role_list:
|
||||
return True
|
||||
return False
|
||||
|
||||
def launch(self, session, in_entities, event):
|
||||
time_start = time.time()
|
||||
message = ""
|
||||
|
||||
# JOB SETTINGS
|
||||
userId = event['source']['user']['id']
|
||||
user = session.query('User where id is ' + userId).one()
|
||||
self.show_message(event, "Synchronization - Preparing data", True)
|
||||
# Get ftrack project
|
||||
if in_entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = in_entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = in_entities[0]["project"]["full_name"]
|
||||
|
||||
job = session.create('Job', {
|
||||
'user': user,
|
||||
'status': 'running',
|
||||
'data': json.dumps({
|
||||
'description': 'Sync Ftrack to Avalon.'
|
||||
})
|
||||
})
|
||||
session.commit()
|
||||
try:
|
||||
self.log.debug("Preparing entities for synchronization")
|
||||
self.entities_factory.launch_setup(ft_project_name)
|
||||
time_1 = time.time()
|
||||
|
||||
if entities[0].entity_type.lower() == "project":
|
||||
ft_project_name = entities[0]["full_name"]
|
||||
else:
|
||||
ft_project_name = entities[0]["project"]["full_name"]
|
||||
self.entities_factory.set_cutom_attributes()
|
||||
time_2 = time.time()
|
||||
|
||||
project_entities = session.query(
|
||||
self.entities_query.format(ft_project_name)
|
||||
).all()
|
||||
# This must happen before all filtering!!!
|
||||
self.entities_factory.prepare_avalon_entities(ft_project_name)
|
||||
time_3 = time.time()
|
||||
|
||||
ft_project = session.query(
|
||||
self.project_query.format(ft_project_name)
|
||||
).one()
|
||||
self.entities_factory.filter_by_ignore_sync()
|
||||
time_4 = time.time()
|
||||
|
||||
entities_by_id = {}
|
||||
entities_by_parent = collections.defaultdict(list)
|
||||
self.entities_factory.duplicity_regex_check()
|
||||
time_5 = time.time()
|
||||
|
||||
entities_by_id[ft_project["id"]] = ft_project
|
||||
for ent in project_entities:
|
||||
entities_by_id[ent["id"]] = ent
|
||||
entities_by_parent[ent["parent_id"]].append(ent)
|
||||
self.entities_factory.prepare_ftrack_ent_data()
|
||||
time_6 = time.time()
|
||||
|
||||
importable = []
|
||||
for ent_info in event["data"]["selection"]:
|
||||
ent = entities_by_id[ent_info["entityId"]]
|
||||
for link_ent_info in ent["link"]:
|
||||
link_ent = entities_by_id[link_ent_info["id"]]
|
||||
if (
|
||||
ent.entity_type.lower() in self.ignore_entity_types or
|
||||
link_ent in importable
|
||||
):
|
||||
continue
|
||||
self.entities_factory.synchronize()
|
||||
time_7 = time.time()
|
||||
|
||||
importable.append(link_ent)
|
||||
self.log.debug(
|
||||
"*** Synchronization finished ***"
|
||||
)
|
||||
self.log.debug(
|
||||
"preparation <{}>".format(time_1 - time_start)
|
||||
)
|
||||
self.log.debug(
|
||||
"set_cutom_attributes <{}>".format(time_2 - time_1)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_avalon_entities <{}>".format(time_3 - time_2)
|
||||
)
|
||||
self.log.debug(
|
||||
"filter_by_ignore_sync <{}>".format(time_4 - time_3)
|
||||
)
|
||||
self.log.debug(
|
||||
"duplicity_regex_check <{}>".format(time_5 - time_4)
|
||||
)
|
||||
self.log.debug(
|
||||
"prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
|
||||
)
|
||||
self.log.debug(
|
||||
"synchronize <{}>".format(time_7 - time_6)
|
||||
)
|
||||
self.log.debug(
|
||||
"* Total time: {}".format(time_7 - time_start)
|
||||
)
|
||||
|
||||
def add_children(parent_id):
|
||||
ents = entities_by_parent[parent_id]
|
||||
for ent in ents:
|
||||
if ent.entity_type.lower() in self.ignore_entity_types:
|
||||
continue
|
||||
|
||||
if ent not in importable:
|
||||
importable.append(ent)
|
||||
|
||||
add_children(ent["id"])
|
||||
|
||||
# add children of selection to importable
|
||||
for ent_info in event["data"]["selection"]:
|
||||
add_children(ent_info["entityId"])
|
||||
|
||||
# Check names: REGEX in schema/duplicates - raise error if found
|
||||
all_names = []
|
||||
duplicates = []
|
||||
|
||||
for entity in importable:
|
||||
lib.avalon_check_name(entity)
|
||||
if entity.entity_type.lower() == "project":
|
||||
continue
|
||||
|
||||
if entity['name'] in all_names:
|
||||
duplicates.append("'{}'".format(entity['name']))
|
||||
else:
|
||||
all_names.append(entity['name'])
|
||||
|
||||
if len(duplicates) > 0:
|
||||
# TODO Show information to user and return False
|
||||
raise ValueError(
|
||||
"Entity name duplication: {}".format(", ".join(duplicates))
|
||||
report = self.entities_factory.report()
|
||||
if report and report.get("items"):
|
||||
default_title = "Synchronization report ({}):".format(
|
||||
ft_project_name
|
||||
)
|
||||
|
||||
# ----- PROJECT ------
|
||||
avalon_project = lib.get_avalon_project(ft_project)
|
||||
custom_attributes = lib.get_avalon_attr(session)
|
||||
|
||||
# Import all entities to Avalon DB
|
||||
for entity in importable:
|
||||
result = lib.import_to_avalon(
|
||||
session=session,
|
||||
entity=entity,
|
||||
ft_project=ft_project,
|
||||
av_project=avalon_project,
|
||||
custom_attributes=custom_attributes
|
||||
self.show_interface(
|
||||
items=report["items"],
|
||||
title=report.get("title", default_title),
|
||||
event=event
|
||||
)
|
||||
# TODO better error handling
|
||||
# maybe split into critical, warnings and messages?
|
||||
if 'errors' in result and len(result['errors']) > 0:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
lib.show_errors(self, event, result['errors'])
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'message': "Sync to avalon FAILED"
|
||||
}
|
||||
|
||||
if avalon_project is None:
|
||||
if 'project' in result:
|
||||
avalon_project = result['project']
|
||||
|
||||
job['status'] = 'done'
|
||||
session.commit()
|
||||
|
||||
except ValueError as ve:
|
||||
# TODO remove this part!!!!
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
message = str(ve)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(message),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
log_message = "{}/{}/Line: {}".format(
|
||||
exc_type, fname, exc_tb.tb_lineno
|
||||
)
|
||||
self.log.error(
|
||||
'Error during syncToAvalon: {}'.format(log_message),
|
||||
exc_info=True
|
||||
)
|
||||
# TODO add traceback to message and show to user
|
||||
message = (
|
||||
'Unexpected Error'
|
||||
' - Please check Log for more information'
|
||||
)
|
||||
|
||||
finally:
|
||||
if job['status'] in ['queued', 'running']:
|
||||
job['status'] = 'failed'
|
||||
|
||||
session.commit()
|
||||
|
||||
time_end = time.time()
|
||||
self.log.debug("Synchronization took \"{}\"".format(
|
||||
str(time_end - time_start)
|
||||
))
|
||||
|
||||
if job["status"] != "failed":
|
||||
self.log.debug("Triggering Sync hierarchical attributes")
|
||||
self.trigger_action("sync.hierarchical.attrs", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
return {
|
||||
'success': False,
|
||||
'message': message
|
||||
"success": True,
|
||||
"message": "Synchronization Finished"
|
||||
}
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': "Synchronization was successfull"
|
||||
}
|
||||
except Exception:
|
||||
self.log.error(
|
||||
"Synchronization failed due to code error", exc_info=True
|
||||
)
|
||||
msg = "An error has happened during synchronization"
|
||||
title = "Synchronization report ({}):".format(ft_project_name)
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "## Traceback of the error"
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<p>{}</p>".format(
|
||||
str(traceback.format_exc()).replace(
|
||||
"\n", "<br>").replace(
|
||||
" ", " "
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
report = {"items": []}
|
||||
try:
|
||||
report = self.entities_factory.report()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_items = report.get("items", [])
|
||||
if _items:
|
||||
items.append(self.entities_factory.report_splitter)
|
||||
items.extend(_items)
|
||||
|
||||
self.show_interface(items, title, event)
|
||||
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
finally:
|
||||
try:
|
||||
self.entities_factory.dbcon.uninstall()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.entities_factory.session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
# assume that register is being called from an old or incompatible API and
|
||||
# return without doing anything.
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
SyncToAvalonServer(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, get_ca_mongoid
|
||||
from pype.ftrack.events.event_sync_to_avalon import Sync_to_Avalon
|
||||
from pype.ftrack.lib import BaseEvent
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.ftrack.events.event_sync_to_avalon import SyncToAvalonEvent
|
||||
|
||||
|
||||
class DelAvalonIdFromNew(BaseEvent):
|
||||
|
|
@ -11,7 +11,8 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
Priority of this event must be less than SyncToAvalon event
|
||||
'''
|
||||
priority = Sync_to_Avalon.priority - 1
|
||||
priority = SyncToAvalonEvent.priority - 1
|
||||
ignore_me = True
|
||||
|
||||
def launch(self, session, event):
|
||||
created = []
|
||||
|
|
@ -28,7 +29,7 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
elif (
|
||||
entity.get('action', None) == 'update' and
|
||||
get_ca_mongoid() in entity['keys'] and
|
||||
CustAttrIdKey in entity['keys'] and
|
||||
entity_id in created
|
||||
):
|
||||
ftrack_entity = session.get(
|
||||
|
|
@ -37,13 +38,11 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
)
|
||||
|
||||
cust_attr = ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
CustAttrIdKey
|
||||
]
|
||||
|
||||
if cust_attr != '':
|
||||
ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
] = ''
|
||||
ftrack_entity['custom_attributes'][CustAttrIdKey] = ''
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
|
|
@ -53,5 +52,4 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
DelAvalonIdFromNew(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
import operator
|
||||
|
||||
|
|
@ -80,10 +80,10 @@ class NextTaskUpdate(BaseEvent):
|
|||
'>>> [ {} ] updated to [ Ready ]'
|
||||
).format(path))
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
self.log.warning((
|
||||
'!!! [ {} ] status couldnt be set: [ {} ]'
|
||||
).format(path, e))
|
||||
session.rollback()
|
||||
).format(path, str(e)), exc_info=True)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
import ftrack_api
|
||||
from pype.ftrack.lib import BaseEvent
|
||||
|
||||
|
||||
class Radio_buttons(BaseEvent):
|
||||
class RadioButtons(BaseEvent):
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
|
@ -37,4 +37,4 @@ class Radio_buttons(BaseEvent):
|
|||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
Radio_buttons(session, plugins_presets).register()
|
||||
RadioButtons(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,213 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseEvent):
|
||||
# After sync to avalon event!
|
||||
priority = 101
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
def launch(self, session, event):
|
||||
# Filter entities and changed values if it makes sence to run script
|
||||
processable = []
|
||||
processable_ent = {}
|
||||
for ent in event['data']['entities']:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent['entityType'].lower() not in ['task', 'show']:
|
||||
continue
|
||||
|
||||
action = ent.get("action")
|
||||
# skip if remove (Entity does not exist in Ftrack)
|
||||
if action == "remove":
|
||||
continue
|
||||
|
||||
# When entity was add we don't care about keys
|
||||
if action != "add":
|
||||
keys = ent.get('keys')
|
||||
if not keys:
|
||||
continue
|
||||
|
||||
entity = session.get(self._get_entity_type(ent), ent['entityId'])
|
||||
processable.append(ent)
|
||||
|
||||
processable_ent[ent['entityId']] = {
|
||||
"entity": entity,
|
||||
"action": action,
|
||||
"link": entity["link"]
|
||||
}
|
||||
|
||||
if not processable:
|
||||
return True
|
||||
|
||||
# Find project of entities
|
||||
ft_project = None
|
||||
for entity_dict in processable_ent.values():
|
||||
try:
|
||||
base_proj = entity_dict['link'][0]
|
||||
except Exception:
|
||||
continue
|
||||
ft_project = session.get(base_proj['type'], base_proj['id'])
|
||||
break
|
||||
|
||||
# check if project is set to auto-sync
|
||||
if (
|
||||
ft_project is None or
|
||||
'avalon_auto_sync' not in ft_project['custom_attributes'] or
|
||||
ft_project['custom_attributes']['avalon_auto_sync'] is False
|
||||
):
|
||||
return True
|
||||
|
||||
# Get hierarchical custom attributes from "avalon" group
|
||||
custom_attributes = {}
|
||||
query = 'CustomAttributeGroup where name is "avalon"'
|
||||
all_avalon_attr = session.query(query).one()
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
if not custom_attributes:
|
||||
return True
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = ft_project['full_name']
|
||||
|
||||
for ent in processable:
|
||||
entity_dict = processable_ent[ent['entityId']]
|
||||
|
||||
entity = entity_dict["entity"]
|
||||
ent_path = "/".join([ent["name"] for ent in entity_dict['link']])
|
||||
action = entity_dict["action"]
|
||||
|
||||
keys_to_process = {}
|
||||
if action == "add":
|
||||
# Store all custom attributes when entity was added
|
||||
for key in custom_attributes:
|
||||
keys_to_process[key] = entity['custom_attributes'][key]
|
||||
else:
|
||||
# Update only updated keys
|
||||
for key in ent['keys']:
|
||||
if key in custom_attributes:
|
||||
keys_to_process[key] = entity['custom_attributes'][key]
|
||||
|
||||
processed_keys = self.get_hierarchical_values(
|
||||
keys_to_process, entity
|
||||
)
|
||||
# Do the processing of values
|
||||
self.update_hierarchical_attribute(entity, processed_keys, ent_path)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_values(self, keys_dict, entity):
|
||||
# check already set values
|
||||
_set_keys = []
|
||||
for key, value in keys_dict.items():
|
||||
if value is not None:
|
||||
_set_keys.append(key)
|
||||
|
||||
# pop set values from keys_dict
|
||||
set_keys = {}
|
||||
for key in _set_keys:
|
||||
set_keys[key] = keys_dict.pop(key)
|
||||
|
||||
# find if entity has set values and pop them out
|
||||
keys_to_pop = []
|
||||
for key in keys_dict.keys():
|
||||
_val = entity["custom_attributes"][key]
|
||||
if _val:
|
||||
keys_to_pop.append(key)
|
||||
set_keys[key] = _val
|
||||
|
||||
for key in keys_to_pop:
|
||||
keys_dict.pop(key)
|
||||
|
||||
# if there are not keys to find value return found
|
||||
if not keys_dict:
|
||||
return set_keys
|
||||
|
||||
# end recursion if entity is project
|
||||
if entity.entity_type.lower() == "project":
|
||||
for key, value in keys_dict.items():
|
||||
set_keys[key] = value
|
||||
|
||||
else:
|
||||
result = self.get_hierarchical_values(keys_dict, entity["parent"])
|
||||
for key, value in result.items():
|
||||
set_keys[key] = value
|
||||
|
||||
return set_keys
|
||||
|
||||
def update_hierarchical_attribute(self, entity, keys_dict, ent_path):
|
||||
# TODO store all keys at once for entity
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
return
|
||||
|
||||
changed_keys = {}
|
||||
data = mongo_entity.get('data') or {}
|
||||
for key, value in keys_dict.items():
|
||||
cur_value = data.get(key)
|
||||
if cur_value:
|
||||
if cur_value == value:
|
||||
continue
|
||||
changed_keys[key] = value
|
||||
data[key] = value
|
||||
|
||||
if not changed_keys:
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"{} - updated hierarchical attributes: {}".format(
|
||||
ent_path, str(changed_keys)
|
||||
)
|
||||
)
|
||||
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
_keys_dict = {}
|
||||
for key, value in keys_dict.items():
|
||||
if key not in child.get('custom_attributes', {}):
|
||||
continue
|
||||
child_value = child['custom_attributes'][key]
|
||||
if child_value is not None:
|
||||
continue
|
||||
_keys_dict[key] = value
|
||||
|
||||
if not _keys_dict:
|
||||
continue
|
||||
child_path = "/".join([ent["name"] for ent in child['link']])
|
||||
self.update_hierarchical_attribute(child, _keys_dict, child_path)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
class Test_Event(BaseEvent):
|
||||
class TestEvent(BaseEvent):
|
||||
|
||||
ignore_me = True
|
||||
|
||||
|
|
@ -23,4 +23,4 @@ class Test_Event(BaseEvent):
|
|||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
Test_Event(session, plugins_presets).register()
|
||||
TestEvent(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
|
|
@ -26,33 +25,41 @@ class ThumbnailEvents(BaseEvent):
|
|||
# Update task thumbnail from published version
|
||||
# if (entity['entityType'] == 'assetversion' and
|
||||
# entity['action'] == 'encoded'):
|
||||
if (
|
||||
entity['entityType'] == 'assetversion'
|
||||
and 'thumbid' in (entity.get('keys') or [])
|
||||
elif (
|
||||
entity['entityType'] == 'assetversion' and
|
||||
entity['action'] != 'remove' and
|
||||
'thumbid' in (entity.get('keys') or [])
|
||||
):
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
if not version:
|
||||
continue
|
||||
|
||||
thumbnail = version.get('thumbnail')
|
||||
if thumbnail:
|
||||
parent = version['asset']['parent']
|
||||
task = version['task']
|
||||
parent['thumbnail_id'] = version['thumbnail_id']
|
||||
if parent.entity_type.lower() == "project":
|
||||
name = parent["full_name"]
|
||||
else:
|
||||
name = parent["name"]
|
||||
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
|
||||
if not thumbnail:
|
||||
continue
|
||||
|
||||
if task:
|
||||
task['thumbnail_id'] = version['thumbnail_id']
|
||||
msg += " and task [ {} ]".format(task["name"])
|
||||
parent = version['asset']['parent']
|
||||
task = version['task']
|
||||
parent['thumbnail_id'] = version['thumbnail_id']
|
||||
if parent.entity_type.lower() == "project":
|
||||
name = parent["full_name"]
|
||||
else:
|
||||
name = parent["name"]
|
||||
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
|
||||
|
||||
self.log.info(msg)
|
||||
if task:
|
||||
task['thumbnail_id'] = version['thumbnail_id']
|
||||
msg += " and task [ {} ]".format(task["name"])
|
||||
|
||||
session.commit()
|
||||
self.log.info(msg)
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
ThumbnailEvents(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from pype.ftrack import BaseEvent
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
|
||||
|
||||
class UserAssigmentEvent(BaseEvent):
|
||||
|
|
@ -36,7 +39,6 @@ class UserAssigmentEvent(BaseEvent):
|
|||
"""
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
def error(self, *err):
|
||||
for e in err:
|
||||
|
|
@ -105,7 +107,7 @@ class UserAssigmentEvent(BaseEvent):
|
|||
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
|
||||
|
||||
avalon_entity = None
|
||||
parent_id = parent['custom_attributes'].get(self.ca_mongoid)
|
||||
parent_id = parent['custom_attributes'].get(CustAttrIdKey)
|
||||
if parent_id:
|
||||
parent_id = ObjectId(parent_id)
|
||||
avalon_entity = self.db_con.find_one({
|
||||
|
|
|
|||
|
|
@ -1,72 +1,135 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class VersionToTaskStatus(BaseEvent):
|
||||
|
||||
# Presets usage
|
||||
default_status_mapping = {}
|
||||
|
||||
def launch(self, session, event):
|
||||
'''Propagates status from version to task when changed'''
|
||||
session.commit()
|
||||
|
||||
# start of event procedure ----------------------------------
|
||||
for entity in event['data'].get('entities', []):
|
||||
# Filter non-assetversions
|
||||
if (
|
||||
entity['entityType'] == 'assetversion' and
|
||||
'statusid' in (entity.get('keys') or [])
|
||||
):
|
||||
# Filter AssetVersions
|
||||
if entity["entityType"] != "assetversion":
|
||||
continue
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
try:
|
||||
version_status = session.get(
|
||||
'Status', entity['changes']['statusid']['new']
|
||||
)
|
||||
except Exception:
|
||||
# Skip if statusid not in keys (in changes)
|
||||
keys = entity.get("keys")
|
||||
if not keys or "statusid" not in keys:
|
||||
continue
|
||||
|
||||
# Get new version task name
|
||||
version_status_id = (
|
||||
entity
|
||||
.get("changes", {})
|
||||
.get("statusid", {})
|
||||
.get("new", {})
|
||||
)
|
||||
|
||||
# Just check that `new` is set to any value
|
||||
if not version_status_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
version_status = session.get("Status", version_status_id)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Troubles with query status id [ {} ]".format(
|
||||
version_status_id
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not version_status:
|
||||
continue
|
||||
|
||||
version_status_orig = version_status["name"]
|
||||
|
||||
# Load status mapping from presets
|
||||
status_mapping = (
|
||||
config.get_presets()
|
||||
.get("ftrack", {})
|
||||
.get("ftrack_config", {})
|
||||
.get("status_version_to_task")
|
||||
) or self.default_status_mapping
|
||||
|
||||
# Skip if mapping is empty
|
||||
if not status_mapping:
|
||||
continue
|
||||
|
||||
# Lower version status name and check if has mapping
|
||||
version_status = version_status_orig.lower()
|
||||
new_status_names = status_mapping.get(version_status)
|
||||
if not new_status_names:
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"Processing AssetVersion status change: [ {} ]".format(
|
||||
version_status_orig
|
||||
)
|
||||
)
|
||||
|
||||
# Backwards compatibility (convert string to list)
|
||||
if isinstance(new_status_names, str):
|
||||
new_status_names = [new_status_names]
|
||||
|
||||
# Lower all names from presets
|
||||
new_status_names = [name.lower() for name in new_status_names]
|
||||
|
||||
# Get entities necessary for processing
|
||||
version = session.get("AssetVersion", entity["entityId"])
|
||||
task = version.get("task")
|
||||
if not task:
|
||||
continue
|
||||
|
||||
project_schema = task["project"]["project_schema"]
|
||||
# Get all available statuses for Task
|
||||
statuses = project_schema.get_statuses("Task", task["type_id"])
|
||||
# map lowered status name with it's object
|
||||
stat_names_low = {
|
||||
status["name"].lower(): status for status in statuses
|
||||
}
|
||||
|
||||
new_status = None
|
||||
for status_name in new_status_names:
|
||||
if status_name not in stat_names_low:
|
||||
continue
|
||||
task_status = version_status
|
||||
task = version['task']
|
||||
self.log.info('>>> version status: [ {} ]'.format(
|
||||
version_status['name']))
|
||||
|
||||
status_to_set = None
|
||||
# Filter to versions with status change to "render complete"
|
||||
if version_status['name'].lower() == 'reviewed':
|
||||
status_to_set = 'Change requested'
|
||||
# store object of found status
|
||||
new_status = stat_names_low[status_name]
|
||||
self.log.debug("Status to set: [ {} ]".format(
|
||||
new_status["name"]
|
||||
))
|
||||
break
|
||||
|
||||
if version_status['name'].lower() == 'approved':
|
||||
status_to_set = 'Complete'
|
||||
# Skip if status names were not found for paticulat entity
|
||||
if not new_status:
|
||||
self.log.warning(
|
||||
"Any of statuses from presets can be set: {}".format(
|
||||
str(new_status_names)
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
'>>> status to set: [ {} ]'.format(status_to_set))
|
||||
# Get full path to task for logging
|
||||
ent_path = "/".join([ent["name"] for ent in task["link"]])
|
||||
|
||||
if status_to_set is not None:
|
||||
query = 'Status where name is "{}"'.format(status_to_set)
|
||||
try:
|
||||
task_status = session.query(query).one()
|
||||
except Exception:
|
||||
self.log.info(
|
||||
'!!! status was not found in Ftrack [ {} ]'.format(
|
||||
status_to_set
|
||||
))
|
||||
continue
|
||||
|
||||
# Proceed if the task status was set
|
||||
if task_status is not None:
|
||||
# Get path to task
|
||||
path = task['name']
|
||||
for p in task['ancestors']:
|
||||
path = p['name'] + '/' + path
|
||||
|
||||
# Setting task status
|
||||
try:
|
||||
task['status'] = task_status
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
self.log.warning('!!! [ {} ] status couldnt be set:\
|
||||
[ {} ]'.format(path, e))
|
||||
else:
|
||||
self.log.info('>>> [ {} ] updated to [ {} ]'.format(
|
||||
path, task_status['name']))
|
||||
# Setting task status
|
||||
try:
|
||||
task["status"] = new_status
|
||||
session.commit()
|
||||
self.log.debug("[ {} ] Status updated to [ {} ]".format(
|
||||
ent_path, new_status['name']
|
||||
))
|
||||
except Exception:
|
||||
session.rollback()
|
||||
self.log.warning(
|
||||
"[ {} ]Status couldn't be set".format(ent_path),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
|
|
|
|||
|
|
@ -1 +1,2 @@
|
|||
from .ftrack_server import FtrackServer
|
||||
from .lib import check_ftrack_url
|
||||
|
|
|
|||
|
|
@ -7,13 +7,12 @@ import socket
|
|||
import argparse
|
||||
import atexit
|
||||
import time
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pype.ftrack.lib import credentials
|
||||
from pype.ftrack.ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import ftrack_events_mongo_settings
|
||||
from pype.ftrack.ftrack_server.lib import (
|
||||
ftrack_events_mongo_settings, check_ftrack_url
|
||||
)
|
||||
import socket_thread
|
||||
|
||||
|
||||
|
|
@ -25,36 +24,6 @@ class MongoPermissionsError(Exception):
|
|||
super().__init__(message)
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
"""Checks if Ftrack server is responding"""
|
||||
if not url:
|
||||
print('ERROR: Ftrack URL is not set!')
|
||||
return None
|
||||
|
||||
url = url.strip('/ ')
|
||||
|
||||
if 'http' not in url:
|
||||
if url.endswith('ftrackapp.com'):
|
||||
url = 'https://' + url
|
||||
else:
|
||||
url = 'https://{0}.ftrackapp.com'.format(url)
|
||||
try:
|
||||
result = requests.get(url, allow_redirects=False)
|
||||
except requests.exceptions.RequestException:
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
print('DEBUG: Ftrack server {} is accessible.'.format(url))
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def check_mongo_url(host, port, log_error=False):
|
||||
"""Checks if mongo server is responding"""
|
||||
sock = None
|
||||
|
|
@ -96,9 +65,8 @@ def validate_credentials(url, user, api):
|
|||
except Exception as e:
|
||||
print(
|
||||
'ERROR: Can\'t log into Ftrack with used credentials:'
|
||||
' Ftrack server: "{}" // Username: {} // API key: {}'.format(
|
||||
url, user, api
|
||||
))
|
||||
' Ftrack server: "{}" // Username: {} // API key: {}'
|
||||
).format(url, user, api)
|
||||
return False
|
||||
|
||||
print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format(
|
||||
|
|
@ -176,9 +144,9 @@ def legacy_server(ftrack_url):
|
|||
).format(str(max_fail_count), str(wait_time_after_max_fail)))
|
||||
subproc_failed_count += 1
|
||||
elif ((
|
||||
datetime.datetime.now() - subproc_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
subproc_failed_count = 0
|
||||
datetime.datetime.now() - subproc_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
subproc_failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif subproc.poll() is not None:
|
||||
|
|
@ -268,6 +236,7 @@ def main_loop(ftrack_url):
|
|||
# Run threads only if Ftrack is accessible
|
||||
if not ftrack_accessible or not mongo_accessible:
|
||||
if not mongo_accessible and not printed_mongo_error:
|
||||
mongo_url = mongo_hostname + ":" + mongo_port
|
||||
print("Can't access Mongo {}".format(mongo_url))
|
||||
|
||||
if not ftrack_accessible and not printed_ftrack_error:
|
||||
|
|
@ -305,9 +274,9 @@ def main_loop(ftrack_url):
|
|||
).format(str(max_fail_count), str(wait_time_after_max_fail)))
|
||||
storer_failed_count += 1
|
||||
elif ((
|
||||
datetime.datetime.now() - storer_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
storer_failed_count = 0
|
||||
datetime.datetime.now() - storer_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
storer_failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif not storer_thread.isAlive():
|
||||
|
|
@ -341,13 +310,13 @@ def main_loop(ftrack_url):
|
|||
processor_failed_count += 1
|
||||
|
||||
elif ((
|
||||
datetime.datetime.now() - processor_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
processor_failed_count = 0
|
||||
datetime.datetime.now() - processor_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
processor_failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif not processor_thread.isAlive():
|
||||
if storer_thread.mongo_error:
|
||||
if processor_thread.mongo_error:
|
||||
raise Exception(
|
||||
"Exiting because have issue with acces to MongoDB"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
import types
|
||||
import importlib
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
import time
|
||||
import logging
|
||||
import inspect
|
||||
|
|
@ -100,7 +100,10 @@ class FtrackServer:
|
|||
log.warning(msg, exc_info=e)
|
||||
|
||||
if len(register_functions_dict) < 1:
|
||||
raise Exception
|
||||
raise Exception((
|
||||
"There are no events with register function."
|
||||
" Registered paths: \"{}\""
|
||||
).format("| ".join(paths)))
|
||||
|
||||
# Load presets for setting plugins
|
||||
key = "user"
|
||||
|
|
|
|||
|
|
@ -1,9 +1,32 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import getpass
|
||||
import atexit
|
||||
import tempfile
|
||||
import threading
|
||||
import datetime
|
||||
import time
|
||||
import queue
|
||||
import pymongo
|
||||
|
||||
import requests
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
import ftrack_api.cache
|
||||
import ftrack_api.operation
|
||||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
except ImportError:
|
||||
from urlparse import urlparse, parse_qs
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
|
||||
|
||||
def ftrack_events_mongo_settings():
|
||||
host = None
|
||||
|
|
@ -48,7 +71,9 @@ def ftrack_events_mongo_settings():
|
|||
|
||||
|
||||
def get_ftrack_event_mongo_info():
|
||||
host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings()
|
||||
host, port, database, username, password, collection, auth_db = (
|
||||
ftrack_events_mongo_settings()
|
||||
)
|
||||
user_pass = ""
|
||||
if username and password:
|
||||
user_pass = "{}:{}@".format(username, password)
|
||||
|
|
@ -66,3 +91,333 @@ def get_ftrack_event_mongo_info():
|
|||
url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth)
|
||||
|
||||
return url, database, collection
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
"""Checks if Ftrack server is responding"""
|
||||
if not url:
|
||||
print('ERROR: Ftrack URL is not set!')
|
||||
return None
|
||||
|
||||
url = url.strip('/ ')
|
||||
|
||||
if 'http' not in url:
|
||||
if url.endswith('ftrackapp.com'):
|
||||
url = 'https://' + url
|
||||
else:
|
||||
url = 'https://{0}.ftrackapp.com'.format(url)
|
||||
try:
|
||||
result = requests.get(url, allow_redirects=False)
|
||||
except requests.exceptions.RequestException:
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
return False
|
||||
|
||||
print('DEBUG: Ftrack server {} is accessible.'.format(url))
|
||||
|
||||
return url
|
||||
|
||||
|
||||
class StorerEventHub(ftrack_api.event.hub.EventHub):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(StorerEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "heartbeat":
|
||||
# Reply with heartbeat.
|
||||
self.sock.sendall(b"storer")
|
||||
return self._send_packet(self._code_name_mapping['heartbeat'])
|
||||
|
||||
elif code_name == "connect":
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic="pype.storer.started",
|
||||
data={},
|
||||
source={
|
||||
"id": self.id,
|
||||
"user": {"username": self._api_user}
|
||||
}
|
||||
)
|
||||
self._event_queue.put(event)
|
||||
|
||||
return super(StorerEventHub, self)._handle_packet(
|
||||
code, packet_identifier, path, data
|
||||
)
|
||||
|
||||
|
||||
class ProcessEventHub(ftrack_api.event.hub.EventHub):
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
pypelog = Logger().get_logger("Session Processor")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.dbcon = DbConnector(
|
||||
mongo_url=self.url,
|
||||
database_name=self.database,
|
||||
table_name=self.table_name
|
||||
)
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def prepare_dbcon(self):
|
||||
try:
|
||||
self.dbcon.install()
|
||||
self.dbcon._database.list_collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
self.pypelog.error(
|
||||
"Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
)
|
||||
)
|
||||
sys.exit(0)
|
||||
|
||||
except pymongo.errors.OperationFailure:
|
||||
self.pypelog.error((
|
||||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(self.database, self.table_name))
|
||||
self.sock.sendall(b"MongoError")
|
||||
sys.exit(0)
|
||||
|
||||
def wait(self, duration=None):
|
||||
"""Overriden wait
|
||||
|
||||
Event are loaded from Mongo DB when queue is empty. Handled event is
|
||||
set as processed in Mongo DB.
|
||||
"""
|
||||
started = time.time()
|
||||
self.prepare_dbcon()
|
||||
while True:
|
||||
try:
|
||||
event = self._event_queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
if not self.load_events():
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
try:
|
||||
self._handle(event)
|
||||
self.dbcon.update_one(
|
||||
{"id": event["id"]},
|
||||
{"$set": {"pype_data.is_processed": True}}
|
||||
)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
self.pypelog.error((
|
||||
"Mongo server \"{}\" is not responding, exiting."
|
||||
).format(os.environ["AVALON_MONGO"]))
|
||||
sys.exit(0)
|
||||
# Additional special processing of events.
|
||||
if event['topic'] == 'ftrack.meta.disconnected':
|
||||
break
|
||||
|
||||
if duration is not None:
|
||||
if (time.time() - started) > duration:
|
||||
break
|
||||
|
||||
def load_events(self):
|
||||
"""Load not processed events sorted by stored date"""
|
||||
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
|
||||
result = self.dbcon.delete_many({
|
||||
"pype_data.stored": {"$lte": ago_date},
|
||||
"pype_data.is_processed": True
|
||||
})
|
||||
|
||||
not_processed_events = self.dbcon.find(
|
||||
{"pype_data.is_processed": False}
|
||||
).sort(
|
||||
[("pype_data.stored", pymongo.ASCENDING)]
|
||||
)
|
||||
|
||||
found = False
|
||||
for event_data in not_processed_events:
|
||||
new_event_data = {
|
||||
k: v for k, v in event_data.items()
|
||||
if k not in ["_id", "pype_data"]
|
||||
}
|
||||
try:
|
||||
event = ftrack_api.event.base.Event(**new_event_data)
|
||||
except Exception:
|
||||
self.logger.exception(L(
|
||||
'Failed to convert payload into event: {0}',
|
||||
event_data
|
||||
))
|
||||
continue
|
||||
found = True
|
||||
self._event_queue.put(event)
|
||||
|
||||
return found
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which skip events and extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "event":
|
||||
return
|
||||
if code_name == "heartbeat":
|
||||
self.sock.sendall(b"processor")
|
||||
return self._send_packet(self._code_name_mapping["heartbeat"])
|
||||
|
||||
return super()._handle_packet(code, packet_identifier, path, data)
|
||||
class SocketSession(ftrack_api.session.Session):
|
||||
'''An isolated session for interaction with an ftrack server.'''
|
||||
def __init__(
|
||||
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
|
||||
plugin_paths=None, cache=None, cache_key_maker=None,
|
||||
auto_connect_event_hub=None, schema_cache_path=None,
|
||||
plugin_arguments=None, sock=None, Eventhub=None
|
||||
):
|
||||
super(ftrack_api.session.Session, self).__init__()
|
||||
self.logger = logging.getLogger(
|
||||
__name__ + '.' + self.__class__.__name__
|
||||
)
|
||||
self._closed = False
|
||||
|
||||
if server_url is None:
|
||||
server_url = os.environ.get('FTRACK_SERVER')
|
||||
|
||||
if not server_url:
|
||||
raise TypeError(
|
||||
'Required "server_url" not specified. Pass as argument or set '
|
||||
'in environment variable FTRACK_SERVER.'
|
||||
)
|
||||
|
||||
self._server_url = server_url
|
||||
|
||||
if api_key is None:
|
||||
api_key = os.environ.get(
|
||||
'FTRACK_API_KEY',
|
||||
# Backwards compatibility
|
||||
os.environ.get('FTRACK_APIKEY')
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
raise TypeError(
|
||||
'Required "api_key" not specified. Pass as argument or set in '
|
||||
'environment variable FTRACK_API_KEY.'
|
||||
)
|
||||
|
||||
self._api_key = api_key
|
||||
|
||||
if api_user is None:
|
||||
api_user = os.environ.get('FTRACK_API_USER')
|
||||
if not api_user:
|
||||
try:
|
||||
api_user = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not api_user:
|
||||
raise TypeError(
|
||||
'Required "api_user" not specified. Pass as argument, set in '
|
||||
'environment variable FTRACK_API_USER or one of the standard '
|
||||
'environment variables used by Python\'s getpass module.'
|
||||
)
|
||||
|
||||
self._api_user = api_user
|
||||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
|
||||
|
||||
# Enforce always having a memory cache at top level so that the same
|
||||
# in-memory instance is returned from session.
|
||||
self.cache = ftrack_api.cache.LayeredCache([
|
||||
ftrack_api.cache.MemoryCache()
|
||||
])
|
||||
|
||||
if cache is not None:
|
||||
if callable(cache):
|
||||
cache = cache(self)
|
||||
|
||||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
self._api_key, self._api_user
|
||||
)
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
# Now check compatibility of server based on retrieved information.
|
||||
self.check_server_compatibility()
|
||||
|
||||
# Construct event hub and load plugins.
|
||||
if Eventhub is None:
|
||||
Eventhub = ftrack_api.event.hub.EventHub
|
||||
self._event_hub = Eventhub(
|
||||
self._server_url,
|
||||
self._api_user,
|
||||
self._api_key,
|
||||
sock=sock
|
||||
)
|
||||
|
||||
self._auto_connect_event_hub_thread = None
|
||||
if auto_connect_event_hub in (None, True):
|
||||
# Connect to event hub in background thread so as not to block main
|
||||
# session usage waiting for event hub connection.
|
||||
self._auto_connect_event_hub_thread = threading.Thread(
|
||||
target=self._event_hub.connect
|
||||
)
|
||||
self._auto_connect_event_hub_thread.daemon = True
|
||||
self._auto_connect_event_hub_thread.start()
|
||||
|
||||
# To help with migration from auto_connect_event_hub default changing
|
||||
# from True to False.
|
||||
self._event_hub._deprecation_warning_auto_connect = (
|
||||
auto_connect_event_hub is None
|
||||
)
|
||||
|
||||
# Register to auto-close session on exit.
|
||||
atexit.register(self.close)
|
||||
|
||||
self._plugin_paths = plugin_paths
|
||||
if self._plugin_paths is None:
|
||||
self._plugin_paths = os.environ.get(
|
||||
'FTRACK_EVENT_PLUGIN_PATH', ''
|
||||
).split(os.pathsep)
|
||||
|
||||
self._discover_plugins(plugin_arguments=plugin_arguments)
|
||||
|
||||
# TODO: Make schemas read-only and non-mutable (or at least without
|
||||
# rebuilding types)?
|
||||
if schema_cache_path is not False:
|
||||
if schema_cache_path is None:
|
||||
schema_cache_path = os.environ.get(
|
||||
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
|
||||
)
|
||||
|
||||
schema_cache_path = os.path.join(
|
||||
schema_cache_path, 'ftrack_api_schema_cache.json'
|
||||
)
|
||||
|
||||
self.schemas = self._load_schemas(schema_cache_path)
|
||||
self.types = self._build_entity_type_classes(self.schemas)
|
||||
|
||||
ftrack_api._centralized_storage_scenario.register(self)
|
||||
|
||||
self._configure_locations()
|
||||
self.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.api.session.ready',
|
||||
data=dict(
|
||||
session=self
|
||||
)
|
||||
),
|
||||
synchronous=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,292 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import atexit
|
||||
import datetime
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import requests
|
||||
import queue
|
||||
import pymongo
|
||||
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
import ftrack_api.cache
|
||||
import ftrack_api.operation
|
||||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Session processor")
|
||||
|
||||
|
||||
class ProcessEventHub(ftrack_api.event.hub.EventHub):
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.dbcon = DbConnector(
|
||||
mongo_url=self.url,
|
||||
database_name=self.database,
|
||||
table_name=self.table_name
|
||||
)
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def prepare_dbcon(self):
|
||||
try:
|
||||
self.dbcon.install()
|
||||
self.dbcon._database.collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error("Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
))
|
||||
sys.exit(0)
|
||||
|
||||
except pymongo.errors.OperationFailure:
|
||||
log.error((
|
||||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(self.database, self.table_name))
|
||||
self.sock.sendall(b"MongoError")
|
||||
sys.exit(0)
|
||||
|
||||
def wait(self, duration=None):
|
||||
"""Overriden wait
|
||||
|
||||
Event are loaded from Mongo DB when queue is empty. Handled event is
|
||||
set as processed in Mongo DB.
|
||||
"""
|
||||
started = time.time()
|
||||
self.prepare_dbcon()
|
||||
while True:
|
||||
try:
|
||||
event = self._event_queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
if not self.load_events():
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
try:
|
||||
self._handle(event)
|
||||
self.dbcon.update_one(
|
||||
{"id": event["id"]},
|
||||
{"$set": {"pype_data.is_processed": True}}
|
||||
)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error((
|
||||
"Mongo server \"{}\" is not responding, exiting."
|
||||
).format(os.environ["AVALON_MONGO"]))
|
||||
sys.exit(0)
|
||||
# Additional special processing of events.
|
||||
if event['topic'] == 'ftrack.meta.disconnected':
|
||||
break
|
||||
|
||||
if duration is not None:
|
||||
if (time.time() - started) > duration:
|
||||
break
|
||||
|
||||
def load_events(self):
|
||||
"""Load not processed events sorted by stored date"""
|
||||
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
|
||||
result = self.dbcon.delete_many({
|
||||
"pype_data.stored": {"$lte": ago_date},
|
||||
"pype_data.is_processed": True
|
||||
})
|
||||
|
||||
not_processed_events = self.dbcon.find(
|
||||
{"pype_data.is_processed": False}
|
||||
).sort(
|
||||
[("pype_data.stored", pymongo.ASCENDING)]
|
||||
)
|
||||
|
||||
found = False
|
||||
for event_data in not_processed_events:
|
||||
new_event_data = {
|
||||
k: v for k, v in event_data.items()
|
||||
if k not in ["_id", "pype_data"]
|
||||
}
|
||||
try:
|
||||
event = ftrack_api.event.base.Event(**new_event_data)
|
||||
except Exception:
|
||||
self.logger.exception(L(
|
||||
'Failed to convert payload into event: {0}',
|
||||
event_data
|
||||
))
|
||||
continue
|
||||
found = True
|
||||
self._event_queue.put(event)
|
||||
|
||||
return found
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which skip events and extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "event":
|
||||
return
|
||||
if code_name == "heartbeat":
|
||||
self.sock.sendall(b"processor")
|
||||
return self._send_packet(self._code_name_mapping["heartbeat"])
|
||||
|
||||
return super()._handle_packet(code, packet_identifier, path, data)
|
||||
|
||||
|
||||
class ProcessSession(ftrack_api.session.Session):
|
||||
'''An isolated session for interaction with an ftrack server.'''
|
||||
def __init__(
|
||||
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
|
||||
plugin_paths=None, cache=None, cache_key_maker=None,
|
||||
auto_connect_event_hub=None, schema_cache_path=None,
|
||||
plugin_arguments=None, sock=None
|
||||
):
|
||||
super(ftrack_api.session.Session, self).__init__()
|
||||
self.logger = logging.getLogger(
|
||||
__name__ + '.' + self.__class__.__name__
|
||||
)
|
||||
self._closed = False
|
||||
|
||||
if server_url is None:
|
||||
server_url = os.environ.get('FTRACK_SERVER')
|
||||
|
||||
if not server_url:
|
||||
raise TypeError(
|
||||
'Required "server_url" not specified. Pass as argument or set '
|
||||
'in environment variable FTRACK_SERVER.'
|
||||
)
|
||||
|
||||
self._server_url = server_url
|
||||
|
||||
if api_key is None:
|
||||
api_key = os.environ.get(
|
||||
'FTRACK_API_KEY',
|
||||
# Backwards compatibility
|
||||
os.environ.get('FTRACK_APIKEY')
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
raise TypeError(
|
||||
'Required "api_key" not specified. Pass as argument or set in '
|
||||
'environment variable FTRACK_API_KEY.'
|
||||
)
|
||||
|
||||
self._api_key = api_key
|
||||
|
||||
if api_user is None:
|
||||
api_user = os.environ.get('FTRACK_API_USER')
|
||||
if not api_user:
|
||||
try:
|
||||
api_user = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not api_user:
|
||||
raise TypeError(
|
||||
'Required "api_user" not specified. Pass as argument, set in '
|
||||
'environment variable FTRACK_API_USER or one of the standard '
|
||||
'environment variables used by Python\'s getpass module.'
|
||||
)
|
||||
|
||||
self._api_user = api_user
|
||||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
|
||||
|
||||
# Enforce always having a memory cache at top level so that the same
|
||||
# in-memory instance is returned from session.
|
||||
self.cache = ftrack_api.cache.LayeredCache([
|
||||
ftrack_api.cache.MemoryCache()
|
||||
])
|
||||
|
||||
if cache is not None:
|
||||
if callable(cache):
|
||||
cache = cache(self)
|
||||
|
||||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
self._api_key, self._api_user
|
||||
)
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
# Now check compatibility of server based on retrieved information.
|
||||
self.check_server_compatibility()
|
||||
|
||||
# Construct event hub and load plugins.
|
||||
self._event_hub = ProcessEventHub(
|
||||
self._server_url,
|
||||
self._api_user,
|
||||
self._api_key,
|
||||
sock=sock
|
||||
)
|
||||
|
||||
self._auto_connect_event_hub_thread = None
|
||||
if auto_connect_event_hub in (None, True):
|
||||
# Connect to event hub in background thread so as not to block main
|
||||
# session usage waiting for event hub connection.
|
||||
self._auto_connect_event_hub_thread = threading.Thread(
|
||||
target=self._event_hub.connect
|
||||
)
|
||||
self._auto_connect_event_hub_thread.daemon = True
|
||||
self._auto_connect_event_hub_thread.start()
|
||||
|
||||
# To help with migration from auto_connect_event_hub default changing
|
||||
# from True to False.
|
||||
self._event_hub._deprecation_warning_auto_connect = (
|
||||
auto_connect_event_hub is None
|
||||
)
|
||||
|
||||
# Register to auto-close session on exit.
|
||||
atexit.register(self.close)
|
||||
|
||||
self._plugin_paths = plugin_paths
|
||||
if self._plugin_paths is None:
|
||||
self._plugin_paths = os.environ.get(
|
||||
'FTRACK_EVENT_PLUGIN_PATH', ''
|
||||
).split(os.pathsep)
|
||||
|
||||
self._discover_plugins(plugin_arguments=plugin_arguments)
|
||||
|
||||
# TODO: Make schemas read-only and non-mutable (or at least without
|
||||
# rebuilding types)?
|
||||
if schema_cache_path is not False:
|
||||
if schema_cache_path is None:
|
||||
schema_cache_path = os.environ.get(
|
||||
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
|
||||
)
|
||||
|
||||
schema_cache_path = os.path.join(
|
||||
schema_cache_path, 'ftrack_api_schema_cache.json'
|
||||
)
|
||||
|
||||
self.schemas = self._load_schemas(schema_cache_path)
|
||||
self.types = self._build_entity_type_classes(self.schemas)
|
||||
|
||||
ftrack_api._centralized_storage_scenario.register(self)
|
||||
|
||||
self._configure_locations()
|
||||
self.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.api.session.ready',
|
||||
data=dict(
|
||||
session=self
|
||||
)
|
||||
),
|
||||
synchronous=True
|
||||
)
|
||||
|
|
@ -1,257 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import atexit
|
||||
import tempfile
|
||||
import threading
|
||||
import requests
|
||||
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
import ftrack_api.cache
|
||||
import ftrack_api.operation
|
||||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
|
||||
|
||||
class StorerEventHub(ftrack_api.event.hub.EventHub):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(StorerEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which extend heartbeat"""
|
||||
if self._code_name_mapping[code] == "heartbeat":
|
||||
# Reply with heartbeat.
|
||||
self.sock.sendall(b"storer")
|
||||
return self._send_packet(self._code_name_mapping['heartbeat'])
|
||||
|
||||
return super(StorerEventHub, self)._handle_packet(
|
||||
code, packet_identifier, path, data
|
||||
)
|
||||
|
||||
|
||||
class StorerSession(ftrack_api.session.Session):
|
||||
'''An isolated session for interaction with an ftrack server.'''
|
||||
def __init__(
|
||||
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
|
||||
plugin_paths=None, cache=None, cache_key_maker=None,
|
||||
auto_connect_event_hub=None, schema_cache_path=None,
|
||||
plugin_arguments=None, sock=None
|
||||
):
|
||||
'''Initialise session.
|
||||
|
||||
*server_url* should be the URL of the ftrack server to connect to
|
||||
including any port number. If not specified attempt to look up from
|
||||
:envvar:`FTRACK_SERVER`.
|
||||
|
||||
*api_key* should be the API key to use for authentication whilst
|
||||
*api_user* should be the username of the user in ftrack to record
|
||||
operations against. If not specified, *api_key* should be retrieved
|
||||
from :envvar:`FTRACK_API_KEY` and *api_user* from
|
||||
:envvar:`FTRACK_API_USER`.
|
||||
|
||||
If *auto_populate* is True (the default), then accessing entity
|
||||
attributes will cause them to be automatically fetched from the server
|
||||
if they are not already. This flag can be changed on the session
|
||||
directly at any time.
|
||||
|
||||
*plugin_paths* should be a list of paths to search for plugins. If not
|
||||
specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`.
|
||||
|
||||
*cache* should be an instance of a cache that fulfils the
|
||||
:class:`ftrack_api.cache.Cache` interface and will be used as the cache
|
||||
for the session. It can also be a callable that will be called with the
|
||||
session instance as sole argument. The callable should return ``None``
|
||||
if a suitable cache could not be configured, but session instantiation
|
||||
can continue safely.
|
||||
|
||||
.. note::
|
||||
|
||||
The session will add the specified cache to a pre-configured layered
|
||||
cache that specifies the top level cache as a
|
||||
:class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary
|
||||
to construct a separate memory cache for typical behaviour. Working
|
||||
around this behaviour or removing the memory cache can lead to
|
||||
unexpected behaviour.
|
||||
|
||||
*cache_key_maker* should be an instance of a key maker that fulfils the
|
||||
:class:`ftrack_api.cache.KeyMaker` interface and will be used to
|
||||
generate keys for objects being stored in the *cache*. If not specified,
|
||||
a :class:`~ftrack_api.cache.StringKeyMaker` will be used.
|
||||
|
||||
If *auto_connect_event_hub* is True then embedded event hub will be
|
||||
automatically connected to the event server and allow for publishing and
|
||||
subscribing to **non-local** events. If False, then only publishing and
|
||||
subscribing to **local** events will be possible until the hub is
|
||||
manually connected using :meth:`EventHub.connect
|
||||
<ftrack_api.event.hub.EventHub.connect>`.
|
||||
|
||||
.. note::
|
||||
|
||||
The event hub connection is performed in a background thread to
|
||||
improve session startup time. If a registered plugin requires a
|
||||
connected event hub then it should check the event hub connection
|
||||
status explicitly. Subscribing to events does *not* require a
|
||||
connected event hub.
|
||||
|
||||
Enable schema caching by setting *schema_cache_path* to a folder path.
|
||||
If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to
|
||||
determine the path to store cache in. If the environment variable is
|
||||
also not specified then a temporary directory will be used. Set to
|
||||
`False` to disable schema caching entirely.
|
||||
|
||||
*plugin_arguments* should be an optional mapping (dict) of keyword
|
||||
arguments to pass to plugin register functions upon discovery. If a
|
||||
discovered plugin has a signature that is incompatible with the passed
|
||||
arguments, the discovery mechanism will attempt to reduce the passed
|
||||
arguments to only those that the plugin accepts. Note that a warning
|
||||
will be logged in this case.
|
||||
|
||||
'''
|
||||
super(ftrack_api.session.Session, self).__init__()
|
||||
self.logger = logging.getLogger(
|
||||
__name__ + '.' + self.__class__.__name__
|
||||
)
|
||||
self._closed = False
|
||||
|
||||
if server_url is None:
|
||||
server_url = os.environ.get('FTRACK_SERVER')
|
||||
|
||||
if not server_url:
|
||||
raise TypeError(
|
||||
'Required "server_url" not specified. Pass as argument or set '
|
||||
'in environment variable FTRACK_SERVER.'
|
||||
)
|
||||
|
||||
self._server_url = server_url
|
||||
|
||||
if api_key is None:
|
||||
api_key = os.environ.get(
|
||||
'FTRACK_API_KEY',
|
||||
# Backwards compatibility
|
||||
os.environ.get('FTRACK_APIKEY')
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
raise TypeError(
|
||||
'Required "api_key" not specified. Pass as argument or set in '
|
||||
'environment variable FTRACK_API_KEY.'
|
||||
)
|
||||
|
||||
self._api_key = api_key
|
||||
|
||||
if api_user is None:
|
||||
api_user = os.environ.get('FTRACK_API_USER')
|
||||
if not api_user:
|
||||
try:
|
||||
api_user = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not api_user:
|
||||
raise TypeError(
|
||||
'Required "api_user" not specified. Pass as argument, set in '
|
||||
'environment variable FTRACK_API_USER or one of the standard '
|
||||
'environment variables used by Python\'s getpass module.'
|
||||
)
|
||||
|
||||
self._api_user = api_user
|
||||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
|
||||
|
||||
# Enforce always having a memory cache at top level so that the same
|
||||
# in-memory instance is returned from session.
|
||||
self.cache = ftrack_api.cache.LayeredCache([
|
||||
ftrack_api.cache.MemoryCache()
|
||||
])
|
||||
|
||||
if cache is not None:
|
||||
if callable(cache):
|
||||
cache = cache(self)
|
||||
|
||||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
self._api_key, self._api_user
|
||||
)
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
# Now check compatibility of server based on retrieved information.
|
||||
self.check_server_compatibility()
|
||||
|
||||
# Construct event hub and load plugins.
|
||||
self._event_hub = StorerEventHub(
|
||||
self._server_url,
|
||||
self._api_user,
|
||||
self._api_key,
|
||||
sock=sock
|
||||
)
|
||||
|
||||
self._auto_connect_event_hub_thread = None
|
||||
if auto_connect_event_hub in (None, True):
|
||||
# Connect to event hub in background thread so as not to block main
|
||||
# session usage waiting for event hub connection.
|
||||
self._auto_connect_event_hub_thread = threading.Thread(
|
||||
target=self._event_hub.connect
|
||||
)
|
||||
self._auto_connect_event_hub_thread.daemon = True
|
||||
self._auto_connect_event_hub_thread.start()
|
||||
|
||||
# To help with migration from auto_connect_event_hub default changing
|
||||
# from True to False.
|
||||
self._event_hub._deprecation_warning_auto_connect = (
|
||||
auto_connect_event_hub is None
|
||||
)
|
||||
|
||||
# Register to auto-close session on exit.
|
||||
atexit.register(self.close)
|
||||
|
||||
self._plugin_paths = plugin_paths
|
||||
if self._plugin_paths is None:
|
||||
self._plugin_paths = os.environ.get(
|
||||
'FTRACK_EVENT_PLUGIN_PATH', ''
|
||||
).split(os.pathsep)
|
||||
|
||||
self._discover_plugins(plugin_arguments=plugin_arguments)
|
||||
|
||||
# TODO: Make schemas read-only and non-mutable (or at least without
|
||||
# rebuilding types)?
|
||||
if schema_cache_path is not False:
|
||||
if schema_cache_path is None:
|
||||
schema_cache_path = os.environ.get(
|
||||
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
|
||||
)
|
||||
|
||||
schema_cache_path = os.path.join(
|
||||
schema_cache_path, 'ftrack_api_schema_cache.json'
|
||||
)
|
||||
|
||||
self.schemas = self._load_schemas(schema_cache_path)
|
||||
self.types = self._build_entity_type_classes(self.schemas)
|
||||
|
||||
ftrack_api._centralized_storage_scenario.register(self)
|
||||
|
||||
self._configure_locations()
|
||||
self.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.api.session.ready',
|
||||
data=dict(
|
||||
session=self
|
||||
)
|
||||
),
|
||||
synchronous=True
|
||||
)
|
||||
|
|
@ -1,7 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import signal
|
||||
import socket
|
||||
import threading
|
||||
import subprocess
|
||||
|
|
@ -10,7 +8,9 @@ from pypeapp import Logger
|
|||
|
||||
class SocketThread(threading.Thread):
|
||||
"""Thread that checks suprocess of storer of processor of events"""
|
||||
|
||||
MAX_TIMEOUT = 35
|
||||
|
||||
def __init__(self, name, port, filepath):
|
||||
super(SocketThread, self).__init__()
|
||||
self.log = Logger().get_logger("SocketThread", "Event Thread")
|
||||
|
|
|
|||
|
|
@ -1,12 +1,9 @@
|
|||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import signal
|
||||
import socket
|
||||
import pymongo
|
||||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.session_processor import ProcessSession
|
||||
from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event processor")
|
||||
|
|
@ -24,14 +21,15 @@ def main(args):
|
|||
|
||||
sock.sendall(b"CreatedProcess")
|
||||
try:
|
||||
session = ProcessSession(auto_connect_event_hub=True, sock=sock)
|
||||
server = FtrackServer('event')
|
||||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub
|
||||
)
|
||||
server = FtrackServer("event")
|
||||
log.debug("Launched Ftrack Event processor")
|
||||
server.run_server(session)
|
||||
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
traceback.print_tb(exc.__traceback__)
|
||||
except Exception:
|
||||
log.error("Event server crashed. See traceback below", exc_info=True)
|
||||
|
||||
finally:
|
||||
log.debug("First closing socket")
|
||||
|
|
|
|||
|
|
@ -5,16 +5,24 @@ import signal
|
|||
import socket
|
||||
import pymongo
|
||||
|
||||
import ftrack_api
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
|
||||
from pype.ftrack.ftrack_server.lib import (
|
||||
get_ftrack_event_mongo_info,
|
||||
SocketSession,
|
||||
StorerEventHub
|
||||
)
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
from session_storer import StorerSession
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event storer")
|
||||
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
class SessionFactory:
|
||||
session = None
|
||||
|
||||
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
dbcon = DbConnector(
|
||||
mongo_url=url,
|
||||
database_name=database,
|
||||
|
|
@ -24,10 +32,11 @@ dbcon = DbConnector(
|
|||
# ignore_topics = ["ftrack.meta.connected"]
|
||||
ignore_topics = []
|
||||
|
||||
|
||||
def install_db():
|
||||
try:
|
||||
dbcon.install()
|
||||
dbcon._database.collection_names()
|
||||
dbcon._database.list_collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error("Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
|
|
@ -49,7 +58,7 @@ def launch(event):
|
|||
|
||||
try:
|
||||
# dbcon.insert_one(event_data)
|
||||
dbcon.update({"id": event_id}, event_data, upsert=True)
|
||||
dbcon.replace_one({"id": event_id}, event_data, upsert=True)
|
||||
log.debug("Event: {} stored".format(event_id))
|
||||
|
||||
except pymongo.errors.AutoReconnect:
|
||||
|
|
@ -65,10 +74,75 @@ def launch(event):
|
|||
)
|
||||
|
||||
|
||||
def trigger_sync(event):
|
||||
session = SessionFactory.session
|
||||
source_id = event.get("source", {}).get("id")
|
||||
if not source_id or source_id != session.event_hub.id:
|
||||
return
|
||||
|
||||
if session is None:
|
||||
log.warning("Session is not set. Can't trigger Sync to avalon action.")
|
||||
return True
|
||||
|
||||
projects = session.query("Project").all()
|
||||
if not projects:
|
||||
return True
|
||||
|
||||
query = {
|
||||
"pype_data.is_processed": False,
|
||||
"topic": "ftrack.action.launch",
|
||||
"data.actionIdentifier": "sync.to.avalon.server"
|
||||
}
|
||||
set_dict = {
|
||||
"$set": {"pype_data.is_processed": True}
|
||||
}
|
||||
dbcon.update_many(query, set_dict)
|
||||
|
||||
selections = []
|
||||
for project in projects:
|
||||
if project["status"] != "active":
|
||||
continue
|
||||
|
||||
auto_sync = project["custom_attributes"].get("avalon_auto_sync")
|
||||
if not auto_sync:
|
||||
continue
|
||||
|
||||
selections.append({
|
||||
"entityId": project["id"],
|
||||
"entityType": "show"
|
||||
})
|
||||
|
||||
if not selections:
|
||||
return
|
||||
|
||||
user = session.query(
|
||||
"User where username is \"{}\"".format(session.api_user)
|
||||
).one()
|
||||
user_data = {
|
||||
"username": user["username"],
|
||||
"id": user["id"]
|
||||
}
|
||||
|
||||
for selection in selections:
|
||||
event_data = {
|
||||
"actionIdentifier": "sync.to.avalon.server",
|
||||
"selection": [selection]
|
||||
}
|
||||
session.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic="ftrack.action.launch",
|
||||
data=event_data,
|
||||
source=dict(user=user_data)
|
||||
),
|
||||
on_error="ignore"
|
||||
)
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Registers the event, subscribing the discover and launch topics.'''
|
||||
install_db()
|
||||
session.event_hub.subscribe("topic=*", launch)
|
||||
session.event_hub.subscribe("topic=pype.storer.started", trigger_sync)
|
||||
|
||||
|
||||
def main(args):
|
||||
|
|
@ -84,7 +158,10 @@ def main(args):
|
|||
sock.sendall(b"CreatedStore")
|
||||
|
||||
try:
|
||||
session = StorerSession(auto_connect_event_hub=True, sock=sock)
|
||||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=StorerEventHub
|
||||
)
|
||||
SessionFactory.session = session
|
||||
register(session)
|
||||
server = FtrackServer("event")
|
||||
log.debug("Launched Ftrack Event storer")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
|
|
@ -6,8 +5,7 @@ import signal
|
|||
import threading
|
||||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.vendor.ftrack_api.event.hub import EventHub
|
||||
import ftrack_api
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event Server Legacy")
|
||||
|
|
@ -37,7 +35,10 @@ class TimerChecker(threading.Thread):
|
|||
|
||||
if not self.session.event_hub.connected:
|
||||
if not connected:
|
||||
if (datetime.datetime.now() - start).seconds > self.max_time_out:
|
||||
if (
|
||||
(datetime.datetime.now() - start).seconds >
|
||||
self.max_time_out
|
||||
):
|
||||
log.error((
|
||||
"Exiting event server. Session was not connected"
|
||||
" to ftrack server in {} seconds."
|
||||
|
|
@ -61,7 +62,7 @@ class TimerChecker(threading.Thread):
|
|||
def main(args):
|
||||
check_thread = None
|
||||
try:
|
||||
server = FtrackServer('event')
|
||||
server = FtrackServer("event")
|
||||
session = ftrack_api.Session(auto_connect_event_hub=True)
|
||||
|
||||
check_thread = TimerChecker(server, session)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from .avalon_sync import *
|
||||
from . import avalon_sync
|
||||
from .credentials import *
|
||||
from .ftrack_app_handler import *
|
||||
from .ftrack_event_handler import *
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import json
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
import appdirs
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,12 @@ import pymongo
|
|||
from pymongo.client_session import ClientSession
|
||||
|
||||
class NotActiveTable(Exception):
|
||||
pass
|
||||
def __init__(self, *args, **kwargs):
|
||||
msg = "Active table is not set. (This is bug)"
|
||||
if not (args or kwargs):
|
||||
args = (default_message,)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
|
|
@ -37,7 +42,16 @@ def auto_reconnect(func):
|
|||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
return decorated
|
||||
|
||||
|
||||
def check_active_table(func):
|
||||
"""Check if DbConnector has active table before db method is called"""
|
||||
@functools.wraps(func)
|
||||
def decorated(obj, *args, **kwargs):
|
||||
if not obj.active_table:
|
||||
raise NotActiveTable()
|
||||
return func(obj, *args, **kwargs)
|
||||
return decorated
|
||||
|
||||
|
||||
|
|
@ -53,7 +67,6 @@ def check_active_table(func):
|
|||
|
||||
|
||||
class DbConnector:
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
timeout = 1000
|
||||
|
||||
|
|
@ -68,10 +81,18 @@ class DbConnector:
|
|||
|
||||
self.active_table = table_name
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# not all methods of PyMongo database are implemented with this it is
|
||||
# possible to use them too
|
||||
try:
|
||||
return super().__getattribute__(attr)
|
||||
return super(DbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
if self.active_table is None:
|
||||
raise NotActiveTable()
|
||||
return self._database[self.active_table].__getattribute__(attr)
|
||||
|
||||
def install(self):
|
||||
|
|
@ -131,6 +152,15 @@ class DbConnector:
|
|||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def create_table(self, name, **options):
|
||||
if self.exist_table(name):
|
||||
return
|
||||
|
||||
return self._database.create_collection(name, **options)
|
||||
|
||||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def tables(self):
|
||||
"""List available tables
|
||||
Returns:
|
||||
|
|
@ -166,18 +196,21 @@ class DbConnector:
|
|||
@check_active_table
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None, **options):
|
||||
options["projection"] = projection
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find(filter, **options)
|
||||
return self._database[self.active_table].find(
|
||||
filter, projection, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None, **options):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
|
||||
options["projection"] = projection
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find_one(filter, **options)
|
||||
return self._database[self.active_table].find_one(
|
||||
filter,
|
||||
projection,
|
||||
**options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
|
|
@ -202,8 +235,8 @@ class DbConnector:
|
|||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.active_table].distinct(*args, **kwargs)
|
||||
def distinct(self, **options):
|
||||
return self._database[self.active_table].distinct(**options)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
|
|
@ -216,10 +249,14 @@ class DbConnector:
|
|||
@auto_reconnect
|
||||
def delete_one(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_one(filter, **options)
|
||||
return self._database[self.active_table].delete_one(
|
||||
filter, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@auto_reconnect
|
||||
def delete_many(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_many(filter, **options)
|
||||
return self._database[self.active_table].delete_many(
|
||||
filter, **options
|
||||
)
|
||||
|
|
|
|||
|
|
@ -345,25 +345,44 @@ class AppAction(BaseHandler):
|
|||
statuses = presets['status_update']
|
||||
|
||||
actual_status = entity['status']['name'].lower()
|
||||
next_status_name = None
|
||||
for key, value in statuses.items():
|
||||
if actual_status in value or '_any_' in value:
|
||||
if key != '_ignore_':
|
||||
next_status_name = key
|
||||
already_tested = []
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in entity['link']]
|
||||
)
|
||||
while True:
|
||||
next_status_name = None
|
||||
for key, value in statuses.items():
|
||||
if key in already_tested:
|
||||
continue
|
||||
if actual_status in value or '_any_' in value:
|
||||
if key != '_ignore_':
|
||||
next_status_name = key
|
||||
already_tested.append(key)
|
||||
break
|
||||
already_tested.append(key)
|
||||
|
||||
if next_status_name is None:
|
||||
break
|
||||
|
||||
if next_status_name is not None:
|
||||
try:
|
||||
query = 'Status where name is "{}"'.format(
|
||||
next_status_name
|
||||
)
|
||||
status = session.query(query).one()
|
||||
|
||||
entity['status'] = status
|
||||
session.commit()
|
||||
self.log.debug("Changing status to \"{}\" <{}>".format(
|
||||
next_status_name, ent_path
|
||||
))
|
||||
break
|
||||
|
||||
except Exception:
|
||||
session.rollback()
|
||||
msg = (
|
||||
'Status "{}" in presets wasn\'t found on Ftrack'
|
||||
).format(next_status_name)
|
||||
'Status "{}" in presets wasn\'t found'
|
||||
' on Ftrack entity type "{}"'
|
||||
).format(next_status_name, entity.entity_type)
|
||||
self.log.warning(msg)
|
||||
|
||||
# Set origin avalon environments
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import functools
|
||||
import time
|
||||
from pypeapp import Logger
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
from pype.ftrack.ftrack_server import session_processor
|
||||
import ftrack_api
|
||||
from pype.ftrack.ftrack_server.lib import SocketSession
|
||||
|
||||
|
||||
class MissingPermision(Exception):
|
||||
|
|
@ -13,6 +12,13 @@ class MissingPermision(Exception):
|
|||
super().__init__(message)
|
||||
|
||||
|
||||
class PreregisterException(Exception):
|
||||
def __init__(self, message=None):
|
||||
if not message:
|
||||
message = "Pre-registration conditions were not met"
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class BaseHandler(object):
|
||||
'''Custom Action base class
|
||||
|
||||
|
|
@ -35,7 +41,7 @@ class BaseHandler(object):
|
|||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
if not(
|
||||
isinstance(session, ftrack_api.session.Session) or
|
||||
isinstance(session, session_processor.ProcessSession)
|
||||
isinstance(session, SocketSession)
|
||||
):
|
||||
raise Exception((
|
||||
"Session object entered with args is instance of \"{}\""
|
||||
|
|
@ -89,15 +95,17 @@ class BaseHandler(object):
|
|||
'!{} "{}" - You\'re missing required {} permissions'
|
||||
).format(self.type, label, str(MPE)))
|
||||
except AssertionError as ae:
|
||||
self.log.info((
|
||||
self.log.warning((
|
||||
'!{} "{}" - {}'
|
||||
).format(self.type, label, str(ae)))
|
||||
except NotImplementedError:
|
||||
self.log.error((
|
||||
'{} "{}" - Register method is not implemented'
|
||||
).format(
|
||||
self.type, label)
|
||||
)
|
||||
).format(self.type, label))
|
||||
except PreregisterException as exc:
|
||||
self.log.warning((
|
||||
'{} "{}" - {}'
|
||||
).format(self.type, label, str(exc)))
|
||||
except Exception as e:
|
||||
self.log.error('{} "{}" - Registration failed ({})'.format(
|
||||
self.type, label, str(e))
|
||||
|
|
@ -119,6 +127,7 @@ class BaseHandler(object):
|
|||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
self.session.rollback()
|
||||
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
|
||||
self.log.error(msg, exc_info=True)
|
||||
return {
|
||||
|
|
@ -163,10 +172,10 @@ class BaseHandler(object):
|
|||
|
||||
if result is True:
|
||||
return
|
||||
msg = "Pre-register conditions were not met"
|
||||
msg = None
|
||||
if isinstance(result, str):
|
||||
msg = result
|
||||
raise Exception(msg)
|
||||
raise PreregisterException(msg)
|
||||
|
||||
def preregister(self):
|
||||
'''
|
||||
|
|
@ -233,7 +242,7 @@ class BaseHandler(object):
|
|||
_entities is None or
|
||||
_entities[0].get(
|
||||
'link', None
|
||||
) == fa_session.ftrack_api.symbol.NOT_SET
|
||||
) == ftrack_api.symbol.NOT_SET
|
||||
):
|
||||
_entities = self._get_entities(event)
|
||||
|
||||
|
|
@ -437,7 +446,7 @@ class BaseHandler(object):
|
|||
'applicationId=ftrack.client.web and user.id="{0}"'
|
||||
).format(user_id)
|
||||
self.session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='message',
|
||||
|
|
@ -485,8 +494,8 @@ class BaseHandler(object):
|
|||
|
||||
if not user:
|
||||
raise TypeError((
|
||||
'Ftrack user with {} "{}" was not found!'.format(key, value)
|
||||
))
|
||||
'Ftrack user with {} "{}" was not found!'
|
||||
).format(key, value))
|
||||
|
||||
user_id = user['id']
|
||||
|
||||
|
|
@ -495,7 +504,7 @@ class BaseHandler(object):
|
|||
).format(user_id)
|
||||
|
||||
self.session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='widget',
|
||||
|
|
@ -523,7 +532,7 @@ class BaseHandler(object):
|
|||
else:
|
||||
first = False
|
||||
|
||||
subtitle = {'type': 'label', 'value':'<h3>{}</h3>'.format(key)}
|
||||
subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
|
||||
items.append(subtitle)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
|
|
@ -583,7 +592,7 @@ class BaseHandler(object):
|
|||
|
||||
# Create and trigger event
|
||||
session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=_event_data,
|
||||
source=dict(user=_user_data)
|
||||
|
|
@ -593,3 +602,24 @@ class BaseHandler(object):
|
|||
self.log.debug(
|
||||
"Action \"{}\" Triggered successfully".format(action_name)
|
||||
)
|
||||
|
||||
def trigger_event(
|
||||
self, topic, event_data={}, session=None, source=None,
|
||||
event=None, on_error="ignore"
|
||||
):
|
||||
if session is None:
|
||||
session = self.session
|
||||
|
||||
if not source and event:
|
||||
source = event.get("source")
|
||||
# Create and trigger event
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=event_data,
|
||||
source=source
|
||||
)
|
||||
session.event_hub.publish(event, on_error=on_error)
|
||||
|
||||
self.log.debug((
|
||||
"Publishing event: {}"
|
||||
).format(str(event.__dict__)))
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ class BaseEvent(BaseHandler):
|
|||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
self.session.rollback()
|
||||
self.log.error(
|
||||
'Event "{}" Failed: {}'.format(
|
||||
self.__class__.__name__, str(exc)
|
||||
|
|
|
|||
|
|
@ -50,6 +50,19 @@ class DbConnector(object):
|
|||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
# not all methods of PyMongo database are implemented with this it is
|
||||
# possible to use them too
|
||||
try:
|
||||
return super(DbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
cur_proj = self.Session["AVALON_PROJECT"]
|
||||
return self._database[cur_proj].__getattribute__(attr)
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ import threading
|
|||
import time
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
import ftrack_api
|
||||
from pypeapp import style
|
||||
from pype.ftrack import FtrackServer, credentials
|
||||
from pype.ftrack import FtrackServer, check_ftrack_url, credentials
|
||||
from . import login_dialog
|
||||
|
||||
from pype import api as pype
|
||||
|
|
@ -24,7 +24,8 @@ class FtrackModule:
|
|||
self.thread_timer = None
|
||||
|
||||
self.bool_logged = False
|
||||
self.bool_action_server = False
|
||||
self.bool_action_server_running = False
|
||||
self.bool_action_thread_running = False
|
||||
self.bool_timer_event = False
|
||||
|
||||
def show_login_widget(self):
|
||||
|
|
@ -74,28 +75,50 @@ class FtrackModule:
|
|||
|
||||
# Actions part
|
||||
def start_action_server(self):
|
||||
self.bool_action_thread_running = True
|
||||
self.set_menu_visibility()
|
||||
if (
|
||||
self.thread_action_server is not None and
|
||||
self.bool_action_thread_running is False
|
||||
):
|
||||
self.stop_action_server()
|
||||
|
||||
if self.thread_action_server is None:
|
||||
self.thread_action_server = threading.Thread(
|
||||
target=self.set_action_server
|
||||
)
|
||||
self.thread_action_server.daemon = True
|
||||
self.thread_action_server.start()
|
||||
|
||||
log.info("Ftrack action server launched")
|
||||
self.bool_action_server = True
|
||||
self.set_menu_visibility()
|
||||
|
||||
def set_action_server(self):
|
||||
try:
|
||||
self.action_server.run_server()
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Ftrack Action server crashed! Please try to start again.",
|
||||
exc_info=True
|
||||
first_check = True
|
||||
while self.bool_action_thread_running is True:
|
||||
if not check_ftrack_url(os.environ['FTRACK_SERVER']):
|
||||
if first_check:
|
||||
log.warning(
|
||||
"Could not connect to Ftrack server"
|
||||
)
|
||||
first_check = False
|
||||
time.sleep(1)
|
||||
continue
|
||||
log.info(
|
||||
"Connected to Ftrack server. Running actions session"
|
||||
)
|
||||
# TODO show message to user
|
||||
self.bool_action_server = False
|
||||
try:
|
||||
self.bool_action_server_running = True
|
||||
self.set_menu_visibility()
|
||||
self.action_server.run_server()
|
||||
if self.bool_action_thread_running:
|
||||
log.debug("Ftrack action server has stopped")
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Ftrack Action server crashed. Trying to connect again",
|
||||
exc_info=True
|
||||
)
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
first_check = True
|
||||
|
||||
self.bool_action_thread_running = False
|
||||
|
||||
def reset_action_server(self):
|
||||
self.stop_action_server()
|
||||
|
|
@ -103,16 +126,21 @@ class FtrackModule:
|
|||
|
||||
def stop_action_server(self):
|
||||
try:
|
||||
self.bool_action_thread_running = False
|
||||
self.action_server.stop_session()
|
||||
if self.thread_action_server is not None:
|
||||
self.thread_action_server.join()
|
||||
self.thread_action_server = None
|
||||
|
||||
log.info("Ftrack action server stopped")
|
||||
self.bool_action_server = False
|
||||
log.info("Ftrack action server was forced to stop")
|
||||
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
except Exception as e:
|
||||
log.error("During Killing action server: {0}".format(e))
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Error has happened during Killing action server",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
|
|
@ -158,6 +186,9 @@ class FtrackModule:
|
|||
def tray_start(self):
|
||||
self.validate()
|
||||
|
||||
def tray_exit(self):
|
||||
self.stop_action_server()
|
||||
|
||||
# Definition of visibility of each menu actions
|
||||
def set_menu_visibility(self):
|
||||
|
||||
|
|
@ -170,9 +201,9 @@ class FtrackModule:
|
|||
self.stop_timer_thread()
|
||||
return
|
||||
|
||||
self.aRunActionS.setVisible(not self.bool_action_server)
|
||||
self.aResetActionS.setVisible(self.bool_action_server)
|
||||
self.aStopActionS.setVisible(self.bool_action_server)
|
||||
self.aRunActionS.setVisible(not self.bool_action_thread_running)
|
||||
self.aResetActionS.setVisible(self.bool_action_thread_running)
|
||||
self.aStopActionS.setVisible(self.bool_action_thread_running)
|
||||
|
||||
if self.bool_timer_event is False:
|
||||
self.start_timer_thread()
|
||||
|
|
|
|||
53
pype/lib.py
53
pype/lib.py
|
|
@ -1,16 +1,12 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
import importlib
|
||||
import itertools
|
||||
import contextlib
|
||||
import subprocess
|
||||
import inspect
|
||||
|
||||
from .vendor import pather
|
||||
from .vendor.pather.error import ParseError
|
||||
|
||||
import avalon.io as io
|
||||
from avalon import io
|
||||
import avalon.api
|
||||
import avalon
|
||||
|
||||
|
|
@ -21,12 +17,15 @@ log = logging.getLogger(__name__)
|
|||
def _subprocess(args):
|
||||
"""Convenience method for getting output errors for subprocess."""
|
||||
|
||||
# make sure environment contains only strings
|
||||
env = {k: str(v) for k, v in os.environ.items()}
|
||||
|
||||
proc = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
env=os.environ
|
||||
env=env
|
||||
)
|
||||
|
||||
output = proc.communicate()[0]
|
||||
|
|
@ -562,7 +561,7 @@ def get_subsets(asset_name,
|
|||
find_dict = {"type": "representation",
|
||||
"parent": version_sel["_id"]}
|
||||
|
||||
filter_repr = {"$or": [{"name": repr} for repr in representations]}
|
||||
filter_repr = {"name": {"$in": representations}}
|
||||
|
||||
find_dict.update(filter_repr)
|
||||
repres_out = [i for i in io.find(find_dict)]
|
||||
|
|
@ -572,3 +571,43 @@ def get_subsets(asset_name,
|
|||
"representaions": repres_out}
|
||||
|
||||
return output_dict
|
||||
|
||||
|
||||
class CustomNone:
|
||||
"""Created object can be used as custom None (not equal to None).
|
||||
|
||||
WARNING: Multiple created objects are not equal either.
|
||||
Exmple:
|
||||
>>> a = CustomNone()
|
||||
>>> a == None
|
||||
False
|
||||
>>> b = CustomNone()
|
||||
>>> a == b
|
||||
False
|
||||
>>> a == a
|
||||
True
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create uuid as identifier for custom None."""
|
||||
import uuid
|
||||
self.identifier = str(uuid.uuid4())
|
||||
|
||||
def __bool__(self):
|
||||
"""Return False (like default None)."""
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Equality is compared by identifier value."""
|
||||
if type(other) == type(self):
|
||||
if other.identifier == self.identifier:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __str__(self):
|
||||
"""Return value of identifier when converted to string."""
|
||||
return self.identifier
|
||||
|
||||
def __repr__(self):
|
||||
"""Representation of custom None."""
|
||||
return "<CustomNone-{}>".format(str(self.identifier))
|
||||
|
|
|
|||
|
|
@ -33,5 +33,7 @@ class LogsWindow(QtWidgets.QWidget):
|
|||
|
||||
def on_selection_changed(self):
|
||||
index = self.logs_widget.selected_log()
|
||||
if not index or not index.isValid():
|
||||
return
|
||||
node = index.data(self.logs_widget.model.NodeRole)
|
||||
self.log_detail.set_detail(node)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import datetime
|
||||
import inspect
|
||||
import getpass
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
from PyQt5.QtCore import QVariant
|
||||
from .models import LogModel
|
||||
|
||||
from .lib import preserve_states
|
||||
|
||||
|
||||
class SearchComboBox(QtWidgets.QComboBox):
|
||||
"""Searchable ComboBox with empty placeholder value as first value"""
|
||||
|
|
@ -53,6 +49,7 @@ class SearchComboBox(QtWidgets.QComboBox):
|
|||
|
||||
return text
|
||||
|
||||
|
||||
class CheckableComboBox2(QtWidgets.QComboBox):
|
||||
def __init__(self, parent=None):
|
||||
super(CheckableComboBox, self).__init__(parent)
|
||||
|
|
@ -96,9 +93,11 @@ class SelectableMenu(QtWidgets.QMenu):
|
|||
else:
|
||||
super(SelectableMenu, self).mouseReleaseEvent(event)
|
||||
|
||||
|
||||
class CustomCombo(QtWidgets.QWidget):
|
||||
|
||||
selection_changed = QtCore.Signal()
|
||||
checked_changed = QtCore.Signal(bool)
|
||||
|
||||
def __init__(self, title, parent=None):
|
||||
super(CustomCombo, self).__init__(parent)
|
||||
|
|
@ -127,12 +126,27 @@ class CustomCombo(QtWidgets.QWidget):
|
|||
self.toolmenu.clear()
|
||||
self.addItems(items)
|
||||
|
||||
def select_items(self, items, ignore_input=False):
|
||||
if not isinstance(items, list):
|
||||
items = [items]
|
||||
|
||||
for action in self.toolmenu.actions():
|
||||
check = True
|
||||
if (
|
||||
action.text() in items and ignore_input or
|
||||
action.text() not in items and not ignore_input
|
||||
):
|
||||
check = False
|
||||
|
||||
action.setChecked(check)
|
||||
|
||||
def addItems(self, items):
|
||||
for item in items:
|
||||
action = self.toolmenu.addAction(item)
|
||||
action.setCheckable(True)
|
||||
action.setChecked(True)
|
||||
self.toolmenu.addAction(action)
|
||||
action.setChecked(True)
|
||||
action.triggered.connect(self.checked_changed)
|
||||
|
||||
def items(self):
|
||||
for action in self.toolmenu.actions():
|
||||
|
|
@ -186,15 +200,42 @@ class CheckableComboBox(QtWidgets.QComboBox):
|
|||
for text, checked in items:
|
||||
text_item = QtGui.QStandardItem(text)
|
||||
checked_item = QtGui.QStandardItem()
|
||||
checked_item.setData(QVariant(checked), QtCore.Qt.CheckStateRole)
|
||||
checked_item.setData(
|
||||
QtCore.QVariant(checked), QtCore.Qt.CheckStateRole
|
||||
)
|
||||
self.model.appendRow([text_item, checked_item])
|
||||
|
||||
|
||||
class FilterLogModel(QtCore.QSortFilterProxyModel):
|
||||
sub_dict = ["$gt", "$lt", "$not"]
|
||||
def __init__(self, key_values, parent=None):
|
||||
super(FilterLogModel, self).__init__(parent)
|
||||
self.allowed_key_values = key_values
|
||||
|
||||
def filterAcceptsRow(self, row, parent):
|
||||
"""
|
||||
Reimplemented from base class.
|
||||
"""
|
||||
model = self.sourceModel()
|
||||
for key, values in self.allowed_key_values.items():
|
||||
col_indx = model.COLUMNS.index(key)
|
||||
value = model.index(row, col_indx, parent).data(
|
||||
QtCore.Qt.DisplayRole
|
||||
)
|
||||
if value not in values:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class LogsWidget(QtWidgets.QWidget):
|
||||
"""A widget that lists the published subsets for an asset"""
|
||||
|
||||
active_changed = QtCore.Signal()
|
||||
|
||||
_level_order = [
|
||||
"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
|
||||
]
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(LogsWidget, self).__init__(parent=parent)
|
||||
|
||||
|
|
@ -202,47 +243,45 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
|
||||
filter_layout = QtWidgets.QHBoxLayout()
|
||||
|
||||
# user_filter = SearchComboBox(self, "Users")
|
||||
user_filter = CustomCombo("Users", self)
|
||||
users = model.dbcon.distinct("user")
|
||||
user_filter.populate(users)
|
||||
user_filter.selection_changed.connect(self.user_changed)
|
||||
user_filter.checked_changed.connect(self.user_changed)
|
||||
user_filter.select_items(getpass.getuser())
|
||||
|
||||
level_filter = CustomCombo("Levels", self)
|
||||
# levels = [(level, True) for level in model.dbcon.distinct("level")]
|
||||
levels = model.dbcon.distinct("level")
|
||||
level_filter.addItems(levels)
|
||||
_levels = []
|
||||
for level in self._level_order:
|
||||
if level in levels:
|
||||
_levels.append(level)
|
||||
level_filter.populate(_levels)
|
||||
level_filter.checked_changed.connect(self.level_changed)
|
||||
|
||||
date_from_label = QtWidgets.QLabel("From:")
|
||||
date_filter_from = QtWidgets.QDateTimeEdit()
|
||||
|
||||
date_from_layout = QtWidgets.QVBoxLayout()
|
||||
date_from_layout.addWidget(date_from_label)
|
||||
date_from_layout.addWidget(date_filter_from)
|
||||
|
||||
# now = datetime.datetime.now()
|
||||
# QtCore.QDateTime(now.year, now.month, now.day, now.hour, now.minute, second = 0, msec = 0, timeSpec = 0)
|
||||
date_to_label = QtWidgets.QLabel("To:")
|
||||
date_filter_to = QtWidgets.QDateTimeEdit()
|
||||
|
||||
date_to_layout = QtWidgets.QVBoxLayout()
|
||||
date_to_layout.addWidget(date_to_label)
|
||||
date_to_layout.addWidget(date_filter_to)
|
||||
# date_from_label = QtWidgets.QLabel("From:")
|
||||
# date_filter_from = QtWidgets.QDateTimeEdit()
|
||||
#
|
||||
# date_from_layout = QtWidgets.QVBoxLayout()
|
||||
# date_from_layout.addWidget(date_from_label)
|
||||
# date_from_layout.addWidget(date_filter_from)
|
||||
#
|
||||
# date_to_label = QtWidgets.QLabel("To:")
|
||||
# date_filter_to = QtWidgets.QDateTimeEdit()
|
||||
#
|
||||
# date_to_layout = QtWidgets.QVBoxLayout()
|
||||
# date_to_layout.addWidget(date_to_label)
|
||||
# date_to_layout.addWidget(date_filter_to)
|
||||
|
||||
filter_layout.addWidget(user_filter)
|
||||
filter_layout.addWidget(level_filter)
|
||||
filter_layout.setAlignment(QtCore.Qt.AlignLeft)
|
||||
|
||||
filter_layout.addLayout(date_from_layout)
|
||||
filter_layout.addLayout(date_to_layout)
|
||||
# filter_layout.addLayout(date_from_layout)
|
||||
# filter_layout.addLayout(date_to_layout)
|
||||
|
||||
view = QtWidgets.QTreeView(self)
|
||||
view.setAllColumnsShowFocus(True)
|
||||
|
||||
# # Set view delegates
|
||||
# time_delegate = PrettyTimeDelegate()
|
||||
# column = model.COLUMNS.index("time")
|
||||
# view.setItemDelegateForColumn(column, time_delegate)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addLayout(filter_layout)
|
||||
|
|
@ -255,34 +294,54 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
QtCore.Qt.AscendingOrder
|
||||
)
|
||||
|
||||
view.setModel(model)
|
||||
key_val = {
|
||||
"user": users,
|
||||
"level": levels
|
||||
}
|
||||
proxy_model = FilterLogModel(key_val, view)
|
||||
proxy_model.setSourceModel(model)
|
||||
view.setModel(proxy_model)
|
||||
|
||||
view.customContextMenuRequested.connect(self.on_context_menu)
|
||||
view.selectionModel().selectionChanged.connect(self.active_changed)
|
||||
# user_filter.connect()
|
||||
|
||||
# TODO remove if nothing will affect...
|
||||
# header = self.view.header()
|
||||
# WARNING this is cool but slows down widget a lot
|
||||
# header = view.header()
|
||||
# # Enforce the columns to fit the data (purely cosmetic)
|
||||
# if Qt.__binding__ in ("PySide2", "PyQt5"):
|
||||
# header.setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)
|
||||
# else:
|
||||
# header.setResizeMode(QtWidgets.QHeaderView.ResizeToContents)
|
||||
|
||||
# Set signals
|
||||
|
||||
# prepare
|
||||
model.refresh()
|
||||
|
||||
# Store to memory
|
||||
self.model = model
|
||||
self.proxy_model = proxy_model
|
||||
self.view = view
|
||||
|
||||
self.user_filter = user_filter
|
||||
self.level_filter = level_filter
|
||||
|
||||
def user_changed(self):
|
||||
valid_actions = []
|
||||
for action in self.user_filter.items():
|
||||
print(action)
|
||||
if action.isChecked():
|
||||
valid_actions.append(action.text())
|
||||
|
||||
self.proxy_model.allowed_key_values["user"] = valid_actions
|
||||
self.proxy_model.invalidate()
|
||||
|
||||
def level_changed(self):
|
||||
valid_actions = []
|
||||
for action in self.level_filter.items():
|
||||
if action.isChecked():
|
||||
valid_actions.append(action.text())
|
||||
|
||||
self.proxy_model.allowed_key_values["level"] = valid_actions
|
||||
self.proxy_model.invalidate()
|
||||
|
||||
|
||||
def on_context_menu(self, point):
|
||||
# TODO will be any actions? it's ready
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
import re
|
||||
import os
|
||||
import uuid
|
||||
import math
|
||||
|
||||
import bson
|
||||
import json
|
||||
|
|
@ -1776,9 +1777,10 @@ def set_scene_fps(fps, update=True):
|
|||
# pull from mapping
|
||||
# this should convert float string to float and int to int
|
||||
# so 25.0 is converted to 25, but 23.98 will be still float.
|
||||
decimals = int(str(fps-int(fps))[2:])
|
||||
if decimals == 0:
|
||||
fps = int(fps)
|
||||
dec, ipart = math.modf(fps)
|
||||
if dec == 0.0:
|
||||
fps = int(ipart)
|
||||
|
||||
unit = fps_mapping.get(str(fps), None)
|
||||
if unit is None:
|
||||
raise ValueError("Unsupported FPS value: `%s`" % fps)
|
||||
|
|
@ -1861,6 +1863,7 @@ def set_context_settings():
|
|||
|
||||
# Set project fps
|
||||
fps = asset_data.get("fps", project_data.get("fps", 25))
|
||||
api.Session["AVALON_FPS"] = fps
|
||||
set_scene_fps(fps)
|
||||
|
||||
# Set project resolution
|
||||
|
|
|
|||
|
|
@ -43,8 +43,10 @@ class MusterModule:
|
|||
self.aShowLogin.trigger()
|
||||
|
||||
if "RestApiServer" in modules:
|
||||
def api_show_login():
|
||||
self.aShowLogin.trigger()
|
||||
modules["RestApiServer"].register_callback(
|
||||
"muster/show_login", api_callback, "post"
|
||||
"/show_login", api_show_login, "muster", "post"
|
||||
)
|
||||
|
||||
# Definition of Tray menu
|
||||
|
|
|
|||
|
|
@ -112,7 +112,9 @@ def install():
|
|||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
"review"
|
||||
"review",
|
||||
"nukenodes"
|
||||
"gizmo"
|
||||
]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
|
|
|
|||
211
pype/nuke/lib.py
211
pype/nuke/lib.py
|
|
@ -6,6 +6,7 @@ from collections import OrderedDict
|
|||
|
||||
from avalon import api, io, lib
|
||||
import avalon.nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
import pype.api as pype
|
||||
|
||||
import nuke
|
||||
|
|
@ -105,6 +106,10 @@ def writes_version_sync():
|
|||
|
||||
for each in nuke.allNodes():
|
||||
if each.Class() == 'Write':
|
||||
# check if the node is avalon tracked
|
||||
if "AvalonTab" not in each.knobs():
|
||||
continue
|
||||
|
||||
avalon_knob_data = avalon.nuke.get_avalon_knob_data(
|
||||
each, ['avalon:', 'ak:'])
|
||||
|
||||
|
|
@ -1190,3 +1195,209 @@ class BuildWorkfile(WorkfileSettings):
|
|||
|
||||
def position_up(self, multiply=1):
|
||||
self.ypos -= (self.ypos_size * multiply) + self.ypos_gap
|
||||
|
||||
|
||||
class Exporter_review_lut:
|
||||
"""
|
||||
Generator object for review lut from Nuke
|
||||
|
||||
Args:
|
||||
klass (pyblish.plugin): pyblish plugin parent
|
||||
|
||||
|
||||
"""
|
||||
_temp_nodes = []
|
||||
data = dict({
|
||||
"representations": list()
|
||||
})
|
||||
|
||||
def __init__(self,
|
||||
klass,
|
||||
instance,
|
||||
name=None,
|
||||
ext=None,
|
||||
cube_size=None,
|
||||
lut_size=None,
|
||||
lut_style=None):
|
||||
|
||||
self.log = klass.log
|
||||
self.instance = instance
|
||||
|
||||
self.name = name or "baked_lut"
|
||||
self.ext = ext or "cube"
|
||||
self.cube_size = cube_size or 32
|
||||
self.lut_size = lut_size or 1024
|
||||
self.lut_style = lut_style or "linear"
|
||||
|
||||
self.stagingDir = self.instance.data["stagingDir"]
|
||||
self.collection = self.instance.data.get("collection", None)
|
||||
|
||||
# set frame start / end and file name to self
|
||||
self.get_file_info()
|
||||
|
||||
self.log.info("File info was set...")
|
||||
|
||||
self.file = self.fhead + self.name + ".{}".format(self.ext)
|
||||
self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/")
|
||||
|
||||
def generate_lut(self):
|
||||
# ---------- start nodes creation
|
||||
|
||||
# CMSTestPattern
|
||||
cms_node = nuke.createNode("CMSTestPattern")
|
||||
cms_node["cube_size"].setValue(self.cube_size)
|
||||
# connect
|
||||
self._temp_nodes.append(cms_node)
|
||||
self.previous_node = cms_node
|
||||
self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes))
|
||||
|
||||
# Node View Process
|
||||
ipn = self.get_view_process_node()
|
||||
if ipn is not None:
|
||||
# connect
|
||||
ipn.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(ipn)
|
||||
self.previous_node = ipn
|
||||
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
|
||||
|
||||
# OCIODisplay
|
||||
dag_node = nuke.createNode("OCIODisplay")
|
||||
# connect
|
||||
dag_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(dag_node)
|
||||
self.previous_node = dag_node
|
||||
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
|
||||
|
||||
# GenerateLUT
|
||||
gen_lut_node = nuke.createNode("GenerateLUT")
|
||||
gen_lut_node["file"].setValue(self.path)
|
||||
gen_lut_node["file_type"].setValue(".{}".format(self.ext))
|
||||
gen_lut_node["lut1d"].setValue(self.lut_size)
|
||||
gen_lut_node["style1d"].setValue(self.lut_style)
|
||||
# connect
|
||||
gen_lut_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(gen_lut_node)
|
||||
self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes))
|
||||
|
||||
# ---------- end nodes creation
|
||||
|
||||
# Export lut file
|
||||
nuke.execute(
|
||||
gen_lut_node.name(),
|
||||
int(self.first_frame),
|
||||
int(self.first_frame))
|
||||
|
||||
self.log.info("Exported...")
|
||||
|
||||
# ---------- generate representation data
|
||||
self.get_representation_data()
|
||||
|
||||
self.log.debug("Representation... `{}`".format(self.data))
|
||||
|
||||
# ---------- Clean up
|
||||
for node in self._temp_nodes:
|
||||
nuke.delete(node)
|
||||
self.log.info("Deleted nodes...")
|
||||
|
||||
return self.data
|
||||
|
||||
def get_file_info(self):
|
||||
if self.collection:
|
||||
self.log.debug("Collection: `{}`".format(self.collection))
|
||||
# get path
|
||||
self.fname = os.path.basename(self.collection.format(
|
||||
"{head}{padding}{tail}"))
|
||||
self.fhead = self.collection.format("{head}")
|
||||
|
||||
# get first and last frame
|
||||
self.first_frame = min(self.collection.indexes)
|
||||
self.last_frame = max(self.collection.indexes)
|
||||
else:
|
||||
self.fname = os.path.basename(self.instance.data.get("path", None))
|
||||
self.fhead = os.path.splitext(self.fname)[0] + "."
|
||||
self.first_frame = self.instance.data.get("frameStart", None)
|
||||
self.last_frame = self.instance.data.get("frameEnd", None)
|
||||
|
||||
if "#" in self.fhead:
|
||||
self.fhead = self.fhead.replace("#", "")[:-1]
|
||||
|
||||
def get_representation_data(self):
|
||||
|
||||
repre = {
|
||||
'name': self.name,
|
||||
'ext': self.ext,
|
||||
'files': self.file,
|
||||
"stagingDir": self.stagingDir,
|
||||
"anatomy_template": "publish",
|
||||
"tags": [self.name.replace("_", "-")]
|
||||
}
|
||||
|
||||
self.data["representations"].append(repre)
|
||||
|
||||
def get_view_process_node(self):
|
||||
"""
|
||||
Will get any active view process.
|
||||
|
||||
Arguments:
|
||||
self (class): in object definition
|
||||
|
||||
Returns:
|
||||
nuke.Node: copy node of Input Process node
|
||||
"""
|
||||
anlib.reset_selection()
|
||||
ipn_orig = None
|
||||
for v in [n for n in nuke.allNodes()
|
||||
if "Viewer" in n.Class()]:
|
||||
ip = v['input_process'].getValue()
|
||||
ipn = v['input_process_node'].getValue()
|
||||
if "VIEWER_INPUT" not in ipn and ip:
|
||||
ipn_orig = nuke.toNode(ipn)
|
||||
ipn_orig.setSelected(True)
|
||||
|
||||
if ipn_orig:
|
||||
# copy selected to clipboard
|
||||
nuke.nodeCopy('%clipboard%')
|
||||
# reset selection
|
||||
anlib.reset_selection()
|
||||
# paste node and selection is on it only
|
||||
nuke.nodePaste('%clipboard%')
|
||||
# assign to variable
|
||||
ipn = nuke.selectedNode()
|
||||
|
||||
return ipn
|
||||
|
||||
def get_dependent_nodes(nodes):
|
||||
"""Get all dependent nodes connected to the list of nodes.
|
||||
|
||||
Looking for connections outside of the nodes in incoming argument.
|
||||
|
||||
Arguments:
|
||||
nodes (list): list of nuke.Node objects
|
||||
|
||||
Returns:
|
||||
connections_in: dictionary of nodes and its dependencies
|
||||
connections_out: dictionary of nodes and its dependency
|
||||
"""
|
||||
|
||||
connections_in = dict()
|
||||
connections_out = dict()
|
||||
node_names = [n.name() for n in nodes]
|
||||
for node in nodes:
|
||||
inputs = node.dependencies()
|
||||
outputs = node.dependent()
|
||||
# collect all inputs outside
|
||||
test_in = [(i, n) for i, n in enumerate(inputs)
|
||||
if n.name() not in node_names]
|
||||
if test_in:
|
||||
connections_in.update({
|
||||
node: test_in
|
||||
})
|
||||
# collect all outputs outside
|
||||
test_out = [i for i in outputs if i.name() not in node_names]
|
||||
if test_out:
|
||||
# only one dependent node is allowed
|
||||
connections_out.update({
|
||||
node: test_out[-1]
|
||||
})
|
||||
|
||||
return connections_in, connections_out
|
||||
|
|
|
|||
64
pype/nuke/utils.py
Normal file
64
pype/nuke/utils.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
import os
|
||||
import nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
|
||||
|
||||
def get_node_outputs(node):
|
||||
'''
|
||||
Return a dictionary of the nodes and pipes that are connected to node
|
||||
'''
|
||||
dep_dict = {}
|
||||
dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS)
|
||||
for d in dependencies:
|
||||
dep_dict[d] = []
|
||||
for i in range(d.inputs()):
|
||||
if d.input(i) == node:
|
||||
dep_dict[d].append(i)
|
||||
return dep_dict
|
||||
|
||||
|
||||
def is_node_gizmo(node):
|
||||
'''
|
||||
return True if node is gizmo
|
||||
'''
|
||||
return 'gizmo_file' in node.knobs()
|
||||
|
||||
|
||||
def gizmo_is_nuke_default(gizmo):
|
||||
'''Check if gizmo is in default install path'''
|
||||
plug_dir = os.path.join(os.path.dirname(
|
||||
nuke.env['ExecutablePath']), 'plugins')
|
||||
return gizmo.filename().startswith(plug_dir)
|
||||
|
||||
|
||||
def bake_gizmos_recursively(in_group=nuke.Root()):
|
||||
"""Converting a gizmo to group
|
||||
|
||||
Argumets:
|
||||
is_group (nuke.Node)[optonal]: group node or all nodes
|
||||
"""
|
||||
# preserve selection after all is done
|
||||
with anlib.maintained_selection():
|
||||
# jump to the group
|
||||
with in_group:
|
||||
for node in nuke.allNodes():
|
||||
if is_node_gizmo(node) and not gizmo_is_nuke_default(node):
|
||||
with node:
|
||||
outputs = get_node_outputs(node)
|
||||
group = node.makeGroup()
|
||||
# Reconnect inputs and outputs if any
|
||||
if outputs:
|
||||
for n, pipes in outputs.items():
|
||||
for i in pipes:
|
||||
n.setInput(i, group)
|
||||
for i in range(node.inputs()):
|
||||
group.setInput(i, node.input(i))
|
||||
# set node position and name
|
||||
group.setXYpos(node.xpos(), node.ypos())
|
||||
name = node.name()
|
||||
nuke.delete(node)
|
||||
group.setName(name)
|
||||
node = group
|
||||
|
||||
if node.Class() == "Group":
|
||||
bake_gizmos_recursively(node)
|
||||
|
|
@ -1,9 +1,6 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from avalon import (
|
||||
io,
|
||||
api as avalon
|
||||
)
|
||||
from avalon import api as avalon
|
||||
from pype import api as pype
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import six
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
|
|
@ -125,6 +126,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
metadata=asset_metadata
|
||||
)
|
||||
)
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Adding metadata
|
||||
existing_asset_metadata = asset_entity["metadata"]
|
||||
|
|
@ -137,8 +144,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
"version": 0,
|
||||
"asset": asset_entity,
|
||||
}
|
||||
if task:
|
||||
assetversion_data['task'] = task
|
||||
|
||||
assetversion_data.update(data.get("assetversion_data", {}))
|
||||
|
||||
|
|
@ -150,6 +155,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
# due to a ftrack_api bug where you can't add metadata on creation.
|
||||
assetversion_metadata = assetversion_data.pop("metadata", {})
|
||||
|
||||
if task:
|
||||
assetversion_data['task'] = task
|
||||
|
||||
# Create a new entity if none exits.
|
||||
if not assetversion_entity:
|
||||
assetversion_entity = session.create(
|
||||
|
|
@ -162,6 +170,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
metadata=assetversion_metadata
|
||||
)
|
||||
)
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Adding metadata
|
||||
existing_assetversion_metadata = assetversion_entity["metadata"]
|
||||
|
|
@ -170,7 +184,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
|
||||
# Have to commit the version and asset, because location can't
|
||||
# determine the final location without.
|
||||
session.commit()
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Component
|
||||
# Get existing entity.
|
||||
|
|
@ -209,7 +228,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
session.delete(member)
|
||||
del(member)
|
||||
|
||||
session.commit()
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Reset members in memory
|
||||
if "members" in component_entity.keys():
|
||||
|
|
@ -320,4 +344,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
)
|
||||
else:
|
||||
# Commit changes.
|
||||
session.commit()
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
|
|||
31
pype/plugins/ftrack/publish/integrate_ftrack_comments.py
Normal file
31
pype/plugins/ftrack/publish/integrate_ftrack_comments.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import sys
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
|
||||
class IntegrateFtrackComments(pyblish.api.InstancePlugin):
|
||||
"""Create comments in Ftrack."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Integrate Comments to Ftrack."
|
||||
families = ["shot"]
|
||||
|
||||
def process(self, instance):
|
||||
session = instance.context.data["ftrackSession"]
|
||||
|
||||
entity = session.query(
|
||||
"Shot where name is \"{}\"".format(instance.data["item"].name())
|
||||
).one()
|
||||
|
||||
notes = []
|
||||
for comment in instance.data["comments"]:
|
||||
notes.append(session.create("Note", {"content": comment}))
|
||||
|
||||
entity["notes"].extend(notes)
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
@ -28,7 +28,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
'plate': 'img',
|
||||
'audio': 'audio',
|
||||
'workfile': 'scene',
|
||||
'animation': 'cache'
|
||||
'animation': 'cache',
|
||||
'image': 'img'
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -37,6 +38,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
else:
|
||||
raise ValueError("Instance version not set")
|
||||
|
||||
family = instance.data['family'].lower()
|
||||
|
||||
|
|
@ -113,6 +116,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
},
|
||||
"assetversion_data": {
|
||||
"version": version_number,
|
||||
"comment": instance.context.data.get("comment", "")
|
||||
},
|
||||
"component_data": component_data,
|
||||
"component_path": comp['published_path'],
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
import sys
|
||||
|
||||
import six
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
|
|
@ -66,9 +69,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
# try to find if entity already exists
|
||||
else:
|
||||
query = 'TypedContext where name is "{0}" and project.full_name is "{1}"'.format(
|
||||
entity_name, self.ft_project["full_name"]
|
||||
)
|
||||
query = (
|
||||
'TypedContext where name is "{0}" and '
|
||||
'project_id is "{1}"'
|
||||
).format(entity_name, self.ft_project["id"])
|
||||
try:
|
||||
entity = self.session.query(query).one()
|
||||
except Exception:
|
||||
|
|
@ -98,7 +102,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
for instance in instances:
|
||||
instance.data['ftrackEntity'] = entity
|
||||
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
|
|
@ -121,11 +130,21 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
task_type=task,
|
||||
parent=entity
|
||||
)
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Incoming links.
|
||||
self.create_links(entity_data, entity)
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_ftrack(
|
||||
|
|
@ -135,7 +154,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
# Clear existing links.
|
||||
for link in entity.get("incoming_links", []):
|
||||
self.session.delete(link)
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Create new links.
|
||||
for input in entity_data.get("inputs", []):
|
||||
|
|
@ -171,7 +195,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.log.info(self.task_types)
|
||||
task['type'] = self.task_types[task_type]
|
||||
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return task
|
||||
|
||||
|
|
@ -180,6 +209,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
self.session.commit()
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return entity
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
|
||||
|
||||
class CollectJSON(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import datetime
|
|||
import time
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
|
||||
|
||||
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -40,6 +40,15 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in instance.context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
assert instance not in failed, ("Result of '{}' instance "
|
||||
"were not success".format(instance.data["name"]))
|
||||
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -100,6 +100,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
label = "RenderedFrames"
|
||||
|
||||
def process(self, context):
|
||||
pixel_aspect = 1
|
||||
lut_path = None
|
||||
if os.environ.get("PYPE_PUBLISH_PATHS"):
|
||||
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
|
||||
self.log.info("Collecting paths: {}".format(paths))
|
||||
|
|
@ -144,6 +146,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
self.log.info("setting session using metadata")
|
||||
api.Session.update(session)
|
||||
os.environ.update(session)
|
||||
instance = metadata.get("instance")
|
||||
if instance:
|
||||
instance_family = instance.get("family")
|
||||
pixel_aspect = instance.get("pixelAspect", 1)
|
||||
lut_path = instance.get("lutPath", None)
|
||||
|
||||
|
||||
else:
|
||||
# Search in directory
|
||||
|
|
@ -181,6 +189,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
if "write" in instance_family:
|
||||
families.append("write")
|
||||
|
||||
for collection in collections:
|
||||
instance = context.create_instance(str(collection))
|
||||
|
|
@ -197,6 +207,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
|
||||
self.log.debug("Collected pixel_aspect:\n"
|
||||
"{}".format(pixel_aspect))
|
||||
self.log.debug("type pixel_aspect:\n"
|
||||
"{}".format(type(pixel_aspect)))
|
||||
|
||||
# root = os.path.normpath(root)
|
||||
# self.log.info("Source: {}}".format(data.get("source", "")))
|
||||
|
||||
|
|
@ -212,8 +227,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": fps,
|
||||
"source": data.get('source', '')
|
||||
"source": data.get('source', ''),
|
||||
"pixelAspect": pixel_aspect,
|
||||
})
|
||||
if lut_path:
|
||||
instance.data.update({"lutPath": lut_path})
|
||||
instance.append(collection)
|
||||
instance.context.data['fps'] = fps
|
||||
|
||||
|
|
|
|||
|
|
@ -24,4 +24,4 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
rootVersion = pype.get_version_from_path(filename)
|
||||
context.data['version'] = rootVersion
|
||||
|
||||
self.log.info('Scene Version: %s' % context.data('version'))
|
||||
self.log.info('Scene Version: %s' % context.data.get('version'))
|
||||
|
|
|
|||
|
|
@ -85,3 +85,6 @@ class CollectTemplates(pyblish.api.InstancePlugin):
|
|||
instance.data["assumedDestination"] = os.path.dirname(
|
||||
(anatomy.format(template_data))["publish"]["path"]
|
||||
)
|
||||
self.log.info("Assumed Destination has been created...")
|
||||
self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"]))
|
||||
self.log.debug("__ template: `{}`".format(instance.data["template"]))
|
||||
|
|
|
|||
|
|
@ -29,11 +29,16 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
if instance.context.data.get('version'):
|
||||
version = "v" + str(instance.context.data['version'])
|
||||
|
||||
frame_start = int(instance.data.get("frameStart") or 0)
|
||||
frame_end = int(instance.data.get("frameEnd") or 1)
|
||||
duration = frame_end - frame_start + 1
|
||||
prep_data = {
|
||||
"username": instance.context.data['user'],
|
||||
"asset": os.environ['AVALON_ASSET'],
|
||||
"task": os.environ['AVALON_TASK'],
|
||||
"start_frame": int(instance.data["frameStart"]),
|
||||
"frame_start": frame_start,
|
||||
"frame_end": frame_end,
|
||||
"duration": duration,
|
||||
"version": version
|
||||
}
|
||||
self.log.debug("__ prep_data: {}".format(prep_data))
|
||||
|
|
@ -49,12 +54,17 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
name = "_burnin"
|
||||
movieFileBurnin = filename.replace(".mov", "") + name + ".mov"
|
||||
|
||||
full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"])
|
||||
full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin)
|
||||
full_movie_path = os.path.join(
|
||||
os.path.normpath(stagingdir), repre["files"]
|
||||
)
|
||||
full_burnin_path = os.path.join(
|
||||
os.path.normpath(stagingdir), movieFileBurnin
|
||||
)
|
||||
self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
|
||||
|
||||
burnin_data = {
|
||||
"input": full_movie_path.replace("\\", "/"),
|
||||
"codec": repre.get("codec", []),
|
||||
"output": full_burnin_path.replace("\\", "/"),
|
||||
"burnin_data": prep_data
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
import pype.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import os
|
||||
|
||||
import math
|
||||
import pyblish.api
|
||||
from pype.vendor import clique
|
||||
import clique
|
||||
import pype.api
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class ExtractReview(pyblish.api.InstancePlugin):
|
||||
|
|
@ -22,16 +21,19 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
families = ["review"]
|
||||
hosts = ["nuke", "maya", "shell"]
|
||||
|
||||
outputs = {}
|
||||
ext_filter = []
|
||||
|
||||
def process(self, instance):
|
||||
# adding plugin attributes from presets
|
||||
publish_presets = config.get_presets()["plugins"]["global"]["publish"]
|
||||
plugin_attrs = publish_presets[self.__class__.__name__]
|
||||
output_profiles = plugin_attrs.get("outputs", {})
|
||||
|
||||
output_profiles = self.outputs or {}
|
||||
|
||||
inst_data = instance.data
|
||||
fps = inst_data.get("fps")
|
||||
start_frame = inst_data.get("frameStart")
|
||||
|
||||
resolution_height = instance.data.get("resolutionHeight", 1080)
|
||||
resolution_width = instance.data.get("resolutionWidth", 1920)
|
||||
pixel_aspect = instance.data.get("pixelAspect", 1)
|
||||
self.log.debug("Families In: `{}`".format(instance.data["families"]))
|
||||
|
||||
# get representation and loop them
|
||||
|
|
@ -40,7 +42,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# filter out mov and img sequences
|
||||
representations_new = representations[:]
|
||||
for repre in representations:
|
||||
if repre['ext'] in plugin_attrs["ext_filter"]:
|
||||
if repre['ext'] in self.ext_filter:
|
||||
tags = repre.get("tags", [])
|
||||
|
||||
self.log.info("Try repre: {}".format(repre))
|
||||
|
|
@ -92,8 +94,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.info("p_tags: `{}`".format(p_tags))
|
||||
# add families
|
||||
[instance.data["families"].append(t)
|
||||
for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
|
||||
# add to
|
||||
[new_tags.append(t) for t in p_tags
|
||||
if t not in new_tags]
|
||||
|
|
@ -147,21 +150,83 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
output_args = []
|
||||
codec_args = profile.get('codec', [])
|
||||
output_args.extend(codec_args)
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
|
||||
# letter_box
|
||||
# TODO: add to documentation
|
||||
lb = profile.get('letter_box', None)
|
||||
if lb:
|
||||
lb = profile.get('letter_box', 0)
|
||||
if lb is not 0:
|
||||
if "reformat" not in p_tags:
|
||||
lb /= pixel_aspect
|
||||
output_args.append(
|
||||
"-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
|
||||
"-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
|
||||
|
||||
# In case audio is longer than video.
|
||||
output_args.append("-shortest")
|
||||
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
|
||||
self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect))
|
||||
self.log.debug("__ resolution_width: `{}`".format(resolution_width))
|
||||
self.log.debug("__ resolution_height: `{}`".format(resolution_height))
|
||||
# scaling none square pixels and 1920 width
|
||||
if "reformat" in p_tags:
|
||||
width_scale = 1920
|
||||
width_half_pad = 0
|
||||
res_w = int(float(resolution_width) * pixel_aspect)
|
||||
height_half_pad = int((
|
||||
(res_w - 1920) / (
|
||||
res_w * .01) * (
|
||||
1080 * .01)) / 2
|
||||
)
|
||||
height_scale = 1080 - (height_half_pad * 2)
|
||||
if height_scale > 1080:
|
||||
height_half_pad = 0
|
||||
height_scale = 1080
|
||||
width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2
|
||||
width_scale = int(1920 - (width_half_pad * 2))
|
||||
|
||||
self.log.debug("__ width_scale: `{}`".format(width_scale))
|
||||
self.log.debug("__ width_half_pad: `{}`".format(width_half_pad))
|
||||
self.log.debug("__ height_scale: `{}`".format(height_scale))
|
||||
self.log.debug("__ height_half_pad: `{}`".format(height_half_pad))
|
||||
|
||||
|
||||
scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format(
|
||||
width_scale, height_scale, width_half_pad, height_half_pad
|
||||
)
|
||||
|
||||
vf_back = self.add_video_filter_args(
|
||||
output_args, scaling_arg)
|
||||
# add it to output_args
|
||||
output_args.insert(0, vf_back)
|
||||
|
||||
# baking lut file application
|
||||
lut_path = instance.data.get("lutPath")
|
||||
if lut_path and ("bake-lut" in p_tags):
|
||||
# removing Gama info as it is all baked in lut
|
||||
gamma = next((g for g in input_args
|
||||
if "-gamma" in g), None)
|
||||
if gamma:
|
||||
input_args.remove(gamma)
|
||||
|
||||
# create lut argument
|
||||
lut_arg = "lut3d=file='{}'".format(
|
||||
lut_path.replace(
|
||||
"\\", "/").replace(":/", "\\:/")
|
||||
)
|
||||
lut_arg += ",colormatrix=bt601:bt709"
|
||||
|
||||
vf_back = self.add_video_filter_args(
|
||||
output_args, lut_arg)
|
||||
# add it to output_args
|
||||
output_args.insert(0, vf_back)
|
||||
self.log.info("Added Lut to ffmpeg command")
|
||||
self.log.debug("_ output_args: `{}`".format(output_args))
|
||||
|
||||
mov_args = [
|
||||
os.path.join(
|
||||
os.environ.get(
|
||||
|
|
@ -183,7 +248,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
'ext': ext,
|
||||
'files': repr_file,
|
||||
"tags": new_tags,
|
||||
"outputName": name
|
||||
"outputName": name,
|
||||
"codec": codec_args
|
||||
})
|
||||
if repre_new.get('preview'):
|
||||
repre_new.pop("preview")
|
||||
|
|
@ -207,3 +273,40 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
instance.data["representations"] = representations_new
|
||||
|
||||
self.log.debug("Families Out: `{}`".format(instance.data["families"]))
|
||||
|
||||
def add_video_filter_args(self, args, inserting_arg):
|
||||
"""
|
||||
Fixing video filter argumets to be one long string
|
||||
|
||||
Args:
|
||||
args (list): list of string arguments
|
||||
inserting_arg (str): string argument we want to add
|
||||
(without flag `-vf`)
|
||||
|
||||
Returns:
|
||||
str: long joined argument to be added back to list of arguments
|
||||
|
||||
"""
|
||||
# find all video format settings
|
||||
vf_settings = [p for p in args
|
||||
for v in ["-filter:v", "-vf"]
|
||||
if v in p]
|
||||
self.log.debug("_ vf_settings: `{}`".format(vf_settings))
|
||||
|
||||
# remove them from output args list
|
||||
for p in vf_settings:
|
||||
self.log.debug("_ remove p: `{}`".format(p))
|
||||
args.remove(p)
|
||||
self.log.debug("_ args: `{}`".format(args))
|
||||
|
||||
# strip them from all flags
|
||||
vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "")
|
||||
for p in vf_settings]
|
||||
|
||||
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
|
||||
vf_fixed.insert(0, inserting_arg)
|
||||
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
|
||||
# create new video filter setting
|
||||
vf_back = "-vf " + ",".join(vf_fixed)
|
||||
|
||||
return vf_back
|
||||
|
|
|
|||
|
|
@ -1,18 +1,23 @@
|
|||
import os
|
||||
from os.path import getsize
|
||||
import logging
|
||||
import speedcopy
|
||||
import sys
|
||||
import clique
|
||||
import errno
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
from avalon.vendor import filelink
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
from speedcopy import copyfile
|
||||
else:
|
||||
from shutil import copyfile
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issius
|
||||
"""Resolve any dependency issues
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
|
@ -57,7 +62,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"render",
|
||||
"imagesequence",
|
||||
"review",
|
||||
"render",
|
||||
"rendersetup",
|
||||
"rig",
|
||||
"plate",
|
||||
|
|
@ -66,7 +70,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache",
|
||||
"source"
|
||||
"nukenodes",
|
||||
"gizmo",
|
||||
"source",
|
||||
"matchmove",
|
||||
"image"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
|
|
@ -406,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
}
|
||||
|
||||
if sequence_repre and repre.get("frameStart"):
|
||||
representation['context']['frame'] = repre.get("frameStart")
|
||||
representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart"))
|
||||
|
||||
self.log.debug("__ representation: {}".format(representation))
|
||||
destination_list.append(dst)
|
||||
|
|
@ -476,7 +484,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# copy file with speedcopy and check if size of files are simetrical
|
||||
while True:
|
||||
speedcopy.copyfile(src, dst)
|
||||
copyfile(src, dst)
|
||||
if str(getsize(src)) in str(getsize(dst)):
|
||||
break
|
||||
|
||||
|
|
@ -494,7 +502,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
filelink.create(src, dst, filelink.HARDLINK)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
|
|
@ -503,7 +510,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
subset_name = instance.data["subset"]
|
||||
self.log.info("Subset '%s' not found, creating.." % subset_name)
|
||||
self.log.debug("families. %s" % instance.data.get('families'))
|
||||
self.log.debug("families. %s" % type(instance.data.get('families')))
|
||||
self.log.debug(
|
||||
"families. %s" % type(instance.data.get('families')))
|
||||
|
||||
_id = io.insert_one({
|
||||
"schema": "pype:subset-3.0",
|
||||
|
|
@ -517,6 +525,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
# add group if available
|
||||
if instance.data.get("subsetGroup"):
|
||||
io.update_many({
|
||||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset["_id"])
|
||||
}, {'$set': {'data.subsetGroup':
|
||||
instance.data.get('subsetGroup')}}
|
||||
)
|
||||
|
||||
return subset
|
||||
|
||||
def create_version(self, subset, version_number, locations, data=None):
|
||||
|
|
|
|||
28
pype/plugins/global/publish/validate_containers.py
Normal file
28
pype/plugins/global/publish/validate_containers.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import pyblish.api
|
||||
|
||||
import pype.lib
|
||||
from avalon.tools import cbsceneinventory
|
||||
|
||||
|
||||
class ShowInventory(pyblish.api.Action):
|
||||
|
||||
label = "Show Inventory"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
cbsceneinventory.show()
|
||||
|
||||
|
||||
class ValidateContainers(pyblish.api.ContextPlugin):
|
||||
"""Containers are must be updated to latest version on publish."""
|
||||
|
||||
label = "Validate Containers"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "houdini", "nuke"]
|
||||
optional = True
|
||||
actions = [ShowInventory]
|
||||
|
||||
def process(self, context):
|
||||
if pype.lib.any_outdated():
|
||||
raise ValueError("There are outdated containers in the scene.")
|
||||
|
|
@ -27,6 +27,8 @@ class ValidateFfmpegInstallef(pyblish.api.Validator):
|
|||
return True
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info("ffmpeg path: `{}`".format(
|
||||
os.environ.get("FFMPEG_PATH", "")))
|
||||
if self.is_tool(
|
||||
os.path.join(
|
||||
os.environ.get("FFMPEG_PATH", ""), "ffmpeg")) is False:
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class ValidateTemplates(pyblish.api.ContextPlugin):
|
||||
"""Check if all templates were filed"""
|
||||
"""Check if all templates were filled"""
|
||||
|
||||
label = "Validate Templates"
|
||||
order = pyblish.api.ValidatorOrder - 0.1
|
||||
|
|
@ -18,12 +19,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
|
|||
"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
|
||||
"project": {"name": "D001_projectsx",
|
||||
"code": "prjX"},
|
||||
"ext": "exr",
|
||||
"version": 3,
|
||||
"task": "animation",
|
||||
"asset": "sh001",
|
||||
"hierarchy": "ep101/sq01/sh010"}
|
||||
|
||||
"ext": "exr",
|
||||
"version": 3,
|
||||
"task": "animation",
|
||||
"asset": "sh001",
|
||||
"app": "maya",
|
||||
"hierarchy": "ep101/sq01/sh010"}
|
||||
|
||||
anatomy_filled = anatomy.format(data)
|
||||
self.log.info(anatomy_filled)
|
||||
|
|
@ -31,11 +32,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
|
|||
data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
|
||||
"project": {"name": "D001_projectsy",
|
||||
"code": "prjY"},
|
||||
"ext": "abc",
|
||||
"version": 1,
|
||||
"task": "lookdev",
|
||||
"asset": "bob",
|
||||
"hierarchy": "ep101/sq01/bob"}
|
||||
"ext": "abc",
|
||||
"version": 1,
|
||||
"task": "lookdev",
|
||||
"asset": "bob",
|
||||
"app": "maya",
|
||||
"hierarchy": "ep101/sq01/bob"}
|
||||
|
||||
anatomy_filled = context.data["anatomy"].format(data)
|
||||
self.log.info(anatomy_filled["work"]["folder"])
|
||||
|
|
|
|||
|
|
@ -1,7 +1,4 @@
|
|||
import os
|
||||
import sys
|
||||
from avalon import io
|
||||
from pprint import pprint
|
||||
import acre
|
||||
|
||||
from avalon import api, lib
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
import os
|
||||
import sys
|
||||
from pprint import pprint
|
||||
import acre
|
||||
|
||||
from avalon import api, lib, io
|
||||
import pype.api as pype
|
||||
from pypeapp import Anatomy
|
||||
|
||||
|
||||
class PremierePro(api.Action):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import avalon.maya
|
||||
from pype.maya import lib
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -14,10 +15,21 @@ class CreateAss(avalon.maya.Creator):
|
|||
icon = "cube"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAss, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data
|
||||
self.data.update(lib.collect_animation_data())
|
||||
|
||||
# Vertex colors with the geometry
|
||||
self.data["exportSequence"] = False
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAss, self).process()
|
||||
|
||||
data = OrderedDict(**self.data)
|
||||
# data = OrderedDict(**self.data)
|
||||
|
||||
|
||||
|
||||
nodes = list()
|
||||
|
||||
|
|
@ -30,4 +42,6 @@ class CreateAss(avalon.maya.Creator):
|
|||
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
||||
cmds.sets([assContent, assProxy], forceElement=instance)
|
||||
|
||||
self.data = data
|
||||
# self.log.info(data)
|
||||
#
|
||||
# self.data = data
|
||||
|
|
|
|||
|
|
@ -18,3 +18,6 @@ class CreateLook(avalon.maya.Creator):
|
|||
|
||||
# Whether to automatically convert the textures to .tx upon publish.
|
||||
self.data["maketx"] = True
|
||||
|
||||
# Enable users to force a copy.
|
||||
self.data["forceCopy"] = False
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
self.log.warning("Deadline REST API url not found.")
|
||||
else:
|
||||
argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
|
||||
response = requests.get(argument)
|
||||
response = self._requests_get(argument)
|
||||
if not response.ok:
|
||||
self.log.warning("No pools retrieved")
|
||||
else:
|
||||
|
|
@ -135,7 +135,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
|
|||
'authToken': self._token
|
||||
}
|
||||
api_entry = '/api/pools/list'
|
||||
response = requests.get(
|
||||
response = self._requests_get(
|
||||
self.MUSTER_REST_URL + api_entry, params=params)
|
||||
if response.status_code != 200:
|
||||
if response.status_code == 401:
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from avalon import api
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
import clique
|
||||
|
||||
|
||||
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -21,6 +22,13 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
|
|
@ -30,7 +38,24 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
path = self.fname
|
||||
proxyPath = os.path.splitext(path)[0] + ".ma"
|
||||
proxyPath_base = os.path.splitext(path)[0]
|
||||
|
||||
if frameStart is not None:
|
||||
proxyPath_base = os.path.splitext(proxyPath_base)[0]
|
||||
|
||||
publish_folder = os.path.split(path)[0]
|
||||
files_in_folder = os.listdir(publish_folder)
|
||||
collections, remainder = clique.assemble(files_in_folder)
|
||||
|
||||
if collections:
|
||||
hashes = collections[0].padding * '#'
|
||||
coll = collections[0].format('{head}[index]{tail}')
|
||||
filename = coll.replace('[index]', hashes)
|
||||
|
||||
path = os.path.join(publish_folder, filename)
|
||||
|
||||
proxyPath = proxyPath_base + ".ma"
|
||||
self.log.info
|
||||
|
||||
nodes = cmds.file(proxyPath,
|
||||
namespace=namespace,
|
||||
|
|
@ -147,6 +172,13 @@ class AssStandinLoader(api.Loader):
|
|||
import mtoa.ui.arnoldmenu
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset + "_",
|
||||
|
|
@ -182,6 +214,8 @@ class AssStandinLoader(api.Loader):
|
|||
|
||||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
if frameStart is not None:
|
||||
standinShape.useFrameExtension.set(1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
|
@ -199,14 +233,23 @@ class AssStandinLoader(api.Loader):
|
|||
|
||||
path = api.get_representation_path(representation)
|
||||
|
||||
# Update the standin
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
standins = pm.ls(members, type="AiStandIn", long=True)
|
||||
files_in_path = os.listdir(os.path.split(path)[0])
|
||||
sequence = 0
|
||||
collections, remainder = clique.assemble(files_in_path)
|
||||
if collections:
|
||||
sequence = 1
|
||||
|
||||
assert len(caches) == 1, "This is a bug"
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.cacheFileName.set(path)
|
||||
standin.dso.set(path)
|
||||
standin.useFrameExtension.set(sequence)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
|
|
|||
30
pype/plugins/maya/load/load_matchmove.py
Normal file
30
pype/plugins/maya/load/load_matchmove.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
from avalon import api
|
||||
from maya import mel
|
||||
|
||||
|
||||
class MatchmoveLoader(api.Loader):
|
||||
"""
|
||||
This will run matchmove script to create track in scene.
|
||||
|
||||
Supported script types are .py and .mel
|
||||
"""
|
||||
|
||||
families = ["matchmove"]
|
||||
representations = ["py", "mel"]
|
||||
defaults = ["Camera", "Object", "Mocap"]
|
||||
|
||||
label = "Run matchmove script"
|
||||
icon = "empire"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
if self.fname.lower().endswith(".py"):
|
||||
exec(open(self.fname).read())
|
||||
|
||||
elif self.fname.lower().endswith(".mel"):
|
||||
mel.eval('source "{}"'.format(self.fname))
|
||||
|
||||
else:
|
||||
self.log.error("Unsupported script type")
|
||||
|
||||
return True
|
||||
|
|
@ -1,9 +1,7 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
reload(config)
|
||||
import pype.maya.plugin
|
||||
reload(pype.maya.plugin)
|
||||
|
||||
|
||||
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
|
|
@ -22,7 +20,6 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
|
|
|
|||
|
|
@ -219,10 +219,6 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
with lib.renderlayer(instance.data["renderlayer"]):
|
||||
self.collect(instance)
|
||||
|
||||
# make ftrack publishable
|
||||
self.maketx = instance.data.get('maketx', True)
|
||||
instance.data['maketx'] = self.maketx
|
||||
self.log.info('maketx: {}'.format(self.maketx))
|
||||
|
||||
def collect(self, instance):
|
||||
|
||||
|
|
@ -297,9 +293,11 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info("Collected file nodes:\n{}".format(files))
|
||||
# Collect textures if any file nodes are found
|
||||
instance.data["resources"] = [self.collect_resource(n)
|
||||
for n in files]
|
||||
self.log.info("Collected resources:\n{}".format(instance.data["resources"]))
|
||||
instance.data["resources"] = []
|
||||
for n in files:
|
||||
instance.data["resources"].append(self.collect_resource(n))
|
||||
|
||||
self.log.info("Collected resources: {}".format(instance.data["resources"]))
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
if not instance.data["lookData"]["relationships"]:
|
||||
|
|
@ -423,7 +421,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("processing: {}".format(node))
|
||||
if cmds.nodeType(node) == 'file':
|
||||
self.log.debug("file node")
|
||||
self.log.debug(" - file node")
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
elif cmds.nodeType(node) == 'aiImage':
|
||||
|
|
@ -431,7 +429,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
attribute = "{}.filename".format(node)
|
||||
computed_attribute = attribute
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
self.log.info(" - file source: {}".format(source))
|
||||
color_space_attr = "{}.colorSpace".format(node)
|
||||
color_space = cmds.getAttr(color_space_attr)
|
||||
# Compare with the computed file path, e.g. the one with the <UDIM>
|
||||
|
|
@ -455,6 +453,13 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if len(files) == 0:
|
||||
self.log.error("No valid files found from node `%s`" % node)
|
||||
|
||||
self.log.info("collection of resource done:")
|
||||
self.log.info(" - node: {}".format(node))
|
||||
self.log.info(" - attribute: {}".format(attribute))
|
||||
self.log.info(" - source: {}".format(source))
|
||||
self.log.info(" - file: {}".format(files))
|
||||
self.log.info(" - color space: {}".format(color_space))
|
||||
|
||||
# Define the resource
|
||||
return {"node": node,
|
||||
"attribute": attribute,
|
||||
|
|
|
|||
|
|
@ -20,8 +20,11 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
sequence = instance.data.get("exportSequence", False)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.ass".format(instance.name)
|
||||
filenames = list()
|
||||
file_path = os.path.join(staging_dir, filename)
|
||||
|
||||
# Write out .ass file
|
||||
|
|
@ -29,13 +32,47 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
with avalon.maya.maintained_selection():
|
||||
self.log.info("Writing: {}".format(instance.data["setMembers"]))
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.arnoldExportAss( filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True
|
||||
)
|
||||
|
||||
if sequence:
|
||||
self.log.info("Extracting ass sequence")
|
||||
|
||||
# Collect the start and end including handles
|
||||
start = instance.data.get("frameStart", 1)
|
||||
end = instance.data.get("frameEnd", 1)
|
||||
handles = instance.data.get("handles", 0)
|
||||
step = instance.data.get("step", 0)
|
||||
if handles:
|
||||
start -= handles
|
||||
end += handles
|
||||
|
||||
exported_files = cmds.arnoldExportAss(filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
frameStep=step
|
||||
)
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
else:
|
||||
cmds.arnoldExportAss(filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True
|
||||
)
|
||||
filenames = filename
|
||||
optionals = [
|
||||
"frameStart", "frameEnd", "step", "handles",
|
||||
"handleEnd", "handleStart"
|
||||
]
|
||||
for key in optionals:
|
||||
instance.data.pop(key, None)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
@ -43,9 +80,13 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
representation = {
|
||||
'name': 'ass',
|
||||
'ext': 'ass',
|
||||
'files': filename,
|
||||
'files': filenames,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
if sequence:
|
||||
representation['frameStart'] = start
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
|
|
|
|||
|
|
@ -43,8 +43,13 @@ class ExtractAssProxy(pype.api.Extractor):
|
|||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
members = instance.data['proxy']
|
||||
members = cmds.ls(members,
|
||||
proxy = instance.data.get('proxy', None)
|
||||
|
||||
if not proxy:
|
||||
self.log.info("no proxy mesh")
|
||||
return
|
||||
|
||||
members = cmds.ls(proxy,
|
||||
dag=True,
|
||||
transforms=True,
|
||||
noIntermediate=True)
|
||||
|
|
|
|||
|
|
@ -38,11 +38,7 @@ def source_hash(filepath, *args):
|
|||
file_name = os.path.basename(filepath)
|
||||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([
|
||||
file_name,
|
||||
time,
|
||||
size
|
||||
] + list(args)).replace(".", ",")
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
|
||||
|
||||
def find_paths_by_hash(texture_hash):
|
||||
|
|
@ -64,36 +60,33 @@ def maketx(source, destination, *args):
|
|||
"""
|
||||
|
||||
cmd = [
|
||||
"maketx",
|
||||
"-v", # verbose
|
||||
"-u", # update mode
|
||||
# unpremultiply before conversion (recommended when alpha present)
|
||||
"--unpremult",
|
||||
"--checknan",
|
||||
# use oiio-optimized settings for tile-size, planarconfig, metadata
|
||||
"--oiio",
|
||||
"--filter lanczos3"
|
||||
]
|
||||
"maketx",
|
||||
"-v", # verbose
|
||||
"-u", # update mode
|
||||
# unpremultiply before conversion (recommended when alpha present)
|
||||
"--unpremult",
|
||||
"--checknan",
|
||||
# use oiio-optimized settings for tile-size, planarconfig, metadata
|
||||
"--oiio",
|
||||
"--filter lanczos3",
|
||||
]
|
||||
|
||||
cmd.extend(args)
|
||||
cmd.extend([
|
||||
"-o", destination,
|
||||
source
|
||||
])
|
||||
cmd.extend(["-o", destination, source])
|
||||
|
||||
cmd = " ".join(cmd)
|
||||
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
kwargs = dict(
|
||||
args=cmd,
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
kwargs = dict(args=cmd, stderr=subprocess.STDOUT)
|
||||
|
||||
if sys.platform == "win32":
|
||||
kwargs["creationflags"] = CREATE_NO_WIDOW
|
||||
kwargs["creationflags"] = CREATE_NO_WINDOW
|
||||
try:
|
||||
out = subprocess.check_output(**kwargs)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(exc)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
raise
|
||||
|
||||
|
|
@ -180,36 +173,51 @@ class ExtractLook(pype.api.Extractor):
|
|||
# Preserve color space values (force value after filepath change)
|
||||
# This will also trigger in the same order at end of context to
|
||||
# ensure after context it's still the original value.
|
||||
color_space = resource.get('color_space')
|
||||
color_space = resource.get("color_space")
|
||||
|
||||
for f in resource["files"]:
|
||||
|
||||
files_metadata[os.path.normpath(f)] = {'color_space': color_space}
|
||||
files_metadata[os.path.normpath(f)] = {
|
||||
"color_space": color_space}
|
||||
# files.update(os.path.normpath(f))
|
||||
|
||||
# Process the resource files
|
||||
transfers = list()
|
||||
hardlinks = list()
|
||||
hashes = dict()
|
||||
forceCopy = instance.data.get("forceCopy", False)
|
||||
|
||||
self.log.info(files)
|
||||
for filepath in files_metadata:
|
||||
|
||||
cspace = files_metadata[filepath]['color_space']
|
||||
cspace = files_metadata[filepath]["color_space"]
|
||||
linearise = False
|
||||
if cspace == 'sRGB':
|
||||
if cspace == "sRGB":
|
||||
linearise = True
|
||||
# set its file node to 'raw' as tx will be linearized
|
||||
files_metadata[filepath]["color_space"] = "raw"
|
||||
|
||||
source, mode, hash = self._process_texture(
|
||||
filepath, do_maketx, staging=dir_path, linearise=linearise
|
||||
)
|
||||
destination = self.resource_destination(
|
||||
instance, source, do_maketx
|
||||
filepath,
|
||||
do_maketx,
|
||||
staging=dir_path,
|
||||
linearise=linearise,
|
||||
force=forceCopy
|
||||
)
|
||||
destination = self.resource_destination(instance,
|
||||
source,
|
||||
do_maketx)
|
||||
|
||||
# Force copy is specified.
|
||||
if forceCopy:
|
||||
mode = COPY
|
||||
|
||||
if mode == COPY:
|
||||
transfers.append((source, destination))
|
||||
self.log.info('copying')
|
||||
elif mode == HARDLINK:
|
||||
hardlinks.append((source, destination))
|
||||
self.log.info('hardlinking')
|
||||
|
||||
# Store the hashes from hash to destination to include in the
|
||||
# database
|
||||
|
|
@ -230,13 +238,14 @@ class ExtractLook(pype.api.Extractor):
|
|||
# Preserve color space values (force value after filepath change)
|
||||
# This will also trigger in the same order at end of context to
|
||||
# ensure after context it's still the original value.
|
||||
color_space_attr = resource['node'] + ".colorSpace"
|
||||
color_space_attr = resource["node"] + ".colorSpace"
|
||||
color_space = cmds.getAttr(color_space_attr)
|
||||
|
||||
if files_metadata[source]["color_space"] == "raw":
|
||||
# set colorpsace to raw if we linearized it
|
||||
color_space = "Raw"
|
||||
# Remap file node filename to destination
|
||||
attr = resource['attribute']
|
||||
attr = resource["attribute"]
|
||||
remap[attr] = destinations[source]
|
||||
|
||||
remap[color_space_attr] = color_space
|
||||
|
||||
self.log.info("Finished remapping destinations ...")
|
||||
|
|
@ -263,13 +272,15 @@ class ExtractLook(pype.api.Extractor):
|
|||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True
|
||||
constructionHistory=True,
|
||||
)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
data = {"attributes": lookdata["attributes"],
|
||||
"relationships": relationships}
|
||||
data = {
|
||||
"attributes": lookdata["attributes"],
|
||||
"relationships": relationships
|
||||
}
|
||||
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
|
@ -288,7 +299,7 @@ class ExtractLook(pype.api.Extractor):
|
|||
instance.data["representations"].append(
|
||||
{
|
||||
"name": "ma",
|
||||
"ext": 'ma',
|
||||
"ext": "ma",
|
||||
"files": os.path.basename(maya_fname),
|
||||
"stagingDir": os.path.dirname(maya_fname),
|
||||
}
|
||||
|
|
@ -296,7 +307,7 @@ class ExtractLook(pype.api.Extractor):
|
|||
instance.data["representations"].append(
|
||||
{
|
||||
"name": "json",
|
||||
"ext": 'json',
|
||||
"ext": "json",
|
||||
"files": os.path.basename(json_fname),
|
||||
"stagingDir": os.path.dirname(json_fname),
|
||||
}
|
||||
|
|
@ -309,13 +320,18 @@ class ExtractLook(pype.api.Extractor):
|
|||
# Source hash for the textures
|
||||
instance.data["sourceHashes"] = hashes
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (
|
||||
instance.name, maya_path)
|
||||
)
|
||||
"""
|
||||
self.log.info("Returning colorspaces to their original values ...")
|
||||
for attr, value in remap.items():
|
||||
self.log.info(" - {}: {}".format(attr, value))
|
||||
cmds.setAttr(attr, value, type="string")
|
||||
"""
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
|
||||
def resource_destination(self, instance, filepath, do_maketx):
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
self.create_destination_template(instance, anatomy)
|
||||
|
||||
|
|
@ -327,12 +343,10 @@ class ExtractLook(pype.api.Extractor):
|
|||
ext = ".tx"
|
||||
|
||||
return os.path.join(
|
||||
instance.data["assumedDestination"],
|
||||
"resources",
|
||||
basename + ext
|
||||
instance.data["assumedDestination"], "resources", basename + ext
|
||||
)
|
||||
|
||||
def _process_texture(self, filepath, do_maketx, staging, linearise):
|
||||
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
|
||||
"""Process a single texture file on disk for publishing.
|
||||
This will:
|
||||
1. Check whether it's already published, if so it will do hardlink
|
||||
|
|
@ -354,24 +368,20 @@ class ExtractLook(pype.api.Extractor):
|
|||
# If source has been published before with the same settings,
|
||||
# then don't reprocess but hardlink from the original
|
||||
existing = find_paths_by_hash(texture_hash)
|
||||
if existing:
|
||||
if existing and not force:
|
||||
self.log.info("Found hash in database, preparing hardlink..")
|
||||
source = next((p for p in existing if os.path.exists(p)), None)
|
||||
if filepath:
|
||||
return source, HARDLINK, texture_hash
|
||||
else:
|
||||
self.log.warning(
|
||||
"Paths not found on disk, "
|
||||
"skipping hardlink: %s" % (existing,)
|
||||
("Paths not found on disk, "
|
||||
"skipping hardlink: %s") % (existing,)
|
||||
)
|
||||
|
||||
if do_maketx and ext != ".tx":
|
||||
# Produce .tx file in staging if source file is not .tx
|
||||
converted = os.path.join(
|
||||
staging,
|
||||
"resources",
|
||||
fname + ".tx"
|
||||
)
|
||||
converted = os.path.join(staging, "resources", fname + ".tx")
|
||||
|
||||
if linearise:
|
||||
self.log.info("tx: converting sRGB -> linear")
|
||||
|
|
@ -384,9 +394,15 @@ class ExtractLook(pype.api.Extractor):
|
|||
os.makedirs(os.path.dirname(converted))
|
||||
|
||||
self.log.info("Generating .tx file for %s .." % filepath)
|
||||
maketx(filepath, converted,
|
||||
# Include `source-hash` as string metadata
|
||||
"-sattrib", "sourceHash", texture_hash, colorconvert)
|
||||
maketx(
|
||||
filepath,
|
||||
converted,
|
||||
# Include `source-hash` as string metadata
|
||||
"-sattrib",
|
||||
"sourceHash",
|
||||
texture_hash,
|
||||
colorconvert,
|
||||
)
|
||||
|
||||
return converted, COPY, texture_hash
|
||||
|
||||
|
|
@ -412,58 +428,62 @@ class ExtractLook(pype.api.Extractor):
|
|||
project_name = api.Session["AVALON_PROJECT"]
|
||||
a_template = anatomy.templates
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
project = io.find_one(
|
||||
{"type": "project", "name": project_name},
|
||||
projection={"config": True, "data": True},
|
||||
)
|
||||
|
||||
template = a_template['publish']['path']
|
||||
template = a_template["publish"]["path"]
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one(
|
||||
{"type": "asset", "name": asset_name, "parent": project["_id"]}
|
||||
)
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset.get('silo')
|
||||
"in project '{}'").format(asset_name, project_name)
|
||||
silo = asset.get("silo")
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
subset = io.find_one(
|
||||
{"type": "subset", "name": subset_name, "parent": asset["_id"]}
|
||||
)
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
version = io.find_one(
|
||||
{"type": "version",
|
||||
"parent": subset["_id"]
|
||||
}, sort=[("name", -1)]
|
||||
)
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
if instance.data.get("version"):
|
||||
version_number = int(instance.data.get("version"))
|
||||
|
||||
padding = int(a_template['render']['padding'])
|
||||
padding = int(a_template["render"]["padding"])
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
hierarchy = asset["data"]["parents"]
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = "/".join(hierarchy)
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
"code": project['data']['code']},
|
||||
"silo": silo,
|
||||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"frame": ('#' * padding),
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
template_data = {
|
||||
"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name, "code": project["data"]["code"]},
|
||||
"silo": silo,
|
||||
"family": instance.data["family"],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"frame": ("#" * padding),
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP",
|
||||
}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
self.log.info(template_data)
|
||||
|
|
|
|||
|
|
@ -1,19 +1,16 @@
|
|||
import os
|
||||
import subprocess
|
||||
import contextlib
|
||||
import json
|
||||
import capture_gui
|
||||
import clique
|
||||
|
||||
#
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
import avalon.maya
|
||||
|
||||
#
|
||||
from maya import cmds, mel
|
||||
import pymel.core as pm
|
||||
from pype.vendor import ffmpeg
|
||||
# from pype.scripts import otio_burnin
|
||||
reload(ffmpeg)
|
||||
# import ffmpeg
|
||||
# # from pype.scripts import otio_burnin
|
||||
# reload(ffmpeg)
|
||||
|
||||
|
||||
# TODO: move codec settings to presets
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue