Merge branch 'develop' into feature/merge_PR187_toke_nukestudio_improvements

This commit is contained in:
Jakub Jezek 2019-07-10 09:01:34 +02:00
commit 639db74f0c
16 changed files with 1409 additions and 28 deletions

15
.gitignore vendored
View file

@ -12,3 +12,18 @@ __pycache__/
# Editor backup files #
#######################
*~
# Unit test / coverage reports
##############################
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
/coverage
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/

0
pype/.coveragerc Normal file
View file

View file

@ -2,6 +2,7 @@ import os
from pyblish import api as pyblish
from avalon import api as avalon
from .lib import filter_pyblish_plugins
import logging
log = logging.getLogger(__name__)
@ -22,10 +23,12 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load")
def install():
log.info("Registering global plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
pyblish.register_discovery_filter(filter_pyblish_plugins)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
def uninstall():
log.info("Deregistering global plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
pyblish.deregister_discovery_filter(filter_pyblish_plugins)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
log.info("Global plug-ins unregistred")

View file

@ -0,0 +1,705 @@
import os
import sys
import argparse
import json
import logging
import collections
import tempfile
import requests
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from pypeapp import config
class SyncAssetVersions(BaseAction):
#: Action identifier.
identifier = 'sync.asset.versions'
#: Action label.
label = 'Sync Asset Versions'
#: Action description.
description = 'Synchronize Asset versions to another Ftrack'
#: roles that are allowed to register this action
role_list = ['Administrator', 'Project Manager', 'Pypeclub']
# ENTER VALUES HERE (change values based on keys)
# Custom attribute storing ftrack id of destination server
id_key_src = 'fridge_ftrackID'
# Custom attribute storing ftrack id of source server
id_key_dst = 'kredenc_ftrackID'
components_name = (
'ftrackreview-mp4_src',
'ftrackreview-image_src',
'thumbnail_src'
)
# comp name mapping
comp_name_mapping = {
'ftrackreview-mp4_src': 'ftrackreview-mp4',
'ftrackreview-image_src': 'ftrackreview-image',
'thumbnail_src': 'thumbnail'
}
comp_location_mapping = {
'ftrack.server': [
'ftrackreview-mp4',
'ftrackreview-mp4_src',
'ftrackreview-image',
'ftrackreview-image_src',
'thumbnail',
'thumbnail_src'
],
'ftrack.unmanaged': []
}
def discover(self, session, entities, event):
''' Validation '''
for entity in entities:
if entity.entity_type.lower() != 'assetversion':
return False
return True
def launch(self, session, entities, event):
self.dst_ftrack_locations = {}
self.interface_messages = {}
# stop if custom attribute for storing second ftrack id is missing
if self.id_key_src not in entities[0]['custom_attributes']:
msg = (
'Custom attribute "{}" does not exist on AssetVersion'
).format(self.id_key_src)
self.log.error(msg)
return {
'success': False,
'message': msg
}
source_credentials = config.get_presets()['ftrack'].get(
'partnership_ftrack_cred', {}
)
self.dst_session = ftrack_api.Session(
server_url=source_credentials.get('server_url'),
api_key=source_credentials.get('api_key'),
api_user=source_credentials.get('api_user'),
auto_connect_event_hub=True
)
# NOTE Shared session has issues with location definition
self.session_for_components = ftrack_api.Session(
server_url=session.server_url,
api_key=session.api_key,
api_user=session.api_user,
auto_connect_event_hub=True
)
for entity in entities:
asset = entity['asset']
parent = asset['parent']
# Check if asset version already has entity on destinaition Ftrack
# TODO ? skip if yes
# ? show to user - with interface/message/note
# + or ask if user want to override found version ????
dst_ftrack_id = entity['custom_attributes'].get(self.id_key_src)
if dst_ftrack_id:
dst_ftrack_ent = self.dst_session.query(
'AssetVersion where id = "{}"'.format(dst_ftrack_id)
).first()
if dst_ftrack_ent:
self.log.warning(
'"{}" - Already exists. Skipping'.format(asset['name'])
)
continue
# Find parent where Version will be uploaded
dst_parent_id = parent['custom_attributes'].get(self.id_key_src)
if not dst_parent_id:
self.log.warning((
'Entity: "{}" don\'t have stored Custom attribute "{}"'
).format(parent['name'], self.id_key_src))
continue
dst_parent_entity = self.dst_session.query(
'TypedContext where id = "{}"'.format(dst_parent_id)
).first()
if not dst_parent_entity:
msg = (
'Didn\'t found mirrored entity in destination Ftrack'
' for "{}"'
).format(parent['name'])
self.log.warning(msg)
continue
component_list = self.prepare_data(entity['id'])
id_stored = False
for comp_data in component_list:
dst_asset_ver_id = self.asset_version_creation(
dst_parent_entity, comp_data, entity
)
if id_stored:
continue
entity['custom_attributes'][self.id_key_src] = dst_asset_ver_id
session.commit()
id_stored = True
self.dst_session.close()
self.session_for_components.close()
self.dst_session = None
self.session_for_components = None
return True
def prepare_data(self, asset_version_id):
components_list = []
asset_version = self.session_for_components.query(
'AssetVersion where id is "{}"'.format(asset_version_id)
).one()
# Asset data
asset_type = asset_version['asset']['type'].get('short', 'upload')
assettype_data = {'short': asset_type}
asset_data = {'name': asset_version['asset']['name']}
# Asset version data
assetversion_data = {'version': asset_version['version']}
# Component data
components_of_interest = {}
for name in self.components_name:
components_of_interest[name] = False
for key in components_of_interest:
# Find component by name
for comp in asset_version['components']:
if comp['name'] == key:
components_of_interest[key] = True
break
# NOTE if component was found then continue
if components_of_interest[key]:
continue
# Look for alternative component name set in mapping
new_key = None
if key in self.comp_name_mapping:
new_key = self.comp_name_mapping[key]
if not new_key:
self.log.warning(
'Asset version do not have components "{}" or "{}"'.format(
key, new_key
)
)
continue
components_of_interest[new_key] = components_of_interest.pop(key)
# Try to look for alternative name
for comp in asset_version['components']:
if comp['name'] == new_key:
components_of_interest[new_key] = True
break
# Check if at least one component is transferable
have_comp_to_transfer = False
for value in components_of_interest.values():
if value:
have_comp_to_transfer = True
break
if not have_comp_to_transfer:
return components_list
thumbnail_id = asset_version.get('thumbnail_id')
temp_folder = tempfile.mkdtemp('components')
# Data for transfer components
for comp in asset_version['components']:
comp_name = comp['name']
if comp_name not in components_of_interest:
continue
if not components_of_interest[comp_name]:
continue
if comp_name in self.comp_name_mapping:
comp_name = self.comp_name_mapping[comp_name]
is_thumbnail = False
for _comp in asset_version['components']:
if _comp['name'] == comp_name:
if _comp['id'] == thumbnail_id:
is_thumbnail = True
break
locatiom_name = comp['component_locations'][0]['location']['name']
location = self.session_for_components.query(
'Location where name is "{}"'.format(locatiom_name)
).one()
file_path = None
if locatiom_name == 'ftrack.unmanaged':
file_path = ''
try:
file_path = location.get_filesystem_path(comp)
except Exception:
pass
file_path = os.path.normpath(file_path)
if not os.path.exists(file_path):
file_path = comp['component_locations'][0][
'resource_identifier'
]
file_path = os.path.normpath(file_path)
if not os.path.exists(file_path):
self.log.warning(
'In component: "{}" can\'t access filepath: "{}"'.format(
comp['name'], file_path
)
)
continue
elif locatiom_name == 'ftrack.server':
download_url = location.get_url(comp)
file_name = '{}{}{}'.format(
asset_version['asset']['name'],
comp_name,
comp['file_type']
)
file_path = os.path.sep.join([temp_folder, file_name])
self.download_file(download_url, file_path)
if not file_path:
self.log.warning(
'In component: "{}" is invalid file path'.format(
comp['name']
)
)
continue
# Default location name value is ftrack.unmanaged
location_name = 'ftrack.unmanaged'
# Try to find location where component will be created
for name, keys in self.comp_location_mapping.items():
if comp_name in keys:
location_name = name
break
dst_location = self.get_dst_location(location_name)
# Metadata
metadata = {}
metadata.update(comp.get('metadata', {}))
component_data = {
"name": comp_name,
"metadata": metadata
}
data = {
'assettype_data': assettype_data,
'asset_data': asset_data,
'assetversion_data': assetversion_data,
'component_data': component_data,
'component_overwrite': False,
'thumbnail': is_thumbnail,
'component_location': dst_location,
'component_path': file_path
}
components_list.append(data)
return components_list
def asset_version_creation(self, dst_parent_entity, data, src_entity):
assettype_data = data['assettype_data']
self.log.debug("data: {}".format(data))
assettype_entity = self.dst_session.query(
self.query("AssetType", assettype_data)
).first()
# Create a new entity if none exits.
if not assettype_entity:
assettype_entity = self.dst_session.create(
"AssetType", assettype_data
)
self.dst_session.commit()
self.log.debug(
"Created new AssetType with data: ".format(assettype_data)
)
# Asset
# Get existing entity.
asset_data = {
"name": src_entity['asset']['name'],
"type": assettype_entity,
"parent": dst_parent_entity
}
asset_data.update(data.get("asset_data", {}))
asset_entity = self.dst_session.query(
self.query("Asset", asset_data)
).first()
self.log.info("asset entity: {}".format(asset_entity))
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
asset_metadata = asset_data.pop("metadata", {})
# Create a new entity if none exits.
info_msg = (
'Created new {entity_type} with data: {data}'
", metadata: {metadata}."
)
if not asset_entity:
asset_entity = self.dst_session.create("Asset", asset_data)
self.dst_session.commit()
self.log.debug(
info_msg.format(
entity_type="Asset",
data=asset_data,
metadata=asset_metadata
)
)
# Adding metadata
existing_asset_metadata = asset_entity["metadata"]
existing_asset_metadata.update(asset_metadata)
asset_entity["metadata"] = existing_asset_metadata
# AssetVersion
assetversion_data = {
'version': 0,
'asset': asset_entity
}
# NOTE task is skipped (can't be identified in other ftrack)
# if task:
# assetversion_data['task'] = task
# NOTE assetversion_data contains version number which is not correct
assetversion_data.update(data.get("assetversion_data", {}))
assetversion_entity = self.dst_session.query(
self.query("AssetVersion", assetversion_data)
).first()
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
assetversion_metadata = assetversion_data.pop("metadata", {})
# Create a new entity if none exits.
if not assetversion_entity:
assetversion_entity = self.dst_session.create(
"AssetVersion", assetversion_data
)
self.dst_session.commit()
self.log.debug(
info_msg.format(
entity_type="AssetVersion",
data=assetversion_data,
metadata=assetversion_metadata
)
)
# Check if custom attribute can of main Ftrack can be set
if self.id_key_dst not in assetversion_entity['custom_attributes']:
self.log.warning((
'Destination Asset Version do not have key "{}" in'
' Custom attributes'
).format(self.id_key_dst))
return
assetversion_entity['custom_attributes'][self.id_key_dst] = src_entity['id']
# Adding metadata
existing_assetversion_metadata = assetversion_entity["metadata"]
existing_assetversion_metadata.update(assetversion_metadata)
assetversion_entity["metadata"] = existing_assetversion_metadata
# Have to commit the version and asset, because location can't
# determine the final location without.
self.dst_session.commit()
# Component
# Get existing entity.
component_data = {
"name": "main",
"version": assetversion_entity
}
component_data.update(data.get("component_data", {}))
component_entity = self.dst_session.query(
self.query("Component", component_data)
).first()
component_overwrite = data.get("component_overwrite", False)
location = None
location_name = data.get("component_location", {}).get('name')
if location_name:
location = self.dst_session.query(
'Location where name is "{}"'.format(location_name)
).first()
if not location:
location = self.dst_session.pick_location()
# Overwrite existing component data if requested.
if component_entity and component_overwrite:
origin_location = self.dst_session.query(
'Location where name is "ftrack.origin"'
).one()
# Removing existing members from location
components = list(component_entity.get("members", []))
components += [component_entity,]
for component in components:
for loc in component["component_locations"]:
if location["id"] == loc["location_id"]:
location.remove_component(
component, recursive=False
)
# Deleting existing members on component entity
for member in component_entity.get("members", []):
self.dst_session.delete(member)
del(member)
self.dst_session.commit()
# Reset members in memory
if "members" in component_entity.keys():
component_entity["members"] = []
# Add components to origin location
try:
collection = clique.parse(data["component_path"])
except ValueError:
# Assume its a single file
# Changing file type
name, ext = os.path.splitext(data["component_path"])
component_entity["file_type"] = ext
origin_location.add_component(
component_entity, data["component_path"]
)
else:
# Changing file type
component_entity["file_type"] = collection.format("{tail}")
# Create member components for sequence.
for member_path in collection:
size = 0
try:
size = os.path.getsize(member_path)
except OSError:
pass
name = collection.match(member_path).group("index")
member_data = {
"name": name,
"container": component_entity,
"size": size,
"file_type": os.path.splitext(member_path)[-1]
}
component = self.dst_session.create(
"FileComponent", member_data
)
origin_location.add_component(
component, member_path, recursive=False
)
component_entity["members"].append(component)
# Add components to location.
location.add_component(
component_entity, origin_location, recursive=True
)
data["component"] = component_entity
msg = "Overwriting Component with path: {0}, data: {1}, "
msg += "location: {2}"
self.log.info(
msg.format(
data["component_path"],
component_data,
location
)
)
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
component_metadata = component_data.pop("metadata", {})
# Create new component if none exists.
new_component = False
if not component_entity:
component_entity = assetversion_entity.create_component(
data["component_path"],
data=component_data,
location=location
)
data["component"] = component_entity
msg = (
"Created new Component with path: {}, data: {}"
", metadata: {}, location: {}"
)
self.log.info(msg.format(
data["component_path"],
component_data,
component_metadata,
location['name']
))
new_component = True
# Adding metadata
existing_component_metadata = component_entity["metadata"]
existing_component_metadata.update(component_metadata)
component_entity["metadata"] = existing_component_metadata
# if component_data['name'] = 'ftrackreview-mp4-mp4':
# assetversion_entity["thumbnail_id"]
# Setting assetversion thumbnail
if data.get("thumbnail", False):
assetversion_entity["thumbnail_id"] = component_entity["id"]
# Inform user about no changes to the database.
if (
component_entity and
not component_overwrite and
not new_component
):
data["component"] = component_entity
self.log.info(
"Found existing component, and no request to overwrite. "
"Nothing has been changed."
)
return
# Commit changes.
self.dst_session.commit()
return assetversion_entity['id']
def query(self, entitytype, data):
""" Generate a query expression from data supplied.
If a value is not a string, we'll add the id of the entity to the
query.
Args:
entitytype (str): The type of entity to query.
data (dict): The data to identify the entity.
exclusions (list): All keys to exclude from the query.
Returns:
str: String query to use with "session.query"
"""
queries = []
if sys.version_info[0] < 3:
for key, value in data.iteritems():
if not isinstance(value, (basestring, int)):
self.log.info("value: {}".format(value))
if "id" in value.keys():
queries.append(
"{0}.id is \"{1}\"".format(key, value["id"])
)
else:
queries.append("{0} is \"{1}\"".format(key, value))
else:
for key, value in data.items():
if not isinstance(value, (str, int)):
self.log.info("value: {}".format(value))
if "id" in value.keys():
queries.append(
"{0}.id is \"{1}\"".format(key, value["id"])
)
else:
queries.append("{0} is \"{1}\"".format(key, value))
query = (
entitytype + " where " + " and ".join(queries)
)
return query
def download_file(self, url, path):
r = requests.get(url, stream=True).content
with open(path, 'wb') as f:
f.write(r)
def get_dst_location(self, name):
if name in self.dst_ftrack_locations:
return self.dst_ftrack_locations[name]
location = self.dst_session.query(
'Location where name is "{}"'.format(name)
).one()
self.dst_ftrack_locations[name] = location
return location
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncAssetVersions(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -0,0 +1,230 @@
import os
import sys
import time
import datetime
import requests
import tempfile
from pypeapp import config
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib.custom_db_connector import DbConnector, ClientSession
class SynchronizeNotes(BaseAction):
#: Action identifier.
identifier = 'sync.notes'
#: Action label.
label = 'Synchronize Notes'
#: Action description.
description = 'Synchronize notes from one Ftrack to another'
#: roles that are allowed to register this action
role_list = ['Administrator', 'Project Manager', 'Pypeclub']
db_con = DbConnector(
mongo_url=os.environ["AVALON_MONGO"],
database_name='notes_database',
table_name='notes_table'
)
id_key_src = 'fridge_ftrackID'
id_key_dst = 'kredenc_ftrackID'
def discover(self, session, entities, event):
''' Validation '''
if len(entities) == 0:
return False
for entity in entities:
if entity.entity_type.lower() != 'assetversion':
return False
return True
def launch(self, session, entities, event):
source_credentials = config.get_presets()['ftrack'].get(
'partnership_ftrack_cred', {}
)
self.session_source = ftrack_api.Session(
server_url=source_credentials.get('server_url'),
api_key=source_credentials.get('api_key'),
api_user=source_credentials.get('api_user'),
auto_connect_event_hub=True
)
self.session_for_components = ftrack_api.Session(
server_url=session.server_url,
api_key=session.api_key,
api_user=session.api_user,
auto_connect_event_hub=True
)
self.user = self.session_for_components.query(
'User where username is "{}"'.format(self.session.api_user)
).one()
self.db_con.install()
missing_id_entities = []
to_sync_data = []
for dst_entity in entities:
# Ignore entities withoud stored id from second ftrack
from_id = dst_entity['custom_attributes'].get(self.id_key_src)
if not from_id:
missing_id_entities.append(dst_entity.get('name', dst_entity))
continue
to_sync_data.append((dst_entity.entity_type, dst_entity['id']))
for dst_entity_data in to_sync_data:
av_query = 'AssetVersion where id is "{}"'.format(from_id)
src_entity = self.session_source.query(av_query).one()
src_notes = src_entity['notes']
self.sync_notes(src_notes, dst_entity_data)
self.db_con.uninstall()
if missing_id_entities:
self.log.info('Entities without Avalon ID:')
self.log.info(missing_id_entities)
return True
def sync_notes(self, src_notes, dst_entity_data):
# Sort notes by date time
src_notes = sorted(src_notes, key=lambda note: note['date'])
for src_note in src_notes:
# Find if exists in DB
db_note_entity = self.db_con.find_one({
self.id_key_src: src_note['id']
})
# WARNING: expr `if not db_note_entity:` does not work!
if db_note_entity is None:
# Create note if not found in DB
dst_note_id = self.create_note(
src_note, dst_entity_data
)
# Add references to DB for next sync
item = {
self.id_key_dst: dst_note_id,
self.id_key_src: src_note['id'],
'content': src_note['content'],
'entity_type': 'Note',
'sync_date': str(datetime.date.today())
}
self.db_con.insert_one(item)
else:
dst_note_id = db_note_entity[self.id_key_dst]
replies = src_note.get('replies')
if not replies:
continue
self.sync_notes(replies, ('Note', dst_note_id))
def create_note(self, src_note, dst_entity_data):
# dst_entity_data - tuple(entity type, entity id)
dst_entity = self.session.query(
'{} where id is "{}"'.format(*dst_entity_data)
).one()
is_reply = False
if dst_entity.entity_type.lower() != 'note':
# Category
category = None
cat = src_note['category']
if cat:
cat_name = cat['name']
category = self.session.query(
'NoteCategory where name is "{}"'.format(cat_name)
).first()
new_note = dst_entity.create_note(
src_note['content'], self.user, category=category
)
else:
new_note = dst_entity.create_reply(
src_note['content'], self.user
)
is_reply = True
# QUESTION Should we change date to match source Ftrack?
new_note['date'] = src_note['date']
self.session.commit()
new_note_id = new_note['id']
# Components
if src_note['note_components']:
self.reupload_components(src_note, new_note_id)
# Bug in ftrack_api, when reply is added session must be reset
if is_reply:
self.session.reset()
time.sleep(0.2)
return new_note_id
def reupload_components(self, src_note, dst_note_id):
# Download and collect source components
src_server_location = self.session_source.query(
'Location where name is "ftrack.server"'
).one()
temp_folder = tempfile.mkdtemp('note_components')
#download and store path to upload
paths_to_upload = []
count = 0
for note_component in src_note['note_components']:
count +=1
download_url = src_server_location.get_url(
note_component['component']
)
file_name = '{}{}{}'.format(
str(src_note['date'].format('YYYYMMDDHHmmss')),
"{:0>3}".format(count),
note_component['component']['file_type']
)
path = os.path.sep.join([temp_folder, file_name])
self.download_file(download_url, path)
paths_to_upload.append(path)
# Create downloaded components and add to note
dst_server_location = self.session_for_components.query(
'Location where name is "ftrack.server"'
).one()
for path in paths_to_upload:
component = self.session_for_components.create_component(
path,
data={'name': 'My file'},
location=dst_server_location
)
# Attach the component to the note.
self.session_for_components.create(
'NoteComponent',
{'component_id': component['id'], 'note_id': dst_note_id}
)
self.session_for_components.commit()
def download_file(self, url, path):
r = requests.get(url, stream=True).content
with open(path, 'wb') as f:
f.write(r)
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SynchronizeNotes(session).register()

View file

@ -0,0 +1,207 @@
"""
Wrapper around interactions with the database
Copy of io module in avalon-core.
- In this case not working as singleton with api.Session!
"""
import os
import time
import errno
import shutil
import logging
import tempfile
import functools
import contextlib
import requests
# Third-party dependencies
import pymongo
from pymongo.client_session import ClientSession
def auto_reconnect(func):
"""Handling auto reconnect in 3 retry times"""
@functools.wraps(func)
def decorated(*args, **kwargs):
object = args[0]
for retry in range(3):
try:
return func(*args, **kwargs)
except pymongo.errors.AutoReconnect:
object.log.error("Reconnecting..")
time.sleep(0.1)
else:
raise
return decorated
class DbConnector:
log = logging.getLogger(__name__)
timeout = 1000
def __init__(self, mongo_url, database_name, table_name):
self._mongo_client = None
self._sentry_client = None
self._sentry_logging_handler = None
self._database = None
self._is_installed = False
self._mongo_url = mongo_url
self._database_name = database_name
self.active_table = table_name
def install(self):
"""Establish a persistent connection to the database"""
if self._is_installed:
return
logging.basicConfig()
self._mongo_client = pymongo.MongoClient(
self._mongo_url,
serverSelectionTimeoutMS=self.timeout
)
for retry in range(3):
try:
t1 = time.time()
self._mongo_client.server_info()
except Exception:
self.log.error("Retrying..")
time.sleep(1)
else:
break
else:
raise IOError(
"ERROR: Couldn't connect to %s in "
"less than %.3f ms" % (self._mongo_url, timeout)
)
self.log.info("Connected to %s, delay %.3f s" % (
self._mongo_url, time.time() - t1
))
self._database = self._mongo_client[self._database_name]
self._is_installed = True
def uninstall(self):
"""Close any connection to the database"""
try:
self._mongo_client.close()
except AttributeError:
pass
self._mongo_client = None
self._database = None
self._is_installed = False
def tables(self):
"""List available tables
Returns:
list of table names
"""
collection_names = self.collections()
for table_name in collection_names:
if table_name in ("system.indexes",):
continue
yield table_name
@auto_reconnect
def collections(self):
return self._database.collection_names()
@auto_reconnect
def insert_one(self, item, session=None):
assert isinstance(item, dict), "item must be of type <dict>"
return self._database[self.active_table].insert_one(
item,
session=session
)
@auto_reconnect
def insert_many(self, items, ordered=True, session=None):
# check if all items are valid
assert isinstance(items, list), "`items` must be of type <list>"
for item in items:
assert isinstance(item, dict), "`item` must be of type <dict>"
return self._database[self.active_table].insert_many(
items,
ordered=ordered,
session=session
)
@auto_reconnect
def find(self, filter, projection=None, sort=None, session=None):
return self._database[self.active_table].find(
filter=filter,
projection=projection,
sort=sort,
session=session
)
@auto_reconnect
def find_one(self, filter, projection=None, sort=None, session=None):
assert isinstance(filter, dict), "filter must be <dict>"
return self._database[self.active_table].find_one(
filter=filter,
projection=projection,
sort=sort,
session=session
)
@auto_reconnect
def replace_one(self, filter, replacement, session=None):
return self._database[self.active_table].replace_one(
filter, replacement,
session=session
)
@auto_reconnect
def update_one(self, filter, update, session=None):
return self._database[self.active_table].update_one(
filter, update,
session=session
)
@auto_reconnect
def update_many(self, filter, update, session=None):
return self._database[self.active_table].update_many(
filter, update,
session=session
)
@auto_reconnect
def distinct(self, *args, **kwargs):
return self._database[self.active_table].distinct(
*args, **kwargs
)
@auto_reconnect
def drop_collection(self, name_or_collection, session=None):
return self._database[self.active_table].drop(
name_or_collection,
session=session
)
@auto_reconnect
def delete_one(filter, collation=None, session=None):
return self._database[self.active_table].delete_one(
filter,
collation=collation,
session=session
)
@auto_reconnect
def delete_many(filter, collation=None, session=None):
return self._database[self.active_table].delete_many(
filter,
collation=collation,
session=session
)

View file

@ -478,3 +478,35 @@ def get_presets_path():
path_items = [templates, 'presets']
filepath = os.path.sep.join(path_items)
return filepath
def filter_pyblish_plugins(plugins):
"""
This servers as plugin filter / modifier for pyblish. It will load plugin
definitions from presets and filter those needed to be excluded.
:param plugins: Dictionary of plugins produced by :mod:`pyblish-base`
`discover()` method.
:type plugins: Dict
"""
from pypeapp import config
from pyblish import api
host = api.current_host()
# iterate over plugins
for plugin in plugins[:]:
try:
config_data = config.get_presets()['plugins'][host]["publish"][plugin.__name__] # noqa: E501
except KeyError:
continue
for option, value in config_data.items():
if option == "enabled" and value is False:
log.info('removing plugin {}'.format(plugin.__name__))
plugins.remove(plugin)
else:
log.info('setting {}:{} on plugin {}'.format(
option, value, plugin.__name__))
setattr(plugin, option, value)

View file

@ -30,6 +30,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
}
def process(self, instance):
self.ftrack_locations = {}
self.log.debug('instance {}'.format(instance))
if instance.data.get('version'):
@ -49,8 +50,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
self.log.debug('component {}'.format(comp))
if comp.get('thumbnail'):
location = ft_session.query(
'Location where name is "ftrack.server"').one()
location = self.get_ftrack_location(
'ftrack.server', ft_session
)
component_data = {
"name": "thumbnail" # Default component name is "main".
}
@ -76,8 +78,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
if not comp.get('frameRate'):
comp['frameRate'] = instance.context.data['fps']
location = ft_session.query(
'Location where name is "ftrack.server"').one()
location = self.get_ftrack_location(
'ftrack.server', ft_session
)
component_data = {
# Default component name is "main".
"name": "ftrackreview-mp4",
@ -91,28 +94,70 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
component_data = {
"name": comp['name']
}
location = ft_session.query(
'Location where name is "ftrack.unmanaged"').one()
location = self.get_ftrack_location(
'ftrack.unmanaged', ft_session
)
comp['thumbnail'] = False
self.log.debug('location {}'.format(location))
componentList.append({"assettype_data": {
"short": asset_type,
},
component_item = {
"assettype_data": {
"short": asset_type,
},
"asset_data": {
"name": instance.data["subset"],
},
"name": instance.data["subset"],
},
"assetversion_data": {
"version": version_number,
},
"version": version_number,
},
"component_data": component_data,
"component_path": comp['published_path'],
'component_location': location,
"component_overwrite": False,
"thumbnail": comp['thumbnail']
}
)
componentList.append(component_item)
# Create copy with ftrack.unmanaged location if thumb or prev
if comp.get('thumbnail') or comp.get('preview'):
unmanaged_loc = self.get_ftrack_location(
'ftrack.unmanaged', ft_session
)
component_data_src = component_data.copy()
name = component_data['name'] + '_src'
component_data_src['name'] = name
component_item_src = {
"assettype_data": {
"short": asset_type,
},
"asset_data": {
"name": instance.data["subset"],
},
"assetversion_data": {
"version": version_number,
},
"component_data": component_data_src,
"component_path": comp['published_path'],
'component_location': unmanaged_loc,
"component_overwrite": False,
"thumbnail": False
}
componentList.append(component_item_src)
self.log.debug('componentsList: {}'.format(str(componentList)))
instance.data["ftrackComponentsList"] = componentList
def get_ftrack_location(self, name, session):
if name in self.ftrack_locations:
return self.ftrack_locations[name]
location = session.query(
'Location where name is "{}"'.format(name)
).one()
self.ftrack_locations[name] = location
return location

View file

@ -8,7 +8,7 @@ class CreateModel(avalon.maya.Creator):
label = "Model"
family = "model"
icon = "cube"
defaults = ["Main", "Proxy"]
defaults = [ "_MD", "_HD", "_LD", "Main", "Proxy",]
def __init__(self, *args, **kwargs):
super(CreateModel, self).__init__(*args, **kwargs)

View file

@ -38,7 +38,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin):
content_instance = list(set(content_instance + descendants))
# Ensure only valid node types
allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface')
allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator')
nodes = cmds.ls(content_instance, long=True)
valid = cmds.ls(content_instance, long=True, type=allowed)
invalid = set(nodes) - set(valid)

View file

@ -4,13 +4,6 @@ import pyblish.api
import pype.api
import pype.maya.action
SUFFIX_NAMING_TABLE = {'mesh': ["_GEO", "_GES", "_GEP", "_OSD"],
'nurbsCurve': ["_CRV"],
'nurbsSurface': ["_NRB"],
None: ['_GRP']}
ALLOW_IF_NOT_IN_SUFFIX_TABLE = True
class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
"""Validates transform suffix based on the type of its children shapes.
@ -23,6 +16,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
_OSD (open subdiv smooth at rendertime)
- nurbsCurve: _CRV
- nurbsSurface: _NRB
- locator: _LOC
- null/group: _GRP
.. warning::
@ -39,9 +33,16 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
version = (0, 1, 0)
label = 'Suffix Naming Conventions'
actions = [pype.maya.action.SelectInvalidAction]
SUFFIX_NAMING_TABLE = {'mesh': ["_GEO", "_GES", "_GEP", "_OSD"],
'nurbsCurve': ["_CRV"],
'nurbsSurface': ["_NRB"],
'locator': ["_LOC"],
None: ['_GRP']}
ALLOW_IF_NOT_IN_SUFFIX_TABLE = True
@staticmethod
def is_valid_name(node_name, shape_type):
def is_valid_name(node_name, shape_type, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
"""Return whether node's name is correct.
The correctness for a transform's suffix is dependent on what
@ -62,7 +63,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
return False
@classmethod
def get_invalid(cls, instance):
def get_invalid(cls, instance, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
transforms = cmds.ls(instance, type='transform', long=True)
invalid = []
@ -73,7 +74,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
noIntermediate=True)
shape_type = cmds.nodeType(shapes[0]) if shapes else None
if not cls.is_valid_name(transform, shape_type):
if not cls.is_valid_name(transform, shape_type, SUFFIX_NAMING_TABLE, ALLOW_IF_NOT_IN_SUFFIX_TABLE):
invalid.append(transform)
return invalid
@ -81,7 +82,8 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
def process(self, instance):
"""Process all the nodes in the instance"""
invalid = self.get_invalid(instance)
invalid = self.get_invalid(instance, self.SUFFIX_NAMING_TABLE, self.ALLOW_IF_NOT_IN_SUFFIX_TABLE)
if invalid:
raise ValueError("Incorrectly named geometry "
"transforms: {0}".format(invalid))

View file

@ -49,6 +49,8 @@ class ValidateTransformZero(pyblish.api.Validator):
invalid = []
for transform in transforms:
if '_LOC' in transform:
continue
mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True)
if not all(abs(x-y) < cls._tolerance
for x, y in zip(cls._identity, mat)):

View file

@ -294,7 +294,7 @@ def burnins_from_data(input_path, output_path, data, overwrite=True):
if (
bi_func in ['frame_numbers', 'timecode'] and
not start_frame
start_frame is None
):
log.error(
'start_frame is not set in entered data!'

0
pype/tests/__init__.py Normal file
View file

80
pype/tests/lib.py Normal file
View file

@ -0,0 +1,80 @@
import os
import sys
import shutil
import tempfile
import contextlib
import pyblish
import pyblish.cli
import pyblish.plugin
from pyblish.vendor import six
# Setup
HOST = 'python'
FAMILY = 'test.family'
REGISTERED = pyblish.plugin.registered_paths()
PACKAGEPATH = pyblish.lib.main_package_path()
ENVIRONMENT = os.environ.get("PYBLISHPLUGINPATH", "")
PLUGINPATH = os.path.join(PACKAGEPATH, '..', 'tests', 'plugins')
def setup():
pyblish.plugin.deregister_all_paths()
def setup_empty():
"""Disable all plug-ins"""
setup()
pyblish.plugin.deregister_all_plugins()
pyblish.plugin.deregister_all_paths()
pyblish.plugin.deregister_all_hosts()
pyblish.plugin.deregister_all_callbacks()
pyblish.plugin.deregister_all_targets()
pyblish.api.deregister_all_discovery_filters()
def teardown():
"""Restore previously REGISTERED paths"""
pyblish.plugin.deregister_all_paths()
for path in REGISTERED:
pyblish.plugin.register_plugin_path(path)
os.environ["PYBLISHPLUGINPATH"] = ENVIRONMENT
pyblish.api.deregister_all_plugins()
pyblish.api.deregister_all_hosts()
pyblish.api.deregister_all_discovery_filters()
pyblish.api.deregister_test()
pyblish.api.__init__()
@contextlib.contextmanager
def captured_stdout():
"""Temporarily reassign stdout to a local variable"""
try:
sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = sys.__stdout__
@contextlib.contextmanager
def captured_stderr():
"""Temporarily reassign stderr to a local variable"""
try:
sys.stderr = six.StringIO()
yield sys.stderr
finally:
sys.stderr = sys.__stderr__
@contextlib.contextmanager
def tempdir():
"""Provide path to temporary directory"""
try:
tempdir = tempfile.mkdtemp()
yield tempdir
finally:
shutil.rmtree(tempdir)

View file

@ -0,0 +1,60 @@
from . import lib
import pyblish.api
import pyblish.util
import pyblish.plugin
from pype.lib import filter_pyblish_plugins
import os
def test_pyblish_plugin_filter_modifier(printer, monkeypatch):
"""
Test if pyblish filter can filter and modify plugins on-the-fly.
"""
lib.setup_empty()
monkeypatch.setitem(os.environ, 'PYBLISHPLUGINPATH', '')
plugins = pyblish.api.registered_plugins()
printer("Test if we have no registered plugins")
assert len(plugins) == 0
paths = pyblish.api.registered_paths()
printer("Test if we have no registered plugin paths")
print(paths)
class MyTestPlugin(pyblish.api.InstancePlugin):
my_test_property = 1
label = "Collect Renderable Camera(s)"
hosts = ["test"]
families = ["default"]
pyblish.api.register_host("test")
pyblish.api.register_plugin(MyTestPlugin)
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
plugins = pyblish.api.discover()
printer("Test if only one plugin was discovered")
assert len(plugins) == 1
printer("Test if properties are modified correctly")
assert plugins[0].label == "loaded from preset"
assert plugins[0].families == ["changed", "by", "preset"]
assert plugins[0].optional is True
lib.teardown()
def test_pyblish_plugin_filter_removal(monkeypatch):
""" Test that plugin can be removed by filter """
lib.setup_empty()
monkeypatch.setitem(os.environ, 'PYBLISHPLUGINPATH', '')
plugins = pyblish.api.registered_plugins()
class MyTestRemovedPlugin(pyblish.api.InstancePlugin):
my_test_property = 1
label = "Collect Renderable Camera(s)"
hosts = ["test"]
families = ["default"]
pyblish.api.register_host("test")
pyblish.api.register_plugin(MyTestRemovedPlugin)
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
plugins = pyblish.api.discover()
assert len(plugins) == 0