Merge branch 'develop' into bugfix/OP-2737_Ftrack-circular-dependency-error

This commit is contained in:
Jakub Trllo 2022-02-23 12:11:46 +01:00
commit 0e24653f65
328 changed files with 41935 additions and 14 deletions

View file

@ -0,0 +1,9 @@
from .ftrack_module import (
FtrackModule,
FTRACK_MODULE_DIR
)
__all__ = (
"FtrackModule",
"FTRACK_MODULE_DIR"
)

View file

@ -0,0 +1,130 @@
import json
from openpype_modules.ftrack.lib import ServerAction
def clone_review_session(session, entity):
# Create a client review with timestamp.
name = entity["name"]
review_session = session.create(
"ReviewSession",
{
"name": f"Clone of {name}",
"project": entity["project"]
}
)
# Add all invitees.
for invitee in entity["review_session_invitees"]:
# Make sure email is not None but string
email = invitee["email"] or ""
session.create(
"ReviewSessionInvitee",
{
"name": invitee["name"],
"email": email,
"review_session": review_session
}
)
# Add all objects to new review session.
for obj in entity["review_session_objects"]:
session.create(
"ReviewSessionObject",
{
"name": obj["name"],
"version": obj["version"],
"review_session": review_session,
"asset_version": obj["asset_version"]
}
)
session.commit()
class CloneReviewSession(ServerAction):
'''Generate Client Review action
`label` a descriptive string identifing your action.
`varaint` To group actions together, give them the same
label and specify a unique variant per action.
`identifier` a unique identifier for your action.
`description` a verbose descriptive text for you action
'''
label = "Clone Review Session"
variant = None
identifier = "clone-review-session"
description = None
settings_key = "clone_review_session"
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
is_valid = (
len(entities) == 1
and entities[0].entity_type == "ReviewSession"
)
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def launch(self, session, entities, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action
failed ) or a dictionary with they keys `message` and `success`, the
message should be a string and will be displayed as feedback to the
user, success should be a bool, True if successful or False if the
action failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create(
'Job',
{
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Cloning Review Session.'
})
}
)
session.commit()
try:
clone_review_session(session, entities[0])
job['status'] = 'done'
session.commit()
except Exception:
session.rollback()
job["status"] = "failed"
session.commit()
self.log.error(
"Cloning review session failed ({})", exc_info=True
)
return {
'success': True,
'message': 'Action completed successfully'
}
def register(session):
'''Register action. Called when used as an event plugin.'''
CloneReviewSession(session).register()

View file

@ -0,0 +1,167 @@
from openpype_modules.ftrack.lib import ServerAction
class MultipleNotesServer(ServerAction):
"""Action adds same note for muliple AssetVersions.
Note is added to selection of AssetVersions. Note is created with user
who triggered the action. It is possible to define note category of note.
"""
identifier = "multiple.notes.server"
label = "Multiple Notes (Server)"
description = "Add same note to multiple Asset Versions"
_none_category = "__NONE__"
def discover(self, session, entities, event):
"""Show action only on AssetVersions."""
if not entities:
return False
for entity in entities:
if entity.entity_type.lower() != "assetversion":
return False
return True
def interface(self, session, entities, event):
event_source = event["source"]
user_info = event_source.get("user") or {}
user_id = user_info.get("id")
if not user_id:
return None
values = event["data"].get("values")
if values:
return None
note_label = {
"type": "label",
"value": "# Enter note: #"
}
note_value = {
"name": "note",
"type": "textarea"
}
category_label = {
"type": "label",
"value": "## Category: ##"
}
category_data = []
category_data.append({
"label": "- None -",
"value": self._none_category
})
all_categories = session.query(
"select id, name from NoteCategory"
).all()
for cat in all_categories:
category_data.append({
"label": cat["name"],
"value": cat["id"]
})
category_value = {
"type": "enumerator",
"name": "category",
"data": category_data,
"value": self._none_category
}
splitter = {
"type": "label",
"value": "---"
}
return [
note_label,
note_value,
splitter,
category_label,
category_value
]
def launch(self, session, entities, event):
if "values" not in event["data"]:
return None
values = event["data"]["values"]
if len(values) <= 0 or "note" not in values:
return False
# Get Note text
note_value = values["note"]
if note_value.lower().strip() == "":
return {
"success": True,
"message": "Note was not entered. Skipping"
}
# Get User
event_source = event["source"]
user_info = event_source.get("user") or {}
user_id = user_info.get("id")
user = None
if user_id:
user = session.query(
'User where id is "{}"'.format(user_id)
).first()
if not user:
return {
"success": False,
"message": "Couldn't get user information."
}
# Logging message preparation
# - username
username = user.get("username") or "N/A"
# - AssetVersion ids
asset_version_ids_str = ",".join([entity["id"] for entity in entities])
# Base note data
note_data = {
"content": note_value,
"author": user
}
# Get category
category_id = values["category"]
if category_id == self._none_category:
category_id = None
category_name = None
if category_id is not None:
category = session.query(
"select id, name from NoteCategory where id is \"{}\"".format(
category_id
)
).first()
if category:
note_data["category"] = category
category_name = category["name"]
category_msg = ""
if category_name:
category_msg = " with category: \"{}\"".format(category_name)
self.log.warning((
"Creating note{} as User \"{}\" on "
"AssetVersions: {} with value \"{}\""
).format(category_msg, username, asset_version_ids_str, note_value))
# Create notes for entities
for entity in entities:
new_note = session.create("Note", note_data)
entity["notes"].append(new_note)
session.commit()
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
MultipleNotesServer(session).register()

View file

@ -0,0 +1,414 @@
import json
from avalon.api import AvalonMongoDB
from openpype.api import ProjectSettings
from openpype.lib import create_project
from openpype.settings import SaveWarningExc
from openpype_modules.ftrack.lib import (
ServerAction,
get_openpype_attr,
CUST_ATTR_AUTO_SYNC
)
class PrepareProjectServer(ServerAction):
"""Prepare project attributes in Anatomy."""
identifier = "prepare.project.server"
label = "OpenPype Admin"
variant = "- Prepare Project (Server)"
description = "Set basic attributes on the project"
settings_key = "prepare_project"
role_list = ["Pypeclub", "Administrator", "Project Manager"]
settings_key = "prepare_project"
item_splitter = {"type": "label", "value": "---"}
_keys_order = (
"fps",
"frameStart",
"frameEnd",
"handleStart",
"handleEnd",
"clipIn",
"clipOut",
"resolutionHeight",
"resolutionWidth",
"pixelAspect",
"applications",
"tools_env",
"library_project",
)
def discover(self, session, entities, event):
"""Show only on project."""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
):
return False
return self.valid_roles(session, entities, event)
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
# Inform user that this may take a while
self.show_message(event, "Preparing data... Please wait", True)
self.log.debug("Preparing data which will be shown")
self.log.debug("Loading custom attributes")
project_entity = entities[0]
project_name = project_entity["full_name"]
project_settings = ProjectSettings(project_name)
project_anatom_settings = project_settings["project_anatomy"]
root_items = self.prepare_root_items(project_anatom_settings)
ca_items, multiselect_enumerators = (
self.prepare_custom_attribute_items(project_anatom_settings)
)
self.log.debug("Heavy items are ready. Preparing last items group.")
title = "Prepare Project"
items = []
# Add root items
items.extend(root_items)
items.append(self.item_splitter)
items.append({
"type": "label",
"value": "<h3>Set basic Attributes:</h3>"
})
items.extend(ca_items)
# This item will be last before enumerators
# Set value of auto synchronization
auto_sync_value = project_entity["custom_attributes"].get(
CUST_ATTR_AUTO_SYNC, False
)
auto_sync_item = {
"name": CUST_ATTR_AUTO_SYNC,
"type": "boolean",
"value": auto_sync_value,
"label": "AutoSync to Avalon"
}
# Add autosync attribute
items.append(auto_sync_item)
# Add enumerator items at the end
for item in multiselect_enumerators:
items.append(item)
return {
"items": items,
"title": title
}
def prepare_root_items(self, project_anatom_settings):
self.log.debug("Root items preparation begins.")
root_items = []
root_items.append({
"type": "label",
"value": "<h3>Check your Project root settings</h3>"
})
root_items.append({
"type": "label",
"value": (
"<p><i>NOTE: Roots are <b>crutial</b> for path filling"
" (and creating folder structure).</i></p>"
)
})
root_items.append({
"type": "label",
"value": (
"<p><i>WARNING: Do not change roots on running project,"
" that <b>will cause workflow issues</b>.</i></p>"
)
})
empty_text = "Enter root path here..."
roots_entity = project_anatom_settings["roots"]
for root_name, root_entity in roots_entity.items():
root_items.append(self.item_splitter)
root_items.append({
"type": "label",
"value": "Root: \"{}\"".format(root_name)
})
for platform_name, value_entity in root_entity.items():
root_items.append({
"label": platform_name,
"name": "__root__{}__{}".format(root_name, platform_name),
"type": "text",
"value": value_entity.value,
"empty_text": empty_text
})
root_items.append({
"type": "hidden",
"name": "__rootnames__",
"value": json.dumps(list(roots_entity.keys()))
})
self.log.debug("Root items preparation ended.")
return root_items
def _attributes_to_set(self, project_anatom_settings):
attributes_to_set = {}
attribute_values_by_key = {}
for key, entity in project_anatom_settings["attributes"].items():
attribute_values_by_key[key] = entity.value
cust_attrs, hier_cust_attrs = get_openpype_attr(self.session, True)
for attr in hier_cust_attrs:
key = attr["key"]
if key.startswith("avalon_"):
continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": attribute_values_by_key.get(key)
}
for attr in cust_attrs:
if attr["entity_type"].lower() != "show":
continue
key = attr["key"]
if key.startswith("avalon_"):
continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": attribute_values_by_key.get(key)
}
# Sort by label
attributes_to_set = dict(sorted(
attributes_to_set.items(),
key=lambda x: x[1]["label"]
))
return attributes_to_set
def prepare_custom_attribute_items(self, project_anatom_settings):
items = []
multiselect_enumerators = []
attributes_to_set = self._attributes_to_set(project_anatom_settings)
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
))
attribute_keys = set(attributes_to_set.keys())
keys_order = []
for key in self._keys_order:
if key in attribute_keys:
keys_order.append(key)
attribute_keys = attribute_keys - set(keys_order)
for key in sorted(attribute_keys):
keys_order.append(key)
for key in keys_order:
in_data = attributes_to_set[key]
attr = in_data["object"]
# initial item definition
item = {
"name": key,
"label": in_data["label"]
}
# cust attr type - may have different visualization
type_name = attr["type"]["name"].lower()
easy_types = ["text", "boolean", "date", "number"]
easy_type = False
if type_name in easy_types:
easy_type = True
elif type_name == "enumerator":
attr_config = json.loads(attr["config"])
attr_config_data = json.loads(attr_config["data"])
if attr_config["multiSelect"] is True:
multiselect_enumerators.append(self.item_splitter)
multiselect_enumerators.append({
"type": "label",
"value": "<h3>{}</h3>".format(in_data["label"])
})
default = in_data["default"]
names = []
for option in sorted(
attr_config_data, key=lambda x: x["menu"]
):
name = option["value"]
new_name = "__{}__{}".format(key, name)
names.append(new_name)
item = {
"name": new_name,
"type": "boolean",
"label": "- {}".format(option["menu"])
}
if default:
if isinstance(default, (list, tuple)):
if name in default:
item["value"] = True
else:
if name == default:
item["value"] = True
multiselect_enumerators.append(item)
multiselect_enumerators.append({
"type": "hidden",
"name": "__hidden__{}".format(key),
"value": json.dumps(names)
})
else:
easy_type = True
item["data"] = attr_config_data
else:
self.log.warning((
"Custom attribute \"{}\" has type \"{}\"."
" I don't know how to handle"
).format(key, type_name))
items.append({
"type": "label",
"value": (
"!!! Can't handle Custom attritubte type \"{}\""
" (key: \"{}\")"
).format(type_name, key)
})
if easy_type:
item["type"] = type_name
# default value in interface
default = in_data["default"]
if default is not None:
item["value"] = default
items.append(item)
return items, multiselect_enumerators
def launch(self, session, entities, event):
in_data = event["data"].get("values")
if not in_data:
return
root_values = {}
root_key = "__root__"
for key in tuple(in_data.keys()):
if key.startswith(root_key):
_key = key[len(root_key):]
root_values[_key] = in_data.pop(key)
root_names = in_data.pop("__rootnames__", None)
root_data = {}
for root_name in json.loads(root_names):
root_data[root_name] = {}
for key, value in tuple(root_values.items()):
prefix = "{}__".format(root_name)
if not key.startswith(prefix):
continue
_key = key[len(prefix):]
root_data[root_name][_key] = value
# Find hidden items for multiselect enumerators
keys_to_process = []
for key in in_data:
if key.startswith("__hidden__"):
keys_to_process.append(key)
self.log.debug("Preparing data for Multiselect Enumerators")
enumerators = {}
for key in keys_to_process:
new_key = key.replace("__hidden__", "")
enumerator_items = in_data.pop(key)
enumerators[new_key] = json.loads(enumerator_items)
# find values set for multiselect enumerator
for key, enumerator_items in enumerators.items():
in_data[key] = []
name = "__{}__".format(key)
for item in enumerator_items:
value = in_data.pop(item)
if value is True:
new_key = item.replace(name, "")
in_data[key].append(new_key)
self.log.debug("Setting Custom Attribute values")
project_entity = entities[0]
project_name = project_entity["full_name"]
# Try to find project document
dbcon = AvalonMongoDB()
dbcon.install()
dbcon.Session["AVALON_PROJECT"] = project_name
project_doc = dbcon.find_one({
"type": "project"
})
# Create project if is not available
# - creation is required to be able set project anatomy and attributes
if not project_doc:
project_code = project_entity["name"]
self.log.info("Creating project \"{} [{}]\"".format(
project_name, project_code
))
create_project(project_name, project_code, dbcon=dbcon)
dbcon.uninstall()
project_settings = ProjectSettings(project_name)
project_anatomy_settings = project_settings["project_anatomy"]
project_anatomy_settings["roots"] = root_data
custom_attribute_values = {}
attributes_entity = project_anatomy_settings["attributes"]
for key, value in in_data.items():
if key not in attributes_entity:
custom_attribute_values[key] = value
else:
attributes_entity[key] = value
try:
project_settings.save()
except SaveWarningExc as exc:
self.log.info("Few warnings happened during settings save:")
for warning in exc.warnings:
self.log.info(str(warning))
# Change custom attributes on project
if custom_attribute_values:
for key, value in custom_attribute_values.items():
project_entity["custom_attributes"][key] = value
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
session.commit()
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
PrepareProjectServer(session).register()

View file

@ -0,0 +1,61 @@
from openpype_modules.ftrack.lib import ServerAction
class PrivateProjectDetectionAction(ServerAction):
"""Action helps to identify if does not have access to project."""
identifier = "server.missing.perm.private.project"
label = "Missing permissions"
description = (
"Main ftrack event server does not have access to this project."
)
def _discover(self, event):
"""Show action only if there is a selection in event data."""
entities = self._translate_event(event)
if entities:
return None
selection = event["data"].get("selection")
if not selection:
return None
return {
"items": [{
"label": self.label,
"variant": self.variant,
"description": self.description,
"actionIdentifier": self.discover_identifier,
"icon": self.icon,
}]
}
def _launch(self, event):
# Ignore if there are values in event data
# - somebody clicked on submit button
values = event["data"].get("values")
if values:
return None
title = "# Private project (missing permissions) #"
msg = (
"User ({}) or API Key used on Ftrack event server"
" does not have permissions to access this private project."
).format(self.session.api_user)
return {
"type": "form",
"title": "Missing permissions",
"items": [
{"type": "label", "value": title},
{"type": "label", "value": msg},
# Add hidden to be able detect if was clicked on submit
{"type": "hidden", "value": "1", "name": "hidden"}
],
"submit_button_label": "Got it"
}
def register(session):
'''Register plugin. Called when used as an plugin.'''
PrivateProjectDetectionAction(session).register()

View file

@ -0,0 +1,477 @@
import sys
import json
import collections
import ftrack_api
from openpype_modules.ftrack.lib import (
ServerAction,
query_custom_attributes
)
class PushHierValuesToNonHier(ServerAction):
"""Action push hierarchical custom attribute values to non hierarchical.
Hierarchical value is also pushed to their task entities.
Action has 3 configurable attributes:
- `role_list`: List of use roles that can discover the action.
- `interest_attributes`: Keys of custom attributes that will be looking
for to push values. Attribute key must have both custom attribute types
hierarchical and on specific object type (entity type).
- `interest_entity_types`: Entity types that will be in focus of pushing
hierarchical to object type's custom attribute.
EXAMPLE:
* Before action
|_ Project
|_ Shot1
- hierarchical custom attribute value: `frameStart`: 1001
- custom attribute for `Shot`: frameStart: 1
|_ Task1
- hierarchical custom attribute value: `frameStart`: 10
- custom attribute for `Task`: frameStart: 0
* After action
|_ Project
|_ Shot1
- hierarchical custom attribute value: `frameStart`: 1001
- custom attribute for `Shot`: frameStart: 1001
|_ Task1
- hierarchical custom attribute value: `frameStart`: 1001
- custom attribute for `Task`: frameStart: 1001
"""
identifier = "admin.push_hier_values_to_non_hier"
label = "OpenPype Admin"
variant = "- Push Hierarchical values To Non-Hierarchical"
entities_query_by_project = (
"select id, parent_id, object_type_id from TypedContext"
" where project_id is \"{}\""
)
cust_attrs_query = (
"select id, key, object_type_id, is_hierarchical, default"
" from CustomAttributeConfiguration"
" where key in ({})"
)
# configurable
settings_key = "sync_hier_entity_attributes"
settings_enabled_key = "action_enabled"
def discover(self, session, entities, event):
""" Validation """
# Check if selection is valid
is_valid = False
for ent in event["data"]["selection"]:
# Ignore entities that are not tasks or projects
if ent["entityType"].lower() in ("task", "show"):
is_valid = True
break
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def launch(self, session, entities, event):
self.log.debug("{}: Creating job".format(self.label))
user_entity = session.query(
"User where id is {}".format(event["source"]["user"]["id"])
).one()
job = session.create("Job", {
"user": user_entity,
"status": "running",
"data": json.dumps({
"description": "Propagation of Frame attribute values to task."
})
})
session.commit()
try:
result = self.propagate_values(session, event, entities)
except Exception as exc:
msg = "Pushing Custom attribute values to task Failed"
self.log.warning(msg, exc_info=True)
session.rollback()
description = "{} (Download traceback)".format(msg)
self.add_traceback_to_job(
job, session, sys.exc_info(), description
)
return {
"success": False,
"message": "Error: {}".format(str(exc))
}
job["status"] = "done"
session.commit()
return result
def attrs_configurations(self, session, object_ids, interest_attributes):
attrs = session.query(self.cust_attrs_query.format(
self.join_query_keys(interest_attributes),
self.join_query_keys(object_ids)
)).all()
output = {}
hiearchical = []
for attr in attrs:
if attr["is_hierarchical"]:
hiearchical.append(attr)
continue
obj_id = attr["object_type_id"]
if obj_id not in output:
output[obj_id] = []
output[obj_id].append(attr)
return output, hiearchical
def propagate_values(self, session, event, selected_entities):
ftrack_settings = self.get_ftrack_settings(
session, event, selected_entities
)
action_settings = (
ftrack_settings[self.settings_frack_subkey][self.settings_key]
)
project_entity = self.get_project_from_entity(selected_entities[0])
selected_ids = [entity["id"] for entity in selected_entities]
self.log.debug("Querying project's entities \"{}\".".format(
project_entity["full_name"]
))
interest_entity_types = tuple(
ent_type.lower()
for ent_type in action_settings["interest_entity_types"]
)
all_object_types = session.query("ObjectType").all()
object_types_by_low_name = {
object_type["name"].lower(): object_type
for object_type in all_object_types
}
task_object_type = object_types_by_low_name["task"]
destination_object_types = [task_object_type]
for ent_type in interest_entity_types:
obj_type = object_types_by_low_name.get(ent_type)
if obj_type and obj_type not in destination_object_types:
destination_object_types.append(obj_type)
destination_object_type_ids = set(
obj_type["id"]
for obj_type in destination_object_types
)
interest_attributes = action_settings["interest_attributes"]
# Find custom attributes definitions
attrs_by_obj_id, hier_attrs = self.attrs_configurations(
session, destination_object_type_ids, interest_attributes
)
# Filter destination object types if they have any object specific
# custom attribute
for obj_id in tuple(destination_object_type_ids):
if obj_id not in attrs_by_obj_id:
destination_object_type_ids.remove(obj_id)
if not destination_object_type_ids:
# TODO report that there are not matching custom attributes
return {
"success": True,
"message": "Nothing has changed."
}
(
parent_id_by_entity_id,
filtered_entities
) = self.all_hierarchy_entities(
session,
selected_ids,
project_entity,
destination_object_type_ids
)
self.log.debug("Preparing whole project hierarchy by ids.")
entities_by_obj_id = {
obj_id: []
for obj_id in destination_object_type_ids
}
self.log.debug("Filtering Task entities.")
focus_entity_ids = []
non_task_entity_ids = []
task_entity_ids = []
for entity in filtered_entities:
entity_id = entity["id"]
focus_entity_ids.append(entity_id)
if entity.entity_type.lower() == "task":
task_entity_ids.append(entity_id)
else:
non_task_entity_ids.append(entity_id)
obj_id = entity["object_type_id"]
entities_by_obj_id[obj_id].append(entity_id)
if not non_task_entity_ids:
return {
"success": True,
"message": "Nothing to do in your selection."
}
self.log.debug("Getting Hierarchical custom attribute values parents.")
hier_values_by_entity_id = self.get_hier_values(
session,
hier_attrs,
non_task_entity_ids,
parent_id_by_entity_id
)
self.log.debug("Setting parents' values to task.")
self.set_task_attr_values(
session,
hier_attrs,
task_entity_ids,
hier_values_by_entity_id,
parent_id_by_entity_id
)
self.log.debug("Setting values to entities themselves.")
self.push_values_to_entities(
session,
entities_by_obj_id,
attrs_by_obj_id,
hier_values_by_entity_id
)
return True
def all_hierarchy_entities(
self,
session,
selected_ids,
project_entity,
destination_object_type_ids
):
selected_ids = set(selected_ids)
filtered_entities = []
parent_id_by_entity_id = {}
# Query is simple if project is in selection
if project_entity["id"] in selected_ids:
entities = session.query(
self.entities_query_by_project.format(project_entity["id"])
).all()
for entity in entities:
if entity["object_type_id"] in destination_object_type_ids:
filtered_entities.append(entity)
entity_id = entity["id"]
parent_id_by_entity_id[entity_id] = entity["parent_id"]
return parent_id_by_entity_id, filtered_entities
# Query selection and get it's link to be able calculate parentings
entities_with_link = session.query((
"select id, parent_id, link, object_type_id"
" from TypedContext where id in ({})"
).format(self.join_query_keys(selected_ids))).all()
# Process and store queried entities and store all lower entities to
# `bottom_ids`
# - bottom_ids should not contain 2 ids where one is parent of second
bottom_ids = set(selected_ids)
for entity in entities_with_link:
if entity["object_type_id"] in destination_object_type_ids:
filtered_entities.append(entity)
children_id = None
for idx, item in enumerate(reversed(entity["link"])):
item_id = item["id"]
if idx > 0 and item_id in bottom_ids:
bottom_ids.remove(item_id)
if children_id is not None:
parent_id_by_entity_id[children_id] = item_id
children_id = item_id
# Query all children of selection per one hierarchy level and process
# their data the same way as selection but parents are already known
chunk_size = 100
while bottom_ids:
child_entities = []
# Query entities in chunks
entity_ids = list(bottom_ids)
for idx in range(0, len(entity_ids), chunk_size):
_entity_ids = entity_ids[idx:idx + chunk_size]
child_entities.extend(session.query((
"select id, parent_id, object_type_id from"
" TypedContext where parent_id in ({})"
).format(self.join_query_keys(_entity_ids))).all())
bottom_ids = set()
for entity in child_entities:
entity_id = entity["id"]
parent_id_by_entity_id[entity_id] = entity["parent_id"]
bottom_ids.add(entity_id)
if entity["object_type_id"] in destination_object_type_ids:
filtered_entities.append(entity)
return parent_id_by_entity_id, filtered_entities
def get_hier_values(
self,
session,
hier_attrs,
focus_entity_ids,
parent_id_by_entity_id
):
all_ids_with_parents = set()
for entity_id in focus_entity_ids:
all_ids_with_parents.add(entity_id)
_entity_id = entity_id
while True:
parent_id = parent_id_by_entity_id.get(_entity_id)
if (
not parent_id
or parent_id in all_ids_with_parents
):
break
all_ids_with_parents.add(parent_id)
_entity_id = parent_id
hier_attr_ids = tuple(hier_attr["id"] for hier_attr in hier_attrs)
hier_attrs_key_by_id = {
hier_attr["id"]: hier_attr["key"]
for hier_attr in hier_attrs
}
values_per_entity_id = {}
for entity_id in all_ids_with_parents:
values_per_entity_id[entity_id] = {}
for key in hier_attrs_key_by_id.values():
values_per_entity_id[entity_id][key] = None
values = query_custom_attributes(
session, all_ids_with_parents, hier_attr_ids, True
)
for item in values:
entity_id = item["entity_id"]
key = hier_attrs_key_by_id[item["configuration_id"]]
values_per_entity_id[entity_id][key] = item["value"]
output = {}
for entity_id in focus_entity_ids:
output[entity_id] = {}
for key in hier_attrs_key_by_id.values():
value = values_per_entity_id[entity_id][key]
tried_ids = set()
if value is None:
tried_ids.add(entity_id)
_entity_id = entity_id
while value is None:
parent_id = parent_id_by_entity_id.get(_entity_id)
if not parent_id:
break
value = values_per_entity_id[parent_id][key]
if value is not None:
break
_entity_id = parent_id
tried_ids.add(parent_id)
if value is not None:
for ent_id in tried_ids:
values_per_entity_id[ent_id][key] = value
output[entity_id][key] = value
return output
def set_task_attr_values(
self,
session,
hier_attrs,
task_entity_ids,
hier_values_by_entity_id,
parent_id_by_entity_id
):
hier_attr_id_by_key = {
attr["key"]: attr["id"]
for attr in hier_attrs
}
for task_id in task_entity_ids:
parent_id = parent_id_by_entity_id.get(task_id) or {}
parent_values = hier_values_by_entity_id.get(parent_id)
if not parent_values:
continue
hier_values_by_entity_id[task_id] = {}
for key, value in parent_values.items():
hier_values_by_entity_id[task_id][key] = value
configuration_id = hier_attr_id_by_key[key]
_entity_key = collections.OrderedDict([
("configuration_id", configuration_id),
("entity_id", task_id)
])
session.recorded_operations.push(
ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
_entity_key,
"value",
ftrack_api.symbol.NOT_SET,
value
)
)
if len(session.recorded_operations) > 100:
session.commit()
session.commit()
def push_values_to_entities(
self,
session,
entities_by_obj_id,
attrs_by_obj_id,
hier_values_by_entity_id
):
for object_id, entity_ids in entities_by_obj_id.items():
attrs = attrs_by_obj_id.get(object_id)
if not attrs or not entity_ids:
continue
for attr in attrs:
for entity_id in entity_ids:
value = (
hier_values_by_entity_id
.get(entity_id, {})
.get(attr["key"])
)
if value is None:
continue
_entity_key = collections.OrderedDict([
("configuration_id", attr["id"]),
("entity_id", entity_id)
])
session.recorded_operations.push(
ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
_entity_key,
"value",
ftrack_api.symbol.NOT_SET,
value
)
)
if len(session.recorded_operations) > 100:
session.commit()
session.commit()
def register(session):
PushHierValuesToNonHier(session).register()

View file

@ -0,0 +1,212 @@
import time
import sys
import json
import traceback
from openpype_modules.ftrack.lib import ServerAction
from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
class SyncToAvalonServer(ServerAction):
"""
Synchronizing data action - from Ftrack to Avalon DB
Stores all information about entity.
- Name(string) - Most important information = identifier of entity
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- Data(dictionary):
- VisualParent(ObjectId) - Avalon Id of parent asset
- Parents(array of string) - All parent names except project
- Tasks(dictionary of dictionaries) - Tasks on asset
- FtrackId(string)
- entityType(string) - entity's type on Ftrack
* All Custom attributes in group 'Avalon'
- custom attributes that start with 'avalon_' are skipped
* These information are stored for entities in whole project.
Avalon ID of asset is stored to Ftrack
- Custom attribute 'avalon_mongo_id'.
- action IS NOT creating this Custom attribute if doesn't exist
- run 'Create Custom Attributes' action
- or do it manually (Not recommended)
"""
#: Action identifier.
identifier = "sync.to.avalon.server"
#: Action label.
label = "OpenPype Admin"
variant = "- Sync To Avalon (Server)"
#: Action description.
description = "Send data from Ftrack to Avalon"
role_list = {"Pypeclub", "Administrator", "Project Manager"}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
def discover(self, session, entities, event):
""" Validation """
# Check if selection is valid
for ent in event["data"]["selection"]:
# Ignore entities that are not tasks or projects
if ent["entityType"].lower() in ["show", "task"]:
return True
return False
def launch(self, session, in_entities, event):
self.log.debug("{}: Creating job".format(self.label))
user_entity = session.query(
"User where id is {}".format(event["source"]["user"]["id"])
).one()
job_entity = session.create("Job", {
"user": user_entity,
"status": "running",
"data": json.dumps({
"description": "Sync to avalon is running..."
})
})
session.commit()
project_entity = self.get_project_from_entity(in_entities[0])
project_name = project_entity["full_name"]
try:
result = self.synchronization(event, project_name)
except Exception:
self.log.error(
"Synchronization failed due to code error", exc_info=True
)
description = "Sync to avalon Crashed (Download traceback)"
self.add_traceback_to_job(
job_entity, session, sys.exc_info(), description
)
msg = "An error has happened during synchronization"
title = "Synchronization report ({}):".format(project_name)
items = []
items.append({
"type": "label",
"value": "# {}".format(msg)
})
items.append({
"type": "label",
"value": (
"<p>Download report from job for more information.</p>"
)
})
report = {}
try:
report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items") or []
if _items:
items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event, submit_btn_label="Ok")
return {"success": True, "message": msg}
job_entity["status"] = "done"
job_entity["data"] = json.dumps({
"description": "Sync to avalon finished."
})
session.commit()
return result
def synchronization(self, event, project_name):
time_start = time.time()
self.show_message(event, "Synchronization - Preparing data", True)
try:
output = self.entities_factory.launch_setup(project_name)
if output is not None:
return output
time_1 = time.time()
self.entities_factory.set_cutom_attributes()
time_2 = time.time()
# This must happen before all filtering!!!
self.entities_factory.prepare_avalon_entities(project_name)
time_3 = time.time()
self.entities_factory.filter_by_ignore_sync()
time_4 = time.time()
self.entities_factory.duplicity_regex_check()
time_5 = time.time()
self.entities_factory.prepare_ftrack_ent_data()
time_6 = time.time()
self.entities_factory.synchronize()
time_7 = time.time()
self.log.debug(
"*** Synchronization finished ***"
)
self.log.debug(
"preparation <{}>".format(time_1 - time_start)
)
self.log.debug(
"set_cutom_attributes <{}>".format(time_2 - time_1)
)
self.log.debug(
"prepare_avalon_entities <{}>".format(time_3 - time_2)
)
self.log.debug(
"filter_by_ignore_sync <{}>".format(time_4 - time_3)
)
self.log.debug(
"duplicity_regex_check <{}>".format(time_5 - time_4)
)
self.log.debug(
"prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
)
self.log.debug(
"synchronize <{}>".format(time_7 - time_6)
)
self.log.debug(
"* Total time: {}".format(time_7 - time_start)
)
report = self.entities_factory.report()
if report and report.get("items"):
default_title = "Synchronization report ({}):".format(
project_name
)
self.show_interface(
items=report["items"],
title=report.get("title", default_title),
event=event
)
return {
"success": True,
"message": "Synchronization Finished"
}
finally:
try:
self.entities_factory.dbcon.uninstall()
except Exception:
pass
try:
self.entities_factory.session.close()
except Exception:
pass
def register(session):
'''Register plugin. Called when used as an plugin.'''
SyncToAvalonServer(session).register()

View file

@ -0,0 +1,54 @@
from openpype_modules.ftrack.lib import BaseEvent
from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
from openpype_modules.ftrack.event_handlers_server.event_sync_to_avalon import (
SyncToAvalonEvent
)
class DelAvalonIdFromNew(BaseEvent):
'''
This event removes AvalonId from custom attributes of new entities
Result:
- 'Copy->Pasted' entities won't have same AvalonID as source entity
Priority of this event must be less than SyncToAvalon event
'''
priority = SyncToAvalonEvent.priority - 1
ignore_me = True
def launch(self, session, event):
created = []
entities = event['data']['entities']
for entity in entities:
try:
entity_id = entity['entityId']
if entity.get('action', None) == 'add':
id_dict = entity['changes']['id']
if id_dict['new'] is not None and id_dict['old'] is None:
created.append(id_dict['new'])
elif (
entity.get('action', None) == 'update' and
CUST_ATTR_ID_KEY in entity['keys'] and
entity_id in created
):
ftrack_entity = session.get(
self._get_entity_type(entity),
entity_id
)
cust_attrs = ftrack_entity["custom_attributes"]
if cust_attrs[CUST_ATTR_ID_KEY]:
cust_attrs[CUST_ATTR_ID_KEY] = ""
session.commit()
except Exception:
session.rollback()
continue
def register(session):
'''Register plugin. Called when used as an plugin.'''
DelAvalonIdFromNew(session).register()

View file

@ -0,0 +1,188 @@
from openpype_modules.ftrack.lib import BaseEvent
class FirstVersionStatus(BaseEvent):
# WARNING Priority MUST be higher
# than handler in `event_version_to_task_statuses.py`
priority = 200
keys_enum = ["task", "task_type"]
# This should be set with presets
task_status_map = []
# EXAMPLE of `task_status_map`
__example_status_map__ = [{
# `key` specify where to look for name (is enumerator of `keys_enum`)
# By default is set to "task"
"key": "task",
# speicification of name
"name": "compositing",
# Status to set to the asset version
"status": "Blocking"
}]
def register(self, *args, **kwargs):
result = super(FirstVersionStatus, self).register(*args, **kwargs)
valid_task_status_map = []
for item in self.task_status_map:
key = (item.get("key") or "task").lower()
name = (item.get("name") or "").lower()
status = (item.get("status") or "").lower()
if not (key and name and status):
self.log.warning((
"Invalid item in Task -> Status mapping. {}"
).format(str(item)))
continue
if key not in self.keys_enum:
expected_msg = ""
last_key_idx = len(self.keys_enum) - 1
for idx, key in enumerate(self.keys_enum):
if idx == 0:
joining_part = "`{}`"
elif idx == last_key_idx:
joining_part = "or `{}`"
else:
joining_part = ", `{}`"
expected_msg += joining_part.format(key)
self.log.warning((
"Invalid key `{}`. Expected: {}."
).format(key, expected_msg))
continue
valid_task_status_map.append({
"key": key,
"name": name,
"status": status
})
self.task_status_map = valid_task_status_map
if not self.task_status_map:
self.log.warning((
"Event handler `{}` don't have set presets."
).format(self.__class__.__name__))
return result
def launch(self, session, event):
"""Set task's status for first created Asset Version."""
if not self.task_status_map:
return
entities_info = self.filter_event_ents(event)
if not entities_info:
return
entity_ids = []
for entity_info in entities_info:
entity_ids.append(entity_info["entityId"])
joined_entity_ids = ",".join(
["\"{}\"".format(entity_id) for entity_id in entity_ids]
)
asset_versions = session.query(
"AssetVersion where id in ({})".format(joined_entity_ids)
).all()
asset_version_statuses = None
project_schema = None
for asset_version in asset_versions:
task_entity = asset_version["task"]
found_item = None
for item in self.task_status_map:
if (
item["key"] == "task" and
task_entity["name"].lower() != item["name"]
):
continue
elif (
item["key"] == "task_type" and
task_entity["type"]["name"].lower() != item["name"]
):
continue
found_item = item
break
if not found_item:
continue
if project_schema is None:
project_schema = task_entity["project"]["project_schema"]
# Get all available statuses for Task
if asset_version_statuses is None:
statuses = project_schema.get_statuses("AssetVersion")
# map lowered status name with it's object
asset_version_statuses = {
status["name"].lower(): status for status in statuses
}
ent_path = "/".join(
[ent["name"] for ent in task_entity["link"]] +
[
str(asset_version["asset"]["name"]),
str(asset_version["version"])
]
)
new_status = asset_version_statuses.get(found_item["status"])
if not new_status:
self.log.warning(
"AssetVersion doesn't have status `{}`."
).format(found_item["status"])
continue
try:
asset_version["status"] = new_status
session.commit()
self.log.debug("[ {} ] Status updated to [ {} ]".format(
ent_path, new_status['name']
))
except Exception:
session.rollback()
self.log.warning(
"[ {} ] Status couldn't be set.".format(ent_path),
exc_info=True
)
def filter_event_ents(self, event):
filtered_ents = []
for entity in event["data"].get("entities", []):
# Care only about add actions
if entity.get("action") != "add":
continue
# Filter AssetVersions
if entity["entityType"] != "assetversion":
continue
entity_changes = entity.get("changes") or {}
# Check if version of Asset Version is `1`
version_num = entity_changes.get("version", {}).get("new")
if version_num != 1:
continue
# Skip in Asset Version don't have task
task_id = entity_changes.get("taskid", {}).get("new")
if not task_id:
continue
filtered_ents.append(entity)
return filtered_ents
def register(session):
'''Register plugin. Called when used as an plugin.'''
FirstVersionStatus(session).register()

View file

@ -0,0 +1,438 @@
import collections
from openpype_modules.ftrack.lib import BaseEvent
class NextTaskUpdate(BaseEvent):
"""Change status on following Task.
Handler cares about changes of status id on Task entities. When new status
has state "Done" it will try to find following task and change it's status.
It is expected following task should be marked as "Ready to work on".
By default all tasks with same task type must have state "Done" to do any
changes. And when all tasks with same task type are "done" it will change
statuses on all tasks with next task type.
# Enable
Handler is based on settings, handler can be turned on/off with "enabled"
key.
```
"enabled": True
```
# Status mappings
Must have set mappings of new statuses:
```
"mapping": {
# From -> To
"Not Ready": "Ready",
...
}
```
If current status name is not found then status change is skipped.
# Ignored statuses
These status names are skipping as they would be in "Done" state. Best
example is status "Omitted" which in most of cases is "Blocked" state but
it will never change.
```
"ignored_statuses": [
"Omitted",
...
]
```
# Change statuses sorted by task type and by name
Change behaviour of task type batching. Statuses are not checked and set
by batches of tasks by Task type but one by one. Tasks are sorted by
Task type and then by name if all previous tasks are "Done" the following
will change status.
```
"name_sorting": True
```
"""
settings_key = "next_task_update"
def launch(self, session, event):
'''Propagates status from version to task when changed'''
filtered_entities_info = self.filter_entities_info(event)
if not filtered_entities_info:
return
for project_id, entities_info in filtered_entities_info.items():
self.process_by_project(session, event, project_id, entities_info)
def filter_entities_info(self, event):
# Filter if event contain relevant data
entities_info = event["data"].get("entities")
if not entities_info:
return
filtered_entities_info = collections.defaultdict(list)
for entity_info in entities_info:
# Care only about Task `entity_type`
if entity_info.get("entity_type") != "Task":
continue
# Care only about changes of status
changes = entity_info.get("changes") or {}
statusid_changes = changes.get("statusid") or {}
if (
statusid_changes.get("new") is None
or statusid_changes.get("old") is None
):
continue
project_id = None
for parent_info in reversed(entity_info["parents"]):
if parent_info["entityType"] == "show":
project_id = parent_info["entityId"]
break
if project_id:
filtered_entities_info[project_id].append(entity_info)
return filtered_entities_info
def process_by_project(self, session, event, project_id, _entities_info):
project_name = self.get_project_name_from_event(
session, event, project_id
)
# Load settings
project_settings = self.get_project_settings_from_event(
event, project_name
)
# Load status mapping from presets
event_settings = (
project_settings["ftrack"]["events"][self.settings_key]
)
if not event_settings["enabled"]:
self.log.debug("Project \"{}\" has disabled {}.".format(
project_name, self.__class__.__name__
))
return
statuses = session.query("Status").all()
entities_info = self.filter_by_status_state(_entities_info, statuses)
if not entities_info:
return
parent_ids = set()
event_task_ids_by_parent_id = collections.defaultdict(list)
for entity_info in entities_info:
parent_id = entity_info["parentId"]
entity_id = entity_info["entityId"]
parent_ids.add(parent_id)
event_task_ids_by_parent_id[parent_id].append(entity_id)
# From now it doesn't matter what was in event data
task_entities = session.query(
(
"select id, type_id, status_id, parent_id, link from Task"
" where parent_id in ({})"
).format(self.join_query_keys(parent_ids))
).all()
tasks_by_parent_id = collections.defaultdict(list)
for task_entity in task_entities:
tasks_by_parent_id[task_entity["parent_id"]].append(task_entity)
project_entity = session.get("Project", project_id)
self.set_next_task_statuses(
session,
tasks_by_parent_id,
event_task_ids_by_parent_id,
statuses,
project_entity,
event_settings
)
def filter_by_status_state(self, entities_info, statuses):
statuses_by_id = {
status["id"]: status
for status in statuses
}
# Care only about tasks having status with state `Done`
filtered_entities_info = []
for entity_info in entities_info:
status_id = entity_info["changes"]["statusid"]["new"]
status_entity = statuses_by_id[status_id]
if status_entity["state"]["name"].lower() == "done":
filtered_entities_info.append(entity_info)
return filtered_entities_info
def set_next_task_statuses(
self,
session,
tasks_by_parent_id,
event_task_ids_by_parent_id,
statuses,
project_entity,
event_settings
):
statuses_by_id = {
status["id"]: status
for status in statuses
}
# Lower ignored statuses
ignored_statuses = set(
status_name.lower()
for status_name in event_settings["ignored_statuses"]
)
# Lower both key and value of mapped statuses
mapping = {
status_from.lower(): status_to.lower()
for status_from, status_to in event_settings["mapping"].items()
}
# Should use name sorting or not
name_sorting = event_settings["name_sorting"]
# Collect task type ids from changed entities
task_type_ids = set()
for task_entities in tasks_by_parent_id.values():
for task_entity in task_entities:
task_type_ids.add(task_entity["type_id"])
statusese_by_obj_id = self.statuses_for_tasks(
task_type_ids, project_entity
)
sorted_task_type_ids = self.get_sorted_task_type_ids(session)
for parent_id, _task_entities in tasks_by_parent_id.items():
task_entities_by_type_id = collections.defaultdict(list)
for _task_entity in _task_entities:
type_id = _task_entity["type_id"]
task_entities_by_type_id[type_id].append(_task_entity)
event_ids = set(event_task_ids_by_parent_id[parent_id])
if name_sorting:
# Sort entities by name
self.sort_by_name_task_entities_by_type(
task_entities_by_type_id
)
# Sort entities by type id
sorted_task_entities = []
for type_id in sorted_task_type_ids:
task_entities = task_entities_by_type_id.get(type_id)
if task_entities:
sorted_task_entities.extend(task_entities)
next_tasks = self.next_tasks_with_name_sorting(
sorted_task_entities,
event_ids,
statuses_by_id,
ignored_statuses
)
else:
next_tasks = self.next_tasks_with_type_sorting(
task_entities_by_type_id,
sorted_task_type_ids,
event_ids,
statuses_by_id,
ignored_statuses
)
for task_entity in next_tasks:
if task_entity["status"]["state"]["name"].lower() == "done":
continue
task_status = statuses_by_id[task_entity["status_id"]]
old_status_name = task_status["name"].lower()
if old_status_name in ignored_statuses:
continue
new_task_name = mapping.get(old_status_name)
if not new_task_name:
self.log.debug(
"Didn't found mapping for status \"{}\".".format(
task_status["name"]
)
)
continue
ent_path = "/".join(
[ent["name"] for ent in task_entity["link"]]
)
type_id = task_entity["type_id"]
new_status = statusese_by_obj_id[type_id].get(new_task_name)
if new_status is None:
self.log.warning((
"\"{}\" does not have available status name \"{}\""
).format(ent_path, new_task_name))
continue
try:
task_entity["status_id"] = new_status["id"]
session.commit()
self.log.info(
"\"{}\" updated status to \"{}\"".format(
ent_path, new_status["name"]
)
)
except Exception:
session.rollback()
self.log.warning(
"\"{}\" status couldnt be set to \"{}\"".format(
ent_path, new_status["name"]
),
exc_info=True
)
def next_tasks_with_name_sorting(
self,
sorted_task_entities,
event_ids,
statuses_by_id,
ignored_statuses,
):
# Pre sort task entities by name
use_next_task = False
next_tasks = []
for task_entity in sorted_task_entities:
if task_entity["id"] in event_ids:
event_ids.remove(task_entity["id"])
use_next_task = True
continue
if not use_next_task:
continue
task_status = statuses_by_id[task_entity["status_id"]]
low_status_name = task_status["name"].lower()
if low_status_name in ignored_statuses:
continue
next_tasks.append(task_entity)
use_next_task = False
if not event_ids:
break
return next_tasks
def check_statuses_done(
self, task_entities, ignored_statuses, statuses_by_id
):
all_are_done = True
for task_entity in task_entities:
task_status = statuses_by_id[task_entity["status_id"]]
low_status_name = task_status["name"].lower()
if low_status_name in ignored_statuses:
continue
low_state_name = task_status["state"]["name"].lower()
if low_state_name != "done":
all_are_done = False
break
return all_are_done
def next_tasks_with_type_sorting(
self,
task_entities_by_type_id,
sorted_task_type_ids,
event_ids,
statuses_by_id,
ignored_statuses
):
# `use_next_task` is used only if `name_sorting` is enabled!
next_tasks = []
use_next_tasks = False
for type_id in sorted_task_type_ids:
if type_id not in task_entities_by_type_id:
continue
task_entities = task_entities_by_type_id[type_id]
# Check if any task was in event
event_id_in_tasks = False
for task_entity in task_entities:
task_id = task_entity["id"]
if task_id in event_ids:
event_ids.remove(task_id)
event_id_in_tasks = True
if use_next_tasks:
# Check if next tasks are not done already
all_in_type_done = self.check_statuses_done(
task_entities, ignored_statuses, statuses_by_id
)
if all_in_type_done:
continue
next_tasks.extend(task_entities)
use_next_tasks = False
if not event_ids:
break
if not event_id_in_tasks:
continue
all_in_type_done = self.check_statuses_done(
task_entities, ignored_statuses, statuses_by_id
)
use_next_tasks = all_in_type_done
if all_in_type_done:
continue
if not event_ids:
break
use_next_tasks = False
return next_tasks
def statuses_for_tasks(self, task_type_ids, project_entity):
project_schema = project_entity["project_schema"]
output = {}
for task_type_id in task_type_ids:
statuses = project_schema.get_statuses("Task", task_type_id)
output[task_type_id] = {
status["name"].lower(): status
for status in statuses
}
return output
def get_sorted_task_type_ids(self, session):
types_by_order = collections.defaultdict(list)
for _type in session.query("Type").all():
sort_oder = _type.get("sort")
if sort_oder is not None:
types_by_order[sort_oder].append(_type["id"])
types = []
for sort_oder in sorted(types_by_order.keys()):
types.extend(types_by_order[sort_oder])
return types
@staticmethod
def sort_by_name_task_entities_by_type(task_entities_by_type_id):
_task_entities_by_type_id = {}
for type_id, task_entities in task_entities_by_type_id.items():
# Store tasks by name
task_entities_by_name = {}
for task_entity in task_entities:
task_name = task_entity["name"]
task_entities_by_name[task_name] = task_entity
# Store task entities by sorted names
sorted_task_entities = []
for task_name in sorted(task_entities_by_name.keys()):
task_entity = task_entities_by_name[task_name]
sorted_task_entities.append(task_entity)
# Store result to temp dictionary
_task_entities_by_type_id[type_id] = sorted_task_entities
# Override values in source object
for type_id, value in _task_entities_by_type_id.items():
task_entities_by_type_id[type_id] = value
def register(session):
NextTaskUpdate(session).register()

View file

@ -0,0 +1,704 @@
import collections
import datetime
import ftrack_api
from openpype_modules.ftrack.lib import (
BaseEvent,
query_custom_attributes
)
class PushFrameValuesToTaskEvent(BaseEvent):
# Ignore event handler by default
cust_attrs_query = (
"select id, key, object_type_id, is_hierarchical, default"
" from CustomAttributeConfiguration"
" where key in ({}) and"
" (object_type_id in ({}) or is_hierarchical is true)"
)
_cached_task_object_id = None
_cached_interest_object_ids = None
_cached_user_id = None
_cached_changes = []
_max_delta = 30
settings_key = "sync_hier_entity_attributes"
def session_user_id(self, session):
if self._cached_user_id is None:
user = session.query(
"User where username is \"{}\"".format(session.api_user)
).one()
self._cached_user_id = user["id"]
return self._cached_user_id
def launch(self, session, event):
filtered_entities_info = self.filter_entities_info(event)
if not filtered_entities_info:
return
for project_id, entities_info in filtered_entities_info.items():
self.process_by_project(session, event, project_id, entities_info)
def filter_entities_info(self, event):
# Filter if event contain relevant data
entities_info = event["data"].get("entities")
if not entities_info:
return
entities_info_by_project_id = {}
for entity_info in entities_info:
# Care only about tasks
if entity_info.get("entityType") != "task":
continue
# Care only about changes of status
changes = entity_info.get("changes")
if not changes:
continue
# Get project id from entity info
project_id = None
for parent_item in reversed(entity_info["parents"]):
if parent_item["entityType"] == "show":
project_id = parent_item["entityId"]
break
if project_id is None:
continue
# Skip `Task` entity type if parent didn't change
if entity_info["entity_type"].lower() == "task":
if (
"parent_id" not in changes
or changes["parent_id"]["new"] is None
):
continue
if project_id not in entities_info_by_project_id:
entities_info_by_project_id[project_id] = []
entities_info_by_project_id[project_id].append(entity_info)
return entities_info_by_project_id
def process_by_project(self, session, event, project_id, entities_info):
project_name = self.get_project_name_from_event(
session, event, project_id
)
# Load settings
project_settings = self.get_project_settings_from_event(
event, project_name
)
# Load status mapping from presets
event_settings = (
project_settings
["ftrack"]
["events"]
["sync_hier_entity_attributes"]
)
# Skip if event is not enabled
if not event_settings["enabled"]:
self.log.debug("Project \"{}\" has disabled {}".format(
project_name, self.__class__.__name__
))
return
interest_attributes = event_settings["interest_attributes"]
if not interest_attributes:
self.log.info((
"Project \"{}\" does not have filled 'interest_attributes',"
" skipping."
))
return
interest_entity_types = event_settings["interest_entity_types"]
if not interest_entity_types:
self.log.info((
"Project \"{}\" does not have filled 'interest_entity_types',"
" skipping."
))
return
interest_attributes = set(interest_attributes)
interest_entity_types = set(interest_entity_types)
# Separate value changes and task parent changes
_entities_info = []
task_parent_changes = []
for entity_info in entities_info:
if entity_info["entity_type"].lower() == "task":
task_parent_changes.append(entity_info)
else:
_entities_info.append(entity_info)
entities_info = _entities_info
# Filter entities info with changes
interesting_data, changed_keys_by_object_id = self.filter_changes(
session, event, entities_info, interest_attributes
)
if not interesting_data and not task_parent_changes:
return
# Prepare object types
object_types = session.query("select id, name from ObjectType").all()
object_types_by_name = {}
for object_type in object_types:
name_low = object_type["name"].lower()
object_types_by_name[name_low] = object_type
# NOTE it would be nice to check if `interesting_data` do not contain
# value changs of tasks that were created or moved
# - it is a complex way how to find out
if interesting_data:
self.process_attribute_changes(
session, object_types_by_name,
interesting_data, changed_keys_by_object_id,
interest_entity_types, interest_attributes
)
if task_parent_changes:
self.process_task_parent_change(
session, object_types_by_name, task_parent_changes,
interest_entity_types, interest_attributes
)
def process_task_parent_change(
self, session, object_types_by_name, task_parent_changes,
interest_entity_types, interest_attributes
):
"""Push custom attribute values if task parent has changed.
Parent is changed if task is created or if is moved under different
entity. We don't care about all task changes only about those that
have it's parent in interest types (from settings).
Tasks hierarchical value should be unset or set based on parents
real hierarchical value and non hierarchical custom attribute value
should be set to hierarchical value.
"""
# Store task ids which were created or moved under parent with entity
# type defined in settings (interest_entity_types).
task_ids = set()
# Store parent ids of matching task ids
matching_parent_ids = set()
# Store all entity ids of all entities to be able query hierarchical
# values.
whole_hierarchy_ids = set()
# Store parent id of each entity id
parent_id_by_entity_id = {}
for entity_info in task_parent_changes:
# Ignore entities with less parents than 2
# NOTE entity itself is also part of "parents" value
parents = entity_info.get("parents") or []
if len(parents) < 2:
continue
parent_info = parents[1]
# Check if parent has entity type we care about.
if parent_info["entity_type"] not in interest_entity_types:
continue
task_ids.add(entity_info["entityId"])
matching_parent_ids.add(parent_info["entityId"])
# Store whole hierarchi of task entity
prev_id = None
for item in parents:
item_id = item["entityId"]
whole_hierarchy_ids.add(item_id)
if prev_id is None:
prev_id = item_id
continue
parent_id_by_entity_id[prev_id] = item_id
if item["entityType"] == "show":
break
prev_id = item_id
# Just skip if nothing is interesting for our settings
if not matching_parent_ids:
return
# Query object type ids of parent ids for custom attribute
# definitions query
entities = session.query(
"select object_type_id from TypedContext where id in ({})".format(
self.join_query_keys(matching_parent_ids)
)
)
# Prepare task object id
task_object_id = object_types_by_name["task"]["id"]
# All object ids for which we're querying custom attribute definitions
object_type_ids = set()
object_type_ids.add(task_object_id)
for entity in entities:
object_type_ids.add(entity["object_type_id"])
attrs_by_obj_id, hier_attrs = self.attrs_configurations(
session, object_type_ids, interest_attributes
)
# Skip if all task attributes are not available
task_attrs = attrs_by_obj_id.get(task_object_id)
if not task_attrs:
return
# Skip attributes that is not in both hierarchical and nonhierarchical
# TODO be able to push values if hierarchical is available
for key in interest_attributes:
if key not in hier_attrs:
task_attrs.pop(key, None)
elif key not in task_attrs:
hier_attrs.pop(key)
# Skip if nothing remained
if not task_attrs:
return
# Do some preparations for custom attribute values query
attr_key_by_id = {}
nonhier_id_by_key = {}
hier_attr_ids = []
for key, attr_id in hier_attrs.items():
attr_key_by_id[attr_id] = key
hier_attr_ids.append(attr_id)
conf_ids = list(hier_attr_ids)
task_conf_ids = []
for key, attr_id in task_attrs.items():
attr_key_by_id[attr_id] = key
nonhier_id_by_key[key] = attr_id
conf_ids.append(attr_id)
task_conf_ids.append(attr_id)
# Query custom attribute values
# - result does not contain values for all entities only result of
# query callback to ftrack server
result = query_custom_attributes(
session, list(hier_attr_ids), whole_hierarchy_ids, True
)
result.extend(
query_custom_attributes(
session, task_conf_ids, whole_hierarchy_ids, False
)
)
# Prepare variables where result will be stored
# - hierachical values should not contain attribute with value by
# default
hier_values_by_entity_id = {
entity_id: {}
for entity_id in whole_hierarchy_ids
}
# - real values of custom attributes
values_by_entity_id = {
entity_id: {
attr_id: None
for attr_id in conf_ids
}
for entity_id in whole_hierarchy_ids
}
for item in result:
attr_id = item["configuration_id"]
entity_id = item["entity_id"]
value = item["value"]
values_by_entity_id[entity_id][attr_id] = value
if attr_id in hier_attr_ids and value is not None:
hier_values_by_entity_id[entity_id][attr_id] = value
# Prepare values for all task entities
# - going through all parents and storing first value value
# - store None to those that are already known that do not have set
# value at all
for task_id in tuple(task_ids):
for attr_id in hier_attr_ids:
entity_ids = []
value = None
entity_id = task_id
while value is None:
entity_value = hier_values_by_entity_id[entity_id]
if attr_id in entity_value:
value = entity_value[attr_id]
if value is None:
break
if value is None:
entity_ids.append(entity_id)
entity_id = parent_id_by_entity_id.get(entity_id)
if entity_id is None:
break
for entity_id in entity_ids:
hier_values_by_entity_id[entity_id][attr_id] = value
# Prepare changes to commit
changes = []
for task_id in tuple(task_ids):
parent_id = parent_id_by_entity_id[task_id]
for attr_id in hier_attr_ids:
attr_key = attr_key_by_id[attr_id]
nonhier_id = nonhier_id_by_key[attr_key]
# Real value of hierarchical attribute on parent
# - If is none then should be unset
real_parent_value = values_by_entity_id[parent_id][attr_id]
# Current hierarchical value of a task
# - Will be compared to real parent value
hier_value = hier_values_by_entity_id[task_id][attr_id]
# Parent value that can be inherited from it's parent entity
parent_value = hier_values_by_entity_id[parent_id][attr_id]
# Task value of nonhierarchical custom attribute
nonhier_value = values_by_entity_id[task_id][nonhier_id]
if real_parent_value != hier_value:
changes.append({
"new_value": real_parent_value,
"attr_id": attr_id,
"entity_id": task_id,
"attr_key": attr_key
})
if parent_value != nonhier_value:
changes.append({
"new_value": parent_value,
"attr_id": nonhier_id,
"entity_id": task_id,
"attr_key": attr_key
})
self._commit_changes(session, changes)
def _commit_changes(self, session, changes):
uncommited_changes = False
for idx, item in enumerate(changes):
new_value = item["new_value"]
attr_id = item["attr_id"]
entity_id = item["entity_id"]
attr_key = item["attr_key"]
entity_key = collections.OrderedDict()
entity_key["configuration_id"] = attr_id
entity_key["entity_id"] = entity_id
self._cached_changes.append({
"attr_key": attr_key,
"entity_id": entity_id,
"value": new_value,
"time": datetime.datetime.now()
})
if new_value is None:
op = ftrack_api.operation.DeleteEntityOperation(
"CustomAttributeValue",
entity_key
)
else:
op = ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
entity_key,
"value",
ftrack_api.symbol.NOT_SET,
new_value
)
session.recorded_operations.push(op)
self.log.info((
"Changing Custom Attribute \"{}\" to value"
" \"{}\" on entity: {}"
).format(attr_key, new_value, entity_id))
if (idx + 1) % 20 == 0:
uncommited_changes = False
try:
session.commit()
except Exception:
session.rollback()
self.log.warning(
"Changing of values failed.", exc_info=True
)
else:
uncommited_changes = True
if uncommited_changes:
try:
session.commit()
except Exception:
session.rollback()
self.log.warning("Changing of values failed.", exc_info=True)
def process_attribute_changes(
self, session, object_types_by_name,
interesting_data, changed_keys_by_object_id,
interest_entity_types, interest_attributes
):
# Prepare task object id
task_object_id = object_types_by_name["task"]["id"]
# Collect object type ids based on settings
interest_object_ids = []
for entity_type in interest_entity_types:
_entity_type = entity_type.lower()
object_type = object_types_by_name.get(_entity_type)
if not object_type:
self.log.warning("Couldn't find object type \"{}\"".format(
entity_type
))
interest_object_ids.append(object_type["id"])
# Query entities by filtered data and object ids
entities = self.get_entities(
session, interesting_data, interest_object_ids
)
if not entities:
return
# Pop not found entities from interesting data
entity_ids = set(
entity["id"]
for entity in entities
)
for entity_id in tuple(interesting_data.keys()):
if entity_id not in entity_ids:
interesting_data.pop(entity_id)
# Add task object type to list
attr_obj_ids = list(interest_object_ids)
attr_obj_ids.append(task_object_id)
attrs_by_obj_id, hier_attrs = self.attrs_configurations(
session, attr_obj_ids, interest_attributes
)
task_attrs = attrs_by_obj_id.get(task_object_id)
changed_keys = set()
# Skip keys that are not both in hierachical and type specific
for object_id, keys in changed_keys_by_object_id.items():
changed_keys |= set(keys)
object_id_attrs = attrs_by_obj_id.get(object_id)
for key in keys:
if key not in hier_attrs:
attrs_by_obj_id[object_id].pop(key)
continue
if (
(not object_id_attrs or key not in object_id_attrs)
and (not task_attrs or key not in task_attrs)
):
hier_attrs.pop(key)
# Clean up empty values
for key, value in tuple(attrs_by_obj_id.items()):
if not value:
attrs_by_obj_id.pop(key)
if not attrs_by_obj_id:
self.log.warning((
"There is not created Custom Attributes {} "
" for entity types: {}"
).format(
self.join_query_keys(interest_attributes),
self.join_query_keys(interest_entity_types)
))
return
# Prepare task entities
task_entities = []
# If task entity does not contain changed attribute then skip
if task_attrs:
task_entities = self.get_task_entities(session, interesting_data)
task_entity_ids = set()
parent_id_by_task_id = {}
for task_entity in task_entities:
task_id = task_entity["id"]
task_entity_ids.add(task_id)
parent_id_by_task_id[task_id] = task_entity["parent_id"]
self.finalize_attribute_changes(
session, interesting_data,
changed_keys, attrs_by_obj_id, hier_attrs,
task_entity_ids, parent_id_by_task_id
)
def finalize_attribute_changes(
self, session, interesting_data,
changed_keys, attrs_by_obj_id, hier_attrs,
task_entity_ids, parent_id_by_task_id
):
attr_id_to_key = {}
for attr_confs in attrs_by_obj_id.values():
for key in changed_keys:
custom_attr_id = attr_confs.get(key)
if custom_attr_id:
attr_id_to_key[custom_attr_id] = key
for key in changed_keys:
custom_attr_id = hier_attrs.get(key)
if custom_attr_id:
attr_id_to_key[custom_attr_id] = key
entity_ids = (
set(interesting_data.keys()) | task_entity_ids
)
attr_ids = set(attr_id_to_key.keys())
current_values_by_id = self.get_current_values(
session, attr_ids, entity_ids, task_entity_ids, hier_attrs
)
changes = []
for entity_id, current_values in current_values_by_id.items():
parent_id = parent_id_by_task_id.get(entity_id)
if not parent_id:
parent_id = entity_id
values = interesting_data[parent_id]
for attr_id, old_value in current_values.items():
attr_key = attr_id_to_key.get(attr_id)
if not attr_key:
continue
# Convert new value from string
new_value = values.get(attr_key)
if new_value is not None and old_value is not None:
try:
new_value = type(old_value)(new_value)
except Exception:
self.log.warning((
"Couldn't convert from {} to {}."
" Skipping update values."
).format(type(new_value), type(old_value)))
if new_value == old_value:
continue
changes.append({
"new_value": new_value,
"attr_id": attr_id,
"entity_id": entity_id,
"attr_key": attr_key
})
self._commit_changes(session, changes)
def filter_changes(
self, session, event, entities_info, interest_attributes
):
session_user_id = self.session_user_id(session)
user_data = event["data"].get("user")
changed_by_session = False
if user_data and user_data.get("userid") == session_user_id:
changed_by_session = True
current_time = datetime.datetime.now()
interesting_data = {}
changed_keys_by_object_id = {}
for entity_info in entities_info:
# Care only about changes if specific keys
entity_changes = {}
changes = entity_info["changes"]
for key in interest_attributes:
if key in changes:
entity_changes[key] = changes[key]["new"]
entity_id = entity_info["entityId"]
if changed_by_session:
for key, new_value in tuple(entity_changes.items()):
for cached in tuple(self._cached_changes):
if (
cached["entity_id"] != entity_id
or cached["attr_key"] != key
):
continue
cached_value = cached["value"]
try:
new_value = type(cached_value)(new_value)
except Exception:
pass
if cached_value == new_value:
self._cached_changes.remove(cached)
entity_changes.pop(key)
break
delta = (current_time - cached["time"]).seconds
if delta > self._max_delta:
self._cached_changes.remove(cached)
if not entity_changes:
continue
entity_id = entity_info["entityId"]
object_id = entity_info["objectTypeId"]
interesting_data[entity_id] = entity_changes
if object_id not in changed_keys_by_object_id:
changed_keys_by_object_id[object_id] = set()
changed_keys_by_object_id[object_id] |= set(entity_changes.keys())
return interesting_data, changed_keys_by_object_id
def get_current_values(
self, session, attr_ids, entity_ids, task_entity_ids, hier_attrs
):
current_values_by_id = {}
if not attr_ids or not entity_ids:
return current_values_by_id
values = query_custom_attributes(
session, attr_ids, entity_ids, True
)
for item in values:
entity_id = item["entity_id"]
attr_id = item["configuration_id"]
if entity_id in task_entity_ids and attr_id in hier_attrs:
continue
if entity_id not in current_values_by_id:
current_values_by_id[entity_id] = {}
current_values_by_id[entity_id][attr_id] = item["value"]
return current_values_by_id
def get_entities(self, session, interesting_data, interest_object_ids):
return session.query((
"select id from TypedContext"
" where id in ({}) and object_type_id in ({})"
).format(
self.join_query_keys(interesting_data.keys()),
self.join_query_keys(interest_object_ids)
)).all()
def get_task_entities(self, session, interesting_data):
return session.query(
"select id, parent_id from Task where parent_id in ({})".format(
self.join_query_keys(interesting_data.keys())
)
).all()
def attrs_configurations(self, session, object_ids, interest_attributes):
attrs = session.query(self.cust_attrs_query.format(
self.join_query_keys(interest_attributes),
self.join_query_keys(object_ids)
)).all()
output = {}
hiearchical = {}
for attr in attrs:
if attr["is_hierarchical"]:
hiearchical[attr["key"]] = attr["id"]
continue
obj_id = attr["object_type_id"]
if obj_id not in output:
output[obj_id] = {}
output[obj_id][attr["key"]] = attr["id"]
return output, hiearchical
def register(session):
PushFrameValuesToTaskEvent(session).register()

View file

@ -0,0 +1,40 @@
import ftrack_api
from openpype_modules.ftrack.lib import BaseEvent
class RadioButtons(BaseEvent):
ignore_me = True
def launch(self, session, event):
'''Provides a readio button behaviour to any bolean attribute in
radio_button group.'''
# start of event procedure ----------------------------------
for entity in event['data'].get('entities', []):
if entity['entityType'] == 'assetversion':
query = 'CustomAttributeGroup where name is "radio_button"'
group = session.query(query).one()
radio_buttons = []
for g in group['custom_attribute_configurations']:
radio_buttons.append(g['key'])
for key in entity['keys']:
if (key in radio_buttons and entity['changes'] is not None):
if entity['changes'][key]['new'] == '1':
version = session.get('AssetVersion',
entity['entityId'])
asset = session.get('Asset', entity['parentId'])
for v in asset['versions']:
if version is not v:
v['custom_attributes'][key] = 0
session.commit()
def register(session):
'''Register plugin. Called when used as an plugin.'''
RadioButtons(session).register()

View file

@ -0,0 +1,147 @@
from pymongo import UpdateOne
from bson.objectid import ObjectId
from avalon.api import AvalonMongoDB
from openpype_modules.ftrack.lib import (
CUST_ATTR_ID_KEY,
query_custom_attributes,
BaseEvent
)
class SyncLinksToAvalon(BaseEvent):
"""Synchronize inpug linkts to avalon documents."""
# Run after sync to avalon event handler
priority = 110
def __init__(self, session):
self.dbcon = AvalonMongoDB()
super(SyncLinksToAvalon, self).__init__(session)
def launch(self, session, event):
# Try to commit and if any error happen then recreate session
entities_info = event["data"]["entities"]
dependency_changes = []
removed_entities = set()
for entity_info in entities_info:
action = entity_info.get("action")
entityType = entity_info.get("entityType")
if action not in ("remove", "add"):
continue
if entityType == "task":
removed_entities.add(entity_info["entityId"])
elif entityType == "dependency":
dependency_changes.append(entity_info)
# Care only about dependency changes
if not dependency_changes:
return
project_id = None
for entity_info in dependency_changes:
for parent_info in entity_info["parents"]:
if parent_info["entityType"] == "show":
project_id = parent_info["entityId"]
if project_id is not None:
break
changed_to_ids = set()
for entity_info in dependency_changes:
to_id_change = entity_info["changes"]["to_id"]
if to_id_change["new"] is not None:
changed_to_ids.add(to_id_change["new"])
if to_id_change["old"] is not None:
changed_to_ids.add(to_id_change["old"])
self._update_in_links(session, changed_to_ids, project_id)
def _update_in_links(self, session, ftrack_ids, project_id):
if not ftrack_ids or project_id is None:
return
attr_def = session.query((
"select id from CustomAttributeConfiguration where key is \"{}\""
).format(CUST_ATTR_ID_KEY)).first()
if attr_def is None:
return
project_entity = session.query((
"select full_name from Project where id is \"{}\""
).format(project_id)).first()
if not project_entity:
return
project_name = project_entity["full_name"]
mongo_id_by_ftrack_id = self._get_mongo_ids_by_ftrack_ids(
session, attr_def["id"], ftrack_ids
)
filtered_ftrack_ids = tuple(mongo_id_by_ftrack_id.keys())
context_links = session.query((
"select from_id, to_id from TypedContextLink where to_id in ({})"
).format(self.join_query_keys(filtered_ftrack_ids))).all()
mapping_by_to_id = {
ftrack_id: set()
for ftrack_id in filtered_ftrack_ids
}
all_from_ids = set()
for context_link in context_links:
to_id = context_link["to_id"]
from_id = context_link["from_id"]
if from_id == to_id:
continue
all_from_ids.add(from_id)
mapping_by_to_id[to_id].add(from_id)
mongo_id_by_ftrack_id.update(self._get_mongo_ids_by_ftrack_ids(
session, attr_def["id"], all_from_ids
))
self.log.info(mongo_id_by_ftrack_id)
bulk_writes = []
for to_id, from_ids in mapping_by_to_id.items():
dst_mongo_id = mongo_id_by_ftrack_id[to_id]
links = []
for ftrack_id in from_ids:
link_mongo_id = mongo_id_by_ftrack_id.get(ftrack_id)
if link_mongo_id is None:
continue
links.append({
"id": ObjectId(link_mongo_id),
"linkedBy": "ftrack",
"type": "breakdown"
})
bulk_writes.append(UpdateOne(
{"_id": ObjectId(dst_mongo_id)},
{"$set": {"data.inputLinks": links}}
))
if bulk_writes:
self.dbcon.database[project_name].bulk_write(bulk_writes)
def _get_mongo_ids_by_ftrack_ids(self, session, attr_id, ftrack_ids):
output = query_custom_attributes(
session, [attr_id], ftrack_ids, True
)
mongo_id_by_ftrack_id = {}
for item in output:
mongo_id = item["value"]
if not mongo_id:
continue
ftrack_id = item["entity_id"]
mongo_id_by_ftrack_id[ftrack_id] = mongo_id
return mongo_id_by_ftrack_id
def register(session):
'''Register plugin. Called when used as an plugin.'''
SyncLinksToAvalon(session).register()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,423 @@
import collections
from openpype_modules.ftrack.lib import BaseEvent
class TaskStatusToParent(BaseEvent):
settings_key = "status_task_to_parent"
def launch(self, session, event):
"""Propagates status from task to parent when changed."""
filtered_entities_info = self.filter_entities_info(event)
if not filtered_entities_info:
return
for project_id, entities_info in filtered_entities_info.items():
self.process_by_project(session, event, project_id, entities_info)
def filter_entities_info(self, event):
# Filter if event contain relevant data
entities_info = event["data"].get("entities")
if not entities_info:
return
filtered_entity_info = collections.defaultdict(list)
status_ids = set()
for entity_info in entities_info:
# Care only about tasks
if entity_info.get("entityType") != "task":
continue
# Care only about changes of status
changes = entity_info.get("changes")
if not changes:
continue
statusid_changes = changes.get("statusid")
if not statusid_changes:
continue
new_status_id = entity_info["changes"]["statusid"]["new"]
if (
statusid_changes.get("old") is None
or new_status_id is None
):
continue
project_id = None
for parent_item in reversed(entity_info["parents"]):
if parent_item["entityType"] == "show":
project_id = parent_item["entityId"]
break
if project_id:
filtered_entity_info[project_id].append(entity_info)
status_ids.add(new_status_id)
return filtered_entity_info
def process_by_project(self, session, event, project_id, entities_info):
# Get project name
project_name = self.get_project_name_from_event(
session, event, project_id
)
# Load settings
project_settings = self.get_project_settings_from_event(
event, project_name
)
# Prepare loaded settings and check if can be processed
result = self.prepare_settings(project_settings, project_name)
if not result:
return
# Unpack the result
parent_object_types, all_match, single_match = result
# Prepare valid object type ids for object types from settings
object_types = session.query("select id, name from ObjectType").all()
object_type_id_by_low_name = {
object_type["name"].lower(): object_type["id"]
for object_type in object_types
}
valid_object_type_ids = set()
for object_type_name in parent_object_types:
if object_type_name in object_type_id_by_low_name:
valid_object_type_ids.add(
object_type_id_by_low_name[object_type_name]
)
else:
self.log.warning(
"Unknown object type \"{}\" set on project \"{}\".".format(
object_type_name, project_name
)
)
if not valid_object_type_ids:
return
# Prepare parent ids
parent_ids = set()
for entity_info in entities_info:
parent_id = entity_info["parentId"]
if parent_id:
parent_ids.add(parent_id)
# Query parent ids by object type ids and parent ids
parent_entities = session.query(
(
"select id, status_id, object_type_id, link from TypedContext"
" where id in ({}) and object_type_id in ({})"
).format(
self.join_query_keys(parent_ids),
self.join_query_keys(valid_object_type_ids)
)
).all()
# Skip if none of parents match the filtering
if not parent_entities:
return
obj_ids = set()
for entity in parent_entities:
obj_ids.add(entity["object_type_id"])
types_mapping = {
_type.lower(): _type
for _type in session.types
}
# Map object type id by lowered and modified object type name
object_type_name_by_id = {}
for object_type in object_types:
mapping_name = object_type["name"].lower().replace(" ", "")
obj_id = object_type["id"]
object_type_name_by_id[obj_id] = types_mapping[mapping_name]
project_entity = session.get("Project", project_id)
project_schema = project_entity["project_schema"]
available_statuses_by_obj_id = {}
for obj_id in obj_ids:
obj_name = object_type_name_by_id[obj_id]
statuses = project_schema.get_statuses(obj_name)
statuses_by_low_name = {
status["name"].lower(): status
for status in statuses
}
valid = False
for name in all_match.keys():
if name in statuses_by_low_name:
valid = True
break
if not valid:
for item in single_match:
if item["new_status"] in statuses_by_low_name:
valid = True
break
if valid:
available_statuses_by_obj_id[obj_id] = statuses_by_low_name
valid_parent_ids = set()
status_ids = set()
valid_parent_entities = []
for entity in parent_entities:
if entity["object_type_id"] not in available_statuses_by_obj_id:
continue
valid_parent_entities.append(entity)
valid_parent_ids.add(entity["id"])
status_ids.add(entity["status_id"])
if not valid_parent_ids:
return
task_entities = session.query(
(
"select id, parent_id, status_id from TypedContext"
" where parent_id in ({}) and object_type_id is \"{}\""
).format(
self.join_query_keys(valid_parent_ids),
object_type_id_by_low_name["task"]
)
).all()
# This should not happen but it is safer
if not task_entities:
return
task_entities_by_parent_id = collections.defaultdict(list)
for task_entity in task_entities:
status_ids.add(task_entity["status_id"])
parent_id = task_entity["parent_id"]
task_entities_by_parent_id[parent_id].append(task_entity)
status_entities = session.query((
"select id, name from Status where id in ({})"
).format(self.join_query_keys(status_ids))).all()
statuses_by_id = {
entity["id"]: entity
for entity in status_entities
}
# New status determination logic
new_statuses_by_parent_id = self.new_status_by_all_task_statuses(
task_entities_by_parent_id, statuses_by_id, all_match
)
task_entities_by_id = {
task_entity["id"]: task_entity
for task_entity in task_entities
}
# Check if there are remaining any parents that does not have
# determined new status yet
remainder_tasks_by_parent_id = collections.defaultdict(list)
for entity_info in entities_info:
entity_id = entity_info["entityId"]
if entity_id not in task_entities_by_id:
continue
parent_id = entity_info["parentId"]
if (
# Skip if already has determined new status
parent_id in new_statuses_by_parent_id
# Skip if parent is not in parent mapping
# - if was not found or parent type is not interesting
or parent_id not in task_entities_by_parent_id
):
continue
remainder_tasks_by_parent_id[parent_id].append(
task_entities_by_id[entity_id]
)
# Try to find new status for remained parents
new_statuses_by_parent_id.update(
self.new_status_by_remainders(
remainder_tasks_by_parent_id,
statuses_by_id,
single_match
)
)
# If there are not new statuses then just skip
if not new_statuses_by_parent_id:
return
parent_entities_by_id = {
parent_entity["id"]: parent_entity
for parent_entity in valid_parent_entities
}
for parent_id, new_status_name in new_statuses_by_parent_id.items():
if not new_status_name:
continue
parent_entity = parent_entities_by_id[parent_id]
ent_path = "/".join(
[ent["name"] for ent in parent_entity["link"]]
)
obj_id = parent_entity["object_type_id"]
statuses_by_low_name = available_statuses_by_obj_id.get(obj_id)
if not statuses_by_low_name:
continue
new_status = statuses_by_low_name.get(new_status_name)
if not new_status:
self.log.warning((
"\"{}\" Couldn't change status to \"{}\"."
" Status is not available for entity type \"{}\"."
).format(
ent_path, new_status_name, parent_entity.entity_type
))
continue
current_status = parent_entity["status"]
# Do nothing if status is already set
if new_status["id"] == current_status["id"]:
self.log.debug(
"\"{}\" Status \"{}\" already set.".format(
ent_path, current_status["name"]
)
)
continue
try:
parent_entity["status_id"] = new_status["id"]
session.commit()
self.log.info(
"\"{}\" changed status to \"{}\"".format(
ent_path, new_status["name"]
)
)
except Exception:
session.rollback()
self.log.warning(
"\"{}\" status couldnt be set to \"{}\"".format(
ent_path, new_status["name"]
),
exc_info=True
)
def prepare_settings(self, project_settings, project_name):
event_settings = (
project_settings["ftrack"]["events"][self.settings_key]
)
if not event_settings["enabled"]:
self.log.debug("Project \"{}\" has disabled {}.".format(
project_name, self.__class__.__name__
))
return
_parent_object_types = event_settings["parent_object_types"]
if not _parent_object_types:
self.log.debug((
"Project \"{}\" does not have set"
" parent object types filtering."
).format(project_name))
return
_all_match = (
event_settings["parent_status_match_all_task_statuses"]
)
_single_match = (
event_settings["parent_status_by_task_status"]
)
if not _all_match and not _single_match:
self.log.debug((
"Project \"{}\" does not have set"
" parent status mappings."
).format(project_name))
return
parent_object_types = [
item.lower()
for item in _parent_object_types
]
all_match = {}
for new_status_name, task_statuses in _all_match.items():
all_match[new_status_name.lower()] = [
status_name.lower()
for status_name in task_statuses
]
single_match = []
for item in _single_match:
single_match.append({
"new_status": item["new_status"].lower(),
"task_statuses": [
status_name.lower()
for status_name in item["task_statuses"]
]
})
return parent_object_types, all_match, single_match
def new_status_by_all_task_statuses(
self, tasks_by_parent_id, statuses_by_id, all_match
):
"""All statuses of parent entity must match specific status names.
Only if all task statuses match the condition parent's status name is
determined.
"""
output = {}
for parent_id, task_entities in tasks_by_parent_id.items():
task_statuses_lowered = set()
for task_entity in task_entities:
task_status = statuses_by_id[task_entity["status_id"]]
low_status_name = task_status["name"].lower()
task_statuses_lowered.add(low_status_name)
new_status = None
for _new_status, task_statuses in all_match.items():
valid_item = True
for status_name_low in task_statuses_lowered:
if status_name_low not in task_statuses:
valid_item = False
break
if valid_item:
new_status = _new_status
break
if new_status is not None:
output[parent_id] = new_status
return output
def new_status_by_remainders(
self, remainder_tasks_by_parent_id, statuses_by_id, single_match
):
"""By new task status can be determined new status of parent."""
output = {}
if not remainder_tasks_by_parent_id:
return output
for parent_id, task_entities in remainder_tasks_by_parent_id.items():
if not task_entities:
continue
# For cases there are multiple tasks in changes
# - task status which match any new status item by order in the
# list `single_match` is preffered
best_order = len(single_match)
best_order_status = None
for task_entity in task_entities:
task_status = statuses_by_id[task_entity["status_id"]]
low_status_name = task_status["name"].lower()
for order, item in enumerate(single_match):
if order >= best_order:
break
if low_status_name in item["task_statuses"]:
best_order = order
best_order_status = item["new_status"]
break
if best_order_status:
output[parent_id] = best_order_status
return output
def register(session):
TaskStatusToParent(session).register()

View file

@ -0,0 +1,377 @@
import collections
from openpype_modules.ftrack.lib import BaseEvent
class TaskToVersionStatus(BaseEvent):
"""Changes status of task's latest AssetVersions on its status change."""
settings_key = "status_task_to_version"
# Attribute for caching session user id
_cached_user_id = None
def is_event_invalid(self, session, event):
"""Skip task status changes for session user changes.
It is expected that there may be another event handler that set
version status to task in that case skip all events caused by same
user as session has to avoid infinite loop of status changes.
"""
# Cache user id of currently running session
if self._cached_user_id is None:
session_user_entity = session.query(
"User where username is \"{}\"".format(session.api_user)
).first()
if not session_user_entity:
self.log.warning(
"Couldn't query Ftrack user with username \"{}\"".format(
session.api_user
)
)
return False
self._cached_user_id = session_user_entity["id"]
# Skip processing if current session user was the user who created
# the event
user_info = event["source"].get("user") or {}
user_id = user_info.get("id")
# Mark as invalid if user is unknown
if user_id is None:
return True
return user_id == self._cached_user_id
def filter_event_entities(self, event):
"""Filter if event contain relevant data.
Event cares only about changes of `statusid` on `entity_type` "Task".
"""
entities_info = event["data"].get("entities")
if not entities_info:
return
filtered_entity_info = collections.defaultdict(list)
for entity_info in entities_info:
# Care only about tasks
if entity_info.get("entity_type") != "Task":
continue
# Care only about changes of status
changes = entity_info.get("changes") or {}
statusid_changes = changes.get("statusid") or {}
if (
statusid_changes.get("new") is None
or statusid_changes.get("old") is None
):
continue
# Get project id from entity info
project_id = None
for parent_item in reversed(entity_info["parents"]):
if parent_item["entityType"] == "show":
project_id = parent_item["entityId"]
break
if project_id:
filtered_entity_info[project_id].append(entity_info)
return filtered_entity_info
def _get_ent_path(self, entity):
return "/".join(
[ent["name"] for ent in entity["link"]]
)
def launch(self, session, event):
'''Propagates status from version to task when changed'''
if self.is_event_invalid(session, event):
return
filtered_entity_infos = self.filter_event_entities(event)
if not filtered_entity_infos:
return
for project_id, entities_info in filtered_entity_infos.items():
self.process_by_project(session, event, project_id, entities_info)
def process_by_project(self, session, event, project_id, entities_info):
if not entities_info:
return
project_name = self.get_project_name_from_event(
session, event, project_id
)
# Load settings
project_settings = self.get_project_settings_from_event(
event, project_name
)
event_settings = (
project_settings["ftrack"]["events"][self.settings_key]
)
_status_mapping = event_settings["mapping"]
if not event_settings["enabled"]:
self.log.debug("Project \"{}\" has disabled {}.".format(
project_name, self.__class__.__name__
))
return
if not _status_mapping:
self.log.debug((
"Project \"{}\" does not have set status mapping for {}."
).format(project_name, self.__class__.__name__))
return
status_mapping = {
key.lower(): value
for key, value in _status_mapping.items()
}
asset_types_filter = event_settings["asset_types_filter"]
task_ids = [
entity_info["entityId"]
for entity_info in entities_info
]
last_asset_versions_by_task_id = (
self.find_last_asset_versions_for_task_ids(
session, task_ids, asset_types_filter
)
)
# Query Task entities for last asset versions
joined_filtered_ids = self.join_query_keys(
last_asset_versions_by_task_id.keys()
)
if not joined_filtered_ids:
return
task_entities = session.query(
"select status_id, link from Task where id in ({})".format(
joined_filtered_ids
)
).all()
if not task_entities:
return
status_ids = set()
for task_entity in task_entities:
status_ids.add(task_entity["status_id"])
task_status_entities = session.query(
"select id, name from Status where id in ({})".format(
self.join_query_keys(status_ids)
)
).all()
task_status_name_by_id = {
status_entity["id"]: status_entity["name"]
for status_entity in task_status_entities
}
# Final process of changing statuses
project_entity = session.get("Project", project_id)
av_statuses_by_low_name, av_statuses_by_id = (
self.get_asset_version_statuses(project_entity)
)
asset_ids = set()
for asset_versions in last_asset_versions_by_task_id.values():
for asset_version in asset_versions:
asset_ids.add(asset_version["asset_id"])
asset_entities = session.query(
"select name from Asset where id in ({})".format(
self.join_query_keys(asset_ids)
)
).all()
asset_names_by_id = {
asset_entity["id"]: asset_entity["name"]
for asset_entity in asset_entities
}
for task_entity in task_entities:
task_id = task_entity["id"]
status_id = task_entity["status_id"]
task_path = self._get_ent_path(task_entity)
task_status_name = task_status_name_by_id[status_id]
task_status_name_low = task_status_name.lower()
new_asset_version_status = None
mapped_status_names = status_mapping.get(task_status_name_low)
if mapped_status_names:
for status_name in mapped_status_names:
_status = av_statuses_by_low_name.get(status_name.lower())
if _status:
new_asset_version_status = _status
break
if not new_asset_version_status:
new_asset_version_status = av_statuses_by_low_name.get(
task_status_name_low
)
# Skip if tasks status is not available to AssetVersion
if not new_asset_version_status:
self.log.debug((
"AssetVersion does not have matching status to \"{}\""
).format(task_status_name))
continue
last_asset_versions = last_asset_versions_by_task_id[task_id]
for asset_version in last_asset_versions:
version = asset_version["version"]
self.log.debug((
"Trying to change status of last AssetVersion {}"
" for task \"{}\""
).format(version, task_path))
asset_id = asset_version["asset_id"]
asset_type_name = asset_names_by_id[asset_id]
av_ent_path = task_path + " Asset {} AssetVersion {}".format(
asset_type_name,
version
)
# Skip if current AssetVersion's status is same
status_id = asset_version["status_id"]
current_status_name = av_statuses_by_id[status_id]["name"]
if current_status_name.lower() == task_status_name_low:
self.log.debug((
"AssetVersion already has set status \"{}\". \"{}\""
).format(current_status_name, av_ent_path))
continue
new_status_id = new_asset_version_status["id"]
new_status_name = new_asset_version_status["name"]
# Skip if status is already same
if asset_version["status_id"] == new_status_id:
continue
# Change the status
try:
asset_version["status_id"] = new_status_id
session.commit()
self.log.info("[ {} ] Status updated to [ {} ]".format(
av_ent_path, new_status_name
))
except Exception:
session.rollback()
self.log.warning(
"[ {} ]Status couldn't be set to \"{}\"".format(
av_ent_path, new_status_name
),
exc_info=True
)
def get_asset_version_statuses(self, project_entity):
"""Status entities for AssetVersion from project's schema.
Load statuses from project's schema and store them by id and name.
Args:
project_entity (ftrack_api.Entity): Entity of ftrack's project.
Returns:
tuple: 2 items are returned first are statuses by name
second are statuses by id.
"""
project_schema = project_entity["project_schema"]
# Get all available statuses for Task
statuses = project_schema.get_statuses("AssetVersion")
# map lowered status name with it's object
av_statuses_by_low_name = {}
av_statuses_by_id = {}
for status in statuses:
av_statuses_by_low_name[status["name"].lower()] = status
av_statuses_by_id[status["id"]] = status
return av_statuses_by_low_name, av_statuses_by_id
def find_last_asset_versions_for_task_ids(
self, session, task_ids, asset_types_filter
):
"""Find latest AssetVersion entities for task.
Find first latest AssetVersion for task and all AssetVersions with
same version for the task.
Args:
asset_versions (list): AssetVersion entities sorted by "version".
task_ids (list): Task ids.
asset_types_filter (list): Asset types short names that will be
used to filter AssetVersions. Filtering is skipped if entered
value is empty list.
"""
# Allow event only on specific asset type names
asset_query_part = ""
if asset_types_filter:
# Query all AssetTypes
asset_types = session.query(
"select id, short from AssetType"
).all()
# Store AssetTypes by id
asset_type_short_by_id = {
asset_type["id"]: asset_type["short"]
for asset_type in asset_types
}
# Lower asset types from settings
# WARNING: not sure if is good idea to lower names as Ftrack may
# contain asset type with name "Scene" and "scene"!
asset_types_filter_low = set(
asset_types_name.lower()
for asset_types_name in asset_types_filter
)
asset_type_ids = []
for type_id, short in asset_type_short_by_id.items():
# TODO log if asset type name is not found
if short.lower() in asset_types_filter_low:
asset_type_ids.append(type_id)
# TODO log that none of asset type names were found in ftrack
if asset_type_ids:
asset_query_part = " and asset.type_id in ({})".format(
self.join_query_keys(asset_type_ids)
)
# Query tasks' AssetVersions
asset_versions = session.query((
"select status_id, version, task_id, asset_id"
" from AssetVersion where task_id in ({}){}"
" order by version descending"
).format(self.join_query_keys(task_ids), asset_query_part)).all()
last_asset_versions_by_task_id = collections.defaultdict(list)
last_version_by_task_id = {}
not_finished_task_ids = set(task_ids)
for asset_version in asset_versions:
task_id = asset_version["task_id"]
# Check if task id is still in `not_finished_task_ids`
if task_id not in not_finished_task_ids:
continue
version = asset_version["version"]
# Find last version in `last_version_by_task_id`
last_version = last_version_by_task_id.get(task_id)
if last_version is None:
# If task id does not have version set yet then it's first
# AssetVersion for this task
last_version_by_task_id[task_id] = version
elif last_version > version:
# Skip processing if version is lower than last version
# and pop task id from `not_finished_task_ids`
not_finished_task_ids.remove(task_id)
continue
# Add AssetVersion entity to output dictionary
last_asset_versions_by_task_id[task_id].append(asset_version)
return last_asset_versions_by_task_id
def register(session):
TaskToVersionStatus(session).register()

View file

@ -0,0 +1,155 @@
import collections
from openpype_modules.ftrack.lib import BaseEvent
class ThumbnailEvents(BaseEvent):
settings_key = "thumbnail_updates"
def launch(self, session, event):
"""Updates thumbnails of entities from new AssetVersion."""
filtered_entities = self.filter_entities(event)
if not filtered_entities:
return
for project_id, entities_info in filtered_entities.items():
self.process_project_entities(
session, event, project_id, entities_info
)
def process_project_entities(
self, session, event, project_id, entities_info
):
project_name = self.get_project_name_from_event(
session, event, project_id
)
# Load settings
project_settings = self.get_project_settings_from_event(
event, project_name
)
event_settings = (
project_settings
["ftrack"]
["events"]
[self.settings_key]
)
if not event_settings["enabled"]:
self.log.debug("Project \"{}\" does not have activated {}.".format(
project_name, self.__class__.__name__
))
return
self.log.debug("Processing {} on project \"{}\".".format(
self.__class__.__name__, project_name
))
parent_levels = event_settings["levels"]
if parent_levels < 1:
self.log.debug(
"Project \"{}\" has parent levels set to {}. Skipping".format(
project_name, parent_levels
)
)
return
asset_version_ids = set()
for entity in entities_info:
asset_version_ids.add(entity["entityId"])
# Do not use attribute `asset_version_entities` will be filtered
# to when `asset_versions_by_id` is filled
asset_version_entities = session.query((
"select task_id, thumbnail_id from AssetVersion where id in ({})"
).format(self.join_query_keys(asset_version_ids))).all()
asset_versions_by_id = {}
for asset_version_entity in asset_version_entities:
if not asset_version_entity["thumbnail_id"]:
continue
entity_id = asset_version_entity["id"]
asset_versions_by_id[entity_id] = asset_version_entity
if not asset_versions_by_id:
self.log.debug("None of asset versions has set thumbnail id.")
return
entity_ids_by_asset_version_id = collections.defaultdict(list)
hierarchy_ids = set()
for entity_info in entities_info:
entity_id = entity_info["entityId"]
if entity_id not in asset_versions_by_id:
continue
parent_ids = []
counter = None
for parent_info in entity_info["parents"]:
if counter is not None:
if counter >= parent_levels:
break
parent_ids.append(parent_info["entityId"])
counter += 1
elif parent_info["entityType"] == "asset":
counter = 0
for parent_id in parent_ids:
hierarchy_ids.add(parent_id)
entity_ids_by_asset_version_id[entity_id].append(parent_id)
for asset_version_entity in asset_versions_by_id.values():
task_id = asset_version_entity["task_id"]
if task_id:
hierarchy_ids.add(task_id)
asset_version_id = asset_version_entity["id"]
entity_ids_by_asset_version_id[asset_version_id].append(
task_id
)
entities = session.query((
"select thumbnail_id, link from TypedContext where id in ({})"
).format(self.join_query_keys(hierarchy_ids))).all()
entities_by_id = {
entity["id"]: entity
for entity in entities
}
for version_id, version_entity in asset_versions_by_id.items():
for entity_id in entity_ids_by_asset_version_id[version_id]:
entity = entities_by_id.get(entity_id)
if not entity:
continue
entity["thumbnail_id"] = version_entity["thumbnail_id"]
self.log.info("Updating thumbnail for entity [ {} ]".format(
self.get_entity_path(entity)
))
try:
session.commit()
except Exception:
session.rollback()
def filter_entities(self, event):
filtered_entities_info = {}
for entity_info in event["data"].get("entities", []):
action = entity_info.get("action")
if not action:
continue
if (
action == "remove"
or entity_info["entityType"].lower() != "assetversion"
or "thumbid" not in (entity_info.get("keys") or [])
):
continue
# Get project id from entity info
project_id = entity_info["parents"][-1]["entityId"]
if project_id not in filtered_entities_info:
filtered_entities_info[project_id] = []
filtered_entities_info[project_id].append(entity_info)
return filtered_entities_info
def register(session):
ThumbnailEvents(session).register()

View file

@ -0,0 +1,258 @@
import os
import re
import subprocess
from openpype_modules.ftrack.lib import BaseEvent
from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
from avalon.api import AvalonMongoDB
from bson.objectid import ObjectId
from openpype.api import Anatomy, get_project_settings
class UserAssigmentEvent(BaseEvent):
"""
This script will intercept user assigment / de-assigment event and
run shell script, providing as much context as possible.
It expects configuration file ``presets/ftrack/user_assigment_event.json``.
In it, you define paths to scripts to be run for user assigment event and
for user-deassigment::
{
"add": [
"/path/to/script1",
"/path/to/script2"
],
"remove": [
"/path/to/script3",
"/path/to/script4"
]
}
Those scripts are executed in shell. Three arguments will be passed to
to them:
1) user name of user (de)assigned
2) path to workfiles of task user was (de)assigned to
3) path to publish files of task user was (de)assigned to
"""
db_con = AvalonMongoDB()
def error(self, *err):
for e in err:
self.log.error(e)
def _run_script(self, script, args):
"""
Run shell script with arguments as subprocess
:param script: script path
:type script: str
:param args: list of arguments passed to script
:type args: list
:returns: return code
:rtype: int
"""
p = subprocess.call([script, args], shell=True)
return p
def _get_task_and_user(self, session, action, changes):
"""
Get Task and User entities from Ftrack session
:param session: ftrack session
:type session: ftrack_api.session
:param action: event action
:type action: str
:param changes: what was changed by event
:type changes: dict
:returns: User and Task entities
:rtype: tuple
"""
if not changes:
return None, None
if action == 'add':
task_id = changes.get('context_id', {}).get('new')
user_id = changes.get('resource_id', {}).get('new')
elif action == 'remove':
task_id = changes.get('context_id', {}).get('old')
user_id = changes.get('resource_id', {}).get('old')
if not task_id:
return None, None
if not user_id:
return None, None
task = session.query('Task where id is "{}"'.format(task_id)).one()
user = session.query('User where id is "{}"'.format(user_id)).one()
return task, user
def _get_asset(self, task):
"""
Get asset from task entity
:param task: Task entity
:type task: dict
:returns: Asset entity
:rtype: dict
"""
parent = task['parent']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
avalon_entity = None
parent_id = parent['custom_attributes'].get(CUST_ATTR_ID_KEY)
if parent_id:
parent_id = ObjectId(parent_id)
avalon_entity = self.db_con.find_one({
'_id': parent_id,
'type': 'asset'
})
if not avalon_entity:
avalon_entity = self.db_con.find_one({
'type': 'asset',
'name': parent['name']
})
if not avalon_entity:
self.db_con.uninstall()
msg = 'Entity "{}" not found in avalon database'.format(
parent['name']
)
self.error(msg)
return {
'success': False,
'message': msg
}
self.db_con.uninstall()
return avalon_entity
def _get_hierarchy(self, asset):
"""
Get hierarchy from Asset entity
:param asset: Asset entity
:type asset: dict
:returns: hierarchy string
:rtype: str
"""
return asset['data']['hierarchy']
def _get_template_data(self, task):
"""
Get data to fill template from task
.. seealso:: :mod:`openpype.api.Anatomy`
:param task: Task entity
:type task: dict
:returns: data for anatomy template
:rtype: dict
"""
project_name = task['project']['full_name']
project_code = task['project']['name']
# fill in template data
asset = self._get_asset(task)
t_data = {
'project': {
'name': project_name,
'code': project_code
},
'asset': asset['name'],
'task': task['name'],
'hierarchy': self._get_hierarchy(asset)
}
return t_data
def launch(self, session, event):
if not event.get("data"):
return
entities_info = event["data"].get("entities")
if not entities_info:
return
# load shell scripts presets
tmp_by_project_name = {}
for entity_info in entities_info:
if entity_info.get('entity_type') != 'Appointment':
continue
task_entity, user_entity = self._get_task_and_user(
session,
entity_info.get('action'),
entity_info.get('changes')
)
if not task_entity or not user_entity:
self.log.error("Task or User was not found.")
continue
# format directories to pass to shell script
project_name = task_entity["project"]["full_name"]
project_data = tmp_by_project_name.get(project_name) or {}
if "scripts_by_action" not in project_data:
project_settings = get_project_settings(project_name)
_settings = (
project_settings["ftrack"]["events"]["user_assignment"]
)
project_data["scripts_by_action"] = _settings.get("scripts")
tmp_by_project_name[project_name] = project_data
scripts_by_action = project_data["scripts_by_action"]
if not scripts_by_action:
continue
if "anatomy" not in project_data:
project_data["anatomy"] = Anatomy(project_name)
tmp_by_project_name[project_name] = project_data
anatomy = project_data["anatomy"]
data = self._get_template_data(task_entity)
anatomy_filled = anatomy.format(data)
# formatting work dir is easiest part as we can use whole path
work_dir = anatomy_filled["work"]["folder"]
# we also need publish but not whole
anatomy_filled.strict = False
publish = anatomy_filled["publish"]["folder"]
# now find path to {asset}
m = re.search(
"(^.+?{})".format(data["asset"]),
publish
)
if not m:
msg = 'Cannot get part of publish path {}'.format(publish)
self.log.error(msg)
return {
'success': False,
'message': msg
}
publish_dir = m.group(1)
username = user_entity["username"]
event_entity_action = entity_info["action"]
for script in scripts_by_action.get(event_entity_action):
self.log.info((
"[{}] : running script for user {}"
).format(event_entity_action, username))
self._run_script(script, [username, work_dir, publish_dir])
return True
def register(session):
"""
Register plugin. Called when used as an plugin.
"""
UserAssigmentEvent(session).register()

View file

@ -0,0 +1,239 @@
from openpype_modules.ftrack.lib import BaseEvent
class VersionToTaskStatus(BaseEvent):
"""Propagates status from version to task when changed."""
def launch(self, session, event):
# Filter event entities
# - output is dictionary where key is project id and event info in
# value
filtered_entities_info = self.filter_entity_info(event)
if not filtered_entities_info:
return
for project_id, entities_info in filtered_entities_info.items():
self.process_by_project(session, event, project_id, entities_info)
def filter_entity_info(self, event):
filtered_entity_info = {}
for entity_info in event["data"].get("entities", []):
# Filter AssetVersions
if entity_info["entityType"] != "assetversion":
continue
# Skip if statusid not in keys (in changes)
keys = entity_info.get("keys")
if not keys or "statusid" not in keys:
continue
# Get new version task name
version_status_id = (
entity_info
.get("changes", {})
.get("statusid", {})
.get("new", {})
)
# Just check that `new` is set to any value
if not version_status_id:
continue
# Get project id from entity info
project_id = entity_info["parents"][-1]["entityId"]
if project_id not in filtered_entity_info:
filtered_entity_info[project_id] = []
filtered_entity_info[project_id].append(entity_info)
return filtered_entity_info
def process_by_project(self, session, event, project_id, entities_info):
# Check for project data if event is enabled for event handler
project_name = self.get_project_name_from_event(
session, event, project_id
)
# Load settings
project_settings = self.get_project_settings_from_event(
event, project_name
)
# Load status mapping from presets
event_settings = (
project_settings["ftrack"]["events"]["status_version_to_task"]
)
# Skip if event is not enabled or status mapping is not set
if not event_settings["enabled"]:
self.log.debug("Project \"{}\" has disabled {}".format(
project_name, self.__class__.__name__
))
return
_status_mapping = event_settings["mapping"] or {}
status_mapping = {
key.lower(): value
for key, value in _status_mapping.items()
}
asset_types_to_skip = [
short_name.lower()
for short_name in event_settings["asset_types_to_skip"]
]
# Collect entity ids
asset_version_ids = set()
for entity_info in entities_info:
asset_version_ids.add(entity_info["entityId"])
# Query tasks for AssetVersions
_asset_version_entities = session.query(
"AssetVersion where task_id != none and id in ({})".format(
self.join_query_keys(asset_version_ids)
)
).all()
if not _asset_version_entities:
return
# Filter asset versions by asset type and store their task_ids
task_ids = set()
asset_version_entities = []
for asset_version in _asset_version_entities:
if asset_types_to_skip:
short_name = asset_version["asset"]["type"]["short"].lower()
if short_name in asset_types_to_skip:
continue
asset_version_entities.append(asset_version)
task_ids.add(asset_version["task_id"])
# Skipt if `task_ids` are empty
if not task_ids:
return
task_entities = session.query(
"select link from Task where id in ({})".format(
self.join_query_keys(task_ids)
)
).all()
task_entities_by_id = {
task_entiy["id"]: task_entiy
for task_entiy in task_entities
}
# Prepare asset version by their id
asset_versions_by_id = {
asset_version["id"]: asset_version
for asset_version in asset_version_entities
}
# Query status entities
status_ids = set()
for entity_info in entities_info:
# Skip statuses of asset versions without task
if entity_info["entityId"] not in asset_versions_by_id:
continue
status_ids.add(entity_info["changes"]["statusid"]["new"])
version_status_entities = session.query(
"select id, name from Status where id in ({})".format(
self.join_query_keys(status_ids)
)
).all()
# Qeury statuses
statusese_by_obj_id = self.statuses_for_tasks(
session, task_entities, project_id
)
# Prepare status names by their ids
status_name_by_id = {
status_entity["id"]: status_entity["name"]
for status_entity in version_status_entities
}
for entity_info in entities_info:
entity_id = entity_info["entityId"]
status_id = entity_info["changes"]["statusid"]["new"]
status_name = status_name_by_id.get(status_id)
if not status_name:
continue
status_name_low = status_name.lower()
# Lower version status name and check if has mapping
new_status_names = []
mapped = status_mapping.get(status_name_low)
if mapped:
new_status_names.extend(list(mapped))
new_status_names.append(status_name_low)
self.log.debug(
"Processing AssetVersion status change: [ {} ]".format(
status_name
)
)
asset_version = asset_versions_by_id[entity_id]
task_entity = task_entities_by_id[asset_version["task_id"]]
type_id = task_entity["type_id"]
# Lower all names from presets
new_status_names = [name.lower() for name in new_status_names]
task_statuses_by_low_name = statusese_by_obj_id[type_id]
new_status = None
for status_name in new_status_names:
if status_name not in task_statuses_by_low_name:
self.log.debug((
"Task does not have status name \"{}\" available."
).format(status_name))
continue
# store object of found status
new_status = task_statuses_by_low_name[status_name]
self.log.debug("Status to set: [ {} ]".format(
new_status["name"]
))
break
# Skip if status names were not found for paticulat entity
if not new_status:
self.log.warning(
"Any of statuses from presets can be set: {}".format(
str(new_status_names)
)
)
continue
# Get full path to task for logging
ent_path = "/".join([ent["name"] for ent in task_entity["link"]])
# Setting task status
try:
task_entity["status"] = new_status
session.commit()
self.log.debug("[ {} ] Status updated to [ {} ]".format(
ent_path, new_status["name"]
))
except Exception:
session.rollback()
self.log.warning(
"[ {} ]Status couldn't be set".format(ent_path),
exc_info=True
)
def statuses_for_tasks(self, session, task_entities, project_id):
task_type_ids = set()
for task_entity in task_entities:
task_type_ids.add(task_entity["type_id"])
project_entity = session.get("Project", project_id)
project_schema = project_entity["project_schema"]
output = {}
for task_type_id in task_type_ids:
statuses = project_schema.get_statuses("Task", task_type_id)
output[task_type_id] = {
status["name"].lower(): status
for status in statuses
}
return output
def register(session):
'''Register plugin. Called when used as an plugin.'''
VersionToTaskStatus(session).register()

View file

@ -0,0 +1,252 @@
import os
from uuid import uuid4
from openpype_modules.ftrack.lib import BaseAction
from openpype.lib.applications import (
ApplicationManager,
ApplicationLaunchFailed,
ApplictionExecutableNotFound,
CUSTOM_LAUNCH_APP_GROUPS
)
from avalon.api import AvalonMongoDB
class AppplicationsAction(BaseAction):
"""Applications Action class."""
type = "Application"
label = "Application action"
identifier = "openpype_app"
_launch_identifier_with_id = None
icon_url = os.environ.get("OPENPYPE_STATICS_SERVER")
def __init__(self, *args, **kwargs):
super(AppplicationsAction, self).__init__(*args, **kwargs)
self.application_manager = ApplicationManager()
self.dbcon = AvalonMongoDB()
@property
def discover_identifier(self):
if self._discover_identifier is None:
self._discover_identifier = "{}.{}".format(
self.identifier, self.process_identifier()
)
return self._discover_identifier
@property
def launch_identifier(self):
if self._launch_identifier is None:
self._launch_identifier = "{}.*".format(self.identifier)
return self._launch_identifier
@property
def launch_identifier_with_id(self):
if self._launch_identifier_with_id is None:
self._launch_identifier_with_id = "{}.{}".format(
self.identifier, self.process_identifier()
)
return self._launch_identifier_with_id
def construct_requirements_validations(self):
# Override validation as this action does not need them
return
def register(self):
"""Registers the action, subscribing the discover and launch topics."""
discovery_subscription = (
"topic=ftrack.action.discover and source.user.username={0}"
).format(self.session.api_user)
self.session.event_hub.subscribe(
discovery_subscription,
self._discover,
priority=self.priority
)
launch_subscription = (
"topic=ftrack.action.launch"
" and data.actionIdentifier={0}"
" and source.user.username={1}"
).format(
self.launch_identifier,
self.session.api_user
)
self.session.event_hub.subscribe(
launch_subscription,
self._launch
)
def _discover(self, event):
entities = self._translate_event(event)
items = self.discover(self.session, entities, event)
if items:
return {"items": items}
def discover(self, session, entities, event):
"""Return true if we can handle the selected entities.
Args:
session (ftrack_api.Session): Helps to query necessary data.
entities (list): Object of selected entities.
event (ftrack_api.Event): Ftrack event causing discover callback.
"""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "task"
):
return False
entity = entities[0]
if entity["parent"].entity_type.lower() == "project":
return False
avalon_project_apps = event["data"].get("avalon_project_apps", None)
avalon_project_doc = event["data"].get("avalon_project_doc", None)
if avalon_project_apps is None:
if avalon_project_doc is None:
ft_project = self.get_project_from_entity(entity)
project_name = ft_project["full_name"]
if not self.dbcon.is_installed():
self.dbcon.install()
self.dbcon.Session["AVALON_PROJECT"] = project_name
avalon_project_doc = self.dbcon.find_one({
"type": "project"
}) or False
event["data"]["avalon_project_doc"] = avalon_project_doc
if not avalon_project_doc:
return False
project_apps_config = avalon_project_doc["config"].get("apps", [])
avalon_project_apps = [
app["name"] for app in project_apps_config
] or False
event["data"]["avalon_project_apps"] = avalon_project_apps
if not avalon_project_apps:
return False
items = []
for app_name in avalon_project_apps:
app = self.application_manager.applications.get(app_name)
if not app or not app.enabled:
continue
if app.group.name in CUSTOM_LAUNCH_APP_GROUPS:
continue
app_icon = app.icon
if app_icon and self.icon_url:
try:
app_icon = app_icon.format(self.icon_url)
except Exception:
self.log.warning((
"Couldn't fill icon path. Icon template: \"{}\""
" --- Icon url: \"{}\""
).format(app_icon, self.icon_url))
app_icon = None
items.append({
"label": app.group.label,
"variant": app.label,
"description": None,
"actionIdentifier": "{}.{}".format(
self.launch_identifier_with_id, app_name
),
"icon": app_icon
})
return items
def _launch(self, event):
event_identifier = event["data"]["actionIdentifier"]
# Check if identifier is same
# - show message that acion may not be triggered on this machine
if event_identifier.startswith(self.launch_identifier_with_id):
return BaseAction._launch(self, event)
return {
"success": False,
"message": (
"There are running more OpenPype processes"
" where Application can be launched."
)
}
def launch(self, session, entities, event):
"""Callback method for the custom action.
return either a bool (True if successful or False if the action failed)
or a dictionary with they keys `message` and `success`, the message
should be a string and will be displayed as feedback to the user,
success should be a bool, True if successful or False if the action
failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and
the entity id. If the entity is a hierarchical you will always get
the entity type TypedContext, once retrieved through a get operation
you will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
"""
identifier = event["data"]["actionIdentifier"]
id_identifier_len = len(self.launch_identifier_with_id) + 1
app_name = identifier[id_identifier_len:]
entity = entities[0]
task_name = entity["name"]
asset_name = entity["parent"]["name"]
project_name = entity["project"]["full_name"]
self.log.info((
"Ftrack launch app: \"{}\" on Project/Asset/Task: {}/{}/{}"
).format(app_name, project_name, asset_name, task_name))
try:
self.application_manager.launch(
app_name,
project_name=project_name,
asset_name=asset_name,
task_name=task_name
)
except ApplictionExecutableNotFound as exc:
self.log.warning(exc.exc_msg)
return {
"success": False,
"message": exc.msg
}
except ApplicationLaunchFailed as exc:
self.log.error(str(exc))
return {
"success": False,
"message": str(exc)
}
except Exception:
msg = "Unexpected failure of application launch {}".format(
self.label
)
self.log.error(msg, exc_info=True)
return {
"success": False,
"message": msg
}
return {
"success": True,
"message": "Launching {0}".format(self.label)
}
def register(session):
"""Register action. Called when used as an event plugin."""
AppplicationsAction(session).register()

View file

@ -0,0 +1,167 @@
"""
Taken from https://github.com/tokejepsen/ftrack-hooks/tree/master/batch_tasks
"""
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class BatchTasksAction(BaseAction):
'''Batch Tasks action
`label` a descriptive string identifing your action.
`varaint` To group actions together, give them the same
label and specify a unique variant per action.
`identifier` a unique identifier for your action.
`description` a verbose descriptive text for you action
'''
label = "Batch Task Create"
variant = None
identifier = "batch-tasks"
description = None
icon = statics_icon("ftrack", "action_icons", "BatchTasks.svg")
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
not_allowed = ["assetversion", "project", "ReviewSession"]
if entities[0].entity_type.lower() in not_allowed:
return False
return True
def get_task_form_items(self, session, number_of_tasks):
items = []
task_type_options = [
{'label': task_type["name"], 'value': task_type["id"]}
for task_type in session.query("Type")
]
for index in range(0, number_of_tasks):
items.extend(
[
{
'value': '##Template for Task{0}##'.format(
index
),
'type': 'label'
},
{
'label': 'Type',
'type': 'enumerator',
'name': 'task_{0}_typeid'.format(index),
'data': task_type_options
},
{
'label': 'Name',
'type': 'text',
'name': 'task_{0}_name'.format(index)
}
]
)
return items
def ensure_task(self, session, name, task_type, parent):
# Query for existing task.
query = (
'Task where type.id is "{0}" and name is "{1}" '
'and parent.id is "{2}"'
)
task = session.query(
query.format(
task_type["id"],
name,
parent["id"]
)
).first()
# Create task.
if not task:
session.create(
"Task",
{
"name": name,
"type": task_type,
"parent": parent
}
)
def launch(self, session, entities, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action
failed ) or a dictionary with they keys `message` and `success`, the
message should be a string and will be displayed as feedback to the
user, success should be a bool, True if successful or False if the
action failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
if 'values' in event['data']:
values = event['data']['values']
if 'number_of_tasks' in values:
return {
'success': True,
'message': '',
'items': self.get_task_form_items(
session, int(values['number_of_tasks'])
)
}
else:
# Create tasks on each entity
for entity in entities:
for count in range(0, int(len(values.keys()) / 2)):
task_type = session.query(
'Type where id is "{0}"'.format(
values["task_{0}_typeid".format(count)]
)
).one()
# Get name, or assume task type in lower case as name.
name = values["task_{0}_name".format(count)]
if not name:
name = task_type["name"].lower()
self.ensure_task(session, name, task_type, entity)
session.commit()
return {
'success': True,
'message': 'Action completed successfully'
}
return {
'success': True,
'message': "",
'items': [
{
'label': 'Number of tasks',
'type': 'number',
'name': 'number_of_tasks',
'value': 2
}
]
}
def register(session):
'''Register action. Called when used as an event plugin.'''
BatchTasksAction(session).register()

View file

@ -0,0 +1,108 @@
import collections
import ftrack_api
from openpype_modules.ftrack.lib import (
BaseAction,
statics_icon,
get_openpype_attr
)
class CleanHierarchicalAttrsAction(BaseAction):
identifier = "clean.hierarchical.attr"
label = "OpenPype Admin"
variant = "- Clean hierarchical custom attributes"
description = "Unset empty hierarchical attribute values."
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
all_project_entities_query = (
"select id, name, parent_id, link"
" from TypedContext where project_id is \"{}\""
)
cust_attr_query = (
"select value, entity_id from CustomAttributeValue"
" where entity_id in ({}) and configuration_id is \"{}\""
)
settings_key = "clean_hierarchical_attr"
def discover(self, session, entities, event):
"""Show only on project entity."""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
):
return False
return self.valid_roles(session, entities, event)
def launch(self, session, entities, event):
project = entities[0]
user_message = "This may take some time"
self.show_message(event, user_message, result=True)
self.log.debug("Preparing entities for cleanup.")
all_entities = session.query(
self.all_project_entities_query.format(project["id"])
).all()
all_entities_ids = [
"\"{}\"".format(entity["id"])
for entity in all_entities
if entity.entity_type.lower() != "task"
]
self.log.debug(
"Collected {} entities to process.".format(len(all_entities_ids))
)
entity_ids_joined = ", ".join(all_entities_ids)
attrs, hier_attrs = get_openpype_attr(session)
for attr in hier_attrs:
configuration_key = attr["key"]
self.log.debug(
"Looking for cleanup of custom attribute \"{}\"".format(
configuration_key
)
)
configuration_id = attr["id"]
values = session.query(
self.cust_attr_query.format(
entity_ids_joined, configuration_id
)
).all()
data = {}
for item in values:
value = item["value"]
if value is None:
data[item["entity_id"]] = value
if not data:
self.log.debug(
"Nothing to clean for \"{}\".".format(configuration_key)
)
continue
self.log.debug("Cleaning up {} values for \"{}\".".format(
len(data), configuration_key
))
for entity_id, value in data.items():
entity_key = collections.OrderedDict((
("configuration_id", configuration_id),
("entity_id", entity_id)
))
session.recorded_operations.push(
ftrack_api.operation.DeleteEntityOperation(
"CustomAttributeValue",
entity_key
)
)
session.commit()
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
CleanHierarchicalAttrsAction(session).register()

View file

@ -0,0 +1,92 @@
from openpype_modules.ftrack.lib import BaseAction, statics_icon
try:
from functools import cmp_to_key
except Exception:
cmp_to_key = None
def existence_comaprison(item_a, item_b):
if not item_a and not item_b:
return 0
if not item_a:
return 1
if not item_b:
return -1
return None
def task_name_sorter(item_a, item_b):
asset_version_a = item_a["asset_version"]
asset_version_b = item_b["asset_version"]
asset_version_comp = existence_comaprison(asset_version_a, asset_version_b)
if asset_version_comp is not None:
return asset_version_comp
task_a = asset_version_a["task"]
task_b = asset_version_b["task"]
task_comp = existence_comaprison(task_a, task_b)
if task_comp is not None:
return task_comp
if task_a["name"] > task_b["name"]:
return 1
if task_a["name"] < task_b["name"]:
return -1
return 0
if cmp_to_key:
task_name_sorter = cmp_to_key(task_name_sorter)
task_name_kwarg_key = "key" if cmp_to_key else "cmp"
task_name_sort_kwargs = {task_name_kwarg_key: task_name_sorter}
class ClientReviewSort(BaseAction):
'''Custom action.'''
#: Action identifier.
identifier = 'client.review.sort'
#: Action label.
label = 'Sort Review'
icon = statics_icon("ftrack", "action_icons", "SortReview.svg")
def discover(self, session, entities, event):
''' Validation '''
if (len(entities) == 0 or entities[0].entity_type != 'ReviewSession'):
return False
return True
def launch(self, session, entities, event):
entity = entities[0]
# Get all objects from Review Session and all 'sort order' possibilities
obj_list = []
sort_order_list = []
for obj in entity['review_session_objects']:
obj_list.append(obj)
sort_order_list.append(obj['sort_order'])
# Sort criteria
obj_list = sorted(obj_list, key=lambda k: k['version'])
obj_list.sort(**task_name_sort_kwargs)
obj_list = sorted(obj_list, key=lambda k: k['name'])
# Set 'sort order' to sorted list, so they are sorted in Ftrack also
for i in range(len(obj_list)):
obj_list[i]['sort_order'] = sort_order_list[i]
session.commit()
return {
'success': True,
'message': 'Client Review sorted!'
}
def register(session):
'''Register action. Called when used as an event plugin.'''
ClientReviewSort(session).register()

View file

@ -0,0 +1,66 @@
import os
import sys
import subprocess
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class ComponentOpen(BaseAction):
'''Custom action.'''
# Action identifier
identifier = 'component.open'
# Action label
label = 'Open File'
# Action icon
icon = statics_icon("ftrack", "action_icons", "ComponentOpen.svg")
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1 or entities[0].entity_type != 'FileComponent':
return False
return True
def launch(self, session, entities, event):
entity = entities[0]
# Return error if component is on ftrack server
location_name = entity['component_locations'][0]['location']['name']
if location_name == 'ftrack.server':
return {
'success': False,
'message': "This component is stored on ftrack server!"
}
# Get component filepath
# TODO with locations it will be different???
fpath = entity['component_locations'][0]['resource_identifier']
fpath = os.path.normpath(os.path.dirname(fpath))
if os.path.isdir(fpath):
if 'win' in sys.platform: # windows
subprocess.Popen('explorer "%s"' % fpath)
elif sys.platform == 'darwin': # macOS
subprocess.Popen(['open', fpath])
else: # linux
try:
subprocess.Popen(['xdg-open', fpath])
except OSError:
raise OSError('unsupported xdg-open call??')
else:
return {
'success': False,
'message': "Didn't found file: " + fpath
}
return {
'success': True,
'message': 'Component folder Opened'
}
def register(session):
'''Register action. Called when used as an event plugin.'''
ComponentOpen(session).register()

View file

@ -0,0 +1,802 @@
import collections
import json
import arrow
import ftrack_api
from openpype_modules.ftrack.lib import (
BaseAction,
statics_icon,
CUST_ATTR_ID_KEY,
CUST_ATTR_GROUP,
CUST_ATTR_TOOLS,
CUST_ATTR_APPLICATIONS,
CUST_ATTR_INTENT,
default_custom_attributes_definition,
app_definitions_from_app_manager,
tool_definitions_from_app_manager
)
from openpype.api import get_system_settings
from openpype.lib import ApplicationManager
"""
This action creates/updates custom attributes.
## First part take care about special attributes
- `avalon_mongo_id` for storing Avalon MongoID
- `applications` based on applications usages
- `tools` based on tools usages
## Second part is based on json file in ftrack module.
File location: `~/OpenPype/pype/modules/ftrack/ftrack_custom_attributes.json`
Data in json file is nested dictionary. Keys in first dictionary level
represents Ftrack entity type (task, show, assetversion, user, list, asset)
and dictionary value define attribute.
There is special key for hierchical attributes `is_hierarchical`.
Entity types `task` requires to define task object type (Folder, Shot,
Sequence, Task, Library, Milestone, Episode, Asset Build, etc.) at second
dictionary level, task's attributes are nested more.
*** Not Changeable *********************************************************
group (string)
- name of group
- based on attribute `openpype_modules.ftrack.lib.CUST_ATTR_GROUP`
- "pype" by default
*** Required ***************************************************************
label (string)
- label that will show in ftrack
key (string)
- must contain only chars [a-z0-9_]
type (string)
- type of custom attribute
- possibilities:
text, boolean, date, enumerator, dynamic enumerator, number
*** Required with conditions ***********************************************
config (dictionary)
- for each attribute type different requirements and possibilities:
- enumerator:
multiSelect = True/False(default: False)
data = {key_1:value_1,key_2:value_2,..,key_n:value_n}
- 'data' is Required value with enumerator
- 'key' must contain only chars [a-z0-9_]
- number:
isdecimal = True/False(default: False)
- text:
markdown = True/False(default: False)
*** Presetable keys **********************************************************
write_security_roles/read_security_roles (array of strings)
- default: ["ALL"]
- strings should be role names (e.g.: ["API", "Administrator"])
- if set to ["ALL"] - all roles will be availabled
- if first is 'except' - roles will be set to all except roles in array
- Warning: Be carefull with except - roles can be different by company
- example:
write_security_roles = ["except", "User"]
read_security_roles = ["ALL"] # (User is can only read)
default
- default: None
- sets default value for custom attribute:
- text -> string
- number -> integer
- enumerator -> array with string of key/s
- boolean -> bool true/false
- date -> string in format: 'YYYY.MM.DD' or 'YYYY.MM.DD HH:mm:ss'
- example: "2018.12.24" / "2018.1.1 6:0:0"
- dynamic enumerator -> DON'T HAVE DEFAULT VALUE!!!
Example:
```
"show": {
"avalon_auto_sync": {
"label": "Avalon auto-sync",
"type": "boolean",
"write_security_roles": ["API", "Administrator"],
"read_security_roles": ["API", "Administrator"]
}
},
"is_hierarchical": {
"fps": {
"label": "FPS",
"type": "number",
"config": {"isdecimal": true}
}
},
"task": {
"library": {
"my_attr_name": {
"label": "My Attr",
"type": "number"
}
}
}
```
"""
class CustAttrException(Exception):
pass
class CustomAttributes(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'create.update.attributes'
#: Action label.
label = "OpenPype Admin"
variant = '- Create/Update Avalon Attributes'
#: Action description.
description = 'Creates Avalon/Mongo ID for double check'
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
settings_key = "create_update_attributes"
required_keys = ("key", "label", "type")
presetable_keys = (
"default",
"write_security_roles",
"read_security_roles"
)
hierarchical_key = "is_hierarchical"
type_posibilities = (
"text", "boolean", "date", "enumerator",
"dynamic enumerator", "number"
)
def discover(self, session, entities, event):
'''
Validation
- action is only for Administrators
'''
return self.valid_roles(session, entities, event)
def launch(self, session, entities, event):
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Custom Attribute creation.'
})
})
session.commit()
self.app_manager = ApplicationManager()
try:
self.prepare_global_data(session)
self.avalon_mongo_id_attributes(session, event)
self.applications_attribute(event)
self.tools_attribute(event)
self.intent_attribute(event)
self.custom_attributes_from_file(event)
job['status'] = 'done'
session.commit()
except Exception:
session.rollback()
job["status"] = "failed"
session.commit()
self.log.error(
"Creating custom attributes failed ({})", exc_info=True
)
return True
def prepare_global_data(self, session):
self.types_per_name = {
attr_type["name"].lower(): attr_type
for attr_type in session.query("CustomAttributeType").all()
}
self.security_roles = {
role["name"].lower(): role
for role in session.query("SecurityRole").all()
}
object_types = session.query("ObjectType").all()
self.object_types_per_id = {
object_type["id"]: object_type for object_type in object_types
}
self.object_types_per_name = {
object_type["name"].lower(): object_type
for object_type in object_types
}
self.groups = {}
self.ftrack_settings = get_system_settings()["modules"]["ftrack"]
self.attrs_settings = self.prepare_attribute_settings()
def prepare_attribute_settings(self):
output = {}
attr_settings = self.ftrack_settings["custom_attributes"]
for entity_type, attr_data in attr_settings.items():
# Lower entity type
entity_type = entity_type.lower()
# Just store if entity type is not "task"
if entity_type != "task":
output[entity_type] = attr_data
continue
# Prepare empty dictionary for entity type if not set yet
if entity_type not in output:
output[entity_type] = {}
# Store presets per lowered object type
for obj_type, _preset in attr_data.items():
output[entity_type][obj_type.lower()] = _preset
return output
def avalon_mongo_id_attributes(self, session, event):
self.create_hierarchical_mongo_attr(session, event)
hierarchical_attr, object_type_attrs = (
self.mongo_id_custom_attributes(session)
)
if object_type_attrs:
self.convert_mongo_id_to_hierarchical(
hierarchical_attr, object_type_attrs, session, event
)
def mongo_id_custom_attributes(self, session):
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical, default"
" from CustomAttributeConfiguration"
" where key = \"{}\""
).format(CUST_ATTR_ID_KEY)
mongo_id_avalon_attr = session.query(cust_attrs_query).all()
heirarchical_attr = None
object_type_attrs = []
for cust_attr in mongo_id_avalon_attr:
if cust_attr["is_hierarchical"]:
heirarchical_attr = cust_attr
else:
object_type_attrs.append(cust_attr)
return heirarchical_attr, object_type_attrs
def create_hierarchical_mongo_attr(self, session, event):
# Set security roles for attribute
data = {
"key": CUST_ATTR_ID_KEY,
"label": "Avalon/Mongo ID",
"type": "text",
"default": "",
"group": CUST_ATTR_GROUP,
"is_hierarchical": True,
"config": {"markdown": False}
}
self.process_attr_data(data, event)
def convert_mongo_id_to_hierarchical(
self, hierarchical_attr, object_type_attrs, session, event
):
user_msg = "Converting old custom attributes. This may take some time."
self.show_message(event, user_msg, True)
self.log.info(user_msg)
object_types_per_id = {
object_type["id"]: object_type
for object_type in session.query("ObjectType").all()
}
cust_attr_query = (
"select value, entity_id from CustomAttributeValue"
" where configuration_id is {}"
)
for attr_def in object_type_attrs:
attr_ent_type = attr_def["entity_type"]
if attr_ent_type == "show":
entity_type_label = "Project"
elif attr_ent_type == "task":
entity_type_label = (
object_types_per_id[attr_def["object_type_id"]]["name"]
)
else:
self.log.warning(
"Unsupported entity type: \"{}\". Skipping.".format(
attr_ent_type
)
)
continue
self.log.debug((
"Converting Avalon MongoID attr for Entity type \"{}\"."
).format(entity_type_label))
values = session.query(
cust_attr_query.format(attr_def["id"])
).all()
for value in values:
table_values = collections.OrderedDict([
("configuration_id", hierarchical_attr["id"]),
("entity_id", value["entity_id"])
])
session.recorded_operations.push(
ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
table_values,
"value",
ftrack_api.symbol.NOT_SET,
value["value"]
)
)
try:
session.commit()
except Exception:
session.rollback()
self.log.warning(
(
"Couldn't transfer Avalon Mongo ID"
" attribute for entity type \"{}\"."
).format(entity_type_label),
exc_info=True
)
try:
session.delete(attr_def)
session.commit()
except Exception:
session.rollback()
self.log.warning(
(
"Couldn't delete Avalon Mongo ID"
" attribute for entity type \"{}\"."
).format(entity_type_label),
exc_info=True
)
def applications_attribute(self, event):
apps_data = app_definitions_from_app_manager(self.app_manager)
applications_custom_attr_data = {
"label": "Applications",
"key": CUST_ATTR_APPLICATIONS,
"type": "enumerator",
"entity_type": "show",
"group": CUST_ATTR_GROUP,
"config": {
"multiselect": True,
"data": apps_data
}
}
self.process_attr_data(applications_custom_attr_data, event)
def tools_attribute(self, event):
tools_data = tool_definitions_from_app_manager(self.app_manager)
tools_custom_attr_data = {
"label": "Tools",
"key": CUST_ATTR_TOOLS,
"type": "enumerator",
"is_hierarchical": True,
"group": CUST_ATTR_GROUP,
"config": {
"multiselect": True,
"data": tools_data
}
}
self.process_attr_data(tools_custom_attr_data, event)
def intent_attribute(self, event):
intent_key_values = self.ftrack_settings["intent"]["items"]
intent_values = []
for key, label in intent_key_values.items():
if not key or not label:
self.log.info((
"Skipping intent row: {{\"{}\": \"{}\"}}"
" because of empty key or label."
).format(key, label))
continue
intent_values.append({key: label})
if not intent_values:
return
intent_custom_attr_data = {
"label": "Intent",
"key": CUST_ATTR_INTENT,
"type": "enumerator",
"entity_type": "assetversion",
"group": CUST_ATTR_GROUP,
"config": {
"multiselect": False,
"data": intent_values
}
}
self.process_attr_data(intent_custom_attr_data, event)
def custom_attributes_from_file(self, event):
# Load json with custom attributes configurations
cust_attr_def = default_custom_attributes_definition()
attrs_data = []
# Prepare data of hierarchical attributes
hierarchical_attrs = cust_attr_def.pop(self.hierarchical_key, {})
for key, cust_attr_data in hierarchical_attrs.items():
cust_attr_data["key"] = key
cust_attr_data["is_hierarchical"] = True
attrs_data.append(cust_attr_data)
# Prepare data of entity specific attributes
for entity_type, cust_attr_datas in cust_attr_def.items():
if entity_type.lower() != "task":
for key, cust_attr_data in cust_attr_datas.items():
cust_attr_data["key"] = key
cust_attr_data["entity_type"] = entity_type
attrs_data.append(cust_attr_data)
continue
# Task should have nested level for object type
for object_type, _cust_attr_datas in cust_attr_datas.items():
for key, cust_attr_data in _cust_attr_datas.items():
cust_attr_data["key"] = key
cust_attr_data["entity_type"] = entity_type
cust_attr_data["object_type"] = object_type
attrs_data.append(cust_attr_data)
# Process prepared data
for cust_attr_data in attrs_data:
# Add group
cust_attr_data["group"] = CUST_ATTR_GROUP
self.process_attr_data(cust_attr_data, event)
def presets_for_attr_data(self, attr_data):
output = {}
attr_key = attr_data["key"]
if attr_data.get("is_hierarchical"):
entity_key = self.hierarchical_key
else:
entity_key = attr_data["entity_type"]
entity_settings = self.attrs_settings.get(entity_key) or {}
if entity_key.lower() == "task":
object_type = attr_data["object_type"]
entity_settings = entity_settings.get(object_type.lower()) or {}
key_settings = entity_settings.get(attr_key) or {}
for key, value in key_settings.items():
if key in self.presetable_keys and value:
output[key] = value
return output
def process_attr_data(self, cust_attr_data, event):
attr_settings = self.presets_for_attr_data(cust_attr_data)
cust_attr_data.update(attr_settings)
try:
data = {}
# Get key, label, type
data.update(self.get_required(cust_attr_data))
# Get hierachical/ entity_type/ object_id
data.update(self.get_entity_type(cust_attr_data))
# Get group, default, security roles
data.update(self.get_optional(cust_attr_data))
# Process data
self.process_attribute(data)
except CustAttrException as cae:
cust_attr_name = cust_attr_data.get("label", cust_attr_data["key"])
if cust_attr_name:
msg = 'Custom attribute error "{}" - {}'.format(
cust_attr_name, str(cae)
)
else:
msg = 'Custom attribute error - {}'.format(str(cae))
self.log.warning(msg, exc_info=True)
self.show_message(event, msg)
def process_attribute(self, data):
existing_attrs = self.session.query(
"CustomAttributeConfiguration"
).all()
matching = []
for attr in existing_attrs:
if (
attr["key"] != data["key"] or
attr["type"]["name"] != data["type"]["name"]
):
continue
if data.get("is_hierarchical") is True:
if attr["is_hierarchical"] is True:
matching.append(attr)
elif "object_type_id" in data:
if (
attr["entity_type"] == data["entity_type"] and
attr["object_type_id"] == data["object_type_id"]
):
matching.append(attr)
else:
if attr["entity_type"] == data["entity_type"]:
matching.append(attr)
if len(matching) == 0:
self.session.create("CustomAttributeConfiguration", data)
self.session.commit()
self.log.debug(
"Custom attribute \"{}\" created".format(data["label"])
)
elif len(matching) == 1:
attr_update = matching[0]
for key in data:
if key not in (
"is_hierarchical", "entity_type", "object_type_id"
):
attr_update[key] = data[key]
self.session.commit()
self.log.debug(
"Custom attribute \"{}\" updated".format(data["label"])
)
else:
raise CustAttrException((
"Custom attribute is duplicated. Key: \"{}\" Type: \"{}\""
).format(data["key"], data["type"]["name"]))
def get_required(self, attr):
output = {}
for key in self.required_keys:
if key not in attr:
raise CustAttrException(
"BUG: Key \"{}\" is required".format(key)
)
if attr['type'].lower() not in self.type_posibilities:
raise CustAttrException(
'Type {} is not valid'.format(attr['type'])
)
output['key'] = attr['key']
output['label'] = attr['label']
type_name = attr['type'].lower()
output['type'] = self.types_per_name[type_name]
config = None
if type_name == 'number':
config = self.get_number_config(attr)
elif type_name == 'text':
config = self.get_text_config(attr)
elif type_name == 'enumerator':
config = self.get_enumerator_config(attr)
if config is not None:
output['config'] = config
return output
def get_number_config(self, attr):
if 'config' in attr and 'isdecimal' in attr['config']:
isdecimal = attr['config']['isdecimal']
else:
isdecimal = False
config = json.dumps({'isdecimal': isdecimal})
return config
def get_text_config(self, attr):
if 'config' in attr and 'markdown' in attr['config']:
markdown = attr['config']['markdown']
else:
markdown = False
config = json.dumps({'markdown': markdown})
return config
def get_enumerator_config(self, attr):
if 'config' not in attr:
raise CustAttrException('Missing config with data')
if 'data' not in attr['config']:
raise CustAttrException('Missing data in config')
data = []
for item in attr['config']['data']:
item_data = {}
for key in item:
# TODO key check by regex
item_data['menu'] = item[key]
item_data['value'] = key
data.append(item_data)
multiSelect = False
for k in attr['config']:
if k.lower() == 'multiselect':
if isinstance(attr['config'][k], bool):
multiSelect = attr['config'][k]
else:
raise CustAttrException('Multiselect must be boolean')
break
config = json.dumps({
'multiSelect': multiSelect,
'data': json.dumps(data)
})
return config
def get_group(self, attr):
if isinstance(attr, dict):
group_name = attr['group'].lower()
else:
group_name = attr
if group_name in self.groups:
return self.groups[group_name]
query = 'CustomAttributeGroup where name is "{}"'.format(group_name)
groups = self.session.query(query).all()
if len(groups) == 1:
group = groups[0]
self.groups[group_name] = group
return group
elif len(groups) < 1:
group = self.session.create('CustomAttributeGroup', {
'name': group_name,
})
self.session.commit()
return group
else:
raise CustAttrException(
'Found more than one group "{}"'.format(group_name)
)
def get_security_roles(self, security_roles):
security_roles_lowered = tuple(name.lower() for name in security_roles)
if (
len(security_roles_lowered) == 0
or "all" in security_roles_lowered
):
return list(self.security_roles.values())
output = []
if security_roles_lowered[0] == "except":
excepts = security_roles_lowered[1:]
for role_name, role in self.security_roles.items():
if role_name not in excepts:
output.append(role)
else:
for role_name in security_roles_lowered:
if role_name in self.security_roles:
output.append(self.security_roles[role_name])
else:
raise CustAttrException((
"Securit role \"{}\" was not found in Ftrack."
).format(role_name))
return output
def get_default(self, attr):
type = attr['type']
default = attr['default']
if default is None:
return default
err_msg = 'Default value is not'
if type == 'number':
if isinstance(default, (str)) and default.isnumeric():
default = float(default)
if not isinstance(default, (float, int)):
raise CustAttrException('{} integer'.format(err_msg))
elif type == 'text':
if not isinstance(default, str):
raise CustAttrException('{} string'.format(err_msg))
elif type == 'boolean':
if not isinstance(default, bool):
raise CustAttrException('{} boolean'.format(err_msg))
elif type == 'enumerator':
if not isinstance(default, list):
raise CustAttrException(
'{} array with strings'.format(err_msg)
)
# TODO check if multiSelect is available
# and if default is one of data menu
if not isinstance(default[0], str):
raise CustAttrException('{} array of strings'.format(err_msg))
elif type == 'date':
date_items = default.split(' ')
try:
if len(date_items) == 1:
default = arrow.get(default, 'YY.M.D')
elif len(date_items) == 2:
default = arrow.get(default, 'YY.M.D H:m:s')
else:
raise Exception
except Exception:
raise CustAttrException('Date is not in proper format')
elif type == 'dynamic enumerator':
raise CustAttrException('Dynamic enumerator can\'t have default')
return default
def get_optional(self, attr):
output = {}
if "group" in attr:
output["group"] = self.get_group(attr)
if "default" in attr:
output["default"] = self.get_default(attr)
roles_read = []
roles_write = []
if "read_security_roles" in attr:
roles_read = attr["read_security_roles"]
if "write_security_roles" in attr:
roles_write = attr["write_security_roles"]
output["read_security_roles"] = self.get_security_roles(roles_read)
output["write_security_roles"] = self.get_security_roles(roles_write)
return output
def get_entity_type(self, attr):
if attr.get("is_hierarchical", False):
return {
"is_hierarchical": True,
"entity_type": attr.get("entity_type") or "show"
}
if 'entity_type' not in attr:
raise CustAttrException('Missing entity_type')
if attr['entity_type'].lower() != 'task':
return {'entity_type': attr['entity_type']}
if 'object_type' not in attr:
raise CustAttrException('Missing object_type')
object_type_name = attr['object_type']
object_type_name_low = object_type_name.lower()
object_type = self.object_types_per_name.get(object_type_name_low)
if not object_type:
raise CustAttrException((
'Object type with name "{}" don\'t exist'
).format(object_type_name))
return {
'entity_type': attr['entity_type'],
'object_type_id': object_type["id"]
}
def register(session):
'''Register plugin. Called when used as an plugin.'''
CustomAttributes(session).register()

View file

@ -0,0 +1,225 @@
import os
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from avalon import lib as avalonlib
from openpype.api import (
Anatomy,
get_project_settings
)
from openpype.lib import ApplicationManager
class CreateFolders(BaseAction):
identifier = "create.folders"
label = "Create Folders"
icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg")
def discover(self, session, entities, event):
if len(entities) != 1:
return False
not_allowed = ["assetversion", "project"]
if entities[0].entity_type.lower() in not_allowed:
return False
return True
def interface(self, session, entities, event):
if event["data"].get("values", {}):
return
entity = entities[0]
without_interface = True
for child in entity["children"]:
if child["object_type"]["name"].lower() != "task":
without_interface = False
break
self.without_interface = without_interface
if without_interface:
return
title = "Create folders"
entity_name = entity["name"]
msg = (
"<h2>Do you want create folders also"
" for all children of \"{}\"?</h2>"
)
if entity.entity_type.lower() == "project":
entity_name = entity["full_name"]
msg = msg.replace(" also", "")
msg += "<h3>(Project root won't be created if not checked)</h3>"
items = []
item_msg = {
"type": "label",
"value": msg.format(entity_name)
}
item_label = {
"type": "label",
"value": "With all chilren entities"
}
item = {
"name": "children_included",
"type": "boolean",
"value": False
}
items.append(item_msg)
items.append(item_label)
items.append(item)
return {
"items": items,
"title": title
}
def launch(self, session, entities, event):
'''Callback method for custom action.'''
with_childrens = True
if self.without_interface is False:
if "values" not in event["data"]:
return
with_childrens = event["data"]["values"]["children_included"]
entity = entities[0]
if entity.entity_type.lower() == "project":
proj = entity
else:
proj = entity["project"]
project_name = proj["full_name"]
project_code = proj["name"]
if entity.entity_type.lower() == 'project' and with_childrens is False:
return {
'success': True,
'message': 'Nothing was created'
}
all_entities = []
all_entities.append(entity)
if with_childrens:
all_entities = self.get_notask_children(entity)
anatomy = Anatomy(project_name)
project_settings = get_project_settings(project_name)
work_keys = ["work", "folder"]
work_template = anatomy.templates
for key in work_keys:
work_template = work_template[key]
work_has_apps = "{app" in work_template
publish_keys = ["publish", "folder"]
publish_template = anatomy.templates
for key in publish_keys:
publish_template = publish_template[key]
publish_has_apps = "{app" in publish_template
collected_paths = []
for entity in all_entities:
if entity.entity_type.lower() == "project":
continue
ent_data = {
"project": {
"name": project_name,
"code": project_code
}
}
ent_data["asset"] = entity["name"]
parents = entity["link"][1:-1]
hierarchy_names = [p["name"] for p in parents]
hierarchy = ""
if hierarchy_names:
hierarchy = os.path.sep.join(hierarchy_names)
ent_data["hierarchy"] = hierarchy
tasks_created = False
for child in entity["children"]:
if child["object_type"]["name"].lower() != "task":
continue
tasks_created = True
task_data = ent_data.copy()
task_data["task"] = child["name"]
apps = []
# Template wok
if work_has_apps:
app_data = task_data.copy()
for app in apps:
app_data["app"] = app
collected_paths.append(self.compute_template(
anatomy, app_data, work_keys
))
else:
collected_paths.append(self.compute_template(
anatomy, task_data, work_keys
))
# Template publish
if publish_has_apps:
app_data = task_data.copy()
for app in apps:
app_data["app"] = app
collected_paths.append(self.compute_template(
anatomy, app_data, publish_keys
))
else:
collected_paths.append(self.compute_template(
anatomy, task_data, publish_keys
))
if not tasks_created:
# create path for entity
collected_paths.append(self.compute_template(
anatomy, ent_data, work_keys
))
collected_paths.append(self.compute_template(
anatomy, ent_data, publish_keys
))
if len(collected_paths) == 0:
return {
"success": True,
"message": "No project folders to create."
}
self.log.info("Creating folders:")
for path in set(collected_paths):
self.log.info(path)
if not os.path.exists(path):
os.makedirs(path)
return {
"success": True,
"message": "Successfully created project folders."
}
def get_notask_children(self, entity):
output = []
if entity.entity_type.lower() == "task":
return output
output.append(entity)
for child in entity["children"]:
output.extend(self.get_notask_children(child))
return output
def compute_template(self, anatomy, data, anatomy_keys):
filled_template = anatomy.format_all(data)
for key in anatomy_keys:
filled_template = filled_template[key]
if filled_template.solved:
return os.path.normpath(filled_template)
self.log.warning(
"Template \"{}\" was not fully filled \"{}\"".format(
filled_template.template, filled_template
)
)
return os.path.normpath(filled_template.split("{")[0])
def register(session):
"""Register plugin. Called when used as an plugin."""
CreateFolders(session).register()

View file

@ -0,0 +1,193 @@
import os
import re
import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import get_project_basic_paths, create_project_folders
class CreateProjectFolders(BaseAction):
"""Action create folder structure and may create hierarchy in Ftrack.
Creation of folder structure and hierarchy in Ftrack is based on presets.
These presets are located in:
`~/pype-config/presets/tools/project_folder_structure.json`
Example of content:
```json
{
"__project_root__": {
"prod" : {},
"resources" : {
"footage": {
"plates": {},
"offline": {}
},
"audio": {},
"art_dept": {}
},
"editorial" : {},
"assets[ftrack.Library]": {
"characters[ftrack]": {},
"locations[ftrack]": {}
},
"shots[ftrack.Sequence]": {
"scripts": {},
"editorial[ftrack.Folder]": {}
}
}
}
```
Key "__project_root__" indicates root folder (or entity). Each key in
dictionary represents folder name. Value may contain another dictionary
with subfolders.
Identifier `[ftrack]` in name says that this should be also created in
Ftrack hierarchy. It is possible to specify entity type of item with "." .
If key is `assets[ftrack.Library]` then in ftrack will be created entity
with name "assets" and entity type "Library". It is expected Library entity
type exist in Ftrack.
"""
identifier = "create.project.structure"
label = "Create Project Structure"
description = "Creates folder structure"
role_list = ["Pypeclub", "Administrator", "Project Manager"]
icon = statics_icon("ftrack", "action_icons", "CreateProjectFolders.svg")
pattern_array = re.compile(r"\[.*\]")
pattern_ftrack = re.compile(r".*\[[.]*ftrack[.]*")
pattern_ent_ftrack = re.compile(r"ftrack\.[^.,\],\s,]*")
project_root_key = "__project_root__"
def discover(self, session, entities, event):
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def launch(self, session, entities, event):
# Get project entity
project_entity = self.get_project_from_entity(entities[0])
project_name = project_entity["full_name"]
try:
# Get paths based on presets
basic_paths = get_project_basic_paths(project_name)
if not basic_paths:
return {
"success": False,
"message": "Project structure is not set."
}
# Invoking OpenPype API to create the project folders
create_project_folders(basic_paths, project_name)
self.create_ftrack_entities(basic_paths, project_entity)
except Exception as exc:
self.log.warning("Creating of structure crashed.", exc_info=True)
session.rollback()
return {
"success": False,
"message": str(exc)
}
return True
def get_ftrack_paths(self, paths_items):
all_ftrack_paths = []
for path_items in paths_items:
ftrack_path_items = []
is_ftrack = False
for item in reversed(path_items):
if item == self.project_root_key:
continue
if is_ftrack:
ftrack_path_items.append(item)
elif re.match(self.pattern_ftrack, item):
ftrack_path_items.append(item)
is_ftrack = True
ftrack_path_items = list(reversed(ftrack_path_items))
if ftrack_path_items:
all_ftrack_paths.append(ftrack_path_items)
return all_ftrack_paths
def compute_ftrack_items(self, in_list, keys):
if len(keys) == 0:
return in_list
key = keys[0]
exist = None
for index, subdict in enumerate(in_list):
if key in subdict:
exist = index
break
if exist is not None:
in_list[exist][key] = self.compute_ftrack_items(
in_list[exist][key], keys[1:]
)
else:
in_list.append({key: self.compute_ftrack_items([], keys[1:])})
return in_list
def translate_ftrack_items(self, paths_items):
main = []
for path_items in paths_items:
main = self.compute_ftrack_items(main, path_items)
return main
def create_ftrack_entities(self, basic_paths, project_ent):
only_ftrack_items = self.get_ftrack_paths(basic_paths)
ftrack_paths = self.translate_ftrack_items(only_ftrack_items)
for separation in ftrack_paths:
parent = project_ent
self.trigger_creation(separation, parent)
def trigger_creation(self, separation, parent):
for item, subvalues in separation.items():
matches = re.findall(self.pattern_array, item)
ent_type = "Folder"
if len(matches) == 0:
name = item
else:
match = matches[0]
name = item.replace(match, "")
ent_type_match = re.findall(self.pattern_ent_ftrack, match)
if len(ent_type_match) > 0:
ent_type_split = ent_type_match[0].split(".")
if len(ent_type_split) == 2:
ent_type = ent_type_split[1]
new_parent = self.create_ftrack_entity(name, ent_type, parent)
if subvalues:
for subvalue in subvalues:
self.trigger_creation(subvalue, new_parent)
def create_ftrack_entity(self, name, ent_type, parent):
for children in parent["children"]:
if children["name"] == name:
return children
data = {
"name": name,
"parent_id": parent["id"]
}
if parent.entity_type.lower() == "project":
data["project_id"] = parent["id"]
else:
data["project_id"] = parent["project"]["id"]
existing_entity = self.session.query((
"TypedContext where name is \"{}\" and "
"parent_id is \"{}\" and project_id is \"{}\""
).format(name, data["parent_id"], data["project_id"])).first()
if existing_entity:
return existing_entity
new_ent = self.session.create(ent_type, data)
self.session.commit()
return new_ent
def register(session):
CreateProjectFolders(session).register()

View file

@ -0,0 +1,708 @@
import collections
import uuid
from datetime import datetime
from bson.objectid import ObjectId
from avalon.api import AvalonMongoDB
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
class DeleteAssetSubset(BaseAction):
'''Edit meta data action.'''
# Action identifier.
identifier = "delete.asset.subset"
# Action label.
label = "Delete Asset/Subsets"
# Action description.
description = "Removes from Avalon with all childs and asset from Ftrack"
icon = statics_icon("ftrack", "action_icons", "DeleteAsset.svg")
settings_key = "delete_asset_subset"
# Db connection
dbcon = None
splitter = {"type": "label", "value": "---"}
action_data_by_id = {}
asset_prefix = "asset:"
subset_prefix = "subset:"
def __init__(self, *args, **kwargs):
self.dbcon = AvalonMongoDB()
super(DeleteAssetSubset, self).__init__(*args, **kwargs)
def discover(self, session, entities, event):
""" Validation """
task_ids = []
for ent_info in event["data"]["selection"]:
if ent_info.get("entityType") == "task":
task_ids.append(ent_info["entityId"])
is_valid = False
for entity in entities:
if (
entity["id"] in task_ids
and entity.entity_type.lower() != "task"
):
is_valid = True
break
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def _launch(self, event):
try:
entities = self._translate_event(event)
if "values" not in event["data"]:
self.dbcon.install()
return self._interface(self.session, entities, event)
confirmation = self.confirm_delete(entities, event)
if confirmation:
return confirmation
self.dbcon.install()
response = self.launch(
self.session, entities, event
)
finally:
self.dbcon.uninstall()
return self._handle_result(response)
def interface(self, session, entities, event):
self.show_message(event, "Preparing data...", True)
items = []
title = "Choose items to delete"
# Filter selection and get ftrack ids
selection = event["data"].get("selection") or []
ftrack_ids = []
project_in_selection = False
for entity in selection:
entity_type = (entity.get("entityType") or "").lower()
if entity_type != "task":
if entity_type == "show":
project_in_selection = True
continue
ftrack_id = entity.get("entityId")
if not ftrack_id:
continue
ftrack_ids.append(ftrack_id)
if project_in_selection:
msg = "It is not possible to use this action on project entity."
self.show_message(event, msg, True)
# Filter event even more (skip task entities)
# - task entities are not relevant for avalon
entity_mapping = {}
for entity in entities:
ftrack_id = entity["id"]
if ftrack_id not in ftrack_ids:
continue
if entity.entity_type.lower() == "task":
ftrack_ids.remove(ftrack_id)
entity_mapping[ftrack_id] = entity
if not ftrack_ids:
# It is bug if this happens!
return {
"success": False,
"message": "Invalid selection for this action (Bug)"
}
if entities[0].entity_type.lower() == "project":
project = entities[0]
else:
project = entities[0]["project"]
project_name = project["full_name"]
self.dbcon.Session["AVALON_PROJECT"] = project_name
selected_av_entities = list(self.dbcon.find({
"type": "asset",
"data.ftrackId": {"$in": ftrack_ids}
}))
found_without_ftrack_id = {}
if len(selected_av_entities) != len(ftrack_ids):
found_ftrack_ids = [
ent["data"]["ftrackId"] for ent in selected_av_entities
]
for ftrack_id, entity in entity_mapping.items():
if ftrack_id in found_ftrack_ids:
continue
av_ents_by_name = list(self.dbcon.find({
"type": "asset",
"name": entity["name"]
}))
if not av_ents_by_name:
continue
ent_path_items = [ent["name"] for ent in entity["link"]]
parents = ent_path_items[1:len(ent_path_items)-1:]
# TODO we should say to user that
# few of them are missing in avalon
for av_ent in av_ents_by_name:
if av_ent["data"]["parents"] != parents:
continue
# TODO we should say to user that found entity
# with same name does not match same ftrack id?
if "ftrackId" not in av_ent["data"]:
selected_av_entities.append(av_ent)
found_without_ftrack_id[str(av_ent["_id"])] = ftrack_id
break
if not selected_av_entities:
return {
"success": True,
"message": (
"Didn't found entities in avalon."
" You can use Ftrack's Delete button for the selection."
)
}
# Remove cached action older than 2 minutes
old_action_ids = []
for action_id, data in self.action_data_by_id.items():
created_at = data.get("created_at")
if not created_at:
old_action_ids.append(action_id)
continue
cur_time = datetime.now()
existing_in_sec = (created_at - cur_time).total_seconds()
if existing_in_sec > 60 * 2:
old_action_ids.append(action_id)
for action_id in old_action_ids:
self.action_data_by_id.pop(action_id, None)
# Store data for action id
action_id = str(uuid.uuid1())
self.action_data_by_id[action_id] = {
"attempt": 1,
"created_at": datetime.now(),
"project_name": project_name,
"subset_ids_by_name": {},
"subset_ids_by_parent": {},
"without_ftrack_id": found_without_ftrack_id
}
id_item = {
"type": "hidden",
"name": "action_id",
"value": action_id
}
items.append(id_item)
asset_ids = [ent["_id"] for ent in selected_av_entities]
subsets_for_selection = self.dbcon.find({
"type": "subset",
"parent": {"$in": asset_ids}
})
asset_ending = ""
if len(selected_av_entities) > 1:
asset_ending = "s"
asset_title = {
"type": "label",
"value": "# Delete asset{}:".format(asset_ending)
}
asset_note = {
"type": "label",
"value": (
"<p><i>NOTE: Action will delete checked entities"
" in Ftrack and Avalon with all children entities and"
" published content.</i></p>"
)
}
items.append(asset_title)
items.append(asset_note)
asset_items = collections.defaultdict(list)
for asset in selected_av_entities:
ent_path_items = [project_name]
ent_path_items.extend(asset.get("data", {}).get("parents") or [])
ent_path_to_parent = "/".join(ent_path_items) + "/"
asset_items[ent_path_to_parent].append(asset)
for asset_parent_path, assets in sorted(asset_items.items()):
items.append({
"type": "label",
"value": "## <b>- {}</b>".format(asset_parent_path)
})
for asset in assets:
items.append({
"label": asset["name"],
"name": "{}{}".format(
self.asset_prefix, str(asset["_id"])
),
"type": 'boolean',
"value": False
})
subset_ids_by_name = collections.defaultdict(list)
subset_ids_by_parent = collections.defaultdict(list)
for subset in subsets_for_selection:
subset_id = subset["_id"]
name = subset["name"]
parent_id = subset["parent"]
subset_ids_by_name[name].append(subset_id)
subset_ids_by_parent[parent_id].append(subset_id)
if not subset_ids_by_name:
return {
"items": items,
"title": title
}
subset_ending = ""
if len(subset_ids_by_name.keys()) > 1:
subset_ending = "s"
subset_title = {
"type": "label",
"value": "# Subset{} to delete:".format(subset_ending)
}
subset_note = {
"type": "label",
"value": (
"<p><i>WARNING: Subset{} will be removed"
" for all <b>selected</b> entities.</i></p>"
).format(subset_ending)
}
items.append(self.splitter)
items.append(subset_title)
items.append(subset_note)
for name in subset_ids_by_name:
items.append({
"label": "<b>{}</b>".format(name),
"name": "{}{}".format(self.subset_prefix, name),
"type": "boolean",
"value": False
})
self.action_data_by_id[action_id]["subset_ids_by_parent"] = (
subset_ids_by_parent
)
self.action_data_by_id[action_id]["subset_ids_by_name"] = (
subset_ids_by_name
)
return {
"items": items,
"title": title
}
def confirm_delete(self, entities, event):
values = event["data"]["values"]
action_id = values.get("action_id")
spec_data = self.action_data_by_id.get(action_id)
if not spec_data:
# it is a bug if this happens!
return {
"success": False,
"message": "Something bad has happened. Please try again."
}
# Process Delete confirmation
delete_key = values.get("delete_key")
if delete_key:
delete_key = delete_key.lower().strip()
# Go to launch part if user entered `delete`
if delete_key == "delete":
return
# Skip whole process if user didn't enter any text
elif delete_key == "":
self.action_data_by_id.pop(action_id, None)
return {
"success": True,
"message": "Deleting cancelled (delete entry was empty)"
}
# Get data to show again
to_delete = spec_data["to_delete"]
else:
to_delete = collections.defaultdict(list)
for key, value in values.items():
if not value:
continue
if key.startswith(self.asset_prefix):
_key = key.replace(self.asset_prefix, "")
to_delete["assets"].append(_key)
elif key.startswith(self.subset_prefix):
_key = key.replace(self.subset_prefix, "")
to_delete["subsets"].append(_key)
self.action_data_by_id[action_id]["to_delete"] = to_delete
asset_to_delete = len(to_delete.get("assets") or []) > 0
subset_to_delete = len(to_delete.get("subsets") or []) > 0
if not asset_to_delete and not subset_to_delete:
self.action_data_by_id.pop(action_id, None)
return {
"success": True,
"message": "Nothing was selected to delete"
}
attempt = spec_data["attempt"]
if attempt > 3:
self.action_data_by_id.pop(action_id, None)
return {
"success": False,
"message": "You didn't enter \"DELETE\" properly 3 times!"
}
self.action_data_by_id[action_id]["attempt"] += 1
title = "Confirmation of deleting"
if asset_to_delete:
asset_len = len(to_delete["assets"])
asset_ending = ""
if asset_len > 1:
asset_ending = "s"
title += " {} Asset{}".format(asset_len, asset_ending)
if subset_to_delete:
title += " and"
if subset_to_delete:
sub_len = len(to_delete["subsets"])
type_ending = ""
sub_ending = ""
if sub_len == 1:
subset_ids_by_name = spec_data["subset_ids_by_name"]
if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1:
sub_ending = "s"
elif sub_len > 1:
type_ending = "s"
sub_ending = "s"
title += " {} type{} of subset{}".format(
sub_len, type_ending, sub_ending
)
items = []
id_item = {"type": "hidden", "name": "action_id", "value": action_id}
delete_label = {
'type': 'label',
'value': '# Please enter "DELETE" to confirm #'
}
delete_item = {
"name": "delete_key",
"type": "text",
"value": "",
"empty_text": "Type Delete here..."
}
items.append(id_item)
items.append(delete_label)
items.append(delete_item)
return {
"items": items,
"title": title
}
def launch(self, session, entities, event):
self.show_message(event, "Processing...", True)
values = event["data"]["values"]
action_id = values.get("action_id")
spec_data = self.action_data_by_id.get(action_id)
if not spec_data:
# it is a bug if this happens!
return {
"success": False,
"message": "Something bad has happened. Please try again."
}
report_messages = collections.defaultdict(list)
project_name = spec_data["project_name"]
to_delete = spec_data["to_delete"]
self.dbcon.Session["AVALON_PROJECT"] = project_name
assets_to_delete = to_delete.get("assets") or []
subsets_to_delete = to_delete.get("subsets") or []
# Convert asset ids to ObjectId obj
assets_to_delete = [
ObjectId(asset_id)
for asset_id in assets_to_delete
if asset_id
]
subset_ids_by_parent = spec_data["subset_ids_by_parent"]
subset_ids_by_name = spec_data["subset_ids_by_name"]
subset_ids_to_archive = []
asset_ids_to_archive = []
ftrack_ids_to_delete = []
if len(assets_to_delete) > 0:
map_av_ftrack_id = spec_data["without_ftrack_id"]
# Prepare data when deleting whole avalon asset
avalon_assets = self.dbcon.find(
{"type": "asset"},
{
"_id": 1,
"data.visualParent": 1,
"data.ftrackId": 1
}
)
avalon_assets_by_parent = collections.defaultdict(list)
for asset in avalon_assets:
asset_id = asset["_id"]
parent_id = asset["data"]["visualParent"]
avalon_assets_by_parent[parent_id].append(asset)
if asset_id in assets_to_delete:
ftrack_id = map_av_ftrack_id.get(str(asset_id))
if not ftrack_id:
ftrack_id = asset["data"].get("ftrackId")
if ftrack_id:
ftrack_ids_to_delete.append(ftrack_id)
children_queue = collections.deque()
for mongo_id in assets_to_delete:
children_queue.append(mongo_id)
while children_queue:
mongo_id = children_queue.popleft()
if mongo_id in asset_ids_to_archive:
continue
asset_ids_to_archive.append(mongo_id)
for subset_id in subset_ids_by_parent.get(mongo_id, []):
if subset_id not in subset_ids_to_archive:
subset_ids_to_archive.append(subset_id)
children = avalon_assets_by_parent.get(mongo_id)
if not children:
continue
for child in children:
child_id = child["_id"]
if child_id not in asset_ids_to_archive:
children_queue.append(child_id)
# Prepare names of assets in ftrack and ids of subsets in mongo
asset_names_to_delete = []
if len(subsets_to_delete) > 0:
for name in subsets_to_delete:
asset_names_to_delete.append(name)
for subset_id in subset_ids_by_name[name]:
if subset_id in subset_ids_to_archive:
continue
subset_ids_to_archive.append(subset_id)
# Get ftrack ids of entities where will be delete only asset
not_deleted_entities_id = []
ftrack_id_name_map = {}
if asset_names_to_delete:
for entity in entities:
ftrack_id = entity["id"]
ftrack_id_name_map[ftrack_id] = entity["name"]
if ftrack_id not in ftrack_ids_to_delete:
not_deleted_entities_id.append(ftrack_id)
mongo_proc_txt = "MongoProcessing: "
ftrack_proc_txt = "Ftrack processing: "
if asset_ids_to_archive:
self.log.debug("{}Archivation of assets <{}>".format(
mongo_proc_txt,
", ".join([str(id) for id in asset_ids_to_archive])
))
self.dbcon.update_many(
{
"_id": {"$in": asset_ids_to_archive},
"type": "asset"
},
{"$set": {"type": "archived_asset"}}
)
if subset_ids_to_archive:
self.log.debug("{}Archivation of subsets <{}>".format(
mongo_proc_txt,
", ".join([str(id) for id in subset_ids_to_archive])
))
self.dbcon.update_many(
{
"_id": {"$in": subset_ids_to_archive},
"type": "subset"
},
{"$set": {"type": "archived_subset"}}
)
if ftrack_ids_to_delete:
self.log.debug("{}Deleting Ftrack Entities <{}>".format(
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
))
entities_by_link_len = self._prepare_entities_before_delete(
ftrack_ids_to_delete, session
)
for link_len in sorted(entities_by_link_len.keys(), reverse=True):
for entity in entities_by_link_len[link_len]:
session.delete(entity)
try:
session.commit()
except Exception:
ent_path = "/".join(
[ent["name"] for ent in entity["link"]]
)
msg = "Failed to delete entity"
report_messages[msg].append(ent_path)
session.rollback()
self.log.warning(
"{} <{}>".format(msg, ent_path),
exc_info=True
)
if not_deleted_entities_id and asset_names_to_delete:
joined_not_deleted = ",".join([
"\"{}\"".format(ftrack_id)
for ftrack_id in not_deleted_entities_id
])
joined_asset_names = ",".join([
"\"{}\"".format(name)
for name in asset_names_to_delete
])
# Find assets of selected entities with names of checked subsets
assets = session.query((
"select id from Asset where"
" context_id in ({}) and name in ({})"
).format(joined_not_deleted, joined_asset_names)).all()
self.log.debug("{}Deleting Ftrack Assets <{}>".format(
ftrack_proc_txt,
", ".join([asset["id"] for asset in assets])
))
for asset in assets:
session.delete(asset)
try:
session.commit()
except Exception:
session.rollback()
msg = "Failed to delete asset"
report_messages[msg].append(asset["id"])
self.log.warning(
"Asset: {} <{}>".format(asset["name"], asset["id"]),
exc_info=True
)
return self.report_handle(report_messages, project_name, event)
def _prepare_entities_before_delete(self, ftrack_ids_to_delete, session):
"""Filter children entities to avoid CircularDependencyError."""
joined_ids_to_delete = ", ".join(
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
)
to_delete_entities = session.query(
"select id, link from TypedContext where id in ({})".format(
joined_ids_to_delete
)
).all()
# Find all children entities and add them to list
# - Delete tasks first then their parents and continue
parent_ids_to_delete = [
entity["id"]
for entity in to_delete_entities
]
while parent_ids_to_delete:
joined_parent_ids_to_delete = ",".join([
"\"{}\"".format(ftrack_id)
for ftrack_id in parent_ids_to_delete
])
_to_delete = session.query((
"select id, link from TypedContext where parent_id in ({})"
).format(joined_parent_ids_to_delete)).all()
parent_ids_to_delete = []
for entity in _to_delete:
parent_ids_to_delete.append(entity["id"])
to_delete_entities.append(entity)
# Unset 'task_id' from AssetVersion entities
# - when task is deleted the asset version is not marked for deletion
task_ids = set(
entity["id"]
for entity in to_delete_entities
if entity.entity_type.lower() == "task"
)
for chunk in create_chunks(task_ids):
asset_versions = session.query((
"select id, task_id from AssetVersion where task_id in ({})"
).format(self.join_query_keys(chunk))).all()
for asset_version in asset_versions:
asset_version["task_id"] = None
session.commit()
entities_by_link_len = collections.defaultdict(list)
for entity in to_delete_entities:
entities_by_link_len[len(entity["link"])].append(entity)
return entities_by_link_len
def report_handle(self, report_messages, project_name, event):
if not report_messages:
return {
"success": True,
"message": "Deletion was successful!"
}
title = "Delete report ({}):".format(project_name)
items = []
items.append({
"type": "label",
"value": "# Deleting was not completely successful"
})
items.append({
"type": "label",
"value": "<p><i>Check logs for more information</i></p>"
})
for msg, _items in report_messages.items():
if not _items or not msg:
continue
items.append({
"type": "label",
"value": "# {}".format(msg)
})
if isinstance(_items, str):
_items = [_items]
items.append({
"type": "label",
"value": '<p>{}</p>'.format("<br>".join(_items))
})
items.append(self.splitter)
self.show_interface(items, title, event)
return {
"success": False,
"message": "Deleting finished. Read report messages."
}
def register(session):
'''Register plugin. Called when used as an plugin.'''
DeleteAssetSubset(session).register()

View file

@ -0,0 +1,587 @@
import os
import collections
import uuid
import clique
from pymongo import UpdateOne
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from avalon.api import AvalonMongoDB
from openpype.api import Anatomy
import avalon.pipeline
class DeleteOldVersions(BaseAction):
identifier = "delete.old.versions"
label = "OpenPype Admin"
variant = "- Delete old versions"
description = (
"Delete files from older publishes so project can be"
" archived with only lates versions."
)
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
settings_key = "delete_old_versions"
dbcon = AvalonMongoDB()
inteface_title = "Choose your preferences"
splitter_item = {"type": "label", "value": "---"}
sequence_splitter = "__sequence_splitter__"
def discover(self, session, entities, event):
""" Validation. """
is_valid = False
for entity in entities:
if entity.entity_type.lower() == "assetversion":
is_valid = True
break
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def interface(self, session, entities, event):
# TODO Add roots existence validation
items = []
values = event["data"].get("values")
if values:
versions_count = int(values["last_versions_count"])
if versions_count >= 1:
return
items.append({
"type": "label",
"value": (
"# You have to keep at least 1 version!"
)
})
items.append({
"type": "label",
"value": (
"<i><b>WARNING:</b> This will remove published files of older"
" versions from disk so we don't recommend use"
" this action on \"live\" project.</i>"
)
})
items.append(self.splitter_item)
# How many versions to keep
items.append({
"type": "label",
"value": "## Choose how many versions you want to keep:"
})
items.append({
"type": "label",
"value": (
"<i><b>NOTE:</b> We do recommend to keep 2 versions.</i>"
)
})
items.append({
"type": "number",
"name": "last_versions_count",
"label": "Versions",
"value": 2
})
items.append(self.splitter_item)
items.append({
"type": "label",
"value": (
"## Remove publish folder even if there"
" are other than published files:"
)
})
items.append({
"type": "label",
"value": (
"<i><b>WARNING:</b> This may remove more than you want.</i>"
)
})
items.append({
"type": "boolean",
"name": "force_delete_publish_folder",
"label": "Are You sure?",
"value": False
})
items.append(self.splitter_item)
items.append({
"type": "label",
"value": (
"<i>This will <b>NOT</b> delete any files and only return the "
"total size of the files.</i>"
)
})
items.append({
"type": "boolean",
"name": "only_calculate",
"label": "Only calculate size of files.",
"value": False
})
return {
"items": items,
"title": self.inteface_title
}
def sizeof_fmt(self, num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def launch(self, session, entities, event):
values = event["data"].get("values")
if not values:
return
versions_count = int(values["last_versions_count"])
force_to_remove = values["force_delete_publish_folder"]
only_calculate = values["only_calculate"]
_val1 = "OFF"
if force_to_remove:
_val1 = "ON"
_val3 = "s"
if versions_count == 1:
_val3 = ""
self.log.debug((
"Process started. Force to delete publish folder is set to [{0}]"
" and will keep {1} latest version{2}."
).format(_val1, versions_count, _val3))
self.dbcon.install()
project = None
avalon_asset_names = []
asset_versions_by_parent_id = collections.defaultdict(list)
subset_names_by_asset_name = collections.defaultdict(list)
ftrack_assets_by_name = {}
for entity in entities:
ftrack_asset = entity["asset"]
parent_ent = ftrack_asset["parent"]
parent_ftrack_id = parent_ent["id"]
parent_name = parent_ent["name"]
if parent_name not in avalon_asset_names:
avalon_asset_names.append(parent_name)
# Group asset versions by parent entity
asset_versions_by_parent_id[parent_ftrack_id].append(entity)
# Get project
if project is None:
project = parent_ent["project"]
# Collect subset names per asset
subset_name = ftrack_asset["name"]
subset_names_by_asset_name[parent_name].append(subset_name)
if subset_name not in ftrack_assets_by_name:
ftrack_assets_by_name[subset_name] = ftrack_asset
# Set Mongo collection
project_name = project["full_name"]
anatomy = Anatomy(project_name)
self.dbcon.Session["AVALON_PROJECT"] = project_name
self.log.debug("Project is set to {}".format(project_name))
# Get Assets from avalon database
assets = list(self.dbcon.find({
"type": "asset",
"name": {"$in": avalon_asset_names}
}))
asset_id_to_name_map = {
asset["_id"]: asset["name"] for asset in assets
}
asset_ids = list(asset_id_to_name_map.keys())
self.log.debug("Collected assets ({})".format(len(asset_ids)))
# Get Subsets
subsets = list(self.dbcon.find({
"type": "subset",
"parent": {"$in": asset_ids}
}))
subsets_by_id = {}
subset_ids = []
for subset in subsets:
asset_id = subset["parent"]
asset_name = asset_id_to_name_map[asset_id]
available_subsets = subset_names_by_asset_name[asset_name]
if subset["name"] not in available_subsets:
continue
subset_ids.append(subset["_id"])
subsets_by_id[subset["_id"]] = subset
self.log.debug("Collected subsets ({})".format(len(subset_ids)))
# Get Versions
versions = list(self.dbcon.find({
"type": "version",
"parent": {"$in": subset_ids}
}))
versions_by_parent = collections.defaultdict(list)
for ent in versions:
versions_by_parent[ent["parent"]].append(ent)
def sort_func(ent):
return int(ent["name"])
all_last_versions = []
for parent_id, _versions in versions_by_parent.items():
for idx, version in enumerate(
sorted(_versions, key=sort_func, reverse=True)
):
if idx >= versions_count:
break
all_last_versions.append(version)
self.log.debug("Collected versions ({})".format(len(versions)))
# Filter latest versions
for version in all_last_versions:
versions.remove(version)
# Update versions_by_parent without filtered versions
versions_by_parent = collections.defaultdict(list)
for ent in versions:
versions_by_parent[ent["parent"]].append(ent)
# Filter already deleted versions
versions_to_pop = []
for version in versions:
version_tags = version["data"].get("tags")
if version_tags and "deleted" in version_tags:
versions_to_pop.append(version)
for version in versions_to_pop:
subset = subsets_by_id[version["parent"]]
asset_id = subset["parent"]
asset_name = asset_id_to_name_map[asset_id]
msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
asset_name, subset["name"], version["name"]
)
self.log.warning((
"Skipping version. Already tagged as `deleted`. < {} >"
).format(msg))
versions.remove(version)
version_ids = [ent["_id"] for ent in versions]
self.log.debug(
"Filtered versions to delete ({})".format(len(version_ids))
)
if not version_ids:
msg = "Skipping processing. Nothing to delete."
self.log.debug(msg)
return {
"success": True,
"message": msg
}
repres = list(self.dbcon.find({
"type": "representation",
"parent": {"$in": version_ids}
}))
self.log.debug(
"Collected representations to remove ({})".format(len(repres))
)
dir_paths = {}
file_paths_by_dir = collections.defaultdict(list)
for repre in repres:
file_path, seq_path = self.path_from_represenation(repre, anatomy)
if file_path is None:
self.log.warning((
"Could not format path for represenation \"{}\""
).format(str(repre)))
continue
dir_path = os.path.dirname(file_path)
dir_id = None
for _dir_id, _dir_path in dir_paths.items():
if _dir_path == dir_path:
dir_id = _dir_id
break
if dir_id is None:
dir_id = uuid.uuid4()
dir_paths[dir_id] = dir_path
file_paths_by_dir[dir_id].append([file_path, seq_path])
dir_ids_to_pop = []
for dir_id, dir_path in dir_paths.items():
if os.path.exists(dir_path):
continue
dir_ids_to_pop.append(dir_id)
# Pop dirs from both dictionaries
for dir_id in dir_ids_to_pop:
dir_paths.pop(dir_id)
paths = file_paths_by_dir.pop(dir_id)
# TODO report of missing directories?
paths_msg = ", ".join([
"'{}'".format(path[0].replace("\\", "/")) for path in paths
])
self.log.warning((
"Folder does not exist. Deleting it's files skipped: {}"
).format(paths_msg))
# Size of files.
size = 0
if only_calculate:
if force_to_remove:
size = self.delete_whole_dir_paths(
dir_paths.values(), delete=False
)
else:
size = self.delete_only_repre_files(
dir_paths, file_paths_by_dir, delete=False
)
msg = "Total size of files: " + self.sizeof_fmt(size)
self.log.warning(msg)
return {"success": True, "message": msg}
if force_to_remove:
size = self.delete_whole_dir_paths(dir_paths.values())
else:
size = self.delete_only_repre_files(dir_paths, file_paths_by_dir)
mongo_changes_bulk = []
for version in versions:
orig_version_tags = version["data"].get("tags") or []
version_tags = [tag for tag in orig_version_tags]
if "deleted" not in version_tags:
version_tags.append("deleted")
if version_tags == orig_version_tags:
continue
update_query = {"_id": version["_id"]}
update_data = {"$set": {"data.tags": version_tags}}
mongo_changes_bulk.append(UpdateOne(update_query, update_data))
if mongo_changes_bulk:
self.dbcon.bulk_write(mongo_changes_bulk)
self.dbcon.uninstall()
# Set attribute `is_published` to `False` on ftrack AssetVersions
for subset_id, _versions in versions_by_parent.items():
subset_name = None
for subset in subsets:
if subset["_id"] == subset_id:
subset_name = subset["name"]
break
if subset_name is None:
self.log.warning(
"Subset with ID `{}` was not found.".format(str(subset_id))
)
continue
ftrack_asset = ftrack_assets_by_name.get(subset_name)
if not ftrack_asset:
self.log.warning((
"Could not find Ftrack asset with name `{}`"
).format(subset_name))
continue
version_numbers = [int(ver["name"]) for ver in _versions]
for version in ftrack_asset["versions"]:
if int(version["version"]) in version_numbers:
version["is_published"] = False
try:
session.commit()
except Exception:
msg = (
"Could not set `is_published` attribute to `False`"
" for selected AssetVersions."
)
self.log.warning(msg, exc_info=True)
return {
"success": False,
"message": msg
}
msg = "Total size of files deleted: " + self.sizeof_fmt(size)
self.log.warning(msg)
return {"success": True, "message": msg}
def delete_whole_dir_paths(self, dir_paths, delete=True):
size = 0
for dir_path in dir_paths:
# Delete all files and fodlers in dir path
for root, dirs, files in os.walk(dir_path, topdown=False):
for name in files:
file_path = os.path.join(root, name)
size += os.path.getsize(file_path)
if delete:
os.remove(file_path)
self.log.debug("Removed file: {}".format(file_path))
for name in dirs:
if delete:
os.rmdir(os.path.join(root, name))
if not delete:
continue
# Delete even the folder and it's parents folders if they are empty
while True:
if not os.path.exists(dir_path):
dir_path = os.path.dirname(dir_path)
continue
if len(os.listdir(dir_path)) != 0:
break
os.rmdir(os.path.join(dir_path))
return size
def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
size = 0
for dir_id, dir_path in dir_paths.items():
dir_files = os.listdir(dir_path)
collections, remainders = clique.assemble(dir_files)
for file_path, seq_path in file_paths[dir_id]:
file_path_base = os.path.split(file_path)[1]
# Just remove file if `frame` key was not in context or
# filled path is in remainders (single file sequence)
if not seq_path or file_path_base in remainders:
if not os.path.exists(file_path):
self.log.warning(
"File was not found: {}".format(file_path)
)
continue
size += os.path.getsize(file_path)
if delete:
os.remove(file_path)
self.log.debug("Removed file: {}".format(file_path))
remainders.remove(file_path_base)
continue
seq_path_base = os.path.split(seq_path)[1]
head, tail = seq_path_base.split(self.sequence_splitter)
final_col = None
for collection in collections:
if head != collection.head or tail != collection.tail:
continue
final_col = collection
break
if final_col is not None:
# Fill full path to head
final_col.head = os.path.join(dir_path, final_col.head)
for _file_path in final_col:
if os.path.exists(_file_path):
size += os.path.getsize(_file_path)
if delete:
os.remove(_file_path)
self.log.debug(
"Removed file: {}".format(_file_path)
)
_seq_path = final_col.format("{head}{padding}{tail}")
self.log.debug("Removed files: {}".format(_seq_path))
collections.remove(final_col)
elif os.path.exists(file_path):
size += os.path.getsize(file_path)
if delete:
os.remove(file_path)
self.log.debug("Removed file: {}".format(file_path))
else:
self.log.warning(
"File was not found: {}".format(file_path)
)
# Delete as much as possible parent folders
if not delete:
return size
for dir_path in dir_paths.values():
while True:
if not os.path.exists(dir_path):
dir_path = os.path.dirname(dir_path)
continue
if len(os.listdir(dir_path)) != 0:
break
self.log.debug("Removed folder: {}".format(dir_path))
os.rmdir(dir_path)
return size
def path_from_represenation(self, representation, anatomy):
try:
template = representation["data"]["template"]
except KeyError:
return (None, None)
sequence_path = None
try:
context = representation["context"]
context["root"] = anatomy.roots
path = avalon.pipeline.format_template_with_optional_keys(
context, template
)
if "frame" in context:
context["frame"] = self.sequence_splitter
sequence_path = os.path.normpath(
avalon.pipeline.format_template_with_optional_keys(
context, template
)
)
except KeyError:
# Template references unavailable data
return (None, None)
return (os.path.normpath(path), sequence_path)
def register(session):
'''Register plugin. Called when used as an plugin.'''
DeleteOldVersions(session).register()

View file

@ -0,0 +1,568 @@
import os
import copy
import json
import collections
from bson.objectid import ObjectId
from openpype.api import Anatomy, config
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
from openpype.lib.delivery import (
path_from_representation,
get_format_dict,
check_destination_path,
process_single_file,
process_sequence
)
from avalon.api import AvalonMongoDB
class Delivery(BaseAction):
identifier = "delivery.action"
label = "Delivery"
description = "Deliver data to client"
role_list = ["Pypeclub", "Administrator", "Project manager"]
icon = statics_icon("ftrack", "action_icons", "Delivery.svg")
settings_key = "delivery_action"
def __init__(self, *args, **kwargs):
self.db_con = AvalonMongoDB()
super(Delivery, self).__init__(*args, **kwargs)
def discover(self, session, entities, event):
is_valid = False
for entity in entities:
if entity.entity_type.lower() == "assetversion":
is_valid = True
break
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def interface(self, session, entities, event):
if event["data"].get("values", {}):
return
title = "Delivery data to Client"
items = []
item_splitter = {"type": "label", "value": "---"}
project_entity = self.get_project_from_entity(entities[0])
project_name = project_entity["full_name"]
self.db_con.install()
self.db_con.Session["AVALON_PROJECT"] = project_name
project_doc = self.db_con.find_one({"type": "project"})
if not project_doc:
return {
"success": False,
"message": (
"Didn't found project \"{}\" in avalon."
).format(project_name)
}
repre_names = self._get_repre_names(entities)
self.db_con.uninstall()
items.append({
"type": "hidden",
"name": "__project_name__",
"value": project_name
})
# Prepare anatomy data
anatomy = Anatomy(project_name)
new_anatomies = []
first = None
for key, template in (anatomy.templates.get("delivery") or {}).items():
# Use only keys with `{root}` or `{root[*]}` in value
if isinstance(template, str) and "{root" in template:
new_anatomies.append({
"label": key,
"value": key
})
if first is None:
first = key
skipped = False
# Add message if there are any common components
if not repre_names or not new_anatomies:
skipped = True
items.append({
"type": "label",
"value": "<h1>Something went wrong:</h1>"
})
items.append({
"type": "hidden",
"name": "__skipped__",
"value": skipped
})
if not repre_names:
if len(entities) == 1:
items.append({
"type": "label",
"value": (
"- Selected entity doesn't have components to deliver."
)
})
else:
items.append({
"type": "label",
"value": (
"- Selected entities don't have common components."
)
})
# Add message if delivery anatomies are not set
if not new_anatomies:
items.append({
"type": "label",
"value": (
"- `\"delivery\"` anatomy key is not set in config."
)
})
# Skip if there are any data shortcomings
if skipped:
return {
"items": items,
"title": title
}
items.append({
"value": "<h1>Choose Components to deliver</h1>",
"type": "label"
})
for repre_name in repre_names:
items.append({
"type": "boolean",
"value": False,
"label": repre_name,
"name": repre_name
})
items.append(item_splitter)
items.append({
"value": "<h2>Location for delivery</h2>",
"type": "label"
})
items.append({
"type": "label",
"value": (
"<i>NOTE: It is possible to replace `root` key in anatomy.</i>"
)
})
items.append({
"type": "text",
"name": "__location_path__",
"empty_text": "Type location path here...(Optional)"
})
items.append(item_splitter)
items.append({
"value": "<h2>Anatomy of delivery files</h2>",
"type": "label"
})
items.append({
"type": "label",
"value": (
"<p><i>NOTE: These can be set in Anatomy.yaml"
" within `delivery` key.</i></p>"
)
})
items.append({
"type": "enumerator",
"name": "__new_anatomies__",
"data": new_anatomies,
"value": first
})
return {
"items": items,
"title": title
}
def _get_repre_names(self, entities):
version_ids = self._get_interest_version_ids(entities)
repre_docs = self.db_con.find({
"type": "representation",
"parent": {"$in": version_ids}
})
return list(sorted(repre_docs.distinct("name")))
def _get_interest_version_ids(self, entities):
parent_ent_by_id = {}
subset_names = set()
version_nums = set()
for entity in entities:
asset = entity["asset"]
parent = asset["parent"]
parent_ent_by_id[parent["id"]] = parent
subset_name = asset["name"]
subset_names.add(subset_name)
version = entity["version"]
version_nums.add(version)
asset_docs_by_ftrack_id = self._get_asset_docs(parent_ent_by_id)
subset_docs = self._get_subset_docs(
asset_docs_by_ftrack_id, subset_names, entities
)
version_docs = self._get_version_docs(
asset_docs_by_ftrack_id, subset_docs, version_nums, entities
)
return [version_doc["_id"] for version_doc in version_docs]
def _get_version_docs(
self, asset_docs_by_ftrack_id, subset_docs, version_nums, entities
):
subset_docs_by_id = {
subset_doc["_id"]: subset_doc
for subset_doc in subset_docs
}
version_docs = list(self.db_con.find({
"type": "version",
"parent": {"$in": list(subset_docs_by_id.keys())},
"name": {"$in": list(version_nums)}
}))
version_docs_by_parent_id = collections.defaultdict(dict)
for version_doc in version_docs:
subset_doc = subset_docs_by_id[version_doc["parent"]]
asset_id = subset_doc["parent"]
subset_name = subset_doc["name"]
version = version_doc["name"]
if version_docs_by_parent_id[asset_id].get(subset_name) is None:
version_docs_by_parent_id[asset_id][subset_name] = {}
version_docs_by_parent_id[asset_id][subset_name][version] = (
version_doc
)
filtered_versions = []
for entity in entities:
asset = entity["asset"]
parent = asset["parent"]
asset_doc = asset_docs_by_ftrack_id[parent["id"]]
subsets_by_name = version_docs_by_parent_id.get(asset_doc["_id"])
if not subsets_by_name:
continue
subset_name = asset["name"]
version_docs_by_version = subsets_by_name.get(subset_name)
if not version_docs_by_version:
continue
version = entity["version"]
version_doc = version_docs_by_version.get(version)
if version_doc:
filtered_versions.append(version_doc)
return filtered_versions
def _get_subset_docs(
self, asset_docs_by_ftrack_id, subset_names, entities
):
asset_doc_ids = list()
for asset_doc in asset_docs_by_ftrack_id.values():
asset_doc_ids.append(asset_doc["_id"])
subset_docs = list(self.db_con.find({
"type": "subset",
"parent": {"$in": asset_doc_ids},
"name": {"$in": list(subset_names)}
}))
subset_docs_by_parent_id = collections.defaultdict(dict)
for subset_doc in subset_docs:
asset_id = subset_doc["parent"]
subset_name = subset_doc["name"]
subset_docs_by_parent_id[asset_id][subset_name] = subset_doc
filtered_subsets = []
for entity in entities:
asset = entity["asset"]
parent = asset["parent"]
asset_doc = asset_docs_by_ftrack_id[parent["id"]]
subsets_by_name = subset_docs_by_parent_id.get(asset_doc["_id"])
if not subsets_by_name:
continue
subset_name = asset["name"]
subset_doc = subsets_by_name.get(subset_name)
if subset_doc:
filtered_subsets.append(subset_doc)
return filtered_subsets
def _get_asset_docs(self, parent_ent_by_id):
asset_docs = list(self.db_con.find({
"type": "asset",
"data.ftrackId": {"$in": list(parent_ent_by_id.keys())}
}))
asset_docs_by_ftrack_id = {
asset_doc["data"]["ftrackId"]: asset_doc
for asset_doc in asset_docs
}
entities_by_mongo_id = {}
entities_by_names = {}
for ftrack_id, entity in parent_ent_by_id.items():
if ftrack_id not in asset_docs_by_ftrack_id:
parent_mongo_id = entity["custom_attributes"].get(
CUST_ATTR_ID_KEY
)
if parent_mongo_id:
entities_by_mongo_id[ObjectId(parent_mongo_id)] = entity
else:
entities_by_names[entity["name"]] = entity
expressions = []
if entities_by_mongo_id:
expression = {
"type": "asset",
"_id": {"$in": list(entities_by_mongo_id.keys())}
}
expressions.append(expression)
if entities_by_names:
expression = {
"type": "asset",
"name": {"$in": list(entities_by_names.keys())}
}
expressions.append(expression)
if expressions:
if len(expressions) == 1:
filter = expressions[0]
else:
filter = {"$or": expressions}
asset_docs = self.db_con.find(filter)
for asset_doc in asset_docs:
if asset_doc["_id"] in entities_by_mongo_id:
entity = entities_by_mongo_id[asset_doc["_id"]]
asset_docs_by_ftrack_id[entity["id"]] = asset_doc
elif asset_doc["name"] in entities_by_names:
entity = entities_by_names[asset_doc["name"]]
asset_docs_by_ftrack_id[entity["id"]] = asset_doc
return asset_docs_by_ftrack_id
def launch(self, session, entities, event):
if "values" not in event["data"]:
return {
"success": True,
"message": "Nothing to do"
}
values = event["data"]["values"]
skipped = values.pop("__skipped__")
if skipped:
return {
"success": False,
"message": "Action skipped"
}
user_id = event["source"]["user"]["id"]
user_entity = session.query(
"User where id is {}".format(user_id)
).one()
job = session.create("Job", {
"user": user_entity,
"status": "running",
"data": json.dumps({
"description": "Delivery processing."
})
})
session.commit()
try:
self.db_con.install()
report = self.real_launch(session, entities, event)
except Exception as exc:
report = {
"success": False,
"title": "Delivery failed",
"items": [{
"type": "label",
"value": (
"Error during delivery action process:<br>{}"
"<br><br>Check logs for more information."
).format(str(exc))
}]
}
self.log.warning(
"Failed during processing delivery action.",
exc_info=True
)
finally:
if report["success"]:
job["status"] = "done"
else:
job["status"] = "failed"
session.commit()
self.db_con.uninstall()
if not report["success"]:
self.show_interface(
items=report["items"],
title=report["title"],
event=event
)
return {
"success": False,
"message": "Errors during delivery process. See report."
}
return report
def real_launch(self, session, entities, event):
self.log.info("Delivery action just started.")
report_items = collections.defaultdict(list)
values = event["data"]["values"]
location_path = values.pop("__location_path__")
anatomy_name = values.pop("__new_anatomies__")
project_name = values.pop("__project_name__")
repre_names = []
for key, value in values.items():
if value is True:
repre_names.append(key)
if not repre_names:
return {
"success": True,
"message": "No selected components to deliver."
}
location_path = location_path.strip()
if location_path:
location_path = os.path.normpath(location_path)
if not os.path.exists(location_path):
os.makedirs(location_path)
self.db_con.Session["AVALON_PROJECT"] = project_name
self.log.debug("Collecting representations to process.")
version_ids = self._get_interest_version_ids(entities)
repres_to_deliver = list(self.db_con.find({
"type": "representation",
"parent": {"$in": version_ids},
"name": {"$in": repre_names}
}))
anatomy = Anatomy(project_name)
format_dict = get_format_dict(anatomy, location_path)
datetime_data = config.get_datetime_data()
for repre in repres_to_deliver:
source_path = repre.get("data", {}).get("path")
debug_msg = "Processing representation {}".format(repre["_id"])
if source_path:
debug_msg += " with published path {}.".format(source_path)
self.log.debug(debug_msg)
anatomy_data = copy.deepcopy(repre["context"])
repre_report_items = check_destination_path(repre["_id"],
anatomy,
anatomy_data,
datetime_data,
anatomy_name)
if repre_report_items:
report_items.update(repre_report_items)
continue
# Get source repre path
frame = repre['context'].get('frame')
if frame:
repre["context"]["frame"] = len(str(frame)) * "#"
repre_path = path_from_representation(repre, anatomy)
# TODO add backup solution where root of path from component
# is replaced with root
args = (
repre_path,
repre,
anatomy,
anatomy_name,
anatomy_data,
format_dict,
report_items,
self.log
)
if not frame:
process_single_file(*args)
else:
process_sequence(*args)
return self.report(report_items)
def report(self, report_items):
"""Returns dict with final status of delivery (succes, fail etc.)."""
items = []
for msg, _items in report_items.items():
if not _items:
continue
if items:
items.append({"type": "label", "value": "---"})
items.append({
"type": "label",
"value": "# {}".format(msg)
})
if not isinstance(_items, (list, tuple)):
_items = [_items]
__items = []
for item in _items:
__items.append(str(item))
items.append({
"type": "label",
"value": '<p>{}</p>'.format("<br>".join(__items))
})
if not items:
return {
"success": True,
"message": "Delivery Finished"
}
return {
"items": items,
"title": "Delivery report",
"success": False
}
def register(session):
'''Register plugin. Called when used as an plugin.'''
Delivery(session).register()

View file

@ -0,0 +1,235 @@
import os
import time
import subprocess
from operator import itemgetter
from openpype.lib import ApplicationManager
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class DJVViewAction(BaseAction):
"""Launch DJVView action."""
identifier = "djvview-launch-action"
label = "DJV View"
description = "DJV View Launcher"
icon = statics_icon("app_icons", "djvView.png")
type = 'Application'
allowed_types = [
"cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg",
"mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut",
"1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf",
"sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img"
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.application_manager = ApplicationManager()
self._last_check = time.time()
self._check_interval = 10
def _get_djv_apps(self):
app_group = self.application_manager.app_groups["djvview"]
output = []
for app in app_group:
executable = app.find_executable()
if executable is not None:
output.append(app)
return output
def get_djv_apps(self):
cur_time = time.time()
if (cur_time - self._last_check) > self._check_interval:
self.application_manager.refresh()
return self._get_djv_apps()
def discover(self, session, entities, event):
"""Return available actions based on *event*. """
selection = event["data"].get("selection", [])
if len(selection) != 1:
return False
entityType = selection[0].get("entityType", None)
if entityType not in ["assetversion", "task"]:
return False
if self.get_djv_apps():
return True
return False
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
entity = entities[0]
versions = []
entity_type = entity.entity_type.lower()
if entity_type == "assetversion":
if (
entity[
'components'
][0]['file_type'][1:] in self.allowed_types
):
versions.append(entity)
else:
master_entity = entity
if entity_type == "task":
master_entity = entity['parent']
for asset in master_entity['assets']:
for version in asset['versions']:
# Get only AssetVersion of selected task
if (
entity_type == "task" and
version['task']['id'] != entity['id']
):
continue
# Get only components with allowed type
filetype = version['components'][0]['file_type']
if filetype[1:] in self.allowed_types:
versions.append(version)
if len(versions) < 1:
return {
'success': False,
'message': 'There are no Asset Versions to open.'
}
# TODO sort them (somehow?)
enum_items = []
first_value = None
for app in self.get_djv_apps():
if first_value is None:
first_value = app.full_name
enum_items.append({
"value": app.full_name,
"label": app.full_label
})
if not enum_items:
return {
"success": False,
"message": "Couldn't find DJV executable."
}
items = [
{
"type": "enumerator",
"label": "DJV version:",
"name": "djv_app_name",
"data": enum_items,
"value": first_value
},
{
"type": "label",
"value": "---"
}
]
version_items = []
base_label = "v{0} - {1} - {2}"
default_component = None
last_available = None
select_value = None
for version in versions:
for component in version['components']:
label = base_label.format(
str(version['version']).zfill(3),
version['asset']['type']['name'],
component['name']
)
try:
location = component[
'component_locations'
][0]['location']
file_path = location.get_filesystem_path(component)
except Exception:
file_path = component[
'component_locations'
][0]['resource_identifier']
if os.path.isdir(os.path.dirname(file_path)):
last_available = file_path
if component['name'] == default_component:
select_value = file_path
version_items.append(
{'label': label, 'value': file_path}
)
if len(version_items) == 0:
return {
'success': False,
'message': (
'There are no Asset Versions with accessible path.'
)
}
item = {
'label': 'Items to view',
'type': 'enumerator',
'name': 'path',
'data': sorted(
version_items,
key=itemgetter('label'),
reverse=True
)
}
if select_value is not None:
item['value'] = select_value
else:
item['value'] = last_available
items.append(item)
return {'items': items}
def launch(self, session, entities, event):
"""Callback method for DJVView action."""
# Launching application
event_data = event["data"]
if "values" not in event_data:
return
djv_app_name = event_data["djv_app_name"]
app = self.applicaion_manager.applications.get(djv_app_name)
executable = None
if app is not None:
executable = app.find_executable()
if not executable:
return {
"success": False,
"message": "Couldn't find DJV executable."
}
filpath = os.path.normpath(event_data["values"]["path"])
cmd = [
# DJV path
executable,
# PATH TO COMPONENT
filpath
]
try:
# Run DJV with these commands
subprocess.Popen(cmd)
except FileNotFoundError:
return {
"success": False,
"message": "File \"{}\" was not found.".format(
os.path.basename(filpath)
)
}
return True
def register(session):
"""Register hooks."""
DJVViewAction(session).register()

View file

@ -0,0 +1,117 @@
import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class JobKiller(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'job.killer'
#: Action label.
label = "OpenPype Admin"
variant = '- Job Killer'
#: Action description.
description = 'Killing selected running jobs'
#: roles that are allowed to register this action
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
settings_key = "job_killer"
def discover(self, session, entities, event):
''' Validation '''
return self.valid_roles(session, entities, event)
def interface(self, session, entities, event):
if not event['data'].get('values', {}):
title = 'Select jobs to kill'
jobs = session.query(
'select id, status from Job'
' where status in ("queued", "running")'
).all()
items = []
item_splitter = {'type': 'label', 'value': '---'}
for job in jobs:
try:
data = json.loads(job['data'])
desctiption = data['description']
except Exception:
desctiption = '*No description*'
user = job['user']['username']
created = job['created_at'].strftime('%d.%m.%Y %H:%M:%S')
label = '{} - {} - {}'.format(
desctiption, created, user
)
item_label = {
'type': 'label',
'value': label
}
item = {
'name': job['id'],
'type': 'boolean',
'value': False
}
if len(items) > 0:
items.append(item_splitter)
items.append(item_label)
items.append(item)
if len(items) == 0:
return {
'success': False,
'message': 'Didn\'t found any running jobs'
}
else:
return {
'items': items,
'title': title
}
def launch(self, session, entities, event):
""" GET JOB """
if 'values' not in event['data']:
return
values = event['data']['values']
if len(values) <= 0:
return {
'success': True,
'message': 'No jobs to kill!'
}
jobs = []
job_ids = []
for k, v in values.items():
if v is True:
job_ids.append(k)
for id in job_ids:
query = 'Job where id is "{}"'.format(id)
jobs.append(session.query(query).one())
# Update all the queried jobs, setting the status to failed.
for job in jobs:
try:
origin_status = job["status"]
job['status'] = 'failed'
session.commit()
self.log.debug((
'Changing Job ({}) status: {} -> failed'
).format(job['id'], origin_status))
except Exception:
session.rollback()
self.log.warning((
'Changing Job ({}) has failed'
).format(job['id']))
self.log.info('All running jobs were killed Successfully!')
return {
'success': True,
'message': 'All running jobs were killed Successfully!'
}
def register(session):
'''Register plugin. Called when used as an plugin.'''
JobKiller(session).register()

View file

@ -0,0 +1,118 @@
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class MultipleNotes(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'multiple.notes'
#: Action label.
label = 'Multiple Notes'
#: Action description.
description = 'Add same note to multiple entities'
icon = statics_icon("ftrack", "action_icons", "MultipleNotes.svg")
def discover(self, session, entities, event):
''' Validation '''
valid = True
# Check for multiple selection.
if len(entities) < 2:
valid = False
# Check for valid entities.
valid_entity_types = ['assetversion', 'task']
for entity in entities:
if entity.entity_type.lower() not in valid_entity_types:
valid = False
break
return valid
def interface(self, session, entities, event):
if not event['data'].get('values', {}):
note_label = {
'type': 'label',
'value': '# Enter note: #'
}
note_value = {
'name': 'note',
'type': 'textarea'
}
category_label = {
'type': 'label',
'value': '## Category: ##'
}
category_data = []
category_data.append({
'label': '- None -',
'value': 'none'
})
all_categories = session.query('NoteCategory').all()
for cat in all_categories:
category_data.append({
'label': cat['name'],
'value': cat['id']
})
category_value = {
'type': 'enumerator',
'name': 'category',
'data': category_data,
'value': 'none'
}
splitter = {
'type': 'label',
'value': '{}'.format(200 * "-")
}
items = []
items.append(note_label)
items.append(note_value)
items.append(splitter)
items.append(category_label)
items.append(category_value)
return items
def launch(self, session, entities, event):
if 'values' not in event['data']:
return
values = event['data']['values']
if len(values) <= 0 or 'note' not in values:
return False
# Get Note text
note_value = values['note']
if note_value.lower().strip() == '':
return False
# Get User
user = session.query(
'User where username is "{}"'.format(session.api_user)
).one()
# Base note data
note_data = {
'content': note_value,
'author': user
}
# Get category
category_value = values['category']
if category_value != 'none':
category = session.query(
'NoteCategory where id is "{}"'.format(category_value)
).one()
note_data['category'] = category
# Create notes for entities
for entity in entities:
new_note = session.create('Note', note_data)
entity['notes'].append(new_note)
session.commit()
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
MultipleNotes(session).register()

View file

@ -0,0 +1,447 @@
import json
from avalon.api import AvalonMongoDB
from openpype.api import ProjectSettings
from openpype.lib import create_project
from openpype.settings import SaveWarningExc
from openpype_modules.ftrack.lib import (
BaseAction,
statics_icon,
get_openpype_attr,
CUST_ATTR_AUTO_SYNC
)
class PrepareProjectLocal(BaseAction):
"""Prepare project attributes in Anatomy."""
identifier = "prepare.project.local"
label = "Prepare Project"
description = "Set basic attributes on the project"
icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg")
role_list = ["Pypeclub", "Administrator", "Project Manager"]
settings_key = "prepare_project"
# Key to store info about trigerring create folder structure
create_project_structure_key = "create_folder_structure"
create_project_structure_identifier = "create.project.structure"
item_splitter = {"type": "label", "value": "---"}
_keys_order = (
"fps",
"frameStart",
"frameEnd",
"handleStart",
"handleEnd",
"clipIn",
"clipOut",
"resolutionHeight",
"resolutionWidth",
"pixelAspect",
"applications",
"tools_env",
"library_project",
)
def discover(self, session, entities, event):
"""Show only on project."""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
):
return False
return self.valid_roles(session, entities, event)
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
# Inform user that this may take a while
self.show_message(event, "Preparing data... Please wait", True)
self.log.debug("Preparing data which will be shown")
self.log.debug("Loading custom attributes")
project_entity = entities[0]
project_name = project_entity["full_name"]
project_settings = ProjectSettings(project_name)
project_anatom_settings = project_settings["project_anatomy"]
root_items = self.prepare_root_items(project_anatom_settings)
ca_items, multiselect_enumerators = (
self.prepare_custom_attribute_items(project_anatom_settings)
)
self.log.debug("Heavy items are ready. Preparing last items group.")
title = "Prepare Project"
items = []
# Add root items
items.extend(root_items)
items.append(self.item_splitter)
items.append({
"type": "label",
"value": "<h3>Set basic Attributes:</h3>"
})
items.extend(ca_items)
# Set value of auto synchronization
auto_sync_value = project_entity["custom_attributes"].get(
CUST_ATTR_AUTO_SYNC, False
)
auto_sync_item = {
"name": CUST_ATTR_AUTO_SYNC,
"type": "boolean",
"value": auto_sync_value,
"label": "AutoSync to Avalon"
}
# Add autosync attribute
items.append(auto_sync_item)
# This item will be last before enumerators
# Ask if want to trigger Action Create Folder Structure
create_project_structure_checked = (
project_settings
["project_settings"]
["ftrack"]
["user_handlers"]
["prepare_project"]
["create_project_structure_checked"]
).value
items.append({
"type": "label",
"value": "<h3>Want to create basic Folder Structure?</h3>"
})
items.append({
"name": self.create_project_structure_key,
"type": "boolean",
"value": create_project_structure_checked,
"label": "Check if Yes"
})
# Add enumerator items at the end
for item in multiselect_enumerators:
items.append(item)
return {
"items": items,
"title": title
}
def prepare_root_items(self, project_anatom_settings):
self.log.debug("Root items preparation begins.")
root_items = []
root_items.append({
"type": "label",
"value": "<h3>Check your Project root settings</h3>"
})
root_items.append({
"type": "label",
"value": (
"<p><i>NOTE: Roots are <b>crutial</b> for path filling"
" (and creating folder structure).</i></p>"
)
})
root_items.append({
"type": "label",
"value": (
"<p><i>WARNING: Do not change roots on running project,"
" that <b>will cause workflow issues</b>.</i></p>"
)
})
empty_text = "Enter root path here..."
roots_entity = project_anatom_settings["roots"]
for root_name, root_entity in roots_entity.items():
root_items.append(self.item_splitter)
root_items.append({
"type": "label",
"value": "Root: \"{}\"".format(root_name)
})
for platform_name, value_entity in root_entity.items():
root_items.append({
"label": platform_name,
"name": "__root__{}__{}".format(root_name, platform_name),
"type": "text",
"value": value_entity.value,
"empty_text": empty_text
})
root_items.append({
"type": "hidden",
"name": "__rootnames__",
"value": json.dumps(list(roots_entity.keys()))
})
self.log.debug("Root items preparation ended.")
return root_items
def _attributes_to_set(self, project_anatom_settings):
attributes_to_set = {}
attribute_values_by_key = {}
for key, entity in project_anatom_settings["attributes"].items():
attribute_values_by_key[key] = entity.value
cust_attrs, hier_cust_attrs = get_openpype_attr(self.session, True)
for attr in hier_cust_attrs:
key = attr["key"]
if key.startswith("avalon_"):
continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": attribute_values_by_key.get(key)
}
for attr in cust_attrs:
if attr["entity_type"].lower() != "show":
continue
key = attr["key"]
if key.startswith("avalon_"):
continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": attribute_values_by_key.get(key)
}
# Sort by label
attributes_to_set = dict(sorted(
attributes_to_set.items(),
key=lambda x: x[1]["label"]
))
return attributes_to_set
def prepare_custom_attribute_items(self, project_anatom_settings):
items = []
multiselect_enumerators = []
attributes_to_set = self._attributes_to_set(project_anatom_settings)
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
))
attribute_keys = set(attributes_to_set.keys())
keys_order = []
for key in self._keys_order:
if key in attribute_keys:
keys_order.append(key)
attribute_keys = attribute_keys - set(keys_order)
for key in sorted(attribute_keys):
keys_order.append(key)
for key in keys_order:
in_data = attributes_to_set[key]
attr = in_data["object"]
# initial item definition
item = {
"name": key,
"label": in_data["label"]
}
# cust attr type - may have different visualization
type_name = attr["type"]["name"].lower()
easy_types = ["text", "boolean", "date", "number"]
easy_type = False
if type_name in easy_types:
easy_type = True
elif type_name == "enumerator":
attr_config = json.loads(attr["config"])
attr_config_data = json.loads(attr_config["data"])
if attr_config["multiSelect"] is True:
multiselect_enumerators.append(self.item_splitter)
multiselect_enumerators.append({
"type": "label",
"value": "<h3>{}</h3>".format(in_data["label"])
})
default = in_data["default"]
names = []
for option in sorted(
attr_config_data, key=lambda x: x["menu"]
):
name = option["value"]
new_name = "__{}__{}".format(key, name)
names.append(new_name)
item = {
"name": new_name,
"type": "boolean",
"label": "- {}".format(option["menu"])
}
if default:
if isinstance(default, (list, tuple)):
if name in default:
item["value"] = True
else:
if name == default:
item["value"] = True
multiselect_enumerators.append(item)
multiselect_enumerators.append({
"type": "hidden",
"name": "__hidden__{}".format(key),
"value": json.dumps(names)
})
else:
easy_type = True
item["data"] = attr_config_data
else:
self.log.warning((
"Custom attribute \"{}\" has type \"{}\"."
" I don't know how to handle"
).format(key, type_name))
items.append({
"type": "label",
"value": (
"!!! Can't handle Custom attritubte type \"{}\""
" (key: \"{}\")"
).format(type_name, key)
})
if easy_type:
item["type"] = type_name
# default value in interface
default = in_data["default"]
if default is not None:
item["value"] = default
items.append(item)
return items, multiselect_enumerators
def launch(self, session, entities, event):
in_data = event["data"].get("values")
if not in_data:
return
create_project_structure_checked = in_data.pop(
self.create_project_structure_key
)
root_values = {}
root_key = "__root__"
for key in tuple(in_data.keys()):
if key.startswith(root_key):
_key = key[len(root_key):]
root_values[_key] = in_data.pop(key)
root_names = in_data.pop("__rootnames__", None)
root_data = {}
for root_name in json.loads(root_names):
root_data[root_name] = {}
for key, value in tuple(root_values.items()):
prefix = "{}__".format(root_name)
if not key.startswith(prefix):
continue
_key = key[len(prefix):]
root_data[root_name][_key] = value
# Find hidden items for multiselect enumerators
keys_to_process = []
for key in in_data:
if key.startswith("__hidden__"):
keys_to_process.append(key)
self.log.debug("Preparing data for Multiselect Enumerators")
enumerators = {}
for key in keys_to_process:
new_key = key.replace("__hidden__", "")
enumerator_items = in_data.pop(key)
enumerators[new_key] = json.loads(enumerator_items)
# find values set for multiselect enumerator
for key, enumerator_items in enumerators.items():
in_data[key] = []
name = "__{}__".format(key)
for item in enumerator_items:
value = in_data.pop(item)
if value is True:
new_key = item.replace(name, "")
in_data[key].append(new_key)
self.log.debug("Setting Custom Attribute values")
project_entity = entities[0]
project_name = project_entity["full_name"]
# Try to find project document
dbcon = AvalonMongoDB()
dbcon.install()
dbcon.Session["AVALON_PROJECT"] = project_name
project_doc = dbcon.find_one({
"type": "project"
})
# Create project if is not available
# - creation is required to be able set project anatomy and attributes
if not project_doc:
project_code = project_entity["name"]
self.log.info("Creating project \"{} [{}]\"".format(
project_name, project_code
))
create_project(project_name, project_code, dbcon=dbcon)
dbcon.uninstall()
project_settings = ProjectSettings(project_name)
project_anatomy_settings = project_settings["project_anatomy"]
project_anatomy_settings["roots"] = root_data
custom_attribute_values = {}
attributes_entity = project_anatomy_settings["attributes"]
for key, value in in_data.items():
if key not in attributes_entity:
custom_attribute_values[key] = value
else:
attributes_entity[key] = value
try:
project_settings.save()
except SaveWarningExc as exc:
self.log.info("Few warnings happened during settings save:")
for warning in exc.warnings:
self.log.info(str(warning))
# Change custom attributes on project
if custom_attribute_values:
for key, value in custom_attribute_values.items():
project_entity["custom_attributes"][key] = value
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
session.commit()
# Trigger create project structure action
if create_project_structure_checked:
trigger_identifier = "{}.{}".format(
self.create_project_structure_identifier,
self.process_identifier()
)
self.trigger_action(trigger_identifier, event)
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
PrepareProjectLocal(session).register()

View file

@ -0,0 +1,318 @@
import os
import subprocess
import traceback
import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
import ftrack_api
from avalon import io, api
class RVAction(BaseAction):
""" Launch RV action """
identifier = "rv.launch.action"
label = "rv"
description = "rv Launcher"
icon = statics_icon("ftrack", "action_icons", "RV.png")
type = 'Application'
allowed_types = ["img", "mov", "exr", "mp4"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# QUESTION load RV application data from AppplicationManager?
rv_path = None
# RV_HOME should be set if properly installed
if os.environ.get('RV_HOME'):
rv_path = os.path.join(
os.environ.get('RV_HOME'),
'bin',
'rv'
)
if not os.path.exists(rv_path):
rv_path = None
if not rv_path:
self.log.info("RV path was not found.")
self.ignore_me = True
self.rv_path = rv_path
def discover(self, session, entities, event):
"""Return available actions based on *event*. """
return True
def preregister(self):
if self.rv_path is None:
return (
'RV is not installed or paths in presets are not set correctly'
)
return True
def get_components_from_entity(self, session, entity, components):
"""Get components from various entity types.
The components dictionary is modifid in place, so nothing is returned.
Args:
entity (Ftrack entity)
components (dict)
"""
if entity.entity_type.lower() == "assetversion":
for component in entity["components"]:
if component["file_type"][1:] not in self.allowed_types:
continue
try:
components[entity["asset"]["parent"]["name"]].append(
component
)
except KeyError:
components[entity["asset"]["parent"]["name"]] = [component]
return
if entity.entity_type.lower() == "task":
query = "AssetVersion where task_id is '{0}'".format(entity["id"])
for assetversion in session.query(query):
self.get_components_from_entity(
session, assetversion, components
)
return
if entity.entity_type.lower() == "shot":
query = "AssetVersion where asset.parent.id is '{0}'".format(
entity["id"]
)
for assetversion in session.query(query):
self.get_components_from_entity(
session, assetversion, components
)
return
raise NotImplementedError(
"\"{}\" entity type is not implemented yet.".format(
entity.entity_type
)
)
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
user = session.query(
"User where username is '{0}'".format(
os.environ["FTRACK_API_USER"]
)
).one()
job = session.create(
"Job",
{
"user": user,
"status": "running",
"data": json.dumps({
"description": "RV: Collecting components."
})
}
)
# Commit to feedback to user.
session.commit()
items = []
try:
items = self.get_interface_items(session, entities)
except Exception:
self.log.error(traceback.format_exc())
job["status"] = "failed"
else:
job["status"] = "done"
# Commit to end job.
session.commit()
return {"items": items}
def get_interface_items(self, session, entities):
components = {}
for entity in entities:
self.get_components_from_entity(session, entity, components)
# Sort by version
for parent_name, entities in components.items():
version_mapping = {}
for entity in entities:
try:
version_mapping[entity["version"]["version"]].append(
entity
)
except KeyError:
version_mapping[entity["version"]["version"]] = [entity]
# Sort same versions by date.
for version, entities in version_mapping.items():
version_mapping[version] = sorted(
entities, key=lambda x: x["version"]["date"], reverse=True
)
components[parent_name] = []
for version in reversed(sorted(version_mapping.keys())):
components[parent_name].extend(version_mapping[version])
# Items to present to user.
items = []
label = "{} - v{} - {}"
for parent_name, entities in components.items():
data = []
for entity in entities:
data.append(
{
"label": label.format(
entity["version"]["asset"]["name"],
str(entity["version"]["version"]).zfill(3),
entity["file_type"][1:]
),
"value": entity["id"]
}
)
items.append(
{
"label": parent_name,
"type": "enumerator",
"name": parent_name,
"data": data,
"value": data[0]["value"]
}
)
return items
def launch(self, session, entities, event):
"""Callback method for RV action."""
# Launching application
if "values" not in event["data"]:
return
user = session.query(
"User where username is '{0}'".format(
os.environ["FTRACK_API_USER"]
)
).one()
job = session.create(
"Job",
{
"user": user,
"status": "running",
"data": json.dumps({
"description": "RV: Collecting file paths."
})
}
)
# Commit to feedback to user.
session.commit()
paths = []
try:
paths = self.get_file_paths(session, event)
except Exception:
self.log.error(traceback.format_exc())
job["status"] = "failed"
else:
job["status"] = "done"
# Commit to end job.
session.commit()
args = [os.path.normpath(self.rv_path)]
fps = entities[0].get("custom_attributes", {}).get("fps", None)
if fps is not None:
args.extend(["-fps", str(fps)])
args.extend(paths)
self.log.info("Running rv: {}".format(args))
subprocess.Popen(args)
return True
def get_file_paths(self, session, event):
"""Get file paths from selected components."""
link = session.get(
"Component", list(event["data"]["values"].values())[0]
)["version"]["asset"]["parent"]["link"][0]
project = session.get(link["type"], link["id"])
os.environ["AVALON_PROJECT"] = project["name"]
api.Session["AVALON_PROJECT"] = project["name"]
io.install()
location = ftrack_api.Session().pick_location()
paths = []
for parent_name in sorted(event["data"]["values"].keys()):
component = session.get(
"Component", event["data"]["values"][parent_name]
)
# Newer publishes have the source referenced in Ftrack.
online_source = False
for neighbour_component in component["version"]["components"]:
if neighbour_component["name"] != "ftrackreview-mp4_src":
continue
paths.append(
location.get_filesystem_path(neighbour_component)
)
online_source = True
if online_source:
continue
asset = io.find_one({"type": "asset", "name": parent_name})
subset = io.find_one(
{
"type": "subset",
"name": component["version"]["asset"]["name"],
"parent": asset["_id"]
}
)
version = io.find_one(
{
"type": "version",
"name": component["version"]["version"],
"parent": subset["_id"]
}
)
representation = io.find_one(
{
"type": "representation",
"parent": version["_id"],
"name": component["file_type"][1:]
}
)
if representation is None:
representation = io.find_one(
{
"type": "representation",
"parent": version["_id"],
"name": "preview"
}
)
paths.append(api.get_representation_path(representation))
return paths
def register(session):
"""Register hooks."""
RVAction(session).register()

View file

@ -0,0 +1,436 @@
import os
from operator import itemgetter
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class SeedDebugProject(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = "seed.debug.project"
#: Action label.
label = "Seed Debug Project"
#: Action description.
description = "Description"
#: priority
priority = 100
#: roles that are allowed to register this action
icon = statics_icon("ftrack", "action_icons", "SeedProject.svg")
# Asset names which will be created in `Assets` entity
assets = [
"Addax", "Alpaca", "Ant", "Antelope", "Aye", "Badger", "Bear", "Bee",
"Beetle", "Bluebird", "Bongo", "Bontebok", "Butterflie", "Caiman",
"Capuchin", "Capybara", "Cat", "Caterpillar", "Coyote", "Crocodile",
"Cuckoo", "Deer", "Dragonfly", "Duck", "Eagle", "Egret", "Elephant",
"Falcon", "Fossa", "Fox", "Gazelle", "Gecko", "Gerbil",
"GiantArmadillo", "Gibbon", "Giraffe", "Goose", "Gorilla",
"Grasshoper", "Hare", "Hawk", "Hedgehog", "Heron", "Hog",
"Hummingbird", "Hyena", "Chameleon", "Cheetah", "Iguana", "Jackal",
"Jaguar", "Kingfisher", "Kinglet", "Kite", "Komodo", "Lemur",
"Leopard", "Lion", "Lizard", "Macaw", "Malachite", "Mandrill",
"Mantis", "Marmoset", "Meadowlark", "Meerkat", "Mockingbird",
"Mongoose", "Monkey", "Nyal", "Ocelot", "Okapi", "Oribi", "Oriole",
"Otter", "Owl", "Panda", "Parrot", "Pelican", "Pig", "Porcupine",
"Reedbuck", "Rhinocero", "Sandpiper", "Servil", "Skink", "Sloth",
"Snake", "Spider", "Squirrel", "Sunbird", "Swallow", "Swift", "Tiger",
"Sylph", "Tanager", "Vulture", "Warthog", "Waterbuck", "Woodpecker",
"Zebra"
]
# Tasks which will be created for Assets
asset_tasks = [
"Modeling", "Lookdev", "Rigging"
]
# Tasks which will be created for Shots
shot_tasks = [
"Animation", "Lighting", "Compositing", "FX"
]
# Define how much sequences will be created
default_seq_count = 5
# Define how much shots will be created for each sequence
default_shots_count = 10
max_entities_created_at_one_commit = 50
existing_projects = None
new_project_item = "< New Project >"
current_project_item = "< Current Project >"
settings_key = "seed_project"
def discover(self, session, entities, event):
''' Validation '''
if not self.valid_roles(session, entities, event):
return False
return True
def interface(self, session, entities, event):
if event["data"].get("values", {}):
return
title = "Select Project where you want to create seed data"
items = []
item_splitter = {"type": "label", "value": "---"}
description_label = {
"type": "label",
"value": (
"WARNING: Action does NOT check if entities already exist !!!"
)
}
items.append(description_label)
all_projects = session.query("select full_name from Project").all()
self.existing_projects = [proj["full_name"] for proj in all_projects]
projects_items = [
{"label": proj, "value": proj} for proj in self.existing_projects
]
data_items = []
data_items.append({
"label": self.new_project_item,
"value": self.new_project_item
})
data_items.append({
"label": self.current_project_item,
"value": self.current_project_item
})
data_items.extend(sorted(
projects_items,
key=itemgetter("label"),
reverse=False
))
projects_item = {
"label": "Choose Project",
"type": "enumerator",
"name": "project_name",
"data": data_items,
"value": self.current_project_item
}
items.append(projects_item)
items.append(item_splitter)
items.append({
"label": "Number of assets",
"type": "number",
"name": "asset_count",
"value": len(self.assets)
})
items.append({
"label": "Number of sequences",
"type": "number",
"name": "seq_count",
"value": self.default_seq_count
})
items.append({
"label": "Number of shots",
"type": "number",
"name": "shots_count",
"value": self.default_shots_count
})
items.append(item_splitter)
note_label = {
"type": "label",
"value": (
"<p><i>NOTE: Enter project name and choose schema if you "
"chose `\"< New Project >\"`(code is optional)</i><p>"
)
}
items.append(note_label)
items.append({
"label": "Project name",
"name": "new_project_name",
"type": "text",
"value": ""
})
project_schemas = [
sch["name"] for sch in self.session.query("ProjectSchema").all()
]
schemas_item = {
"label": "Choose Schema",
"type": "enumerator",
"name": "new_schema_name",
"data": [
{"label": sch, "value": sch} for sch in project_schemas
],
"value": project_schemas[0]
}
items.append(schemas_item)
items.append({
"label": "*Project code",
"name": "new_project_code",
"type": "text",
"value": "",
"empty_text": "Optional..."
})
return {
"items": items,
"title": title
}
def launch(self, session, in_entities, event):
if "values" not in event["data"]:
return
# THIS IS THE PROJECT PART
values = event["data"]["values"]
selected_project = values["project_name"]
if selected_project == self.new_project_item:
project_name = values["new_project_name"]
if project_name in self.existing_projects:
msg = "Project \"{}\" already exist".format(project_name)
self.log.error(msg)
return {"success": False, "message": msg}
project_code = values["new_project_code"]
project_schema_name = values["new_schema_name"]
if not project_code:
project_code = project_name
project_code = project_code.lower().replace(" ", "_").strip()
_project = session.query(
"Project where name is \"{}\"".format(project_code)
).first()
if _project:
msg = "Project with code \"{}\" already exist".format(
project_code
)
self.log.error(msg)
return {"success": False, "message": msg}
project_schema = session.query(
"ProjectSchema where name is \"{}\"".format(
project_schema_name
)
).one()
# Create the project with the chosen schema.
self.log.debug((
"*** Creating Project: name <{}>, code <{}>, schema <{}>"
).format(project_name, project_code, project_schema_name))
project = session.create("Project", {
"name": project_code,
"full_name": project_name,
"project_schema": project_schema
})
session.commit()
elif selected_project == self.current_project_item:
entity = in_entities[0]
if entity.entity_type.lower() == "project":
project = entity
else:
if "project" in entity:
project = entity["project"]
else:
project = entity["parent"]["project"]
project_schema = project["project_schema"]
self.log.debug((
"*** Using Project: name <{}>, code <{}>, schema <{}>"
).format(
project["full_name"], project["name"], project_schema["name"]
))
else:
project = session.query("Project where full_name is \"{}\"".format(
selected_project
)).one()
project_schema = project["project_schema"]
self.log.debug((
"*** Using Project: name <{}>, code <{}>, schema <{}>"
).format(
project["full_name"], project["name"], project_schema["name"]
))
# THIS IS THE MAGIC PART
task_types = {}
for _type in project_schema["_task_type_schema"]["types"]:
if _type["name"] not in task_types:
task_types[_type["name"]] = _type
self.task_types = task_types
asset_count = values.get("asset_count") or len(self.assets)
seq_count = values.get("seq_count") or self.default_seq_count
shots_count = values.get("shots_count") or self.default_shots_count
self.create_assets(project, asset_count)
self.create_shots(project, seq_count, shots_count)
return True
def create_assets(self, project, asset_count):
self.log.debug("*** Creating assets:")
try:
asset_count = int(asset_count)
except ValueError:
asset_count = 0
if asset_count <= 0:
self.log.debug("No assets to create")
return
main_entity = self.session.create("Folder", {
"name": "Assets",
"parent": project
})
self.log.debug("- Assets")
available_assets = len(self.assets)
repetitive_times = (
int(asset_count / available_assets) +
(asset_count % available_assets > 0)
)
index = 0
created_entities = 0
to_create_length = asset_count + (asset_count * len(self.asset_tasks))
for _asset_name in self.assets:
if created_entities >= to_create_length:
break
for asset_num in range(1, repetitive_times + 1):
if created_entities >= asset_count:
break
asset_name = "%s_%02d" % (_asset_name, asset_num)
asset = self.session.create("AssetBuild", {
"name": asset_name,
"parent": main_entity
})
self.log.debug("- Assets/{}".format(asset_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for task_name in self.asset_tasks:
self.session.create("Task", {
"name": task_name,
"parent": asset,
"type": self.task_types[task_name]
})
self.log.debug("- Assets/{}/{}".format(
asset_name, task_name
))
created_entities += 1
index += 1
if self.temp_commit(
index, created_entities, to_create_length
):
index = 0
self.log.debug("*** Commiting Assets")
self.log.debug("Commiting entities. {}/{}".format(
created_entities, to_create_length
))
self.session.commit()
def create_shots(self, project, seq_count, shots_count):
self.log.debug("*** Creating shots:")
# Convert counts to integers
try:
seq_count = int(seq_count)
except ValueError:
seq_count = 0
try:
shots_count = int(shots_count)
except ValueError:
shots_count = 0
# Check if both are higher than 0
missing = []
if seq_count <= 0:
missing.append("sequences")
if shots_count <= 0:
missing.append("shots")
if missing:
self.log.debug("No {} to create".format(" and ".join(missing)))
return
# Create Folder "Shots"
main_entity = self.session.create("Folder", {
"name": "Shots",
"parent": project
})
self.log.debug("- Shots")
index = 0
created_entities = 0
to_create_length = (
seq_count
+ (seq_count * shots_count)
+ (seq_count * shots_count * len(self.shot_tasks))
)
for seq_num in range(1, seq_count + 1):
seq_name = "sq%03d" % seq_num
seq = self.session.create("Sequence", {
"name": seq_name,
"parent": main_entity
})
self.log.debug("- Shots/{}".format(seq_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for shot_num in range(1, shots_count + 1):
shot_name = "%ssh%04d" % (seq_name, (shot_num * 10))
shot = self.session.create("Shot", {
"name": shot_name,
"parent": seq
})
self.log.debug("- Shots/{}/{}".format(seq_name, shot_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for task_name in self.shot_tasks:
self.session.create("Task", {
"name": task_name,
"parent": shot,
"type": self.task_types[task_name]
})
self.log.debug("- Shots/{}/{}/{}".format(
seq_name, shot_name, task_name
))
created_entities += 1
index += 1
if self.temp_commit(
index, created_entities, to_create_length
):
index = 0
self.log.debug("*** Commiting Shots")
self.log.debug("Commiting entities. {}/{}".format(
created_entities, to_create_length
))
self.session.commit()
def temp_commit(self, index, created_entities, to_create_length):
if index < self.max_entities_created_at_one_commit:
return False
self.log.debug("Commiting {} entities. {}/{}".format(
index, created_entities, to_create_length
))
self.session.commit()
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
SeedDebugProject(session).register()

View file

@ -0,0 +1,466 @@
import os
import errno
import json
import requests
from bson.objectid import ObjectId
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import Anatomy
from avalon.api import AvalonMongoDB
from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY
class StoreThumbnailsToAvalon(BaseAction):
# Action identifier
identifier = "store.thubmnail.to.avalon"
# Action label
label = "OpenPype Admin"
# Action variant
variant = "- Store Thumbnails to avalon"
# Action description
description = 'Test action'
# roles that are allowed to register this action
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
settings_key = "store_thubmnail_to_avalon"
thumbnail_key = "AVALON_THUMBNAIL_ROOT"
def __init__(self, *args, **kwargs):
self.db_con = AvalonMongoDB()
super(StoreThumbnailsToAvalon, self).__init__(*args, **kwargs)
def discover(self, session, entities, event):
is_valid = False
for entity in entities:
if entity.entity_type.lower() == "assetversion":
is_valid = True
break
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def launch(self, session, entities, event):
user = session.query(
"User where username is '{0}'".format(session.api_user)
).one()
action_job = session.create("Job", {
"user": user,
"status": "running",
"data": json.dumps({
"description": "Storing thumbnails to avalon."
})
})
session.commit()
project = self.get_project_from_entity(entities[0])
project_name = project["full_name"]
anatomy = Anatomy(project_name)
if "publish" not in anatomy.templates:
msg = "Anatomy does not have set publish key!"
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
if "thumbnail" not in anatomy.templates["publish"]:
msg = (
"There is not set \"thumbnail\""
" template in Antomy for project \"{}\""
).format(project_name)
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
thumbnail_roots = os.environ.get(self.thumbnail_key)
if (
"{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"]
and not thumbnail_roots
):
msg = "`{}` environment is not set".format(self.thumbnail_key)
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
existing_thumbnail_root = None
for path in thumbnail_roots.split(os.pathsep):
if os.path.exists(path):
existing_thumbnail_root = path
break
if existing_thumbnail_root is None:
msg = (
"Can't access paths, set in `{}` ({})"
).format(self.thumbnail_key, thumbnail_roots)
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
example_template_data = {
"_id": "ID",
"thumbnail_root": "THUBMNAIL_ROOT",
"thumbnail_type": "THUMBNAIL_TYPE",
"ext": ".EXT",
"project": {
"name": "PROJECT_NAME",
"code": "PROJECT_CODE"
},
"asset": "ASSET_NAME",
"subset": "SUBSET_NAME",
"version": "VERSION_NAME",
"hierarchy": "HIERARCHY"
}
tmp_filled = anatomy.format_all(example_template_data)
thumbnail_result = tmp_filled["publish"]["thumbnail"]
if not thumbnail_result.solved:
missing_keys = thumbnail_result.missing_keys
invalid_types = thumbnail_result.invalid_types
submsg = ""
if missing_keys:
submsg += "Missing keys: {}".format(", ".join(
["\"{}\"".format(key) for key in missing_keys]
))
if invalid_types:
items = []
for key, value in invalid_types.items():
items.append("{}{}".format(str(key), str(value)))
submsg += "Invalid types: {}".format(", ".join(items))
msg = (
"Thumbnail Anatomy template expects more keys than action"
" can offer. {}"
).format(submsg)
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
self.db_con.install()
for entity in entities:
# Skip if entity is not AssetVersion (never should happend, but..)
if entity.entity_type.lower() != "assetversion":
continue
# Skip if AssetVersion don't have thumbnail
thumbnail_ent = entity["thumbnail"]
if thumbnail_ent is None:
self.log.debug((
"Skipping. AssetVersion don't "
"have set thumbnail. {}"
).format(entity["id"]))
continue
avalon_ents_result = self.get_avalon_entities_for_assetversion(
entity, self.db_con
)
version_full_path = (
"Asset: \"{project_name}/{asset_path}\""
" | Subset: \"{subset_name}\""
" | Version: \"{version_name}\""
).format(**avalon_ents_result)
version = avalon_ents_result["version"]
if not version:
self.log.warning((
"AssetVersion does not have version in avalon. {}"
).format(version_full_path))
continue
thumbnail_id = version["data"].get("thumbnail_id")
if thumbnail_id:
self.log.info((
"AssetVersion skipped, already has thubmanil set. {}"
).format(version_full_path))
continue
# Get thumbnail extension
file_ext = thumbnail_ent["file_type"]
if not file_ext.startswith("."):
file_ext = ".{}".format(file_ext)
avalon_project = avalon_ents_result["project"]
avalon_asset = avalon_ents_result["asset"]
hierarchy = ""
parents = avalon_asset["data"].get("parents") or []
if parents:
hierarchy = "/".join(parents)
# Prepare anatomy template fill data
# 1. Create new id for thumbnail entity
thumbnail_id = ObjectId()
template_data = {
"_id": str(thumbnail_id),
"thumbnail_root": existing_thumbnail_root,
"thumbnail_type": "thumbnail",
"ext": file_ext,
"project": {
"name": avalon_project["name"],
"code": avalon_project["data"].get("code")
},
"asset": avalon_ents_result["asset_name"],
"subset": avalon_ents_result["subset_name"],
"version": avalon_ents_result["version_name"],
"hierarchy": hierarchy
}
anatomy_filled = anatomy.format(template_data)
thumbnail_path = anatomy_filled["publish"]["thumbnail"]
thumbnail_path = thumbnail_path.replace("..", ".")
thumbnail_path = os.path.normpath(thumbnail_path)
downloaded = False
for loc in (thumbnail_ent.get("component_locations") or []):
res_id = loc.get("resource_identifier")
if not res_id:
continue
thubmnail_url = self.get_thumbnail_url(res_id)
if self.download_file(thubmnail_url, thumbnail_path):
downloaded = True
break
if not downloaded:
self.log.warning(
"Could not download thumbnail for {}".format(
version_full_path
)
)
continue
# Clean template data from keys that are dynamic
template_data.pop("_id")
template_data.pop("thumbnail_root")
thumbnail_entity = {
"_id": thumbnail_id,
"type": "thumbnail",
"schema": "openpype:thumbnail-1.0",
"data": {
"template": thumbnail_template,
"template_data": template_data
}
}
# Create thumbnail entity
self.db_con.insert_one(thumbnail_entity)
self.log.debug(
"Creating entity in database {}".format(str(thumbnail_entity))
)
# Set thumbnail id for version
self.db_con.update_one(
{"_id": version["_id"]},
{"$set": {"data.thumbnail_id": thumbnail_id}}
)
self.db_con.update_one(
{"_id": avalon_asset["_id"]},
{"$set": {"data.thumbnail_id": thumbnail_id}}
)
action_job["status"] = "done"
session.commit()
return True
def get_thumbnail_url(self, resource_identifier, size=None):
# TODO use ftrack_api method rather (find way how to use it)
url_string = (
u'{url}/component/thumbnail?id={id}&username={username}'
u'&apiKey={apiKey}'
)
url = url_string.format(
url=self.session.server_url,
id=resource_identifier,
username=self.session.api_user,
apiKey=self.session.api_key
)
if size:
url += u'&size={0}'.format(size)
return url
def download_file(self, source_url, dst_file_path):
dir_path = os.path.dirname(dst_file_path)
try:
os.makedirs(dir_path)
except OSError as exc:
if exc.errno != errno.EEXIST:
self.log.warning(
"Could not create folder: \"{}\"".format(dir_path)
)
return False
self.log.debug(
"Downloading file \"{}\" -> \"{}\"".format(
source_url, dst_file_path
)
)
file_open = open(dst_file_path, "wb")
try:
file_open.write(requests.get(source_url).content)
except Exception:
self.log.warning(
"Download of image `{}` failed.".format(source_url)
)
return False
finally:
file_open.close()
return True
def get_avalon_entities_for_assetversion(self, asset_version, db_con):
output = {
"success": True,
"message": None,
"project": None,
"project_name": None,
"asset": None,
"asset_name": None,
"asset_path": None,
"subset": None,
"subset_name": None,
"version": None,
"version_name": None,
"representations": None
}
db_con.install()
ft_asset = asset_version["asset"]
subset_name = ft_asset["name"]
version = asset_version["version"]
parent = ft_asset["parent"]
ent_path = "/".join(
[ent["name"] for ent in parent["link"]]
)
project = self.get_project_from_entity(asset_version)
project_name = project["full_name"]
output["project_name"] = project_name
output["asset_name"] = parent["name"]
output["asset_path"] = ent_path
output["subset_name"] = subset_name
output["version_name"] = version
db_con.Session["AVALON_PROJECT"] = project_name
avalon_project = db_con.find_one({"type": "project"})
output["project"] = avalon_project
if not avalon_project:
output["success"] = False
output["message"] = (
"Project not synchronized to avalon `{}`".format(project_name)
)
return output
asset_ent = None
asset_mongo_id = parent["custom_attributes"].get(CUST_ATTR_ID_KEY)
if asset_mongo_id:
try:
asset_mongo_id = ObjectId(asset_mongo_id)
asset_ent = db_con.find_one({
"type": "asset",
"_id": asset_mongo_id
})
except Exception:
pass
if not asset_ent:
asset_ent = db_con.find_one({
"type": "asset",
"data.ftrackId": parent["id"]
})
output["asset"] = asset_ent
if not asset_ent:
output["success"] = False
output["message"] = (
"Not synchronized entity to avalon `{}`".format(ent_path)
)
return output
asset_mongo_id = asset_ent["_id"]
subset_ent = db_con.find_one({
"type": "subset",
"parent": asset_mongo_id,
"name": subset_name
})
output["subset"] = subset_ent
if not subset_ent:
output["success"] = False
output["message"] = (
"Subset `{}` does not exist under Asset `{}`"
).format(subset_name, ent_path)
return output
version_ent = db_con.find_one({
"type": "version",
"name": version,
"parent": subset_ent["_id"]
})
output["version"] = version_ent
if not version_ent:
output["success"] = False
output["message"] = (
"Version `{}` does not exist under Subset `{}` | Asset `{}`"
).format(version, subset_name, ent_path)
return output
repre_ents = list(db_con.find({
"type": "representation",
"parent": version_ent["_id"]
}))
output["representations"] = repre_ents
return output
def register(session):
StoreThumbnailsToAvalon(session).register()

View file

@ -0,0 +1,217 @@
import time
import sys
import json
import traceback
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
class SyncToAvalonLocal(BaseAction):
"""
Synchronizing data action - from Ftrack to Avalon DB
Stores all information about entity.
- Name(string) - Most important information = identifier of entity
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- Data(dictionary):
- VisualParent(ObjectId) - Avalon Id of parent asset
- Parents(array of string) - All parent names except project
- Tasks(array of string) - Tasks on asset
- FtrackId(string)
- entityType(string) - entity's type on Ftrack
* All Custom attributes in group 'Avalon'
- custom attributes that start with 'avalon_' are skipped
* These information are stored for entities in whole project.
Avalon ID of asset is stored to Ftrack
- Custom attribute 'avalon_mongo_id'.
- action IS NOT creating this Custom attribute if doesn't exist
- run 'Create Custom Attributes' action
- or do it manually (Not recommended)
"""
identifier = "sync.to.avalon.local"
label = "OpenPype Admin"
variant = "- Sync To Avalon (Local)"
priority = 200
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
settings_key = "sync_to_avalon_local"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
def discover(self, session, entities, event):
""" Validate selection. """
is_valid = False
for ent in event["data"]["selection"]:
# Ignore entities that are not tasks or projects
if ent["entityType"].lower() in ["show", "task"]:
is_valid = True
break
if is_valid:
is_valid = self.valid_roles(session, entities, event)
return is_valid
def launch(self, session, in_entities, event):
self.log.debug("{}: Creating job".format(self.label))
user_entity = session.query(
"User where id is {}".format(event["source"]["user"]["id"])
).one()
job_entity = session.create("Job", {
"user": user_entity,
"status": "running",
"data": json.dumps({
"description": "Sync to avalon is running..."
})
})
session.commit()
project_entity = self.get_project_from_entity(in_entities[0])
project_name = project_entity["full_name"]
try:
result = self.synchronization(event, project_name)
except Exception:
self.log.error(
"Synchronization failed due to code error", exc_info=True
)
description = "Sync to avalon Crashed (Download traceback)"
self.add_traceback_to_job(
job_entity, session, sys.exc_info(), description
)
msg = "An error has happened during synchronization"
title = "Synchronization report ({}):".format(project_name)
items = []
items.append({
"type": "label",
"value": "# {}".format(msg)
})
items.append({
"type": "label",
"value": (
"<p>Download report from job for more information.</p>"
)
})
report = {}
try:
report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items") or []
if _items:
items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event, submit_btn_label="Ok")
return {"success": True, "message": msg}
job_entity["status"] = "done"
job_entity["data"] = json.dumps({
"description": "Sync to avalon finished."
})
session.commit()
return result
def synchronization(self, event, project_name):
time_start = time.time()
self.show_message(event, "Synchronization - Preparing data", True)
try:
output = self.entities_factory.launch_setup(project_name)
if output is not None:
return output
time_1 = time.time()
self.entities_factory.set_cutom_attributes()
time_2 = time.time()
# This must happen before all filtering!!!
self.entities_factory.prepare_avalon_entities(project_name)
time_3 = time.time()
self.entities_factory.filter_by_ignore_sync()
time_4 = time.time()
self.entities_factory.duplicity_regex_check()
time_5 = time.time()
self.entities_factory.prepare_ftrack_ent_data()
time_6 = time.time()
self.entities_factory.synchronize()
time_7 = time.time()
self.log.debug(
"*** Synchronization finished ***"
)
self.log.debug(
"preparation <{}>".format(time_1 - time_start)
)
self.log.debug(
"set_cutom_attributes <{}>".format(time_2 - time_1)
)
self.log.debug(
"prepare_avalon_entities <{}>".format(time_3 - time_2)
)
self.log.debug(
"filter_by_ignore_sync <{}>".format(time_4 - time_3)
)
self.log.debug(
"duplicity_regex_check <{}>".format(time_5 - time_4)
)
self.log.debug(
"prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
)
self.log.debug(
"synchronize <{}>".format(time_7 - time_6)
)
self.log.debug(
"* Total time: {}".format(time_7 - time_start)
)
report = self.entities_factory.report()
if report and report.get("items"):
default_title = "Synchronization report ({}):".format(
project_name
)
self.show_interface(
items=report["items"],
title=report.get("title", default_title),
event=event
)
return {
"success": True,
"message": "Synchronization Finished"
}
finally:
try:
self.entities_factory.dbcon.uninstall()
except Exception:
pass
try:
self.entities_factory.session.close()
except Exception:
pass
def register(session):
'''Register plugin. Called when used as an plugin.'''
SyncToAvalonLocal(session).register()

View file

@ -0,0 +1,26 @@
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class TestAction(BaseAction):
"""Action for testing purpose or as base for new actions."""
ignore_me = True
identifier = 'test.action'
label = 'Test action'
description = 'Test action'
priority = 10000
role_list = ['Pypeclub']
icon = statics_icon("ftrack", "action_icons", "TestAction.svg")
def discover(self, session, entities, event):
return True
def launch(self, session, entities, event):
self.log.info(event)
return True
def register(session):
TestAction(session).register()

View file

@ -0,0 +1,63 @@
import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class ThumbToChildren(BaseAction):
'''Custom action.'''
# Action identifier
identifier = 'thumb.to.children'
# Action label
label = 'Thumbnail'
# Action variant
variant = " to Children"
# Action icon
icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg")
def discover(self, session, entities, event):
"""Show only on project."""
if (len(entities) != 1 or entities[0].entity_type in ["Project"]):
return False
return True
def launch(self, session, entities, event):
'''Callback method for action.'''
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Push thumbnails to Childrens'
})
})
session.commit()
try:
for entity in entities:
thumbid = entity['thumbnail_id']
if thumbid:
for child in entity['children']:
child['thumbnail_id'] = thumbid
# inform the user that the job is done
job['status'] = 'done'
except Exception as exc:
session.rollback()
# fail the job if something goes wrong
job['status'] = 'failed'
raise exc
finally:
session.commit()
return {
'success': True,
'message': 'Created job for updating thumbnails!'
}
def register(session):
'''Register action. Called when used as an event plugin.'''
ThumbToChildren(session).register()

View file

@ -0,0 +1,91 @@
import json
from openpype_modules.ftrack.lib import BaseAction, statics_icon
class ThumbToParent(BaseAction):
'''Custom action.'''
# Action identifier
identifier = 'thumb.to.parent'
# Action label
label = 'Thumbnail'
# Action variant
variant = " to Parent"
# Action icon
icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg")
def discover(self, session, entities, event):
'''Return action config if triggered on asset versions.'''
if len(entities) <= 0 or entities[0].entity_type in ['Project']:
return False
return True
def launch(self, session, entities, event):
'''Callback method for action.'''
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Push thumbnails to parents'
})
})
session.commit()
try:
for entity in entities:
parent = None
thumbid = None
if entity.entity_type.lower() == 'assetversion':
parent = entity['task']
if parent is None:
par_ent = entity['link'][-2]
parent = session.get(par_ent['type'], par_ent['id'])
else:
try:
parent = entity['parent']
except Exception as e:
msg = (
"During Action 'Thumb to Parent'"
" went something wrong"
)
self.log.error(msg)
raise e
thumbid = entity['thumbnail_id']
if parent and thumbid:
parent['thumbnail_id'] = thumbid
status = 'done'
else:
raise Exception(
"Parent or thumbnail id not found. Parent: {}. "
"Thumbnail id: {}".format(parent, thumbid)
)
# inform the user that the job is done
job['status'] = status or 'done'
except Exception as exc:
session.rollback()
# fail the job if something goes wrong
job['status'] = 'failed'
raise exc
finally:
session.commit()
return {
'success': True,
'message': 'Created job for updating thumbnails!'
}
def register(session):
'''Register action. Called when used as an event plugin.'''
ThumbToParent(session).register()

View file

@ -0,0 +1,98 @@
import platform
import socket
import getpass
from openpype_modules.ftrack.lib import BaseAction
class ActionWhereIRun(BaseAction):
"""Show where same user has running OpenPype instances."""
identifier = "ask.where.i.run"
show_identifier = "show.where.i.run"
label = "OpenPype Admin"
variant = "- Where I run"
description = "Show PC info where user have running OpenPype"
def _discover(self, _event):
return {
"items": [{
"label": self.label,
"variant": self.variant,
"description": self.description,
"actionIdentifier": self.discover_identifier,
"icon": self.icon,
}]
}
def _launch(self, event):
self.trigger_action(self.show_identifier, event)
def register(self):
# Register default action callbacks
super(ActionWhereIRun, self).register()
# Add show identifier
show_subscription = (
"topic=ftrack.action.launch"
" and data.actionIdentifier={}"
" and source.user.username={}"
).format(
self.show_identifier,
self.session.api_user
)
self.session.event_hub.subscribe(
show_subscription,
self._show_info
)
def _show_info(self, event):
title = "Where Do I Run?"
msgs = {}
all_keys = ["Hostname", "IP", "Username", "System name", "PC name"]
try:
host_name = socket.gethostname()
msgs["Hostname"] = host_name
host_ip = socket.gethostbyname(host_name)
msgs["IP"] = host_ip
except Exception:
pass
try:
system_name, pc_name, *_ = platform.uname()
msgs["System name"] = system_name
msgs["PC name"] = pc_name
except Exception:
pass
try:
msgs["Username"] = getpass.getuser()
except Exception:
pass
for key in all_keys:
if not msgs.get(key):
msgs[key] = "-Undefined-"
items = []
first = True
separator = {"type": "label", "value": "---"}
for key, value in msgs.items():
if first:
first = False
else:
items.append(separator)
self.log.debug("{}: {}".format(key, value))
subtitle = {"type": "label", "value": "<h3>{}</h3>".format(key)}
items.append(subtitle)
message = {"type": "label", "value": "<p>{}</p>".format(value)}
items.append(message)
self.show_interface(items, title, event=event)
def register(session):
'''Register plugin. Called when used as an plugin.'''
ActionWhereIRun(session).register()

View file

@ -0,0 +1,494 @@
import os
import json
import collections
import platform
import click
from openpype.modules import OpenPypeModule
from openpype_interfaces import (
ITrayModule,
IPluginPaths,
ILaunchHookPaths,
ISettingsChangeListener
)
from openpype.settings import SaveWarningExc
FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
class FtrackModule(
OpenPypeModule,
ITrayModule,
IPluginPaths,
ILaunchHookPaths,
ISettingsChangeListener
):
name = "ftrack"
def initialize(self, settings):
ftrack_settings = settings[self.name]
self.enabled = ftrack_settings["enabled"]
# Add http schema
ftrack_url = ftrack_settings["ftrack_server"].strip("/ ")
if ftrack_url:
if "http" not in ftrack_url:
ftrack_url = "https://" + ftrack_url
# Check if "ftrack.app" is part os url
if "ftrackapp.com" not in ftrack_url:
ftrack_url = ftrack_url + ".ftrackapp.com"
self.ftrack_url = ftrack_url
current_dir = os.path.dirname(os.path.abspath(__file__))
low_platform = platform.system().lower()
# Server event handler paths
server_event_handlers_paths = [
os.path.join(current_dir, "event_handlers_server")
]
settings_server_paths = ftrack_settings["ftrack_events_path"]
if isinstance(settings_server_paths, dict):
settings_server_paths = settings_server_paths[low_platform]
server_event_handlers_paths.extend(settings_server_paths)
# User event handler paths
user_event_handlers_paths = [
os.path.join(current_dir, "event_handlers_user")
]
settings_action_paths = ftrack_settings["ftrack_actions_path"]
if isinstance(settings_action_paths, dict):
settings_action_paths = settings_action_paths[low_platform]
user_event_handlers_paths.extend(settings_action_paths)
# Prepare attribute
self.server_event_handlers_paths = server_event_handlers_paths
self.user_event_handlers_paths = user_event_handlers_paths
self.tray_module = None
# TimersManager connection
self.timers_manager_connector = None
self._timers_manager_module = None
def get_global_environments(self):
"""Ftrack's global environments."""
return {
"FTRACK_SERVER": self.ftrack_url
}
def get_plugin_paths(self):
"""Ftrack plugin paths."""
return {
"publish": [os.path.join(FTRACK_MODULE_DIR, "plugins", "publish")]
}
def get_launch_hook_paths(self):
"""Implementation of `ILaunchHookPaths`."""
return os.path.join(FTRACK_MODULE_DIR, "launch_hooks")
def connect_with_modules(self, enabled_modules):
for module in enabled_modules:
if not hasattr(module, "get_ftrack_event_handler_paths"):
continue
try:
paths_by_type = module.get_ftrack_event_handler_paths()
except Exception:
continue
if not isinstance(paths_by_type, dict):
continue
for key, value in paths_by_type.items():
if not value:
continue
if key not in ("server", "user"):
self.log.warning(
"Unknown event handlers key \"{}\" skipping.".format(
key
)
)
continue
if not isinstance(value, (list, tuple, set)):
value = [value]
if key == "server":
self.server_event_handlers_paths.extend(value)
elif key == "user":
self.user_event_handlers_paths.extend(value)
def on_system_settings_save(
self, old_value, new_value, changes, new_value_metadata
):
"""Implementation of ISettingsChangeListener interface."""
if not self.ftrack_url:
raise SaveWarningExc((
"Ftrack URL is not set."
" Can't propagate changes to Ftrack server."
))
ftrack_changes = changes.get("modules", {}).get("ftrack", {})
url_change_msg = None
if "ftrack_server" in ftrack_changes:
url_change_msg = (
"Ftrack URL was changed."
" This change may need to restart OpenPype to take affect."
)
try:
session = self.create_ftrack_session()
except Exception:
self.log.warning("Couldn't create ftrack session.", exc_info=True)
if url_change_msg:
raise SaveWarningExc(url_change_msg)
raise SaveWarningExc((
"Saving of attributes to ftrack wasn't successful,"
" try running Create/Update Avalon Attributes in ftrack."
))
from .lib import (
get_openpype_attr,
CUST_ATTR_APPLICATIONS,
CUST_ATTR_TOOLS,
app_definitions_from_app_manager,
tool_definitions_from_app_manager
)
from openpype.api import ApplicationManager
query_keys = [
"id",
"key",
"config"
]
custom_attributes = get_openpype_attr(
session,
split_hierarchical=False,
query_keys=query_keys
)
app_attribute = None
tool_attribute = None
for custom_attribute in custom_attributes:
key = custom_attribute["key"]
if key == CUST_ATTR_APPLICATIONS:
app_attribute = custom_attribute
elif key == CUST_ATTR_TOOLS:
tool_attribute = custom_attribute
app_manager = ApplicationManager(new_value_metadata)
missing_attributes = []
if not app_attribute:
missing_attributes.append(CUST_ATTR_APPLICATIONS)
else:
config = json.loads(app_attribute["config"])
new_data = app_definitions_from_app_manager(app_manager)
prepared_data = []
for item in new_data:
for key, label in item.items():
prepared_data.append({
"menu": label,
"value": key
})
config["data"] = json.dumps(prepared_data)
app_attribute["config"] = json.dumps(config)
if not tool_attribute:
missing_attributes.append(CUST_ATTR_TOOLS)
else:
config = json.loads(tool_attribute["config"])
new_data = tool_definitions_from_app_manager(app_manager)
prepared_data = []
for item in new_data:
for key, label in item.items():
prepared_data.append({
"menu": label,
"value": key
})
config["data"] = json.dumps(prepared_data)
tool_attribute["config"] = json.dumps(config)
session.commit()
if missing_attributes:
raise SaveWarningExc((
"Couldn't find custom attribute/s ({}) to update."
" Try running Create/Update Avalon Attributes in ftrack."
).format(", ".join(missing_attributes)))
if url_change_msg:
raise SaveWarningExc(url_change_msg)
def on_project_settings_save(self, *_args, **_kwargs):
"""Implementation of ISettingsChangeListener interface."""
# Ignore
return
def on_project_anatomy_save(
self, old_value, new_value, changes, project_name, new_value_metadata
):
"""Implementation of ISettingsChangeListener interface."""
if not project_name:
return
new_attr_values = new_value.get("attributes")
if not new_attr_values:
return
import ftrack_api
from openpype_modules.ftrack.lib import (
get_openpype_attr,
default_custom_attributes_definition,
CUST_ATTR_TOOLS,
CUST_ATTR_APPLICATIONS,
CUST_ATTR_INTENT
)
try:
session = self.create_ftrack_session()
except Exception:
self.log.warning("Couldn't create ftrack session.", exc_info=True)
raise SaveWarningExc((
"Saving of attributes to ftrack wasn't successful,"
" try running Create/Update Avalon Attributes in ftrack."
))
project_entity = session.query(
"Project where full_name is \"{}\"".format(project_name)
).first()
if not project_entity:
msg = (
"Ftrack project with name \"{}\" was not found in Ftrack."
" Can't push attribute changes."
).format(project_name)
self.log.warning(msg)
raise SaveWarningExc(msg)
project_id = project_entity["id"]
ca_defs = default_custom_attributes_definition()
hierarchical_attrs = ca_defs.get("is_hierarchical") or {}
project_attrs = ca_defs.get("show") or {}
ca_keys = (
set(hierarchical_attrs.keys())
| set(project_attrs.keys())
| {CUST_ATTR_TOOLS, CUST_ATTR_APPLICATIONS, CUST_ATTR_INTENT}
)
cust_attr, hier_attr = get_openpype_attr(session)
cust_attr_by_key = {attr["key"]: attr for attr in cust_attr}
hier_attrs_by_key = {attr["key"]: attr for attr in hier_attr}
failed = {}
missing = {}
for key, value in new_attr_values.items():
if key not in ca_keys:
continue
configuration = hier_attrs_by_key.get(key)
if not configuration:
configuration = cust_attr_by_key.get(key)
if not configuration:
self.log.warning(
"Custom attribute \"{}\" was not found.".format(key)
)
missing[key] = value
continue
# TODO add add permissions check
# TODO add value validations
# - value type and list items
entity_key = collections.OrderedDict([
("configuration_id", configuration["id"]),
("entity_id", project_id)
])
session.recorded_operations.push(
ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
entity_key,
"value",
ftrack_api.symbol.NOT_SET,
value
)
)
try:
session.commit()
self.log.debug(
"Changed project custom attribute \"{}\" to \"{}\"".format(
key, value
)
)
except Exception:
self.log.warning(
"Failed to set \"{}\" to \"{}\"".format(key, value),
exc_info=True
)
session.rollback()
failed[key] = value
if not failed and not missing:
return
error_msg = (
"Values were not updated on Ftrack which may cause issues."
" try running Create/Update Avalon Attributes in ftrack "
" and resave project settings."
)
if missing:
error_msg += "\nMissing Custom attributes on Ftrack: {}.".format(
", ".join([
'"{}"'.format(key)
for key in missing.keys()
])
)
if failed:
joined_failed = ", ".join([
'"{}": "{}"'.format(key, value)
for key, value in failed.items()
])
error_msg += "\nFailed to set: {}".format(joined_failed)
raise SaveWarningExc(error_msg)
def create_ftrack_session(self, **session_kwargs):
import ftrack_api
if "server_url" not in session_kwargs:
session_kwargs["server_url"] = self.ftrack_url
api_key = session_kwargs.get("api_key")
api_user = session_kwargs.get("api_user")
# First look into environments
# - both OpenPype tray and ftrack event server should have set them
# - ftrack event server may crash when credentials are tried to load
# from keyring
if not api_key or not api_user:
api_key = os.environ.get("FTRACK_API_KEY")
api_user = os.environ.get("FTRACK_API_USER")
if not api_key or not api_user:
from .lib import credentials
cred = credentials.get_credentials()
api_user = cred.get("username")
api_key = cred.get("api_key")
session_kwargs["api_user"] = api_user
session_kwargs["api_key"] = api_key
return ftrack_api.Session(**session_kwargs)
def tray_init(self):
from .tray import FtrackTrayWrapper
self.tray_module = FtrackTrayWrapper(self)
# Module is it's own connector to TimersManager
self.timers_manager_connector = self
def tray_menu(self, parent_menu):
return self.tray_module.tray_menu(parent_menu)
def tray_start(self):
return self.tray_module.validate()
def tray_exit(self):
self.tray_module.tray_exit()
def set_credentials_to_env(self, username, api_key):
os.environ["FTRACK_API_USER"] = username or ""
os.environ["FTRACK_API_KEY"] = api_key or ""
# --- TimersManager connection methods ---
def start_timer(self, data):
if self.tray_module:
self.tray_module.start_timer_manager(data)
def stop_timer(self):
if self.tray_module:
self.tray_module.stop_timer_manager()
def register_timers_manager(self, timer_manager_module):
self._timers_manager_module = timer_manager_module
def timer_started(self, data):
if self._timers_manager_module is not None:
self._timers_manager_module.timer_started(self.id, data)
def timer_stopped(self):
if self._timers_manager_module is not None:
self._timers_manager_module.timer_stopped(self.id)
def get_task_time(self, project_name, asset_name, task_name):
session = self.create_ftrack_session()
query = (
'Task where name is "{}"'
' and parent.name is "{}"'
' and project.full_name is "{}"'
).format(task_name, asset_name, project_name)
task_entity = session.query(query).first()
if not task_entity:
return 0
hours_logged = (task_entity["time_logged"] / 60) / 60
return hours_logged
def get_credentials(self):
# type: () -> tuple
"""Get local Ftrack credentials."""
from .lib import credentials
cred = credentials.get_credentials(self.ftrack_url)
return cred.get("username"), cred.get("api_key")
def cli(self, click_group):
click_group.add_command(cli_main)
@click.group(FtrackModule.name, help="Ftrack module related commands.")
def cli_main():
pass
@cli_main.command()
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("--ftrack-url", envvar="FTRACK_SERVER",
help="Ftrack server url")
@click.option("--ftrack-user", envvar="FTRACK_API_USER",
help="Ftrack api user")
@click.option("--ftrack-api-key", envvar="FTRACK_API_KEY",
help="Ftrack api key")
@click.option("--legacy", is_flag=True,
help="run event server without mongo storing")
@click.option("--clockify-api-key", envvar="CLOCKIFY_API_KEY",
help="Clockify API key.")
@click.option("--clockify-workspace", envvar="CLOCKIFY_WORKSPACE",
help="Clockify workspace")
def eventserver(
debug,
ftrack_url,
ftrack_user,
ftrack_api_key,
legacy,
clockify_api_key,
clockify_workspace
):
"""Launch ftrack event server.
This should be ideally used by system service (such us systemd or upstart
on linux and window service).
"""
if debug:
os.environ["OPENPYPE_DEBUG"] = "3"
from .ftrack_server.event_server_cli import run_event_server
return run_event_server(
ftrack_url,
ftrack_user,
ftrack_api_key,
legacy,
clockify_api_key,
clockify_workspace
)

View file

@ -0,0 +1,8 @@
from .ftrack_server import FtrackServer
from .lib import check_ftrack_url
__all__ = (
"FtrackServer",
"check_ftrack_url"
)

View file

@ -0,0 +1,462 @@
import os
import sys
import signal
import datetime
import subprocess
import socket
import json
import platform
import getpass
import atexit
import time
import uuid
import ftrack_api
import pymongo
from openpype.lib import (
get_openpype_execute_args,
OpenPypeMongoConnection,
get_openpype_version,
get_build_version,
validate_mongo_connection
)
from openpype_modules.ftrack import FTRACK_MODULE_DIR
from openpype_modules.ftrack.lib import credentials
from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url
from openpype_modules.ftrack.ftrack_server import socket_thread
class MongoPermissionsError(Exception):
"""Is used when is created multiple objects of same RestApi class."""
def __init__(self, message=None):
if not message:
message = "Exiting because have issue with acces to MongoDB"
super().__init__(message)
def check_mongo_url(mongo_uri, log_error=False):
"""Checks if mongo server is responding"""
try:
validate_mongo_connection(mongo_uri)
except pymongo.errors.InvalidURI as err:
if log_error:
print("Can't connect to MongoDB at {} because: {}".format(
mongo_uri, err
))
return False
except pymongo.errors.ServerSelectionTimeoutError as err:
if log_error:
print("Can't connect to MongoDB at {} because: {}".format(
mongo_uri, err
))
return False
return True
def validate_credentials(url, user, api):
first_validation = True
if not user:
print('- Ftrack Username is not set')
first_validation = False
if not api:
print('- Ftrack API key is not set')
first_validation = False
if not first_validation:
return False
try:
session = ftrack_api.Session(
server_url=url,
api_user=user,
api_key=api
)
session.close()
except Exception as e:
print("Can't log into Ftrack with used credentials:")
ftrack_cred = {
"Ftrack server": str(url),
"Username": str(user),
"API key": str(api)
}
item_lens = [len(key) + 1 for key in ftrack_cred.keys()]
justify_len = max(*item_lens)
for key, value in ftrack_cred.items():
print("{} {}".format(
(key + ":").ljust(justify_len, " "),
value
))
return False
print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format(
user, api
))
return True
def legacy_server(ftrack_url):
# Current file
scripts_dir = os.path.join(FTRACK_MODULE_DIR, "scripts")
min_fail_seconds = 5
max_fail_count = 3
wait_time_after_max_fail = 10
subproc = None
subproc_path = "{}/sub_legacy_server.py".format(scripts_dir)
subproc_last_failed = datetime.datetime.now()
subproc_failed_count = 0
ftrack_accessible = False
printed_ftrack_error = False
while True:
if not ftrack_accessible:
ftrack_accessible = check_ftrack_url(ftrack_url)
# Run threads only if Ftrack is accessible
if not ftrack_accessible and not printed_ftrack_error:
print("Can't access Ftrack {} <{}>".format(
ftrack_url, str(datetime.datetime.now())
))
if subproc is not None:
if subproc.poll() is None:
subproc.terminate()
subproc = None
printed_ftrack_error = True
time.sleep(1)
continue
printed_ftrack_error = False
if subproc is None:
if subproc_failed_count < max_fail_count:
args = get_openpype_execute_args("run", subproc_path)
subproc = subprocess.Popen(
args,
stdout=subprocess.PIPE
)
elif subproc_failed_count == max_fail_count:
print((
"Storer failed {}times I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
subproc_failed_count += 1
elif ((
datetime.datetime.now() - subproc_last_failed
).seconds > wait_time_after_max_fail):
subproc_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif subproc.poll() is not None:
subproc = None
ftrack_accessible = False
_subproc_last_failed = datetime.datetime.now()
delta_time = (_subproc_last_failed - subproc_last_failed).seconds
if delta_time < min_fail_seconds:
subproc_failed_count += 1
else:
subproc_failed_count = 0
subproc_last_failed = _subproc_last_failed
time.sleep(1)
def main_loop(ftrack_url):
""" This is main loop of event handling.
Loop is handling threads which handles subprocesses of event storer and
processor. When one of threads is stopped it is tested to connect to
ftrack and mongo server. Threads are not started when ftrack or mongo
server is not accessible. When threads are started it is checked for socket
signals as heartbeat. Heartbeat must become at least once per 30sec
otherwise thread will be killed.
"""
os.environ["FTRACK_EVENT_SUB_ID"] = str(uuid.uuid1())
mongo_uri = OpenPypeMongoConnection.get_default_mongo_url()
# Current file
scripts_dir = os.path.join(FTRACK_MODULE_DIR, "scripts")
min_fail_seconds = 5
max_fail_count = 3
wait_time_after_max_fail = 10
# Threads data
storer_name = "StorerThread"
storer_port = 10001
storer_path = "{}/sub_event_storer.py".format(scripts_dir)
storer_thread = None
storer_last_failed = datetime.datetime.now()
storer_failed_count = 0
processor_name = "ProcessorThread"
processor_port = 10011
processor_path = "{}/sub_event_processor.py".format(scripts_dir)
processor_thread = None
processor_last_failed = datetime.datetime.now()
processor_failed_count = 0
statuser_name = "StorerThread"
statuser_port = 10021
statuser_path = "{}/sub_event_status.py".format(scripts_dir)
statuser_thread = None
statuser_last_failed = datetime.datetime.now()
statuser_failed_count = 0
ftrack_accessible = False
mongo_accessible = False
printed_ftrack_error = False
printed_mongo_error = False
# stop threads on exit
# TODO check if works and args have thread objects!
def on_exit(processor_thread, storer_thread, statuser_thread):
if processor_thread is not None:
processor_thread.stop()
processor_thread.join()
processor_thread = None
if storer_thread is not None:
storer_thread.stop()
storer_thread.join()
storer_thread = None
if statuser_thread is not None:
statuser_thread.stop()
statuser_thread.join()
statuser_thread = None
atexit.register(
on_exit,
processor_thread=processor_thread,
storer_thread=storer_thread,
statuser_thread=statuser_thread
)
host_name = socket.gethostname()
main_info = [
["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")],
["Username", getpass.getuser()],
["Host Name", host_name],
["Host IP", socket.gethostbyname(host_name)],
["OpenPype executable", get_openpype_execute_args()[-1]],
["OpenPype version", get_openpype_version() or "N/A"],
["OpenPype build version", get_build_version() or "N/A"]
]
main_info_str = json.dumps(main_info)
# Main loop
while True:
# Check if accessible Ftrack and Mongo url
if not ftrack_accessible:
ftrack_accessible = check_ftrack_url(ftrack_url)
if not mongo_accessible:
mongo_accessible = check_mongo_url(mongo_uri)
# Run threads only if Ftrack is accessible
if not ftrack_accessible or not mongo_accessible:
if not mongo_accessible and not printed_mongo_error:
print("Can't access Mongo {}".format(mongo_uri))
if not ftrack_accessible and not printed_ftrack_error:
print("Can't access Ftrack {}".format(ftrack_url))
if storer_thread is not None:
storer_thread.stop()
storer_thread.join()
storer_thread = None
if processor_thread is not None:
processor_thread.stop()
processor_thread.join()
processor_thread = None
printed_ftrack_error = True
printed_mongo_error = True
time.sleep(1)
continue
printed_ftrack_error = False
printed_mongo_error = False
# ====== STATUSER =======
if statuser_thread is None:
if statuser_failed_count < max_fail_count:
statuser_thread = socket_thread.StatusSocketThread(
statuser_name, statuser_port, statuser_path,
[main_info_str]
)
statuser_thread.start()
elif statuser_failed_count == max_fail_count:
print((
"Statuser failed {}times in row"
" I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
statuser_failed_count += 1
elif ((
datetime.datetime.now() - statuser_last_failed
).seconds > wait_time_after_max_fail):
statuser_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not statuser_thread.isAlive():
statuser_thread.join()
statuser_thread = None
ftrack_accessible = False
mongo_accessible = False
_processor_last_failed = datetime.datetime.now()
delta_time = (
_processor_last_failed - statuser_last_failed
).seconds
if delta_time < min_fail_seconds:
statuser_failed_count += 1
else:
statuser_failed_count = 0
statuser_last_failed = _processor_last_failed
elif statuser_thread.stop_subprocess:
print("Main process was stopped by action")
on_exit(processor_thread, storer_thread, statuser_thread)
os.kill(os.getpid(), signal.SIGTERM)
return 1
# ====== STORER =======
# Run backup thread which does not requeire mongo to work
if storer_thread is None:
if storer_failed_count < max_fail_count:
storer_thread = socket_thread.SocketThread(
storer_name, storer_port, storer_path
)
storer_thread.start()
elif storer_failed_count == max_fail_count:
print((
"Storer failed {}times I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
storer_failed_count += 1
elif ((
datetime.datetime.now() - storer_last_failed
).seconds > wait_time_after_max_fail):
storer_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not storer_thread.isAlive():
if storer_thread.mongo_error:
raise MongoPermissionsError()
storer_thread.join()
storer_thread = None
ftrack_accessible = False
mongo_accessible = False
_storer_last_failed = datetime.datetime.now()
delta_time = (_storer_last_failed - storer_last_failed).seconds
if delta_time < min_fail_seconds:
storer_failed_count += 1
else:
storer_failed_count = 0
storer_last_failed = _storer_last_failed
# ====== PROCESSOR =======
if processor_thread is None:
if processor_failed_count < max_fail_count:
processor_thread = socket_thread.SocketThread(
processor_name, processor_port, processor_path
)
processor_thread.start()
elif processor_failed_count == max_fail_count:
print((
"Processor failed {}times in row"
" I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
processor_failed_count += 1
elif ((
datetime.datetime.now() - processor_last_failed
).seconds > wait_time_after_max_fail):
processor_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not processor_thread.isAlive():
if processor_thread.mongo_error:
raise Exception(
"Exiting because have issue with acces to MongoDB"
)
processor_thread.join()
processor_thread = None
ftrack_accessible = False
mongo_accessible = False
_processor_last_failed = datetime.datetime.now()
delta_time = (
_processor_last_failed - processor_last_failed
).seconds
if delta_time < min_fail_seconds:
processor_failed_count += 1
else:
processor_failed_count = 0
processor_last_failed = _processor_last_failed
if statuser_thread is not None:
statuser_thread.set_process("storer", storer_thread)
statuser_thread.set_process("processor", processor_thread)
time.sleep(1)
def run_event_server(
ftrack_url,
ftrack_user,
ftrack_api_key,
legacy,
clockify_api_key,
clockify_workspace
):
if not ftrack_user or not ftrack_api_key:
print((
"Ftrack user/api key were not passed."
" Trying to use credentials from user keyring."
))
cred = credentials.get_credentials(ftrack_url)
ftrack_user = cred.get("username")
ftrack_api_key = cred.get("api_key")
if clockify_workspace and clockify_api_key:
os.environ["CLOCKIFY_WORKSPACE"] = clockify_workspace
os.environ["CLOCKIFY_API_KEY"] = clockify_api_key
# Check url regex and accessibility
ftrack_url = check_ftrack_url(ftrack_url)
if not ftrack_url:
print('Exiting! < Please enter Ftrack server url >')
return 1
# Validate entered credentials
if not validate_credentials(ftrack_url, ftrack_user, ftrack_api_key):
print('Exiting! < Please enter valid credentials >')
return 1
# Set Ftrack environments
os.environ["FTRACK_SERVER"] = ftrack_url
os.environ["FTRACK_API_USER"] = ftrack_user
os.environ["FTRACK_API_KEY"] = ftrack_api_key
if legacy:
return legacy_server(ftrack_url)
return main_loop(ftrack_url)

View file

@ -0,0 +1,159 @@
import os
import time
import types
import logging
import traceback
import ftrack_api
from openpype.lib import (
PypeLogger,
modules_from_path
)
log = PypeLogger.get_logger(__name__)
"""
# Required - Needed for connection to Ftrack
FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com"
FTRACK_API_KEY # Ftrack user's API key "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
FTRACK_API_USER # Ftrack username e.g. "user.name"
# Required - Paths to folder with actions
FTRACK_ACTIONS_PATH # Paths to folders where are located actions
- EXAMPLE: "M:/FtrackApi/../actions/"
FTRACK_EVENTS_PATH # Paths to folders where are located actions
- EXAMPLE: "M:/FtrackApi/../events/"
# Required - Needed for import included modules
PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
- path to ftrack_action_handler, etc.
"""
class FtrackServer:
def __init__(self, handler_paths=None):
"""
- 'type' is by default set to 'action' - Runs Action server
- enter 'event' for Event server
EXAMPLE FOR EVENT SERVER:
...
server = FtrackServer()
server.run_server()
..
"""
# set Ftrack logging to Warning only - OPTIONAL
ftrack_log = logging.getLogger("ftrack_api")
ftrack_log.setLevel(logging.WARNING)
self.stopped = True
self.is_running = False
self.handler_paths = handler_paths or []
def stop_session(self):
self.stopped = True
if self.session.event_hub.connected is True:
self.session.event_hub.disconnect()
self.session.close()
self.session = None
def set_files(self, paths):
# Iterate all paths
register_functions = []
for path in paths:
# Try to format path with environments
try:
path = path.format(**os.environ)
except BaseException:
pass
# Get all modules with functions
modules, crashed = modules_from_path(path)
for filepath, exc_info in crashed:
log.warning("Filepath load crashed {}.\n{}".format(
filepath, traceback.format_exception(*exc_info)
))
for filepath, module in modules:
register_function = None
for name, attr in module.__dict__.items():
if (
name == "register"
and isinstance(attr, types.FunctionType)
):
register_function = attr
break
if not register_function:
log.warning(
"\"{}\" - Missing register method".format(filepath)
)
continue
register_functions.append(
(filepath, register_function)
)
if not register_functions:
log.warning((
"There are no events with `register` function"
" in registered paths: \"{}\""
).format("| ".join(paths)))
for filepath, register_func in register_functions:
try:
register_func(self.session)
except Exception:
log.warning(
"\"{}\" - register was not successful".format(filepath),
exc_info=True
)
def set_handler_paths(self, paths):
self.handler_paths = paths
if self.is_running:
self.stop_session()
self.run_server()
elif not self.stopped:
self.run_server()
def run_server(self, session=None, load_files=True):
self.stopped = False
self.is_running = True
if not session:
session = ftrack_api.Session(auto_connect_event_hub=True)
# Wait until session has connected event hub
if session._auto_connect_event_hub_thread:
# Use timeout from session (since ftrack-api 2.1.0)
timeout = getattr(session, "request_timeout", 60)
started = time.time()
while not session.event_hub.connected:
if (time.time() - started) > timeout:
raise RuntimeError((
"Connection to Ftrack was not created in {} seconds"
).format(timeout))
time.sleep(0.1)
self.session = session
if load_files:
if not self.handler_paths:
log.warning((
"Paths to event handlers are not set."
" Ftrack server won't launch."
))
self.is_running = False
return
self.set_files(self.handler_paths)
msg = "Registration of event handlers has finished!"
log.info(len(msg) * "*")
log.info(msg)
# keep event_hub on session running
self.session.event_hub.wait()
self.is_running = False

View file

@ -0,0 +1,412 @@
import os
import sys
import logging
import getpass
import atexit
import threading
import datetime
import time
import queue
import appdirs
import pymongo
import requests
import ftrack_api
import ftrack_api.session
import ftrack_api.cache
import ftrack_api.operation
import ftrack_api._centralized_storage_scenario
import ftrack_api.event
from ftrack_api.logging import LazyLogMessage as L
try:
from weakref import WeakMethod
except ImportError:
from ftrack_api._weakref import WeakMethod
from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info
from openpype.lib import OpenPypeMongoConnection
from openpype.api import Logger
TOPIC_STATUS_SERVER = "openpype.event.server.status"
TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result"
def check_ftrack_url(url, log_errors=True):
"""Checks if Ftrack server is responding"""
if not url:
print('ERROR: Ftrack URL is not set!')
return None
url = url.strip('/ ')
if 'http' not in url:
if url.endswith('ftrackapp.com'):
url = 'https://' + url
else:
url = 'https://{0}.ftrackapp.com'.format(url)
try:
result = requests.get(url, allow_redirects=False)
except requests.exceptions.RequestException:
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
print('DEBUG: Ftrack server {} is accessible.'.format(url))
return url
class SocketBaseEventHub(ftrack_api.event.hub.EventHub):
hearbeat_msg = b"hearbeat"
heartbeat_callbacks = []
def __init__(self, *args, **kwargs):
self.sock = kwargs.pop("sock")
super(SocketBaseEventHub, self).__init__(*args, **kwargs)
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "heartbeat":
# Reply with heartbeat.
for callback in self.heartbeat_callbacks:
callback()
self.sock.sendall(self.hearbeat_msg)
return self._send_packet(self._code_name_mapping["heartbeat"])
return super(SocketBaseEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
class StatusEventHub(SocketBaseEventHub):
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "connect":
event = ftrack_api.event.base.Event(
topic="openpype.status.started",
data={},
source={
"id": self.id,
"user": {"username": self._api_user}
}
)
self._event_queue.put(event)
return super(StatusEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
class StorerEventHub(SocketBaseEventHub):
hearbeat_msg = b"storer"
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "connect":
event = ftrack_api.event.base.Event(
topic="openpype.storer.started",
data={},
source={
"id": self.id,
"user": {"username": self._api_user}
}
)
self._event_queue.put(event)
return super(StorerEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
class ProcessEventHub(SocketBaseEventHub):
hearbeat_msg = b"processor"
is_collection_created = False
pypelog = Logger().get_logger("Session Processor")
def __init__(self, *args, **kwargs):
self.mongo_url = None
self.dbcon = None
super(ProcessEventHub, self).__init__(*args, **kwargs)
def prepare_dbcon(self):
try:
database_name, collection_name = get_ftrack_event_mongo_info()
mongo_client = OpenPypeMongoConnection.get_mongo_client()
self.dbcon = mongo_client[database_name][collection_name]
self.mongo_client = mongo_client
except pymongo.errors.AutoReconnect:
self.pypelog.error((
"Mongo server \"{}\" is not responding, exiting."
).format(OpenPypeMongoConnection.get_default_mongo_url()))
sys.exit(0)
except pymongo.errors.OperationFailure:
self.pypelog.error((
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(self.database, self.collection_name))
self.sock.sendall(b"MongoError")
sys.exit(0)
def wait(self, duration=None):
"""Overridden wait
Event are loaded from Mongo DB when queue is empty. Handled event is
set as processed in Mongo DB.
"""
started = time.time()
self.prepare_dbcon()
while True:
try:
event = self._event_queue.get(timeout=0.1)
except queue.Empty:
if not self.load_events():
time.sleep(0.5)
else:
try:
self._handle(event)
mongo_id = event["data"].get("_event_mongo_id")
if mongo_id is None:
continue
self.dbcon.update_one(
{"_id": mongo_id},
{"$set": {"pype_data.is_processed": True}}
)
except pymongo.errors.AutoReconnect:
self.pypelog.error((
"Mongo server \"{}\" is not responding, exiting."
).format(os.environ["AVALON_MONGO"]))
sys.exit(0)
# Additional special processing of events.
if event['topic'] == 'ftrack.meta.disconnected':
break
if duration is not None:
if (time.time() - started) > duration:
break
def load_events(self):
"""Load not processed events sorted by stored date"""
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
self.dbcon.delete_many({
"pype_data.stored": {"$lte": ago_date},
"pype_data.is_processed": True
})
not_processed_events = self.dbcon.find(
{"pype_data.is_processed": False}
).sort(
[("pype_data.stored", pymongo.ASCENDING)]
)
found = False
for event_data in not_processed_events:
new_event_data = {
k: v for k, v in event_data.items()
if k not in ["_id", "pype_data"]
}
try:
event = ftrack_api.event.base.Event(**new_event_data)
event["data"]["_event_mongo_id"] = event_data["_id"]
except Exception:
self.logger.exception(L(
'Failed to convert payload into event: {0}',
event_data
))
continue
found = True
self._event_queue.put(event)
return found
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which skip events and extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "event":
return
return super()._handle_packet(code, packet_identifier, path, data)
class CustomEventHubSession(ftrack_api.session.Session):
'''An isolated session for interaction with an ftrack server.'''
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=False, schema_cache_path=None,
plugin_arguments=None, timeout=60, **kwargs
):
self.kwargs = kwargs
super(ftrack_api.session.Session, self).__init__()
self.logger = logging.getLogger(
__name__ + '.' + self.__class__.__name__
)
self._closed = False
if server_url is None:
server_url = os.environ.get('FTRACK_SERVER')
if not server_url:
raise TypeError(
'Required "server_url" not specified. Pass as argument or set '
'in environment variable FTRACK_SERVER.'
)
self._server_url = server_url
if api_key is None:
api_key = os.environ.get(
'FTRACK_API_KEY',
# Backwards compatibility
os.environ.get('FTRACK_APIKEY')
)
if not api_key:
raise TypeError(
'Required "api_key" not specified. Pass as argument or set in '
'environment variable FTRACK_API_KEY.'
)
self._api_key = api_key
if api_user is None:
api_user = os.environ.get('FTRACK_API_USER')
if not api_user:
try:
api_user = getpass.getuser()
except Exception:
pass
if not api_user:
raise TypeError(
'Required "api_user" not specified. Pass as argument, set in '
'environment variable FTRACK_API_USER or one of the standard '
'environment variables used by Python\'s getpass module.'
)
self._api_user = api_user
# Currently pending operations.
self.recorded_operations = ftrack_api.operation.Operations()
self.record_operations = True
self.cache_key_maker = cache_key_maker
if self.cache_key_maker is None:
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
# Enforce always having a memory cache at top level so that the same
# in-memory instance is returned from session.
self.cache = ftrack_api.cache.LayeredCache([
ftrack_api.cache.MemoryCache()
])
if cache is not None:
if callable(cache):
cache = cache(self)
if cache is not None:
self.cache.caches.append(cache)
self._managed_request = None
self._request = requests.Session()
self._request.auth = ftrack_api.session.SessionAuthentication(
self._api_key, self._api_user
)
self.request_timeout = timeout
self.auto_populate = auto_populate
# Fetch server information and in doing so also check credentials.
self._server_information = self._fetch_server_information()
# Now check compatibility of server based on retrieved information.
self.check_server_compatibility()
# Construct event hub and load plugins.
self._event_hub = self._create_event_hub()
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub:
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
target=self._event_hub.connect
)
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# Register to auto-close session on exit.
atexit.register(WeakMethod(self.close))
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
self._plugin_paths = os.environ.get(
'FTRACK_EVENT_PLUGIN_PATH', ''
).split(os.pathsep)
self._discover_plugins(plugin_arguments=plugin_arguments)
# TODO: Make schemas read-only and non-mutable (or at least without
# rebuilding types)?
if schema_cache_path is not False:
if schema_cache_path is None:
schema_cache_path = appdirs.user_cache_dir()
schema_cache_path = os.environ.get(
'FTRACK_API_SCHEMA_CACHE_PATH', schema_cache_path
)
schema_cache_path = os.path.join(
schema_cache_path, 'ftrack_api_schema_cache.json'
)
self.schemas = self._load_schemas(schema_cache_path)
self.types = self._build_entity_type_classes(self.schemas)
ftrack_api._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)
def _create_event_hub(self):
return ftrack_api.event.hub.EventHub(
self._server_url,
self._api_user,
self._api_key
)
class SocketSession(CustomEventHubSession):
def _create_event_hub(self):
self.sock = self.kwargs["sock"]
return self.kwargs["Eventhub"](
self._server_url,
self._api_user,
self._api_key,
sock=self.sock
)

View file

@ -0,0 +1,199 @@
import os
import sys
import time
import socket
import threading
import traceback
import subprocess
from openpype.api import Logger
from openpype.lib import get_openpype_execute_args
class SocketThread(threading.Thread):
"""Thread that checks suprocess of storer of processor of events"""
MAX_TIMEOUT = int(os.environ.get("OPENPYPE_FTRACK_SOCKET_TIMEOUT", 45))
def __init__(self, name, port, filepath, additional_args=[]):
super(SocketThread, self).__init__()
self.log = Logger().get_logger(self.__class__.__name__)
self.setName(name)
self.name = name
self.port = port
self.filepath = filepath
self.additional_args = additional_args
self.sock = None
self.subproc = None
self.connection = None
self._is_running = False
self.finished = False
self.mongo_error = False
self._temp_data = {}
def stop(self):
self._is_running = False
def run(self):
self._is_running = True
time_socket = time.time()
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock = sock
# Bind the socket to the port - skip already used ports
while True:
try:
server_address = ("localhost", self.port)
sock.bind(server_address)
break
except OSError:
self.port += 1
self.log.debug(
"Running Socked thread on {}:{}".format(*server_address)
)
env = os.environ.copy()
env["OPENPYPE_PROCESS_MONGO_ID"] = str(Logger.mongo_process_id)
# OpenPype executable (with path to start script if not build)
args = get_openpype_execute_args(
# Add `run` command
"run",
self.filepath,
*self.additional_args,
str(self.port)
)
kwargs = {
"env": env,
"stdin": subprocess.PIPE
}
if not sys.stdout:
# Redirect to devnull if stdout is None
kwargs["stdout"] = subprocess.DEVNULL
kwargs["stderr"] = subprocess.DEVNULL
self.subproc = subprocess.Popen(args, **kwargs)
# Listen for incoming connections
sock.listen(1)
sock.settimeout(1.0)
while True:
if not self._is_running:
break
try:
connection, client_address = sock.accept()
time_socket = time.time()
connection.settimeout(1.0)
self.connection = connection
except socket.timeout:
if (time.time() - time_socket) > self.MAX_TIMEOUT:
self.log.error("Connection timeout passed. Terminating.")
self._is_running = False
self.subproc.terminate()
break
continue
try:
time_con = time.time()
# Receive the data in small chunks and retransmit it
while True:
try:
if not self._is_running:
break
data = None
try:
data = self.get_data_from_con(connection)
time_con = time.time()
except socket.timeout:
if (time.time() - time_con) > self.MAX_TIMEOUT:
self.log.error(
"Connection timeout passed. Terminating."
)
self._is_running = False
self.subproc.terminate()
break
continue
except ConnectionResetError:
self._is_running = False
break
self._handle_data(connection, data)
except Exception as exc:
self.log.error(
"Event server process failed", exc_info=True
)
finally:
# Clean up the connection
connection.close()
if self.subproc.poll() is None:
self.subproc.terminate()
self.finished = True
def get_data_from_con(self, connection):
return connection.recv(16)
def _handle_data(self, connection, data):
if not data:
return
if data == b"MongoError":
self.mongo_error = True
connection.sendall(data)
class StatusSocketThread(SocketThread):
process_name_mapping = {
b"RestartS": "storer",
b"RestartP": "processor",
b"RestartM": "main"
}
def __init__(self, *args, **kwargs):
self.process_threads = {}
self.stop_subprocess = False
super(StatusSocketThread, self).__init__(*args, **kwargs)
def set_process(self, process_name, thread):
try:
if not self.subproc:
self.process_threads[process_name] = None
return
if (
process_name in self.process_threads and
self.process_threads[process_name] == thread
):
return
self.process_threads[process_name] = thread
self.subproc.stdin.write(
str.encode("reset:{}\r\n".format(process_name))
)
self.subproc.stdin.flush()
except Exception:
print("Could not set thread in StatusSocketThread")
traceback.print_exception(*sys.exc_info())
def _handle_data(self, connection, data):
if not data:
return
process_name = self.process_name_mapping.get(data)
if process_name:
if process_name == "main":
self.stop_subprocess = True
else:
subp = self.process_threads.get(process_name)
if subp:
subp.stop()
connection.sendall(data)

View file

@ -0,0 +1,162 @@
import os
import ftrack_api
from openpype.api import get_project_settings
from openpype.lib import PostLaunchHook
class PostFtrackHook(PostLaunchHook):
order = None
def execute(self):
project_name = self.data.get("project_name")
asset_name = self.data.get("asset_name")
task_name = self.data.get("task_name")
missing_context_keys = set()
if not project_name:
missing_context_keys.add("project_name")
if not asset_name:
missing_context_keys.add("asset_name")
if not task_name:
missing_context_keys.add("task_name")
if missing_context_keys:
missing_keys_str = ", ".join([
"\"{}\"".format(key) for key in missing_context_keys
])
self.log.debug("Hook {} skipped. Missing data keys: {}".format(
self.__class__.__name__, missing_keys_str
))
return
required_keys = ("FTRACK_SERVER", "FTRACK_API_USER", "FTRACK_API_KEY")
for key in required_keys:
if not os.environ.get(key):
self.log.debug((
"Missing required environment \"{}\""
" for Ftrack after launch procedure."
).format(key))
return
try:
session = ftrack_api.Session(auto_connect_event_hub=True)
self.log.debug("Ftrack session created")
except Exception:
self.log.warning("Couldn't create Ftrack session")
return
try:
entity = self.find_ftrack_task_entity(
session, project_name, asset_name, task_name
)
if entity:
self.ftrack_status_change(session, entity, project_name)
except Exception:
self.log.warning(
"Couldn't finish Ftrack procedure.", exc_info=True
)
return
finally:
session.close()
def find_ftrack_task_entity(
self, session, project_name, asset_name, task_name
):
project_entity = session.query(
"Project where full_name is \"{}\"".format(project_name)
).first()
if not project_entity:
self.log.warning(
"Couldn't find project \"{}\" in Ftrack.".format(project_name)
)
return
potential_task_entities = session.query((
"TypedContext where parent.name is \"{}\" and project_id is \"{}\""
).format(asset_name, project_entity["id"])).all()
filtered_entities = []
for _entity in potential_task_entities:
if (
_entity.entity_type.lower() == "task"
and _entity["name"] == task_name
):
filtered_entities.append(_entity)
if not filtered_entities:
self.log.warning((
"Couldn't find task \"{}\" under parent \"{}\" in Ftrack."
).format(task_name, asset_name))
return
if len(filtered_entities) > 1:
self.log.warning((
"Found more than one task \"{}\""
" under parent \"{}\" in Ftrack."
).format(task_name, asset_name))
return
return filtered_entities[0]
def ftrack_status_change(self, session, entity, project_name):
project_settings = get_project_settings(project_name)
status_update = project_settings["ftrack"]["events"]["status_update"]
if not status_update["enabled"]:
self.log.debug(
"Status changes are disabled for project \"{}\"".format(
project_name
)
)
return
status_mapping = status_update["mapping"]
if not status_mapping:
self.log.warning(
"Project \"{}\" does not have set status changes.".format(
project_name
)
)
return
actual_status = entity["status"]["name"].lower()
already_tested = set()
ent_path = "/".join(
[ent["name"] for ent in entity["link"]]
)
while True:
next_status_name = None
for key, value in status_mapping.items():
if key in already_tested:
continue
if actual_status in value or "__any__" in value:
if key != "__ignore__":
next_status_name = key
already_tested.add(key)
break
already_tested.add(key)
if next_status_name is None:
break
try:
query = "Status where name is \"{}\"".format(
next_status_name
)
status = session.query(query).one()
entity["status"] = status
session.commit()
self.log.debug("Changing status to \"{}\" <{}>".format(
next_status_name, ent_path
))
break
except Exception:
session.rollback()
msg = (
"Status \"{}\" in presets wasn't found"
" on Ftrack entity type \"{}\""
).format(next_status_name, entity.entity_type)
self.log.warning(msg)

View file

@ -0,0 +1,43 @@
import os
from openpype.lib import PreLaunchHook
from openpype_modules.ftrack import FTRACK_MODULE_DIR
class PrePython2Support(PreLaunchHook):
"""Add python ftrack api module for Python 2 to PYTHONPATH.
Path to vendor modules is added to the beggining of PYTHONPATH.
"""
def execute(self):
if not self.application.use_python_2:
return
self.log.info("Adding Ftrack Python 2 packages to PYTHONPATH.")
# Prepare vendor dir path
python_2_vendor = os.path.join(FTRACK_MODULE_DIR, "python2_vendor")
# Add Python 2 modules
python_paths = [
# `python-ftrack-api`
os.path.join(python_2_vendor, "ftrack-python-api", "source"),
# `arrow`
os.path.join(python_2_vendor, "arrow"),
# `builtins` from `python-future`
# - `python-future` is strict Python 2 module that cause crashes
# of Python 3 scripts executed through OpenPype (burnin script etc.)
os.path.join(python_2_vendor, "builtins"),
# `backports.functools_lru_cache`
os.path.join(
python_2_vendor, "backports.functools_lru_cache"
)
]
# Load PYTHONPATH from current launch context
python_path = self.launch_context.env.get("PYTHONPATH")
if python_path:
python_paths.append(python_path)
# Set new PYTHONPATH to launch context environments
self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths)

View file

@ -0,0 +1,53 @@
from .constants import (
CUST_ATTR_ID_KEY,
CUST_ATTR_AUTO_SYNC,
CUST_ATTR_GROUP,
CUST_ATTR_TOOLS,
CUST_ATTR_APPLICATIONS,
CUST_ATTR_INTENT
)
from .settings import (
get_ftrack_event_mongo_info
)
from .custom_attributes import (
default_custom_attributes_definition,
app_definitions_from_app_manager,
tool_definitions_from_app_manager,
get_openpype_attr,
query_custom_attributes
)
from . import avalon_sync
from . import credentials
from .ftrack_base_handler import BaseHandler
from .ftrack_event_handler import BaseEvent
from .ftrack_action_handler import BaseAction, ServerAction, statics_icon
__all__ = (
"CUST_ATTR_ID_KEY",
"CUST_ATTR_AUTO_SYNC",
"CUST_ATTR_GROUP",
"CUST_ATTR_TOOLS",
"CUST_ATTR_APPLICATIONS",
"get_ftrack_event_mongo_info",
"default_custom_attributes_definition",
"app_definitions_from_app_manager",
"tool_definitions_from_app_manager",
"get_openpype_attr",
"query_custom_attributes",
"avalon_sync",
"credentials",
"BaseHandler",
"BaseEvent",
"BaseAction",
"ServerAction",
"statics_icon"
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,14 @@
# Group name of custom attributes
CUST_ATTR_GROUP = "openpype"
# name of Custom attribute that stores mongo_id from avalon db
CUST_ATTR_ID_KEY = "avalon_mongo_id"
# Auto sync of project
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
# Applications custom attribute name
CUST_ATTR_APPLICATIONS = "applications"
# Environment tools custom attribute
CUST_ATTR_TOOLS = "tools_env"
# Intent custom attribute name
CUST_ATTR_INTENT = "intent"

View file

@ -0,0 +1,105 @@
import os
import ftrack_api
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from openpype.lib import OpenPypeSecureRegistry
USERNAME_KEY = "username"
API_KEY_KEY = "api_key"
def get_ftrack_hostname(ftrack_server=None):
if not ftrack_server:
ftrack_server = os.environ.get("FTRACK_SERVER")
if not ftrack_server:
return None
if "//" not in ftrack_server:
ftrack_server = "//" + ftrack_server
return urlparse(ftrack_server).hostname
def _get_ftrack_secure_key(hostname, key):
"""Secure item key for entered hostname."""
return "/".join(("ftrack", hostname, key))
def get_credentials(ftrack_server=None):
output = {
USERNAME_KEY: None,
API_KEY_KEY: None
}
hostname = get_ftrack_hostname(ftrack_server)
if not hostname:
return output
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
output[USERNAME_KEY] = username_registry.get_item(USERNAME_KEY, None)
output[API_KEY_KEY] = api_key_registry.get_item(API_KEY_KEY, None)
return output
def save_credentials(username, api_key, ftrack_server=None):
hostname = get_ftrack_hostname(ftrack_server)
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
# Clear credentials
clear_credentials(ftrack_server)
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
username_registry.set_item(USERNAME_KEY, username)
api_key_registry.set_item(API_KEY_KEY, api_key)
def clear_credentials(ftrack_server=None):
hostname = get_ftrack_hostname(ftrack_server)
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
current_username = username_registry.get_item(USERNAME_KEY, None)
current_api_key = api_key_registry.get_item(API_KEY_KEY, None)
if current_username is not None:
username_registry.delete_item(USERNAME_KEY)
if current_api_key is not None:
api_key_registry.delete_item(API_KEY_KEY)
def check_credentials(username, api_key, ftrack_server=None):
if not ftrack_server:
ftrack_server = os.environ.get("FTRACK_SERVER")
if not ftrack_server or not username or not api_key:
return False
try:
session = ftrack_api.Session(
server_url=ftrack_server,
api_key=api_key,
api_user=username
)
session.close()
except Exception:
return False
return True

View file

@ -0,0 +1,56 @@
{
"show": {
"avalon_auto_sync": {
"label": "Avalon auto-sync",
"type": "boolean"
},
"library_project": {
"label": "Library Project",
"type": "boolean"
}
},
"is_hierarchical": {
"fps": {
"label": "FPS",
"type": "number",
"config": {"isdecimal": true}
},
"clipIn": {
"label": "Clip in",
"type": "number"
},
"clipOut": {
"label": "Clip out",
"type": "number"
},
"frameStart": {
"label": "Frame start",
"type": "number"
},
"frameEnd": {
"label": "Frame end",
"type": "number"
},
"resolutionWidth": {
"label": "Resolution Width",
"type": "number"
},
"resolutionHeight": {
"label": "Resolution Height",
"type": "number"
},
"pixelAspect": {
"label": "Pixel aspect",
"type": "number",
"config": {"isdecimal": true}
},
"handleStart": {
"label": "Frame handles start",
"type": "number"
},
"handleEnd": {
"label": "Frame handles end",
"type": "number"
}
}
}

View file

@ -0,0 +1,147 @@
import os
import json
from .constants import CUST_ATTR_GROUP
def default_custom_attributes_definition():
json_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"custom_attributes.json"
)
with open(json_file_path, "r") as json_stream:
data = json.load(json_stream)
return data
def app_definitions_from_app_manager(app_manager):
_app_definitions = []
for app_name, app in app_manager.applications.items():
if app.enabled:
_app_definitions.append(
(app_name, app.full_label)
)
# Sort items by label
app_definitions = []
for key, label in sorted(_app_definitions, key=lambda item: item[1]):
app_definitions.append({key: label})
if not app_definitions:
app_definitions.append({"empty": "< Empty >"})
return app_definitions
def tool_definitions_from_app_manager(app_manager):
_tools_data = []
for tool_name, tool in app_manager.tools.items():
_tools_data.append(
(tool_name, tool.label)
)
# Sort items by label
tools_data = []
for key, label in sorted(_tools_data, key=lambda item: item[1]):
tools_data.append({key: label})
# Make sure there is at least one item
if not tools_data:
tools_data.append({"empty": "< Empty >"})
return tools_data
def get_openpype_attr(session, split_hierarchical=True, query_keys=None):
custom_attributes = []
hier_custom_attributes = []
if not query_keys:
query_keys = [
"id",
"entity_type",
"object_type_id",
"is_hierarchical",
"default"
]
# TODO remove deprecated "pype" group from query
cust_attrs_query = (
"select {}"
" from CustomAttributeConfiguration"
# Kept `pype` for Backwards Compatiblity
" where group.name in (\"pype\", \"{}\")"
).format(", ".join(query_keys), CUST_ATTR_GROUP)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
if split_hierarchical and cust_attr["is_hierarchical"]:
hier_custom_attributes.append(cust_attr)
continue
custom_attributes.append(cust_attr)
if split_hierarchical:
# return tuple
return custom_attributes, hier_custom_attributes
return custom_attributes
def join_query_keys(keys):
"""Helper to join keys to query."""
return ",".join(["\"{}\"".format(key) for key in keys])
def query_custom_attributes(
session, conf_ids, entity_ids, only_set_values=False
):
"""Query custom attribute values from ftrack database.
Using ftrack call method result may differ based on used table name and
version of ftrack server.
For hierarchical attributes you shou always use `only_set_values=True`
otherwise result will be default value of custom attribute and it would not
be possible to differentiate if value is set on entity or default value is
used.
Args:
session(ftrack_api.Session): Connected ftrack session.
conf_id(list, set, tuple): Configuration(attribute) ids which are
queried.
entity_ids(list, set, tuple): Entity ids for which are values queried.
only_set_values(bool): Entities that don't have explicitly set
value won't return a value. If is set to False then default custom
attribute value is returned if value is not set.
"""
output = []
# Just skip
if not conf_ids or not entity_ids:
return output
if only_set_values:
table_name = "CustomAttributeValue"
else:
table_name = "ContextCustomAttributeValue"
# Prepare values to query
attributes_joined = join_query_keys(conf_ids)
attributes_len = len(conf_ids)
# Query values in chunks
chunk_size = int(5000 / attributes_len)
# Make sure entity_ids is `list` for chunk selection
entity_ids = list(entity_ids)
for idx in range(0, len(entity_ids), chunk_size):
entity_ids_joined = join_query_keys(
entity_ids[idx:idx + chunk_size]
)
output.extend(
session.query(
(
"select value, entity_id from {}"
" where entity_id in ({}) and configuration_id in ({})"
).format(
table_name,
entity_ids_joined,
attributes_joined
)
).all()
)
return output

View file

@ -0,0 +1,431 @@
import os
from .ftrack_base_handler import BaseHandler
def statics_icon(*icon_statics_file_parts):
statics_server = os.environ.get("OPENPYPE_STATICS_SERVER")
if not statics_server:
return None
return "/".join((statics_server, *icon_statics_file_parts))
class BaseAction(BaseHandler):
'''Custom Action base class
`label` a descriptive string identifing your action.
`varaint` To group actions together, give them the same
label and specify a unique variant per action.
`identifier` a unique identifier for your action.
`description` a verbose descriptive text for you action
'''
label = None
variant = None
identifier = None
description = None
icon = None
type = 'Action'
_discover_identifier = None
_launch_identifier = None
settings_frack_subkey = "user_handlers"
settings_enabled_key = "enabled"
def __init__(self, session):
'''Expects a ftrack_api.Session instance'''
if self.label is None:
raise ValueError('Action missing label.')
if self.identifier is None:
raise ValueError('Action missing identifier.')
super().__init__(session)
@property
def discover_identifier(self):
if self._discover_identifier is None:
self._discover_identifier = "{}.{}".format(
self.identifier, self.process_identifier()
)
return self._discover_identifier
@property
def launch_identifier(self):
if self._launch_identifier is None:
self._launch_identifier = "{}.{}".format(
self.identifier, self.process_identifier()
)
return self._launch_identifier
def register(self):
'''
Registers the action, subscribing the the discover and launch topics.
- highest priority event will show last
'''
self.session.event_hub.subscribe(
'topic=ftrack.action.discover and source.user.username={0}'.format(
self.session.api_user
),
self._discover,
priority=self.priority
)
launch_subscription = (
'topic=ftrack.action.launch'
' and data.actionIdentifier={0}'
' and source.user.username={1}'
).format(
self.launch_identifier,
self.session.api_user
)
self.session.event_hub.subscribe(
launch_subscription,
self._launch
)
def _discover(self, event):
entities = self._translate_event(event)
if not entities:
return
accepts = self.discover(self.session, entities, event)
if not accepts:
return
self.log.debug(u'Discovering action with selection: {0}'.format(
event['data'].get('selection', [])
))
return {
'items': [{
'label': self.label,
'variant': self.variant,
'description': self.description,
'actionIdentifier': self.discover_identifier,
'icon': self.icon,
}]
}
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id. If the entity is a hierarchical you will always get the
entity type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return False
def _interface(self, session, entities, event):
interface = self.interface(session, entities, event)
if not interface:
return
if isinstance(interface, (tuple, list)):
return {"items": interface}
if isinstance(interface, dict):
if (
"items" in interface
or ("success" in interface and "message" in interface)
):
return interface
raise ValueError((
"Invalid interface output expected key: \"items\" or keys:"
" \"success\" and \"message\". Got: \"{}\""
).format(str(interface)))
raise ValueError(
"Invalid interface output type \"{}\"".format(
str(type(interface))
)
)
def interface(self, session, entities, event):
'''Return a interface if applicable or None
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and
the entity id. If the entity is a hierarchical you will always get the
entity type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return None
def _launch(self, event):
entities = self._translate_event(event)
if not entities:
return
preactions_launched = self._handle_preactions(self.session, event)
if preactions_launched is False:
return
interface = self._interface(self.session, entities, event)
if interface:
return interface
response = self.launch(self.session, entities, event)
return self._handle_result(response)
def _handle_result(self, result):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
if result is True:
msg = 'Action {0} finished.'
else:
msg = 'Action {0} failed.'
return {
'success': result,
'message': msg.format(self.label)
}
if isinstance(result, dict):
if 'items' in result:
if not isinstance(result['items'], list):
raise ValueError('Invalid items format, must be list!')
else:
for key in ('success', 'message'):
if key not in result:
raise KeyError(
"Missing required key: {0}.".format(key)
)
return result
self.log.warning((
'Invalid result type \"{}\" must be bool or dictionary!'
).format(str(type(result))))
return result
@staticmethod
def roles_check(settings_roles, user_roles, default=True):
"""Compare roles from setting and user's roles.
Args:
settings_roles(list): List of role names from settings.
user_roles(list): User's lowered role names.
default(bool): If `settings_roles` is empty list.
Returns:
bool: `True` if user has at least one role from settings or
default if `settings_roles` is empty.
"""
if not settings_roles:
return default
for role_name in settings_roles:
if role_name.lower() in user_roles:
return True
return False
@classmethod
def get_user_entity_from_event(cls, session, event):
"""Query user entity from event."""
not_set = object()
# Check if user is already stored in event data
user_entity = event["data"].get("user_entity", not_set)
if user_entity is not_set:
# Query user entity from event
user_info = event.get("source", {}).get("user", {})
user_id = user_info.get("id")
username = user_info.get("username")
if user_id:
user_entity = session.query(
"User where id is {}".format(user_id)
).first()
if not user_entity and username:
user_entity = session.query(
"User where username is {}".format(username)
).first()
event["data"]["user_entity"] = user_entity
return user_entity
@classmethod
def get_user_roles_from_event(cls, session, event):
"""Query user entity from event."""
not_set = object()
user_roles = event["data"].get("user_roles", not_set)
if user_roles is not_set:
user_roles = []
user_entity = cls.get_user_entity_from_event(session, event)
for role in user_entity["user_security_roles"]:
user_roles.append(role["security_role"]["name"].lower())
event["data"]["user_roles"] = user_roles
return user_roles
def get_project_name_from_event(self, session, event, entities):
"""Load or query and fill project entity from/to event data.
Project data are stored by ftrack id because in most cases it is
easier to access project id than project name.
Args:
session (ftrack_api.Session): Current session.
event (ftrack_api.Event): Processed event by session.
entities (list): Ftrack entities of selection.
"""
# Try to get project entity from event
project_name = event["data"].get("project_name")
if not project_name:
project_entity = self.get_project_from_entity(
entities[0], session
)
project_name = project_entity["full_name"]
event["data"]["project_name"] = project_name
return project_name
def get_ftrack_settings(self, session, event, entities):
project_name = self.get_project_name_from_event(
session, event, entities
)
project_settings = self.get_project_settings_from_event(
event, project_name
)
return project_settings["ftrack"]
def valid_roles(self, session, entities, event):
"""Validate user roles by settings.
Method requires to have set `settings_key` attribute.
"""
ftrack_settings = self.get_ftrack_settings(session, event, entities)
settings = (
ftrack_settings[self.settings_frack_subkey][self.settings_key]
)
if self.settings_enabled_key:
if not settings.get(self.settings_enabled_key, True):
return False
user_role_list = self.get_user_roles_from_event(session, event)
if not self.roles_check(settings.get("role_list"), user_role_list):
return False
return True
class LocalAction(BaseAction):
"""Action that warn user when more Processes with same action are running.
Action is launched all the time but if id does not match id of current
instanace then message is shown to user.
Handy for actions where matters if is executed on specific machine.
"""
_full_launch_identifier = None
@property
def discover_identifier(self):
if self._discover_identifier is None:
self._discover_identifier = "{}.{}".format(
self.identifier, self.process_identifier()
)
return self._discover_identifier
@property
def launch_identifier(self):
"""Catch all topics with same identifier."""
if self._launch_identifier is None:
self._launch_identifier = "{}.*".format(self.identifier)
return self._launch_identifier
@property
def full_launch_identifier(self):
"""Catch all topics with same identifier."""
if self._full_launch_identifier is None:
self._full_launch_identifier = "{}.{}".format(
self.identifier, self.process_identifier()
)
return self._full_launch_identifier
def _discover(self, event):
entities = self._translate_event(event)
if not entities:
return
accepts = self.discover(self.session, entities, event)
if not accepts:
return
self.log.debug("Discovering action with selection: {0}".format(
event["data"].get("selection", [])
))
return {
"items": [{
"label": self.label,
"variant": self.variant,
"description": self.description,
"actionIdentifier": self.discover_identifier,
"icon": self.icon,
}]
}
def _launch(self, event):
event_identifier = event["data"]["actionIdentifier"]
# Check if identifier is same
# - show message that acion may not be triggered on this machine
if event_identifier != self.full_launch_identifier:
return {
"success": False,
"message": (
"There are running more OpenPype processes"
" where this action could be launched."
)
}
return super(LocalAction, self)._launch(event)
class ServerAction(BaseAction):
"""Action class meant to be used on event server.
Unlike the `BaseAction` roles are not checked on register but on discover.
For the same reason register is modified to not filter topics by username.
"""
settings_frack_subkey = "events"
@property
def discover_identifier(self):
return self.identifier
@property
def launch_identifier(self):
return self.identifier
def register(self):
"""Register subcription to Ftrack event hub."""
self.session.event_hub.subscribe(
"topic=ftrack.action.discover",
self._discover,
priority=self.priority
)
launch_subscription = (
"topic=ftrack.action.launch and data.actionIdentifier={0}"
).format(self.launch_identifier)
self.session.event_hub.subscribe(launch_subscription, self._launch)

View file

@ -0,0 +1,720 @@
import os
import tempfile
import json
import functools
import uuid
import datetime
import traceback
import time
from openpype.api import Logger
from openpype.settings import get_project_settings
import ftrack_api
from openpype_modules.ftrack import ftrack_server
class MissingPermision(Exception):
def __init__(self, message=None):
if message is None:
message = 'Ftrack'
super().__init__(message)
class PreregisterException(Exception):
def __init__(self, message=None):
if not message:
message = "Pre-registration conditions were not met"
super().__init__(message)
class BaseHandler(object):
'''Custom Action base class
<label> - a descriptive string identifing your action.
<varaint> - To group actions together, give them the same
label and specify a unique variant per action.
<identifier> - a unique identifier for app.
<description> - a verbose descriptive text for you action
<icon> - icon in ftrack
'''
_process_id = None
# Default priority is 100
priority = 100
# Type is just for logging purpose (e.g.: Action, Event, Application,...)
type = 'No-type'
ignore_me = False
preactions = []
@staticmethod
def join_query_keys(keys):
"""Helper to join keys to query."""
return ",".join(["\"{}\"".format(key) for key in keys])
def __init__(self, session):
'''Expects a ftrack_api.Session instance'''
self.log = Logger().get_logger(self.__class__.__name__)
if not(
isinstance(session, ftrack_api.session.Session) or
isinstance(session, ftrack_server.lib.SocketSession)
):
raise Exception((
"Session object entered with args is instance of \"{}\""
" but expected instances are \"{}\" and \"{}\""
).format(
str(type(session)),
str(ftrack_api.session.Session),
str(ftrack_server.lib.SocketSession)
))
self._session = session
# Using decorator
self.register = self.register_decorator(self.register)
self.launch = self.launch_log(self.launch)
@staticmethod
def process_identifier():
"""Helper property to have """
if not BaseHandler._process_id:
BaseHandler._process_id = str(uuid.uuid4())
return BaseHandler._process_id
# Decorator
def register_decorator(self, func):
@functools.wraps(func)
def wrapper_register(*args, **kwargs):
if self.ignore_me:
return
label = getattr(self, "label", self.__class__.__name__)
variant = getattr(self, "variant", None)
if variant:
label = "{} {}".format(label, variant)
try:
self._preregister()
start_time = time.perf_counter()
func(*args, **kwargs)
end_time = time.perf_counter()
run_time = end_time - start_time
self.log.info((
'{} "{}" - Registered successfully ({:.4f}sec)'
).format(self.type, label, run_time))
except MissingPermision as MPE:
self.log.info((
'!{} "{}" - You\'re missing required {} permissions'
).format(self.type, label, str(MPE)))
except AssertionError as ae:
self.log.warning((
'!{} "{}" - {}'
).format(self.type, label, str(ae)))
except NotImplementedError:
self.log.error((
'{} "{}" - Register method is not implemented'
).format(self.type, label))
except PreregisterException as exc:
self.log.warning((
'{} "{}" - {}'
).format(self.type, label, str(exc)))
except Exception as e:
self.log.error('{} "{}" - Registration failed ({})'.format(
self.type, label, str(e))
)
return wrapper_register
# Decorator
def launch_log(self, func):
@functools.wraps(func)
def wrapper_launch(*args, **kwargs):
label = getattr(self, "label", self.__class__.__name__)
variant = getattr(self, "variant", None)
if variant:
label = "{} {}".format(label, variant)
self.log.info(('{} "{}": Launched').format(self.type, label))
try:
return func(*args, **kwargs)
except Exception as exc:
self.session.rollback()
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
self.log.error(msg, exc_info=True)
return {
'success': False,
'message': msg
}
finally:
self.log.info(('{} "{}": Finished').format(self.type, label))
return wrapper_launch
@property
def session(self):
'''Return current session.'''
return self._session
def reset_session(self):
self.session.reset()
def _preregister(self):
# Custom validations
result = self.preregister()
if result is None:
self.log.debug((
"\"{}\" 'preregister' method returned 'None'. Expected it"
" didn't fail and continue as preregister returned True."
).format(self.__class__.__name__))
return
if result is not True:
msg = None
if isinstance(result, str):
msg = result
raise PreregisterException(msg)
def preregister(self):
'''
Preregister conditions.
Registration continues if returns True.
'''
return True
def register(self):
'''
Registers the action, subscribing the discover and launch topics.
Is decorated by register_log
'''
raise NotImplementedError()
def _translate_event(self, event, session=None):
'''Return *event* translated structure to be used with the API.'''
if session is None:
session = self.session
_entities = event["data"].get("entities_object", None)
if _entities is not None and not _entities:
return _entities
if (
_entities is None
or _entities[0].get(
"link", None
) == ftrack_api.symbol.NOT_SET
):
_entities = [
item
for item in self._get_entities(event)
if item is not None
]
event["data"]["entities_object"] = _entities
return _entities
def _get_entities(self, event, session=None, ignore=None):
entities = []
selection = event['data'].get('selection')
if not selection:
return entities
if ignore is None:
ignore = []
elif isinstance(ignore, str):
ignore = [ignore]
filtered_selection = []
for entity in selection:
if entity['entityType'] not in ignore:
filtered_selection.append(entity)
if not filtered_selection:
return entities
if session is None:
session = self.session
session._local_cache.clear()
for entity in filtered_selection:
entities.append(session.get(
self._get_entity_type(entity, session),
entity.get('entityId')
))
return entities
def _get_entity_type(self, entity, session=None):
'''Return translated entity type tht can be used with API.'''
# Get entity type and make sure it is lower cased. Most places except
# the component tab in the Sidebar will use lower case notation.
entity_type = entity.get('entityType').replace('_', '').lower()
if session is None:
session = self.session
for schema in self.session.schemas:
alias_for = schema.get('alias_for')
if (
alias_for and isinstance(alias_for, str) and
alias_for.lower() == entity_type
):
return schema['id']
for schema in self.session.schemas:
if schema['id'].lower() == entity_type:
return schema['id']
raise ValueError(
'Unable to translate entity type: {0}.'.format(entity_type)
)
def _launch(self, event):
self.session.rollback()
self.session._local_cache.clear()
self.launch(self.session, event)
def launch(self, session, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action failed )
or a dictionary with they keys `message` and `success`, the message should be a
string and will be displayed as feedback to the user, success should be a bool,
True if successful or False if the action failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
raise NotImplementedError()
def _handle_preactions(self, session, event):
# If preactions are not set
if len(self.preactions) == 0:
return True
# If no selection
selection = event.get('data', {}).get('selection', None)
if (selection is None):
return False
# If preactions were already started
if event['data'].get('preactions_launched', None) is True:
return True
# Launch preactions
for preaction in self.preactions:
self.trigger_action(preaction, event)
# Relaunch this action
additional_data = {"preactions_launched": True}
self.trigger_action(
self.identifier, event, additional_event_data=additional_data
)
return False
def _handle_result(self, result):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
if result is True:
result = {
'success': result,
'message': (
'{0} launched successfully.'.format(self.label)
)
}
else:
result = {
'success': result,
'message': (
'{0} launch failed.'.format(self.label)
)
}
elif isinstance(result, dict):
items = 'items' in result
if items is False:
for key in ('success', 'message'):
if key in result:
continue
raise KeyError(
'Missing required key: {0}.'.format(key)
)
return result
def show_message(self, event, input_message, result=False):
"""
Shows message to user who triggered event
- event - just source of user id
- input_message - message that is shown to user
- result - changes color of message (based on ftrack settings)
- True = Violet
- False = Red
"""
if not isinstance(result, bool):
result = False
try:
message = str(input_message)
except Exception:
return
user_id = event['source']['user']['id']
target = (
'applicationId=ftrack.client.web and user.id="{0}"'
).format(user_id)
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=dict(
type='message',
success=result,
message=message
),
target=target
),
on_error='ignore'
)
def show_interface(
self, items, title="", event=None, user=None,
username=None, user_id=None, submit_btn_label=None
):
"""
Shows interface to user
- to identify user must be entered one of args:
event, user, username, user_id
- 'items' must be list containing Ftrack interface items
"""
if not any([event, user, username, user_id]):
raise TypeError((
'Missing argument `show_interface` requires one of args:'
' event (ftrack_api Event object),'
' user (ftrack_api User object)'
' username (string) or user_id (string)'
))
if event:
user_id = event['source']['user']['id']
elif user:
user_id = user['id']
else:
if user_id:
key = 'id'
value = user_id
else:
key = 'username'
value = username
user = self.session.query(
'User where {} is "{}"'.format(key, value)
).first()
if not user:
raise TypeError((
'Ftrack user with {} "{}" was not found!'
).format(key, value))
user_id = user['id']
target = (
'applicationId=ftrack.client.web and user.id="{0}"'
).format(user_id)
event_data = {
"type": "widget",
"items": items,
"title": title
}
if submit_btn_label:
event_data["submit_button_label"] = submit_btn_label
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=event_data,
target=target
),
on_error='ignore'
)
def show_interface_from_dict(
self, messages, title="", event=None,
user=None, username=None, user_id=None, submit_btn_label=None
):
if not messages:
self.log.debug("No messages to show! (messages dict is empty)")
return
items = []
splitter = {'type': 'label', 'value': '---'}
first = True
for key, value in messages.items():
if not first:
items.append(splitter)
else:
first = False
subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
items.append(subtitle)
if isinstance(value, list):
for item in value:
message = {
'type': 'label', 'value': '<p>{}</p>'.format(item)
}
items.append(message)
else:
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
items.append(message)
self.show_interface(
items, title, event, user, username, user_id, submit_btn_label
)
def trigger_action(
self, action_name, event=None, session=None,
selection=None, user_data=None,
topic="ftrack.action.launch", additional_event_data={},
on_error="ignore"
):
self.log.debug("Triggering action \"{}\" Begins".format(action_name))
if not session:
session = self.session
# Getting selection and user data
_selection = None
_user_data = None
if event:
_selection = event.get("data", {}).get("selection")
_user_data = event.get("source", {}).get("user")
if selection is not None:
_selection = selection
if user_data is not None:
_user_data = user_data
# Without selection and user data skip triggering
msg = "Can't trigger \"{}\" action without {}."
if _selection is None:
self.log.error(msg.format(action_name, "selection"))
return
if _user_data is None:
self.log.error(msg.format(action_name, "user data"))
return
_event_data = {
"actionIdentifier": action_name,
"selection": _selection
}
# Add additional data
if additional_event_data:
_event_data.update(additional_event_data)
# Create and trigger event
session.event_hub.publish(
ftrack_api.event.base.Event(
topic=topic,
data=_event_data,
source=dict(user=_user_data)
),
on_error=on_error
)
self.log.debug(
"Action \"{}\" Triggered successfully".format(action_name)
)
def trigger_event(
self, topic, event_data={}, session=None, source=None,
event=None, on_error="ignore"
):
if session is None:
session = self.session
if not source and event:
source = event.get("source")
# Create and trigger event
event = ftrack_api.event.base.Event(
topic=topic,
data=event_data,
source=source
)
session.event_hub.publish(event, on_error=on_error)
self.log.debug((
"Publishing event: {}"
).format(str(event.__dict__)))
def get_project_from_entity(self, entity, session=None):
low_entity_type = entity.entity_type.lower()
if low_entity_type == "project":
return entity
if "project" in entity:
# reviewsession, task(Task, Shot, Sequence,...)
return entity["project"]
if low_entity_type == "filecomponent":
entity = entity["version"]
low_entity_type = entity.entity_type.lower()
if low_entity_type == "assetversion":
asset = entity["asset"]
parent = None
if asset:
parent = asset["parent"]
if parent:
if parent.entity_type.lower() == "project":
return parent
if "project" in parent:
return parent["project"]
project_data = entity["link"][0]
if session is None:
session = self.session
return session.query(
"Project where id is {}".format(project_data["id"])
).one()
def get_project_settings_from_event(self, event, project_name):
"""Load or fill OpenPype's project settings from event data.
Project data are stored by ftrack id because in most cases it is
easier to access project id than project name.
Args:
event (ftrack_api.Event): Processed event by session.
project_entity (ftrack_api.Entity): Project entity.
"""
project_settings_by_id = event["data"].get("project_settings")
if not project_settings_by_id:
project_settings_by_id = {}
event["data"]["project_settings"] = project_settings_by_id
project_settings = project_settings_by_id.get(project_name)
if not project_settings:
project_settings = get_project_settings(project_name)
event["data"]["project_settings"][project_name] = project_settings
return project_settings
@staticmethod
def get_entity_path(entity):
"""Return full hierarchical path to entity."""
return "/".join(
[ent["name"] for ent in entity["link"]]
)
@classmethod
def add_traceback_to_job(
cls, job, session, exc_info,
description=None,
component_name=None,
job_status=None
):
"""Add traceback file to a job.
Args:
job (JobEntity): Entity of job where file should be able to
download (Created or queried with passed session).
session (Session): Ftrack session which was used to query/create
entered job.
exc_info (tuple): Exception info (e.g. from `sys.exc_info()`).
description (str): Change job description to describe what
happened. Job description won't change if not passed.
component_name (str): Name of component and default name of
downloaded file. Class name and current date time are used if
not specified.
job_status (str): Status of job which will be set. By default is
set to 'failed'.
"""
if description:
job_data = {
"description": description
}
job["data"] = json.dumps(job_data)
if not job_status:
job_status = "failed"
job["status"] = job_status
# Create temp file where traceback will be stored
temp_obj = tempfile.NamedTemporaryFile(
mode="w", prefix="openpype_ftrack_", suffix=".txt", delete=False
)
temp_obj.close()
temp_filepath = temp_obj.name
# Store traceback to file
result = traceback.format_exception(*exc_info)
with open(temp_filepath, "w") as temp_file:
temp_file.write("".join(result))
# Upload file with traceback to ftrack server and add it to job
if not component_name:
component_name = "{}_{}".format(
cls.__name__,
datetime.datetime.now().strftime("%y-%m-%d-%H%M")
)
cls.add_file_component_to_job(
job, session, temp_filepath, component_name
)
# Delete temp file
os.remove(temp_filepath)
@staticmethod
def add_file_component_to_job(job, session, filepath, basename=None):
"""Add filepath as downloadable component to job.
Args:
job (JobEntity): Entity of job where file should be able to
download (Created or queried with passed session).
session (Session): Ftrack session which was used to query/create
entered job.
filepath (str): Path to file which should be added to job.
basename (str): Defines name of file which will be downloaded on
user's side. Must be without extension otherwise extension will
be duplicated in downloaded name. Basename from entered path
used when not entered.
"""
# Make sure session's locations are configured
# - they can be deconfigured e.g. using `rollback` method
session._configure_locations()
# Query `ftrack.server` location where component will be stored
location = session.query(
"Location where name is \"ftrack.server\""
).one()
# Use filename as basename if not entered (must be without extension)
if basename is None:
basename = os.path.splitext(
os.path.basename(filepath)
)[0]
component = session.create_component(
filepath,
data={"name": basename},
location=location
)
session.create(
"JobComponent",
{
"component_id": component["id"],
"job_id": job["id"]
}
)
session.commit()

View file

@ -0,0 +1,79 @@
import functools
from .ftrack_base_handler import BaseHandler
class BaseEvent(BaseHandler):
'''Custom Event base class
BaseEvent is based on ftrack.update event
- get entities from event
If want to use different event base
- override register and *optional _translate_event method
'''
type = 'Event'
# Decorator
def launch_log(self, func):
@functools.wraps(func)
def wrapper_launch(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as exc:
self.session.rollback()
self.log.error(
'Event "{}" Failed: {}'.format(
self.__class__.__name__, str(exc)
),
exc_info=True
)
return wrapper_launch
def register(self):
'''Registers the event, subscribing the discover and launch topics.'''
self.session.event_hub.subscribe(
'topic=ftrack.update',
self._launch,
priority=self.priority
)
def _translate_event(self, event, session=None):
'''Return *event* translated structure to be used with the API.'''
return self._get_entities(
event,
session,
ignore=['socialfeed', 'socialnotification']
)
def get_project_name_from_event(self, session, event, project_id):
"""Load or query and fill project entity from/to event data.
Project data are stored by ftrack id because in most cases it is
easier to access project id than project name.
Args:
session (ftrack_api.Session): Current session.
event (ftrack_api.Event): Processed event by session.
project_id (str): Ftrack project id.
"""
if not project_id:
raise ValueError(
"Entered `project_id` is not valid. {} ({})".format(
str(project_id), str(type(project_id))
)
)
# Try to get project entity from event
project_data = event["data"].get("project_data")
if not project_data:
project_data = {}
event["data"]["project_data"] = project_data
project_name = project_data.get(project_id)
if not project_name:
# Get project entity from task and store to event
project_entity = session.get("Project", project_id)
project_name = project_entity["full_name"]
event["data"]["project_data"][project_id] = project_name
return project_name

View file

@ -0,0 +1,7 @@
import os
def get_ftrack_event_mongo_info():
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
collection_name = "ftrack_events"
return database_name, collection_name

View file

@ -0,0 +1,32 @@
import sys
import pyblish.api
import six
class IntegrateFtrackComments(pyblish.api.InstancePlugin):
"""Create comments in Ftrack."""
order = pyblish.api.IntegratorOrder
label = "Integrate Comments to Ftrack"
families = ["shot"]
enabled = False
def process(self, instance):
session = instance.context.data["ftrackSession"]
entity = session.query(
"Shot where name is \"{}\"".format(instance.data["item"].name())
).one()
notes = []
for comment in instance.data["comments"]:
notes.append(session.create("Note", {"content": comment}))
entity["notes"].extend(notes)
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)

View file

@ -0,0 +1,210 @@
import os
import logging
import pyblish.api
import avalon.api
class CollectFtrackApi(pyblish.api.ContextPlugin):
""" Collects an ftrack session and the current task id. """
order = pyblish.api.CollectorOrder + 0.4999
label = "Collect Ftrack Api"
def process(self, context):
ftrack_log = logging.getLogger('ftrack_api')
ftrack_log.setLevel(logging.WARNING)
ftrack_log = logging.getLogger('ftrack_api_old')
ftrack_log.setLevel(logging.WARNING)
# Collect session
# NOTE Import python module here to know if import was successful
import ftrack_api
session = ftrack_api.Session(auto_connect_event_hub=False)
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
# Collect task
project_name = avalon.api.Session["AVALON_PROJECT"]
asset_name = avalon.api.Session["AVALON_ASSET"]
task_name = avalon.api.Session["AVALON_TASK"]
# Find project entity
project_query = 'Project where full_name is "{0}"'.format(project_name)
self.log.debug("Project query: < {0} >".format(project_query))
project_entities = list(session.query(project_query).all())
if len(project_entities) == 0:
raise AssertionError(
"Project \"{0}\" not found in Ftrack.".format(project_name)
)
# QUESTION Is possible to happen?
elif len(project_entities) > 1:
raise AssertionError((
"Found more than one project with name \"{0}\" in Ftrack."
).format(project_name))
project_entity = project_entities[0]
self.log.debug("Project found: {0}".format(project_entity))
# Find asset entity
entity_query = (
'TypedContext where project_id is "{0}"'
' and name is "{1}"'
).format(project_entity["id"], asset_name)
self.log.debug("Asset entity query: < {0} >".format(entity_query))
asset_entities = []
for entity in session.query(entity_query).all():
# Skip tasks
if entity.entity_type.lower() != "task":
asset_entities.append(entity)
if len(asset_entities) == 0:
raise AssertionError((
"Entity with name \"{0}\" not found"
" in Ftrack project \"{1}\"."
).format(asset_name, project_name))
elif len(asset_entities) > 1:
raise AssertionError((
"Found more than one entity with name \"{0}\""
" in Ftrack project \"{1}\"."
).format(asset_name, project_name))
asset_entity = asset_entities[0]
self.log.debug("Asset found: {0}".format(asset_entity))
# Find task entity if task is set
if task_name:
task_query = (
'Task where name is "{0}" and parent_id is "{1}"'
).format(task_name, asset_entity["id"])
self.log.debug("Task entity query: < {0} >".format(task_query))
task_entity = session.query(task_query).first()
if not task_entity:
self.log.warning(
"Task entity with name \"{0}\" was not found.".format(
task_name
)
)
else:
self.log.debug("Task entity found: {0}".format(task_entity))
else:
task_entity = None
self.log.warning("Task name is not set.")
context.data["ftrackSession"] = session
context.data["ftrackPythonModule"] = ftrack_api
context.data["ftrackProject"] = project_entity
context.data["ftrackEntity"] = asset_entity
context.data["ftrackTask"] = task_entity
self.per_instance_process(context, asset_name, task_name)
def per_instance_process(
self, context, context_asset_name, context_task_name
):
instance_by_asset_and_task = {}
for instance in context:
self.log.debug(
"Checking entities of instance \"{}\"".format(str(instance))
)
instance_asset_name = instance.data.get("asset")
instance_task_name = instance.data.get("task")
if not instance_asset_name and not instance_task_name:
self.log.debug("Instance does not have set context keys.")
continue
elif instance_asset_name and instance_task_name:
if (
instance_asset_name == context_asset_name
and instance_task_name == context_task_name
):
self.log.debug((
"Instance's context is same as in publish context."
" Asset: {} | Task: {}"
).format(context_asset_name, context_task_name))
continue
asset_name = instance_asset_name
task_name = instance_task_name
elif instance_task_name:
if instance_task_name == context_task_name:
self.log.debug((
"Instance's context task is same as in publish"
" context. Task: {}"
).format(context_task_name))
continue
asset_name = context_asset_name
task_name = instance_task_name
elif instance_asset_name:
if instance_asset_name == context_asset_name:
self.log.debug((
"Instance's context asset is same as in publish"
" context. Asset: {}"
).format(context_asset_name))
continue
# Do not use context's task name
task_name = instance_task_name
asset_name = instance_asset_name
if asset_name not in instance_by_asset_and_task:
instance_by_asset_and_task[asset_name] = {}
if task_name not in instance_by_asset_and_task[asset_name]:
instance_by_asset_and_task[asset_name][task_name] = []
instance_by_asset_and_task[asset_name][task_name].append(instance)
if not instance_by_asset_and_task:
return
session = context.data["ftrackSession"]
project_entity = context.data["ftrackProject"]
asset_names = set()
for asset_name in instance_by_asset_and_task.keys():
asset_names.add(asset_name)
joined_asset_names = ",".join([
"\"{}\"".format(name)
for name in asset_names
])
entities = session.query((
"TypedContext where project_id is \"{}\" and name in ({})"
).format(project_entity["id"], joined_asset_names)).all()
entities_by_name = {
entity["name"]: entity
for entity in entities
}
for asset_name, by_task_data in instance_by_asset_and_task.items():
entity = entities_by_name.get(asset_name)
task_entity_by_name = {}
if not entity:
self.log.warning((
"Didn't find entity with name \"{}\" in Project \"{}\""
).format(asset_name, project_entity["full_name"]))
else:
task_entities = session.query((
"select id, name from Task where parent_id is \"{}\""
).format(entity["id"])).all()
for task_entity in task_entities:
task_name_low = task_entity["name"].lower()
task_entity_by_name[task_name_low] = task_entity
for task_name, instances in by_task_data.items():
task_entity = None
if task_name and entity:
task_entity = task_entity_by_name.get(task_name.lower())
for instance in instances:
instance.data["ftrackEntity"] = entity
instance.data["ftrackTask"] = task_entity
self.log.debug((
"Instance {} has own ftrack entities"
" as has different context. TypedContext: {} Task: {}"
).format(str(instance), str(entity), str(task_entity)))

View file

@ -0,0 +1,105 @@
"""
Requires:
none
Provides:
instance -> families ([])
"""
import pyblish.api
import avalon.api
from openpype.lib.plugin_tools import filter_profiles
class CollectFtrackFamily(pyblish.api.InstancePlugin):
"""
Adds explicitly 'ftrack' to families to upload instance to FTrack.
Uses selection by combination of hosts/families/tasks names via
profiles resolution.
Triggered everywhere, checks instance against configured.
Checks advanced filtering which works on 'families' not on main
'family', as some variants dynamically resolves addition of ftrack
based on 'families' (editorial drives it by presence of 'review')
"""
label = "Collect Ftrack Family"
order = pyblish.api.CollectorOrder + 0.4998
profiles = None
def process(self, instance):
if not self.profiles:
self.log.warning("No profiles present for adding Ftrack family")
return
task_name = instance.data.get("task",
avalon.api.Session["AVALON_TASK"])
host_name = avalon.api.Session["AVALON_APP"]
family = instance.data["family"]
filtering_criteria = {
"hosts": host_name,
"families": family,
"tasks": task_name
}
profile = filter_profiles(self.profiles, filtering_criteria,
logger=self.log)
if profile:
families = instance.data.get("families")
add_ftrack_family = profile["add_ftrack_family"]
additional_filters = profile.get("advanced_filtering")
if additional_filters:
add_ftrack_family = self._get_add_ftrack_f_from_addit_filters(
additional_filters,
families,
add_ftrack_family
)
if add_ftrack_family:
self.log.debug("Adding ftrack family for '{}'".
format(instance.data.get("family")))
if families:
if "ftrack" not in families:
instance.data["families"].append("ftrack")
else:
instance.data["families"] = ["ftrack"]
def _get_add_ftrack_f_from_addit_filters(self,
additional_filters,
families,
add_ftrack_family):
"""
Compares additional filters - working on instance's families.
Triggered for more detailed filtering when main family matches,
but content of 'families' actually matter.
(For example 'review' in 'families' should result in adding to
Ftrack)
Args:
additional_filters (dict) - from Setting
families (list) - subfamilies
add_ftrack_family (bool) - add ftrack to families if True
"""
override_filter = None
override_filter_value = -1
for additional_filter in additional_filters:
filter_families = set(additional_filter["families"])
valid = filter_families <= set(families) # issubset
if not valid:
continue
value = len(filter_families)
if value > override_filter_value:
override_filter = additional_filter
override_filter_value = value
if override_filter:
add_ftrack_family = override_filter["add_ftrack_family"]
return add_ftrack_family

View file

@ -0,0 +1,23 @@
# -*- coding: utf-8 -*-
"""Collect default Deadline server."""
import pyblish.api
import os
class CollectLocalFtrackCreds(pyblish.api.ContextPlugin):
"""Collect default Royal Render path."""
order = pyblish.api.CollectorOrder + 0.01
label = "Collect local ftrack credentials"
targets = ["rr_control"]
def process(self, context):
if os.getenv("FTRACK_API_USER") and os.getenv("FTRACK_API_KEY") and \
os.getenv("FTRACK_SERVER"):
return
ftrack_module = context.data["openPypeModules"]["ftrack"]
if ftrack_module.enabled:
creds = ftrack_module.get_credentials()
os.environ["FTRACK_API_USER"] = creds[0]
os.environ["FTRACK_API_KEY"] = creds[1]
os.environ["FTRACK_SERVER"] = ftrack_module.ftrack_url

View file

@ -0,0 +1,63 @@
"""Loads publishing context from json and continues in publish process.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
Provides:
context, instances -> All data from previous publishing process.
"""
import ftrack_api
import os
import pyblish.api
class CollectUsername(pyblish.api.ContextPlugin):
"""
Translates user email to Ftrack username.
Emails in Ftrack are same as company's Slack, username is needed to
load data to Ftrack.
Expects "pype.club" user created on Ftrack and FTRACK_BOT_API_KEY env
var set up.
"""
order = pyblish.api.CollectorOrder - 0.488
label = "Collect ftrack username"
hosts = ["webpublisher", "photoshop"]
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
_context = None
def process(self, context):
self.log.info("CollectUsername")
os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"]
os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"]
# for publishes with studio processing
user_email = os.environ.get("USER_EMAIL")
self.log.debug("Email from env:: {}".format(user_email))
if not user_email:
# for basic webpublishes
for instance in context:
user_email = instance.data.get("user_email")
self.log.debug("Email from instance:: {}".format(user_email))
break
if not user_email:
self.log.info("No email found")
return
session = ftrack_api.Session(auto_connect_event_hub=False)
user = session.query("User where email like '{}'".format(user_email))
if not user:
raise ValueError(
"Couldn't find user with {} email".format(user_email))
username = user[0].get("username")
self.log.debug("Resolved ftrack username:: {}".format(username))
os.environ["FTRACK_API_USER"] = username

View file

@ -0,0 +1,466 @@
import os
import sys
import six
import pyblish.api
import clique
class IntegrateFtrackApi(pyblish.api.InstancePlugin):
""" Commit components to server. """
order = pyblish.api.IntegratorOrder+0.499
label = "Integrate Ftrack Api"
families = ["ftrack"]
def query(self, entitytype, data):
""" Generate a query expression from data supplied.
If a value is not a string, we'll add the id of the entity to the
query.
Args:
entitytype (str): The type of entity to query.
data (dict): The data to identify the entity.
exclusions (list): All keys to exclude from the query.
Returns:
str: String query to use with "session.query"
"""
queries = []
if sys.version_info[0] < 3:
for key, value in data.iteritems():
if not isinstance(value, (basestring, int)):
self.log.info("value: {}".format(value))
if "id" in value.keys():
queries.append(
"{0}.id is \"{1}\"".format(key, value["id"])
)
else:
queries.append("{0} is \"{1}\"".format(key, value))
else:
for key, value in data.items():
if not isinstance(value, (str, int)):
self.log.info("value: {}".format(value))
if "id" in value.keys():
queries.append(
"{0}.id is \"{1}\"".format(key, value["id"])
)
else:
queries.append("{0} is \"{1}\"".format(key, value))
query = (
"select id from " + entitytype + " where " + " and ".join(queries)
)
self.log.debug(query)
return query
def _set_task_status(self, instance, task_entity, session):
project_entity = instance.context.data.get("ftrackProject")
if not project_entity:
self.log.info("Task status won't be set, project is not known.")
return
if not task_entity:
self.log.info("Task status won't be set, task is not known.")
return
status_name = instance.context.data.get("ftrackStatus")
if not status_name:
self.log.info("Ftrack status name is not set.")
return
self.log.debug(
"Ftrack status name will be (maybe) set to \"{}\"".format(
status_name
)
)
project_schema = project_entity["project_schema"]
task_statuses = project_schema.get_statuses(
"Task", task_entity["type_id"]
)
task_statuses_by_low_name = {
status["name"].lower(): status for status in task_statuses
}
status = task_statuses_by_low_name.get(status_name.lower())
if not status:
self.log.warning((
"Task status \"{}\" won't be set,"
" status is now allowed on task type \"{}\"."
).format(status_name, task_entity["type"]["name"]))
return
self.log.info("Setting task status to \"{}\"".format(status_name))
task_entity["status"] = status
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
def process(self, instance):
session = instance.context.data["ftrackSession"]
context = instance.context
name = None
# If instance has set "ftrackEntity" or "ftrackTask" then use them from
# instance. Even if they are set to None. If they are set to None it
# has a reason. (like has different context)
if "ftrackEntity" in instance.data or "ftrackTask" in instance.data:
task = instance.data.get("ftrackTask")
parent = instance.data.get("ftrackEntity")
elif "ftrackEntity" in context.data or "ftrackTask" in context.data:
task = context.data.get("ftrackTask")
parent = context.data.get("ftrackEntity")
if task:
parent = task["parent"]
name = task
elif parent:
name = parent["name"]
if not name:
self.log.info((
"Skipping ftrack integration. Instance \"{}\" does not"
" have specified ftrack entities."
).format(str(instance)))
return
info_msg = (
"Created new {entity_type} with data: {data}"
", metadata: {metadata}."
)
used_asset_versions = []
self._set_task_status(instance, task, session)
# Iterate over components and publish
for data in instance.data.get("ftrackComponentsList", []):
# AssetType
# Get existing entity.
assettype_data = {"short": "upload"}
assettype_data.update(data.get("assettype_data", {}))
self.log.debug("data: {}".format(data))
assettype_entity = session.query(
self.query("AssetType", assettype_data)
).first()
# Create a new entity if none exits.
if not assettype_entity:
assettype_entity = session.create("AssetType", assettype_data)
self.log.debug("Created new AssetType with data: {}".format(
assettype_data
))
# Asset
# Get existing entity.
asset_data = {
"name": name,
"type": assettype_entity,
"parent": parent,
}
asset_data.update(data.get("asset_data", {}))
asset_entity = session.query(
self.query("Asset", asset_data)
).first()
self.log.info("asset entity: {}".format(asset_entity))
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
asset_metadata = asset_data.pop("metadata", {})
# Create a new entity if none exits.
if not asset_entity:
asset_entity = session.create("Asset", asset_data)
self.log.debug(
info_msg.format(
entity_type="Asset",
data=asset_data,
metadata=asset_metadata
)
)
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Adding metadata
existing_asset_metadata = asset_entity["metadata"]
existing_asset_metadata.update(asset_metadata)
asset_entity["metadata"] = existing_asset_metadata
# AssetVersion
# Get existing entity.
assetversion_data = {
"version": 0,
"asset": asset_entity,
}
_assetversion_data = data.get("assetversion_data", {})
assetversion_cust_attrs = _assetversion_data.pop(
"custom_attributes", {}
)
asset_version_comment = _assetversion_data.pop(
"comment", None
)
assetversion_data.update(_assetversion_data)
assetversion_entity = session.query(
self.query("AssetVersion", assetversion_data)
).first()
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
assetversion_metadata = assetversion_data.pop("metadata", {})
if task:
assetversion_data['task'] = task
# Create a new entity if none exits.
if not assetversion_entity:
assetversion_entity = session.create(
"AssetVersion", assetversion_data
)
self.log.debug(
info_msg.format(
entity_type="AssetVersion",
data=assetversion_data,
metadata=assetversion_metadata
)
)
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Adding metadata
existing_assetversion_metadata = assetversion_entity["metadata"]
existing_assetversion_metadata.update(assetversion_metadata)
assetversion_entity["metadata"] = existing_assetversion_metadata
# Add comment
if asset_version_comment:
assetversion_entity["comment"] = asset_version_comment
try:
session.commit()
except Exception:
session.rollback()
session._configure_locations()
self.log.warning((
"Comment was not possible to set for AssetVersion"
"\"{0}\". Can't set it's value to: \"{1}\""
).format(
assetversion_entity["id"], str(asset_version_comment)
))
# Adding Custom Attributes
for attr, val in assetversion_cust_attrs.items():
if attr in assetversion_entity["custom_attributes"]:
try:
assetversion_entity["custom_attributes"][attr] = val
session.commit()
continue
except Exception:
session.rollback()
session._configure_locations()
self.log.warning((
"Custom Attrubute \"{0}\""
" is not available for AssetVersion <{1}>."
" Can't set it's value to: \"{2}\""
).format(attr, assetversion_entity["id"], str(val)))
# Have to commit the version and asset, because location can't
# determine the final location without.
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Component
# Get existing entity.
component_data = {
"name": "main",
"version": assetversion_entity
}
component_data.update(data.get("component_data", {}))
component_entity = session.query(
self.query("Component", component_data)
).first()
component_overwrite = data.get("component_overwrite", False)
location = data.get("component_location", session.pick_location())
# Overwrite existing component data if requested.
if component_entity and component_overwrite:
origin_location = session.query(
"Location where name is \"ftrack.origin\""
).one()
# Removing existing members from location
components = list(component_entity.get("members", []))
components += [component_entity]
for component in components:
for loc in component["component_locations"]:
if location["id"] == loc["location_id"]:
location.remove_component(
component, recursive=False
)
# Deleting existing members on component entity
for member in component_entity.get("members", []):
session.delete(member)
del(member)
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Reset members in memory
if "members" in component_entity.keys():
component_entity["members"] = []
# Add components to origin location
try:
collection = clique.parse(data["component_path"])
except ValueError:
# Assume its a single file
# Changing file type
name, ext = os.path.splitext(data["component_path"])
component_entity["file_type"] = ext
origin_location.add_component(
component_entity, data["component_path"]
)
else:
# Changing file type
component_entity["file_type"] = collection.format("{tail}")
# Create member components for sequence.
for member_path in collection:
size = 0
try:
size = os.path.getsize(member_path)
except OSError:
pass
name = collection.match(member_path).group("index")
member_data = {
"name": name,
"container": component_entity,
"size": size,
"file_type": os.path.splitext(member_path)[-1]
}
component = session.create(
"FileComponent", member_data
)
origin_location.add_component(
component, member_path, recursive=False
)
component_entity["members"].append(component)
# Add components to location.
location.add_component(
component_entity, origin_location, recursive=True
)
data["component"] = component_entity
msg = "Overwriting Component with path: {0}, data: {1}, "
msg += "location: {2}"
self.log.info(
msg.format(
data["component_path"],
component_data,
location
)
)
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
component_metadata = component_data.pop("metadata", {})
# Create new component if none exists.
new_component = False
if not component_entity:
component_entity = assetversion_entity.create_component(
data["component_path"],
data=component_data,
location=location
)
data["component"] = component_entity
msg = "Created new Component with path: {0}, data: {1}"
msg += ", metadata: {2}, location: {3}"
self.log.info(
msg.format(
data["component_path"],
component_data,
component_metadata,
location
)
)
new_component = True
# Adding metadata
existing_component_metadata = component_entity["metadata"]
existing_component_metadata.update(component_metadata)
component_entity["metadata"] = existing_component_metadata
# if component_data['name'] = 'ftrackreview-mp4-mp4':
# assetversion_entity["thumbnail_id"]
# Setting assetversion thumbnail
if data.get("thumbnail", False):
assetversion_entity["thumbnail_id"] = component_entity["id"]
# Inform user about no changes to the database.
if (component_entity and not component_overwrite and
not new_component):
data["component"] = component_entity
self.log.info(
"Found existing component, and no request to overwrite. "
"Nothing has been changed."
)
else:
# Commit changes.
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
if assetversion_entity not in used_asset_versions:
used_asset_versions.append(assetversion_entity)
asset_versions_key = "ftrackIntegratedAssetVersions"
if asset_versions_key not in instance.data:
instance.data[asset_versions_key] = []
for asset_version in used_asset_versions:
if asset_version not in instance.data[asset_versions_key]:
instance.data[asset_versions_key].append(asset_version)

View file

@ -0,0 +1,21 @@
import pyblish.api
class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin):
"""
Set `component_overwrite` to True on all instances `ftrackComponentsList`
"""
order = pyblish.api.IntegratorOrder + 0.49
label = 'Overwrite ftrack created versions'
families = ["clip"]
optional = True
active = False
def process(self, instance):
component_list = instance.data['ftrackComponentsList']
for cl in component_list:
cl['component_overwrite'] = True
self.log.debug('Component {} overwriting'.format(
cl['component_data']['name']))

View file

@ -0,0 +1,270 @@
import os
import json
import copy
import pyblish.api
class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
"""Collect ftrack component data (not integrate yet).
Add ftrack component list to instance.
"""
order = pyblish.api.IntegratorOrder + 0.48
label = "Integrate Ftrack Component"
families = ["ftrack"]
family_mapping = {
"camera": "cam",
"look": "look",
"mayaascii": "scene",
"model": "geo",
"rig": "rig",
"setdress": "setdress",
"pointcache": "cache",
"render": "render",
"prerender": "render",
"render2d": "render",
"nukescript": "comp",
"write": "render",
"review": "mov",
"plate": "img",
"audio": "audio",
"workfile": "scene",
"animation": "cache",
"image": "img",
"reference": "reference"
}
def process(self, instance):
self.log.debug("instance {}".format(instance))
instance_version = instance.data.get("version")
if instance_version is None:
raise ValueError("Instance version not set")
version_number = int(instance_version)
family = instance.data["family"]
family_low = instance.data["family"].lower()
asset_type = instance.data.get("ftrackFamily")
if not asset_type and family_low in self.family_mapping:
asset_type = self.family_mapping[family_low]
self.log.debug(self.family_mapping)
self.log.debug(family_low)
# Ignore this instance if neither "ftrackFamily" or a family mapping is
# found.
if not asset_type:
self.log.info((
"Family \"{}\" does not match any asset type mapping"
).format(family))
return
instance_repres = instance.data.get("representations")
if not instance_repres:
self.log.info((
"Skipping instance. Does not have any representations {}"
).format(str(instance)))
return
# Prepare FPS
instance_fps = instance.data.get("fps")
if instance_fps is None:
instance_fps = instance.context.data["fps"]
# Base of component item data
# - create a copy of this object when want to use it
base_component_item = {
"assettype_data": {
"short": asset_type,
},
"asset_data": {
"name": instance.data["subset"],
},
"assetversion_data": {
"version": version_number,
"comment": instance.context.data.get("comment") or ""
},
"component_overwrite": False,
# This can be change optionally
"thumbnail": False,
# These must be changed for each component
"component_data": None,
"component_path": None,
"component_location": None
}
ft_session = instance.context.data["ftrackSession"]
# Filter types of representations
review_representations = []
thumbnail_representations = []
other_representations = []
for repre in instance_repres:
self.log.debug("Representation {}".format(repre))
repre_tags = repre.get("tags") or []
if repre.get("thumbnail") or "thumbnail" in repre_tags:
thumbnail_representations.append(repre)
elif "ftrackreview" in repre_tags:
review_representations.append(repre)
else:
other_representations.append(repre)
# Prepare ftrack locations
unmanaged_location = ft_session.query(
"Location where name is \"ftrack.unmanaged\""
).one()
ftrack_server_location = ft_session.query(
"Location where name is \"ftrack.server\""
).one()
# Components data
component_list = []
# Components that will be duplicated to unmanaged location
src_components_to_add = []
# Create thumbnail components
# TODO what if there is multiple thumbnails?
first_thumbnail_component = None
for repre in thumbnail_representations:
published_path = repre.get("published_path")
if not published_path:
comp_files = repre["files"]
if isinstance(comp_files, (tuple, list, set)):
filename = comp_files[0]
else:
filename = comp_files
published_path = os.path.join(
repre["stagingDir"], filename
)
if not os.path.exists(published_path):
continue
repre["published_path"] = published_path
# Create copy of base comp item and append it
thumbnail_item = copy.deepcopy(base_component_item)
thumbnail_item["component_path"] = repre["published_path"]
thumbnail_item["component_data"] = {
"name": "thumbnail"
}
thumbnail_item["thumbnail"] = True
# Create copy of item before setting location
src_components_to_add.append(copy.deepcopy(thumbnail_item))
# Create copy of first thumbnail
if first_thumbnail_component is None:
first_thumbnail_component = copy.deepcopy(thumbnail_item)
# Set location
thumbnail_item["component_location"] = ftrack_server_location
# Add item to component list
component_list.append(thumbnail_item)
# Create review components
# Change asset name of each new component for review
is_first_review_repre = True
not_first_components = []
for repre in review_representations:
frame_start = repre.get("frameStartFtrack")
frame_end = repre.get("frameEndFtrack")
if frame_start is None or frame_end is None:
frame_start = instance.data["frameStart"]
frame_end = instance.data["frameEnd"]
# Frame end of uploaded video file should be duration in frames
# - frame start is always 0
# - frame end is duration in frames
duration = frame_end - frame_start + 1
fps = repre.get("fps")
if fps is None:
fps = instance_fps
# Create copy of base comp item and append it
review_item = copy.deepcopy(base_component_item)
# Change location
review_item["component_path"] = repre["published_path"]
# Change component data
review_item["component_data"] = {
# Default component name is "main".
"name": "ftrackreview-mp4",
"metadata": {
"ftr_meta": json.dumps({
"frameIn": 0,
"frameOut": int(duration),
"frameRate": float(fps)
})
}
}
# Create copy of item before setting location or changing asset
src_components_to_add.append(copy.deepcopy(review_item))
if is_first_review_repre:
is_first_review_repre = False
else:
# Add representation name to asset name of "not first" review
asset_name = review_item["asset_data"]["name"]
review_item["asset_data"]["name"] = "_".join(
(asset_name, repre["name"])
)
not_first_components.append(review_item)
# Set location
review_item["component_location"] = ftrack_server_location
# Add item to component list
component_list.append(review_item)
# Duplicate thumbnail component for all not first reviews
if first_thumbnail_component is not None:
for component_item in not_first_components:
asset_name = component_item["asset_data"]["name"]
new_thumbnail_component = copy.deepcopy(
first_thumbnail_component
)
new_thumbnail_component["asset_data"]["name"] = asset_name
new_thumbnail_component["component_location"] = (
ftrack_server_location
)
component_list.append(new_thumbnail_component)
# Add source components for review and thubmnail components
for copy_src_item in src_components_to_add:
# Make sure thumbnail is disabled
copy_src_item["thumbnail"] = False
# Set location
copy_src_item["component_location"] = unmanaged_location
# Modify name of component to have suffix "_src"
component_data = copy_src_item["component_data"]
component_name = component_data["name"]
component_data["name"] = component_name + "_src"
component_list.append(copy_src_item)
# Add others representations as component
for repre in other_representations:
published_path = repre.get("published_path")
if not published_path:
continue
# Create copy of base comp item and append it
other_item = copy.deepcopy(base_component_item)
other_item["component_data"] = {
"name": repre["name"]
}
other_item["component_location"] = unmanaged_location
other_item["component_path"] = published_path
component_list.append(other_item)
def json_obj_parser(obj):
return str(obj)
self.log.debug("Components list: {}".format(
json.dumps(
component_list,
sort_keys=True,
indent=4,
default=json_obj_parser
)
))
instance.data["ftrackComponentsList"] = component_list

View file

@ -0,0 +1,149 @@
import sys
import json
import pyblish.api
import six
class IntegrateFtrackNote(pyblish.api.InstancePlugin):
"""Create comments in Ftrack."""
# Must be after integrate asset new
order = pyblish.api.IntegratorOrder + 0.4999
label = "Integrate Ftrack note"
families = ["ftrack"]
optional = True
# Can be set in presets:
# - Allows only `intent` and `comment` keys
note_with_intent_template = "{intent}: {comment}"
# - note label must exist in Ftrack
note_labels = []
def get_intent_label(self, session, intent_value):
if not intent_value:
return
intent_configurations = session.query(
"CustomAttributeConfiguration where key is intent"
).all()
if not intent_configurations:
return
intent_configuration = intent_configurations[0]
if len(intent_configuration) > 1:
self.log.warning((
"Found more than one `intent` custom attribute."
" Using first found."
))
config = intent_configuration.get("config")
if not config:
return
configuration = json.loads(config)
items = configuration.get("data")
if not items:
return
if sys.version_info[0] < 3:
string_type = basestring
else:
string_type = str
if isinstance(items, string_type):
items = json.loads(items)
intent_label = None
for item in items:
if item["value"] == intent_value:
intent_label = item["menu"]
break
return intent_label
def process(self, instance):
comment = (instance.context.data.get("comment") or "").strip()
if not comment:
self.log.info("Comment is not set.")
return
self.log.debug("Comment is set to `{}`".format(comment))
session = instance.context.data["ftrackSession"]
intent = instance.context.data.get("intent")
if intent and isinstance(intent, dict):
intent_val = intent.get("value")
intent_label = intent.get("label")
else:
intent_val = intent_label = intent
final_label = None
if intent_val:
final_label = self.get_intent_label(session, intent_val)
if final_label is None:
final_label = intent_label
# if intent label is set then format comment
# - it is possible that intent_label is equal to "" (empty string)
if final_label:
msg = "Intent label is set to `{}`.".format(final_label)
comment = self.note_with_intent_template.format(**{
"intent": final_label,
"comment": comment
})
elif intent_val:
msg = (
"Intent is set to `{}` and was not added"
" to comment because label is set to `{}`."
).format(intent_val, final_label)
else:
msg = "Intent is not set."
self.log.debug(msg)
asset_versions_key = "ftrackIntegratedAssetVersions"
asset_versions = instance.data.get(asset_versions_key)
if not asset_versions:
self.log.info("There are any integrated AssetVersions")
return
user = session.query(
"User where username is \"{}\"".format(session.api_user)
).first()
if not user:
self.log.warning(
"Was not able to query current User {}".format(
session.api_user
)
)
labels = []
if self.note_labels:
all_labels = session.query("NoteLabel").all()
labels_by_low_name = {lab["name"].lower(): lab for lab in all_labels}
for _label in self.note_labels:
label = labels_by_low_name.get(_label.lower())
if not label:
self.log.warning(
"Note Label `{}` was not found.".format(_label)
)
continue
labels.append(label)
for asset_version in asset_versions:
asset_version.create_note(comment, author=user, labels=labels)
try:
session.commit()
self.log.debug("Note added to AssetVersion \"{}\"".format(
str(asset_version)
))
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)

View file

@ -0,0 +1,356 @@
import sys
import collections
import six
import pyblish.api
from avalon import io
# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC`
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
CUST_ATTR_GROUP = "openpype"
# Copy of `get_pype_attr` from openpype_modules.ftrack.lib
# TODO import from openpype's ftrack module when possible to not break Python 2
def get_pype_attr(session, split_hierarchical=True):
custom_attributes = []
hier_custom_attributes = []
# TODO remove deprecated "avalon" group from query
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical, default"
" from CustomAttributeConfiguration"
# Kept `pype` for Backwards Compatiblity
" where group.name in (\"pype\", \"{}\")"
).format(CUST_ATTR_GROUP)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
if split_hierarchical and cust_attr["is_hierarchical"]:
hier_custom_attributes.append(cust_attr)
continue
custom_attributes.append(cust_attr)
if split_hierarchical:
# return tuple
return custom_attributes, hier_custom_attributes
return custom_attributes
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder - 0.04
label = 'Integrate Hierarchy To Ftrack'
families = ["shot"]
hosts = ["hiero", "resolve", "standalonepublisher", "flame"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in self.context.data:
return
hierarchy_context = self.context.data["hierarchyContext"]
self.session = self.context.data["ftrackSession"]
project_name = self.context.data["projectEntity"]["name"]
query = 'Project where full_name is "{}"'.format(project_name)
project = self.session.query(query).one()
auto_sync_state = project[
"custom_attributes"][CUST_ATTR_AUTO_SYNC]
if not io.Session:
io.install()
self.ft_project = None
input_data = hierarchy_context
# disable termporarily ftrack project's autosyncing
if auto_sync_state:
self.auto_sync_off(project)
try:
# import ftrack hierarchy
self.import_to_ftrack(input_data)
except Exception:
raise
finally:
if auto_sync_state:
self.auto_sync_on(project)
def import_to_ftrack(self, input_data, parent=None):
# Prequery hiearchical custom attributes
hier_custom_attributes = get_pype_attr(self.session)[1]
hier_attr_by_key = {
attr["key"]: attr
for attr in hier_custom_attributes
}
# Get ftrack api module (as they are different per python version)
ftrack_api = self.context.data["ftrackPythonModule"]
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type']
self.log.debug(entity_data)
self.log.debug(entity_type)
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = (
'TypedContext where name is "{0}" and '
'project_id is "{1}"'
).format(entity_name, self.ft_project["id"])
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context if i.data['asset'] in entity['name']
]
for key in custom_attributes:
hier_attr = hier_attr_by_key.get(key)
# Use simple method if key is not hierarchical
if not hier_attr:
assert (key in entity['custom_attributes']), (
'Missing custom attribute key: `{0}` in attrs: '
'`{1}`'.format(key, entity['custom_attributes'].keys())
)
entity['custom_attributes'][key] = custom_attributes[key]
else:
# Use ftrack operations method to set hiearchical
# attribute value.
# - this is because there may be non hiearchical custom
# attributes with different properties
entity_key = collections.OrderedDict()
entity_key["configuration_id"] = hier_attr["id"]
entity_key["entity_id"] = entity["id"]
self.session.recorded_operations.push(
ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
entity_key,
"value",
ftrack_api.symbol.NOT_SET,
custom_attributes[key]
)
)
for instance in instances:
instance.data['ftrackEntity'] = entity
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'].lower())
# existing_tasks.append(child['type']['name'])
for task_name in tasks:
task_type = tasks[task_name]["type"]
if task_name.lower() in existing_tasks:
print("Task {} already exists".format(task_name))
continue
tasks_to_create.append((task_name, task_type))
for task_name, task_type in tasks_to_create:
self.create_task(
name=task_name,
task_type=task_type,
parent=entity
)
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Incoming links.
self.create_links(entity_data, entity)
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Create notes.
user = self.session.query(
"User where username is \"{}\"".format(self.session.api_user)
).first()
if user:
for comment in entity_data.get("comments", []):
entity.create_note(comment, user)
else:
self.log.warning(
"Was not able to query current User {}".format(
self.session.api_user
)
)
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Import children.
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], entity)
def create_links(self, entity_data, entity):
# Clear existing links.
for link in entity.get("incoming_links", []):
self.session.delete(link)
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Create new links.
for input in entity_data.get("inputs", []):
input_id = io.find_one({"_id": input})["data"]["ftrackId"]
assetbuild = self.session.get("AssetBuild", input_id)
self.log.debug(
"Creating link from {0} to {1}".format(
assetbuild["name"], entity["name"]
)
)
self.session.create(
"TypedContextLink", {"from": assetbuild, "to": entity}
)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
return entity
def auto_sync_off(self, project):
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False
self.log.info("Ftrack autosync swithed off")
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
def auto_sync_on(self, project):
project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True
self.log.info("Ftrack autosync swithed on")
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)

View file

@ -0,0 +1,206 @@
import pyblish.api
import openpype.api
class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
"""
This will validate attributes in ftrack against data in scene.
Attributes to be validated are specified in:
`$OPENPYPE_CONFIG/presets/<host>/ftrack_attributes.json`
This is array (list) of checks in format:
[
[<attribute>, <operator>, <expression>]
]
Where <attribute> is name of ftrack attribute, <operator> is one of:
"is", is_not", "greater_than", "less_than", "contains", "not_contains",
"starts_with", "ends_with"
<expression> is python code that is evaluated by validator. This allows
you to fetch whatever value in scene you want, for example in Maya:
[
"fps", "is",
"from maya import mel; out = mel.eval('currentTimeUnitToFPS()')"
]
will test if ftrack fps attribute on current Task parent is same as fps
info we get from maya. Store the value you need to compare in
variable `out` in your expression.
"""
label = "Validate Custom Ftrack Attributes"
order = openpype.api.ValidateContentsOrder
families = ["ftrack"]
optional = True
# Ignore standalone host, because it does not have an Ftrack entity
# associated.
hosts = [
"blender",
"fusion",
"harmony",
"houdini",
"maya",
"nuke",
"hiero",
"photoshop",
"premiere",
"resolve",
"unreal"
]
def process(self, instance):
context = instance.context
task = context.data.get('ftrackTask', False)
if not task:
self._raise(AttributeError,
"Missing FTrack Task entity in context")
host = pyblish.api.current_host()
to_check = self.ftrack_custom_attributes.get(host, {})
if not to_check:
self.log.warning("ftrack_attributes preset not found")
return
self.log.info("getting attributes from ftrack ...")
# get parent of task
custom_attributes = {}
try:
parent = task["parent"]
custom_attributes = parent["custom_attributes"].items()
except KeyError:
self._raise(KeyError, "missing `parent` or `attributes`")
custom_attributes = dict(custom_attributes)
# get list of hierarchical attributes from ftrack
session = context.data["ftrackSession"]
custom_hier_attributes = self._get_custom_hier_attrs(session)
custom_attributes = {}
_nonhier = {}
custom_hier_attributes = {k: None for k in custom_hier_attributes}
for key, value in dict(parent["custom_attributes"]).items():
if key in custom_hier_attributes:
custom_hier_attributes[key] = value
else:
_nonhier[key] = value
custom_hier_values = self._get_hierarchical_values(
custom_hier_attributes, parent)
custom_hier_values.update(_nonhier)
errors = []
attribs = custom_hier_values
for check in to_check:
ev = {}
# WARNING(Ondrej Samohel): This is really not secure as we are
# basically executing user code. But there's no other way to make
# it flexible enough for users to get stuff from
exec(str(check[2]), {}, ev)
if not ev.get("out"):
errors.append("{} code doesn't return 'out': '{}'".format(
check[0], check[2]))
continue
if check[0] in attribs:
if check[1] == "is":
if attribs[check[0]] != ev["out"]:
errors.append("{}: {} is not {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "is_not":
if attribs[check[0]] == ev["out"]:
errors.append("{}: {} is {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "less_than":
if attribs[check[0]] < ev["out"]:
errors.append("{}: {} is greater {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "greater_than":
if attribs[check[0]] < ev["out"]:
errors.append("{}: {} is less {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "contains":
if attribs[check[0]] in ev["out"]:
errors.append("{}: {} does not contain {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "not_contains":
if attribs[check[0]] not in ev["out"]:
errors.append("{}: {} contains {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "starts_with":
if attribs[check[0]].startswith(ev["out"]):
errors.append("{}: {} does not starts with {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "ends_with":
if attribs[check[0]].endswith(ev["out"]):
errors.append("{}: {} does not end with {}".format(
check[0], attribs[check[0]], ev["out"]))
if errors:
self.log.error('There are invalid values for attributes:')
for e in errors:
self.log.error(e)
raise ValueError("ftrack attributes doesn't match")
def _get_custom_hier_attrs(self, session):
hier_custom_attributes = []
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical"
" from CustomAttributeConfiguration"
)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
if cust_attr["is_hierarchical"]:
hier_custom_attributes.append(cust_attr["key"])
return hier_custom_attributes
def _get_hierarchical_values(self, keys_dict, entity):
# check values already set
_set_keys = []
for key, value in keys_dict.items():
if value is not None:
_set_keys.append(key)
# pop set values from keys_dict
set_keys = {}
for key in _set_keys:
set_keys[key] = keys_dict.pop(key)
# find if entity has set values and pop them out
keys_to_pop = []
for key in keys_dict.keys():
_val = entity["custom_attributes"][key]
if _val:
keys_to_pop.append(key)
set_keys[key] = _val
for key in keys_to_pop:
keys_dict.pop(key)
# if there are not keys to find value return found
if not keys_dict:
return set_keys
# end recursion if entity is project
if entity.entity_type.lower() == "project":
for key, value in keys_dict.items():
set_keys[key] = value
else:
result = self._get_hierarchical_values(keys_dict, entity["parent"])
for key, value in result.items():
set_keys[key] = value
return set_keys
def _raise(self, exc, msg):
self.log.error(msg)
raise exc(msg)

View file

@ -0,0 +1,211 @@
README.rst.new
# Small entry point file for debugging tasks
test.py
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
local/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Swap
[._]*.s[a-v][a-z]
[._]*.sw[a-p]
[._]s[a-rt-v][a-z]
[._]ss[a-gi-z]
[._]sw[a-p]
# Session
Session.vim
Sessionx.vim
# Temporary
.netrwhist
*~
# Auto-generated tag files
tags
# Persistent undo
[._]*.un~
.idea/
.vscode/
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk

View file

@ -0,0 +1,41 @@
default_language_version:
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: fix-encoding-pragma
exclude: ^arrow/_version.py
- id: requirements-txt-fixer
- id: check-ast
- id: check-yaml
- id: check-case-conflict
- id: check-docstring-first
- id: check-merge-conflict
- id: debug-statements
- repo: https://github.com/timothycrosley/isort
rev: 5.4.2
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v2.7.2
hooks:
- id: pyupgrade
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.6.0
hooks:
- id: python-no-eval
- id: python-check-blanket-noqa
- id: rst-backticks
- repo: https://github.com/psf/black
rev: 20.8b1
hooks:
- id: black
args: [--safe, --quiet]
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.3
hooks:
- id: flake8
additional_dependencies: [flake8-bugbear]

View file

@ -0,0 +1,598 @@
Changelog
=========
0.17.0 (2020-10-2)
-------------------
- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. This is the last major release to support Python 2.7 and Python 3.5.
- [NEW] Arrow now properly handles imaginary datetimes during DST shifts. For example:
..code-block:: python
>>> just_before = arrow.get(2013, 3, 31, 1, 55, tzinfo="Europe/Paris")
>>> just_before.shift(minutes=+10)
<Arrow [2013-03-31T03:05:00+02:00]>
..code-block:: python
>>> before = arrow.get("2018-03-10 23:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific")
>>> after = arrow.get("2018-03-11 04:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific")
>>> result=[(t, t.to("utc")) for t in arrow.Arrow.range("hour", before, after)]
>>> for r in result:
... print(r)
...
(<Arrow [2018-03-10T23:00:00-08:00]>, <Arrow [2018-03-11T07:00:00+00:00]>)
(<Arrow [2018-03-11T00:00:00-08:00]>, <Arrow [2018-03-11T08:00:00+00:00]>)
(<Arrow [2018-03-11T01:00:00-08:00]>, <Arrow [2018-03-11T09:00:00+00:00]>)
(<Arrow [2018-03-11T03:00:00-07:00]>, <Arrow [2018-03-11T10:00:00+00:00]>)
(<Arrow [2018-03-11T04:00:00-07:00]>, <Arrow [2018-03-11T11:00:00+00:00]>)
- [NEW] Added ``humanize`` week granularity translation for Tagalog.
- [CHANGE] Calls to the ``timestamp`` property now emit a ``DeprecationWarning``. In a future release, ``timestamp`` will be changed to a method to align with Python's datetime module. If you would like to continue using the property, please change your code to use the ``int_timestamp`` or ``float_timestamp`` properties instead.
- [CHANGE] Expanded and improved Catalan locale.
- [FIX] Fixed a bug that caused ``Arrow.range()`` to incorrectly cut off ranges in certain scenarios when using month, quarter, or year endings.
- [FIX] Fixed a bug that caused day of week token parsing to be case sensitive.
- [INTERNAL] A number of functions were reordered in arrow.py for better organization and grouping of related methods. This change will have no impact on usage.
- [INTERNAL] A minimum tox version is now enforced for compatibility reasons. Contributors must use tox >3.18.0 going forward.
0.16.0 (2020-08-23)
-------------------
- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.16.x and 0.17.x releases are the last to support Python 2.7 and 3.5.
- [NEW] Implemented `PEP 495 <https://www.python.org/dev/peps/pep-0495/>`_ to handle ambiguous datetimes. This is achieved by the addition of the ``fold`` attribute for Arrow objects. For example:
.. code-block:: python
>>> before = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm')
<Arrow [2017-10-29T02:00:00+02:00]>
>>> before.fold
0
>>> before.ambiguous
True
>>> after = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm', fold=1)
<Arrow [2017-10-29T02:00:00+01:00]>
>>> after = before.replace(fold=1)
<Arrow [2017-10-29T02:00:00+01:00]>
- [NEW] Added ``normalize_whitespace`` flag to ``arrow.get``. This is useful for parsing log files and/or any files that may contain inconsistent spacing. For example:
.. code-block:: python
>>> arrow.get("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True)
<Arrow [2005-06-01T13:33:00+00:00]>
>>> arrow.get("2013-036 \t 04:05:06Z", normalize_whitespace=True)
<Arrow [2013-02-05T04:05:06+00:00]>
0.15.8 (2020-07-23)
-------------------
- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.15.x, 0.16.x, and 0.17.x releases are the last to support Python 2.7 and 3.5.
- [NEW] Added ``humanize`` week granularity translation for Czech.
- [FIX] ``arrow.get`` will now pick sane defaults when weekdays are passed with particular token combinations, see `#446 <https://github.com/arrow-py/arrow/issues/446>`_.
- [INTERNAL] Moved arrow to an organization. The repo can now be found `here <https://github.com/arrow-py/arrow>`_.
- [INTERNAL] Started issuing deprecation warnings for Python 2.7 and 3.5.
- [INTERNAL] Added Python 3.9 to CI pipeline.
0.15.7 (2020-06-19)
-------------------
- [NEW] Added a number of built-in format strings. See the `docs <https://arrow.readthedocs.io/#built-in-formats>`_ for a complete list of supported formats. For example:
.. code-block:: python
>>> arw = arrow.utcnow()
>>> arw.format(arrow.FORMAT_COOKIE)
'Wednesday, 27-May-2020 10:30:35 UTC'
- [NEW] Arrow is now fully compatible with Python 3.9 and PyPy3.
- [NEW] Added Makefile, tox.ini, and requirements.txt files to the distribution bundle.
- [NEW] Added French Canadian and Swahili locales.
- [NEW] Added ``humanize`` week granularity translation for Hebrew, Greek, Macedonian, Swedish, Slovak.
- [FIX] ms and μs timestamps are now normalized in ``arrow.get()``, ``arrow.fromtimestamp()``, and ``arrow.utcfromtimestamp()``. For example:
.. code-block:: python
>>> ts = 1591161115194556
>>> arw = arrow.get(ts)
<Arrow [2020-06-03T05:11:55.194556+00:00]>
>>> arw.timestamp
1591161115
- [FIX] Refactored and updated Macedonian, Hebrew, Korean, and Portuguese locales.
0.15.6 (2020-04-29)
-------------------
- [NEW] Added support for parsing and formatting `ISO 8601 week dates <https://en.wikipedia.org/wiki/ISO_week_date>`_ via a new token ``W``, for example:
.. code-block:: python
>>> arrow.get("2013-W29-6", "W")
<Arrow [2013-07-20T00:00:00+00:00]>
>>> utc=arrow.utcnow()
>>> utc
<Arrow [2020-01-23T18:37:55.417624+00:00]>
>>> utc.format("W")
'2020-W04-4'
- [NEW] Formatting with ``x`` token (microseconds) is now possible, for example:
.. code-block:: python
>>> dt = arrow.utcnow()
>>> dt.format("x")
'1585669870688329'
>>> dt.format("X")
'1585669870'
- [NEW] Added ``humanize`` week granularity translation for German, Italian, Polish & Taiwanese locales.
- [FIX] Consolidated and simplified German locales.
- [INTERNAL] Moved testing suite from nosetest/Chai to pytest/pytest-mock.
- [INTERNAL] Converted xunit-style setup and teardown functions in tests to pytest fixtures.
- [INTERNAL] Setup Github Actions for CI alongside Travis.
- [INTERNAL] Help support Arrow's future development by donating to the project on `Open Collective <https://opencollective.com/arrow>`_.
0.15.5 (2020-01-03)
-------------------
- [WARN] Python 2 reached EOL on 2020-01-01. arrow will **drop support** for Python 2 in a future release to be decided (see `#739 <https://github.com/arrow-py/arrow/issues/739>`_).
- [NEW] Added bounds parameter to ``span_range``, ``interval`` and ``span`` methods. This allows you to include or exclude the start and end values.
- [NEW] ``arrow.get()`` can now create arrow objects from a timestamp with a timezone, for example:
.. code-block:: python
>>> arrow.get(1367900664, tzinfo=tz.gettz('US/Pacific'))
<Arrow [2013-05-06T21:24:24-07:00]>
- [NEW] ``humanize`` can now combine multiple levels of granularity, for example:
.. code-block:: python
>>> later140 = arrow.utcnow().shift(seconds=+8400)
>>> later140.humanize(granularity="minute")
'in 139 minutes'
>>> later140.humanize(granularity=["hour", "minute"])
'in 2 hours and 19 minutes'
- [NEW] Added Hong Kong locale (``zh_hk``).
- [NEW] Added ``humanize`` week granularity translation for Dutch.
- [NEW] Numbers are now displayed when using the seconds granularity in ``humanize``.
- [CHANGE] ``range`` now supports both the singular and plural forms of the ``frames`` argument (e.g. day and days).
- [FIX] Improved parsing of strings that contain punctuation.
- [FIX] Improved behaviour of ``humanize`` when singular seconds are involved.
0.15.4 (2019-11-02)
-------------------
- [FIX] Fixed an issue that caused package installs to fail on Conda Forge.
0.15.3 (2019-11-02)
-------------------
- [NEW] ``factory.get()`` can now create arrow objects from a ISO calendar tuple, for example:
.. code-block:: python
>>> arrow.get((2013, 18, 7))
<Arrow [2013-05-05T00:00:00+00:00]>
- [NEW] Added a new token ``x`` to allow parsing of integer timestamps with milliseconds and microseconds.
- [NEW] Formatting now supports escaping of characters using the same syntax as parsing, for example:
.. code-block:: python
>>> arw = arrow.now()
>>> fmt = "YYYY-MM-DD h [h] m"
>>> arw.format(fmt)
'2019-11-02 3 h 32'
- [NEW] Added ``humanize`` week granularity translations for Chinese, Spanish and Vietnamese.
- [CHANGE] Added ``ParserError`` to module exports.
- [FIX] Added support for midnight at end of day. See `#703 <https://github.com/arrow-py/arrow/issues/703>`_ for details.
- [INTERNAL] Created Travis build for macOS.
- [INTERNAL] Test parsing and formatting against full timezone database.
0.15.2 (2019-09-14)
-------------------
- [NEW] Added ``humanize`` week granularity translations for Portuguese and Brazilian Portuguese.
- [NEW] Embedded changelog within docs and added release dates to versions.
- [FIX] Fixed a bug that caused test failures on Windows only, see `#668 <https://github.com/arrow-py/arrow/issues/668>`_ for details.
0.15.1 (2019-09-10)
-------------------
- [NEW] Added ``humanize`` week granularity translations for Japanese.
- [FIX] Fixed a bug that caused Arrow to fail when passed a negative timestamp string.
- [FIX] Fixed a bug that caused Arrow to fail when passed a datetime object with ``tzinfo`` of type ``StaticTzInfo``.
0.15.0 (2019-09-08)
-------------------
- [NEW] Added support for DDD and DDDD ordinal date tokens. The following functionality is now possible: ``arrow.get("1998-045")``, ``arrow.get("1998-45", "YYYY-DDD")``, ``arrow.get("1998-045", "YYYY-DDDD")``.
- [NEW] ISO 8601 basic format for dates and times is now supported (e.g. ``YYYYMMDDTHHmmssZ``).
- [NEW] Added ``humanize`` week granularity translations for French, Russian and Swiss German locales.
- [CHANGE] Timestamps of type ``str`` are no longer supported **without a format string** in the ``arrow.get()`` method. This change was made to support the ISO 8601 basic format and to address bugs such as `#447 <https://github.com/arrow-py/arrow/issues/447>`_.
The following will NOT work in v0.15.0:
.. code-block:: python
>>> arrow.get("1565358758")
>>> arrow.get("1565358758.123413")
The following will work in v0.15.0:
.. code-block:: python
>>> arrow.get("1565358758", "X")
>>> arrow.get("1565358758.123413", "X")
>>> arrow.get(1565358758)
>>> arrow.get(1565358758.123413)
- [CHANGE] When a meridian token (a|A) is passed and no meridians are available for the specified locale (e.g. unsupported or untranslated) a ``ParserError`` is raised.
- [CHANGE] The timestamp token (``X``) will now match float timestamps of type ``str``: ``arrow.get(“1565358758.123415”, “X”)``.
- [CHANGE] Strings with leading and/or trailing whitespace will no longer be parsed without a format string. Please see `the docs <https://arrow.readthedocs.io/#regular-expressions>`_ for ways to handle this.
- [FIX] The timestamp token (``X``) will now only match on strings that **strictly contain integers and floats**, preventing incorrect matches.
- [FIX] Most instances of ``arrow.get()`` returning an incorrect ``Arrow`` object from a partial parsing match have been eliminated. The following issue have been addressed: `#91 <https://github.com/arrow-py/arrow/issues/91>`_, `#196 <https://github.com/arrow-py/arrow/issues/196>`_, `#396 <https://github.com/arrow-py/arrow/issues/396>`_, `#434 <https://github.com/arrow-py/arrow/issues/434>`_, `#447 <https://github.com/arrow-py/arrow/issues/447>`_, `#456 <https://github.com/arrow-py/arrow/issues/456>`_, `#519 <https://github.com/arrow-py/arrow/issues/519>`_, `#538 <https://github.com/arrow-py/arrow/issues/538>`_, `#560 <https://github.com/arrow-py/arrow/issues/560>`_.
0.14.7 (2019-09-04)
-------------------
- [CHANGE] ``ArrowParseWarning`` will no longer be printed on every call to ``arrow.get()`` with a datetime string. The purpose of the warning was to start a conversation about the upcoming 0.15.0 changes and we appreciate all the feedback that the community has given us!
0.14.6 (2019-08-28)
-------------------
- [NEW] Added support for ``week`` granularity in ``Arrow.humanize()``. For example, ``arrow.utcnow().shift(weeks=-1).humanize(granularity="week")`` outputs "a week ago". This change introduced two new untranslated words, ``week`` and ``weeks``, to all locale dictionaries, so locale contributions are welcome!
- [NEW] Fully translated the Brazilian Portugese locale.
- [CHANGE] Updated the Macedonian locale to inherit from a Slavic base.
- [FIX] Fixed a bug that caused ``arrow.get()`` to ignore tzinfo arguments of type string (e.g. ``arrow.get(tzinfo="Europe/Paris")``).
- [FIX] Fixed a bug that occurred when ``arrow.Arrow()`` was instantiated with a ``pytz`` tzinfo object.
- [FIX] Fixed a bug that caused Arrow to fail when passed a sub-second token, that when rounded, had a value greater than 999999 (e.g. ``arrow.get("2015-01-12T01:13:15.9999995")``). Arrow should now accurately propagate the rounding for large sub-second tokens.
0.14.5 (2019-08-09)
-------------------
- [NEW] Added Afrikaans locale.
- [CHANGE] Removed deprecated ``replace`` shift functionality. Users looking to pass plural properties to the ``replace`` function to shift values should use ``shift`` instead.
- [FIX] Fixed bug that occurred when ``factory.get()`` was passed a locale kwarg.
0.14.4 (2019-07-30)
-------------------
- [FIX] Fixed a regression in 0.14.3 that prevented a tzinfo argument of type string to be passed to the ``get()`` function. Functionality such as ``arrow.get("2019072807", "YYYYMMDDHH", tzinfo="UTC")`` should work as normal again.
- [CHANGE] Moved ``backports.functools_lru_cache`` dependency from ``extra_requires`` to ``install_requires`` for ``Python 2.7`` installs to fix `#495 <https://github.com/arrow-py/arrow/issues/495>`_.
0.14.3 (2019-07-28)
-------------------
- [NEW] Added full support for Python 3.8.
- [CHANGE] Added warnings for upcoming factory.get() parsing changes in 0.15.0. Please see `#612 <https://github.com/arrow-py/arrow/issues/612>`_ for full details.
- [FIX] Extensive refactor and update of documentation.
- [FIX] factory.get() can now construct from kwargs.
- [FIX] Added meridians to Spanish Locale.
0.14.2 (2019-06-06)
-------------------
- [CHANGE] Travis CI builds now use tox to lint and run tests.
- [FIX] Fixed UnicodeDecodeError on certain locales (#600).
0.14.1 (2019-06-06)
-------------------
- [FIX] Fixed ``ImportError: No module named 'dateutil'`` (#598).
0.14.0 (2019-06-06)
-------------------
- [NEW] Added provisional support for Python 3.8.
- [CHANGE] Removed support for EOL Python 3.4.
- [FIX] Updated setup.py with modern Python standards.
- [FIX] Upgraded dependencies to latest versions.
- [FIX] Enabled flake8 and black on travis builds.
- [FIX] Formatted code using black and isort.
0.13.2 (2019-05-30)
-------------------
- [NEW] Add is_between method.
- [FIX] Improved humanize behaviour for near zero durations (#416).
- [FIX] Correct humanize behaviour with future days (#541).
- [FIX] Documentation updates.
- [FIX] Improvements to German Locale.
0.13.1 (2019-02-17)
-------------------
- [NEW] Add support for Python 3.7.
- [CHANGE] Remove deprecation decorators for Arrow.range(), Arrow.span_range() and Arrow.interval(), all now return generators, wrap with list() to get old behavior.
- [FIX] Documentation and docstring updates.
0.13.0 (2019-01-09)
-------------------
- [NEW] Added support for Python 3.6.
- [CHANGE] Drop support for Python 2.6/3.3.
- [CHANGE] Return generator instead of list for Arrow.range(), Arrow.span_range() and Arrow.interval().
- [FIX] Make arrow.get() work with str & tzinfo combo.
- [FIX] Make sure special RegEx characters are escaped in format string.
- [NEW] Added support for ZZZ when formatting.
- [FIX] Stop using datetime.utcnow() in internals, use datetime.now(UTC) instead.
- [FIX] Return NotImplemented instead of TypeError in arrow math internals.
- [NEW] Added Estonian Locale.
- [FIX] Small fixes to Greek locale.
- [FIX] TagalogLocale improvements.
- [FIX] Added test requirements to setup.
- [FIX] Improve docs for get, now and utcnow methods.
- [FIX] Correct typo in depreciation warning.
0.12.1
------
- [FIX] Allow universal wheels to be generated and reliably installed.
- [FIX] Make humanize respect only_distance when granularity argument is also given.
0.12.0
------
- [FIX] Compatibility fix for Python 2.x
0.11.0
------
- [FIX] Fix grammar of ArabicLocale
- [NEW] Add Nepali Locale
- [FIX] Fix month name + rename AustriaLocale -> AustrianLocale
- [FIX] Fix typo in Basque Locale
- [FIX] Fix grammar in PortugueseBrazilian locale
- [FIX] Remove pip --user-mirrors flag
- [NEW] Add Indonesian Locale
0.10.0
------
- [FIX] Fix getattr off by one for quarter
- [FIX] Fix negative offset for UTC
- [FIX] Update arrow.py
0.9.0
-----
- [NEW] Remove duplicate code
- [NEW] Support gnu date iso 8601
- [NEW] Add support for universal wheels
- [NEW] Slovenian locale
- [NEW] Slovak locale
- [NEW] Romanian locale
- [FIX] respect limit even if end is defined range
- [FIX] Separate replace & shift functions
- [NEW] Added tox
- [FIX] Fix supported Python versions in documentation
- [NEW] Azerbaijani locale added, locale issue fixed in Turkish.
- [FIX] Format ParserError's raise message
0.8.0
-----
- []
0.7.1
-----
- [NEW] Esperanto locale (batisteo)
0.7.0
-----
- [FIX] Parse localized strings #228 (swistakm)
- [FIX] Modify tzinfo parameter in ``get`` api #221 (bottleimp)
- [FIX] Fix Czech locale (PrehistoricTeam)
- [FIX] Raise TypeError when adding/subtracting non-dates (itsmeolivia)
- [FIX] Fix pytz conversion error (Kudo)
- [FIX] Fix overzealous time truncation in span_range (kdeldycke)
- [NEW] Humanize for time duration #232 (ybrs)
- [NEW] Add Thai locale (sipp11)
- [NEW] Adding Belarusian (be) locale (oire)
- [NEW] Search date in strings (beenje)
- [NEW] Note that arrow's tokens differ from strptime's. (offby1)
0.6.0
-----
- [FIX] Added support for Python 3
- [FIX] Avoid truncating oversized epoch timestamps. Fixes #216.
- [FIX] Fixed month abbreviations for Ukrainian
- [FIX] Fix typo timezone
- [FIX] A couple of dialect fixes and two new languages
- [FIX] Spanish locale: ``Miercoles`` should have acute accent
- [Fix] Fix Finnish grammar
- [FIX] Fix typo in 'Arrow.floor' docstring
- [FIX] Use read() utility to open README
- [FIX] span_range for week frame
- [NEW] Add minimal support for fractional seconds longer than six digits.
- [NEW] Adding locale support for Marathi (mr)
- [NEW] Add count argument to span method
- [NEW] Improved docs
0.5.1 - 0.5.4
-------------
- [FIX] test the behavior of simplejson instead of calling for_json directly (tonyseek)
- [FIX] Add Hebrew Locale (doodyparizada)
- [FIX] Update documentation location (andrewelkins)
- [FIX] Update setup.py Development Status level (andrewelkins)
- [FIX] Case insensitive month match (cshowe)
0.5.0
-----
- [NEW] struct_time addition. (mhworth)
- [NEW] Version grep (eirnym)
- [NEW] Default to ISO 8601 format (emonty)
- [NEW] Raise TypeError on comparison (sniekamp)
- [NEW] Adding Macedonian(mk) locale (krisfremen)
- [FIX] Fix for ISO seconds and fractional seconds (sdispater) (andrewelkins)
- [FIX] Use correct Dutch wording for "hours" (wbolster)
- [FIX] Complete the list of english locales (indorilftw)
- [FIX] Change README to reStructuredText (nyuszika7h)
- [FIX] Parse lower-cased 'h' (tamentis)
- [FIX] Slight modifications to Dutch locale (nvie)
0.4.4
-----
- [NEW] Include the docs in the released tarball
- [NEW] Czech localization Czech localization for Arrow
- [NEW] Add fa_ir to locales
- [FIX] Fixes parsing of time strings with a final Z
- [FIX] Fixes ISO parsing and formatting for fractional seconds
- [FIX] test_fromtimestamp sp
- [FIX] some typos fixed
- [FIX] removed an unused import statement
- [FIX] docs table fix
- [FIX] Issue with specify 'X' template and no template at all to arrow.get
- [FIX] Fix "import" typo in docs/index.rst
- [FIX] Fix unit tests for zero passed
- [FIX] Update layout.html
- [FIX] In Norwegian and new Norwegian months and weekdays should not be capitalized
- [FIX] Fixed discrepancy between specifying 'X' to arrow.get and specifying no template
0.4.3
-----
- [NEW] Turkish locale (Emre)
- [NEW] Arabic locale (Mosab Ahmad)
- [NEW] Danish locale (Holmars)
- [NEW] Icelandic locale (Holmars)
- [NEW] Hindi locale (Atmb4u)
- [NEW] Malayalam locale (Atmb4u)
- [NEW] Finnish locale (Stormpat)
- [NEW] Portuguese locale (Danielcorreia)
- [NEW] ``h`` and ``hh`` strings are now supported (Averyonghub)
- [FIX] An incorrect inflection in the Polish locale has been fixed (Avalanchy)
- [FIX] ``arrow.get`` now properly handles ``Date`` (Jaapz)
- [FIX] Tests are now declared in ``setup.py`` and the manifest (Pypingou)
- [FIX] ``__version__`` has been added to ``__init__.py`` (Sametmax)
- [FIX] ISO 8601 strings can be parsed without a separator (Ivandiguisto / Root)
- [FIX] Documentation is now more clear regarding some inputs on ``arrow.get`` (Eriktaubeneck)
- [FIX] Some documentation links have been fixed (Vrutsky)
- [FIX] Error messages for parse errors are now more descriptive (Maciej Albin)
- [FIX] The parser now correctly checks for separators in strings (Mschwager)
0.4.2
-----
- [NEW] Factory ``get`` method now accepts a single ``Arrow`` argument.
- [NEW] Tokens SSSS, SSSSS and SSSSSS are supported in parsing.
- [NEW] ``Arrow`` objects have a ``float_timestamp`` property.
- [NEW] Vietnamese locale (Iu1nguoi)
- [NEW] Factory ``get`` method now accepts a list of format strings (Dgilland)
- [NEW] A MANIFEST.in file has been added (Pypingou)
- [NEW] Tests can be run directly from ``setup.py`` (Pypingou)
- [FIX] Arrow docs now list 'day of week' format tokens correctly (Rudolphfroger)
- [FIX] Several issues with the Korean locale have been resolved (Yoloseem)
- [FIX] ``humanize`` now correctly returns unicode (Shvechikov)
- [FIX] ``Arrow`` objects now pickle / unpickle correctly (Yoloseem)
0.4.1
-----
- [NEW] Table / explanation of formatting & parsing tokens in docs
- [NEW] Brazilian locale (Augusto2112)
- [NEW] Dutch locale (OrangeTux)
- [NEW] Italian locale (Pertux)
- [NEW] Austrain locale (LeChewbacca)
- [NEW] Tagalog locale (Marksteve)
- [FIX] Corrected spelling and day numbers in German locale (LeChewbacca)
- [FIX] Factory ``get`` method should now handle unicode strings correctly (Bwells)
- [FIX] Midnight and noon should now parse and format correctly (Bwells)
0.4.0
-----
- [NEW] Format-free ISO 8601 parsing in factory ``get`` method
- [NEW] Support for 'week' / 'weeks' in ``span``, ``range``, ``span_range``, ``floor`` and ``ceil``
- [NEW] Support for 'weeks' in ``replace``
- [NEW] Norwegian locale (Martinp)
- [NEW] Japanese locale (CortYuming)
- [FIX] Timezones no longer show the wrong sign when formatted (Bean)
- [FIX] Microseconds are parsed correctly from strings (Bsidhom)
- [FIX] Locale day-of-week is no longer off by one (Cynddl)
- [FIX] Corrected plurals of Ukrainian and Russian nouns (Catchagain)
- [CHANGE] Old 0.1 ``arrow`` module method removed
- [CHANGE] Dropped timestamp support in ``range`` and ``span_range`` (never worked correctly)
- [CHANGE] Dropped parsing of single string as tz string in factory ``get`` method (replaced by ISO 8601)
0.3.5
-----
- [NEW] French locale (Cynddl)
- [NEW] Spanish locale (Slapresta)
- [FIX] Ranges handle multiple timezones correctly (Ftobia)
0.3.4
-----
- [FIX] Humanize no longer sometimes returns the wrong month delta
- [FIX] ``__format__`` works correctly with no format string
0.3.3
-----
- [NEW] Python 2.6 support
- [NEW] Initial support for locale-based parsing and formatting
- [NEW] ArrowFactory class, now proxied as the module API
- [NEW] ``factory`` api method to obtain a factory for a custom type
- [FIX] Python 3 support and tests completely ironed out
0.3.2
-----
- [NEW] Python 3+ support
0.3.1
-----
- [FIX] The old ``arrow`` module function handles timestamps correctly as it used to
0.3.0
-----
- [NEW] ``Arrow.replace`` method
- [NEW] Accept timestamps, datetimes and Arrows for datetime inputs, where reasonable
- [FIX] ``range`` and ``span_range`` respect end and limit parameters correctly
- [CHANGE] Arrow objects are no longer mutable
- [CHANGE] Plural attribute name semantics altered: single -> absolute, plural -> relative
- [CHANGE] Plural names no longer supported as properties (e.g. ``arrow.utcnow().years``)
0.2.1
-----
- [NEW] Support for localized humanization
- [NEW] English, Russian, Greek, Korean, Chinese locales
0.2.0
-----
- **REWRITE**
- [NEW] Date parsing
- [NEW] Date formatting
- [NEW] ``floor``, ``ceil`` and ``span`` methods
- [NEW] ``datetime`` interface implementation
- [NEW] ``clone`` method
- [NEW] ``get``, ``now`` and ``utcnow`` API methods
0.1.6
-----
- [NEW] Humanized time deltas
- [NEW] ``__eq__`` implemented
- [FIX] Issues with conversions related to daylight savings time resolved
- [CHANGE] ``__str__`` uses ISO formatting
0.1.5
-----
- **Started tracking changes**
- [NEW] Parsing of ISO-formatted time zone offsets (e.g. '+02:30', '-05:00')
- [NEW] Resolved some issues with timestamps and delta / Olson time zones

View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2019 Chris Smith
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,3 @@
include LICENSE CHANGELOG.rst README.rst Makefile requirements.txt tox.ini
recursive-include tests *.py
recursive-include docs *.py *.rst *.bat Makefile

View file

@ -0,0 +1,44 @@
.PHONY: auto test docs clean
auto: build38
build27: PYTHON_VER = python2.7
build35: PYTHON_VER = python3.5
build36: PYTHON_VER = python3.6
build37: PYTHON_VER = python3.7
build38: PYTHON_VER = python3.8
build39: PYTHON_VER = python3.9
build27 build35 build36 build37 build38 build39: clean
virtualenv venv --python=$(PYTHON_VER)
. venv/bin/activate; \
pip install -r requirements.txt; \
pre-commit install
test:
rm -f .coverage coverage.xml
. venv/bin/activate; pytest
lint:
. venv/bin/activate; pre-commit run --all-files --show-diff-on-failure
docs:
rm -rf docs/_build
. venv/bin/activate; cd docs; make html
clean: clean-dist
rm -rf venv .pytest_cache ./**/__pycache__
rm -f .coverage coverage.xml ./**/*.pyc
clean-dist:
rm -rf dist build .egg .eggs arrow.egg-info
build-dist:
. venv/bin/activate; \
pip install -U setuptools twine wheel; \
python setup.py sdist bdist_wheel
upload-dist:
. venv/bin/activate; twine upload dist/*
publish: test clean-dist build-dist upload-dist clean-dist

View file

@ -0,0 +1,133 @@
Arrow: Better dates & times for Python
======================================
.. start-inclusion-marker-do-not-remove
.. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master
:alt: Build Status
:target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster
.. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg
:alt: Coverage
:target: https://codecov.io/gh/arrow-py/arrow
.. image:: https://img.shields.io/pypi/v/arrow.svg
:alt: PyPI Version
:target: https://pypi.python.org/pypi/arrow
.. image:: https://img.shields.io/pypi/pyversions/arrow.svg
:alt: Supported Python Versions
:target: https://pypi.python.org/pypi/arrow
.. image:: https://img.shields.io/pypi/l/arrow.svg
:alt: License
:target: https://pypi.python.org/pypi/arrow
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:alt: Code Style: Black
:target: https://github.com/psf/black
**Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code.
Arrow is named after the `arrow of time <https://en.wikipedia.org/wiki/Arrow_of_time>`_ and is heavily inspired by `moment.js <https://github.com/moment/moment>`_ and `requests <https://github.com/psf/requests>`_.
Why use Arrow over built-in modules?
------------------------------------
Python's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective:
- Too many modules: datetime, time, calendar, dateutil, pytz and more
- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc.
- Timezones and timestamp conversions are verbose and unpleasant
- Timezone naivety is the norm
- Gaps in functionality: ISO 8601 parsing, timespans, humanization
Features
--------
- Fully-implemented, drop-in replacement for datetime
- Supports Python 2.7, 3.5, 3.6, 3.7, 3.8 and 3.9
- Timezone-aware and UTC by default
- Provides super-simple creation options for many common input scenarios
- :code:`shift` method with support for relative offsets, including weeks
- Formats and parses strings automatically
- Wide support for ISO 8601
- Timezone conversion
- Timestamp available as a property
- Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year
- Humanizes and supports a growing list of contributed locales
- Extensible for your own Arrow-derived types
Quick Start
-----------
Installation
~~~~~~~~~~~~
To install Arrow, use `pip <https://pip.pypa.io/en/stable/quickstart/>`_ or `pipenv <https://docs.pipenv.org>`_:
.. code-block:: console
$ pip install -U arrow
Example Usage
~~~~~~~~~~~~~
.. code-block:: python
>>> import arrow
>>> arrow.get('2013-05-11T21:23:58.970460+07:00')
<Arrow [2013-05-11T21:23:58.970460+07:00]>
>>> utc = arrow.utcnow()
>>> utc
<Arrow [2013-05-11T21:23:58.970460+00:00]>
>>> utc = utc.shift(hours=-1)
>>> utc
<Arrow [2013-05-11T20:23:58.970460+00:00]>
>>> local = utc.to('US/Pacific')
>>> local
<Arrow [2013-05-11T13:23:58.970460-07:00]>
>>> local.timestamp
1368303838
>>> local.format()
'2013-05-11 13:23:58 -07:00'
>>> local.format('YYYY-MM-DD HH:mm:ss ZZ')
'2013-05-11 13:23:58 -07:00'
>>> local.humanize()
'an hour ago'
>>> local.humanize(locale='ko_kr')
'1시간 전'
.. end-inclusion-marker-do-not-remove
Documentation
-------------
For full documentation, please visit `arrow.readthedocs.io <https://arrow.readthedocs.io>`_.
Contributing
------------
Contributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing:
#. Find an issue or feature to tackle on the `issue tracker <https://github.com/arrow-py/arrow/issues>`_. Issues marked with the `"good first issue" label <https://github.com/arrow-py/arrow/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22>`_ may be a great place to start!
#. Fork `this repository <https://github.com/arrow-py/arrow>`_ on GitHub and begin making changes in a branch.
#. Add a few tests to ensure that the bug was fixed or the feature works as expected.
#. Run the entire test suite and linting checks by running one of the following commands: :code:`tox` (if you have `tox <https://tox.readthedocs.io>`_ installed) **OR** :code:`make build38 && make test && make lint` (if you do not have Python 3.8 installed, replace :code:`build38` with the latest Python version on your system).
#. Submit a pull request and await feedback 😃.
If you have any questions along the way, feel free to ask them `here <https://github.com/arrow-py/arrow/issues/new?labels=question>`_.
Support Arrow
-------------
`Open Collective <https://opencollective.com/>`_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective <https://opencollective.com/arrow>`_.

View file

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
from ._version import __version__
from .api import get, now, utcnow
from .arrow import Arrow
from .factory import ArrowFactory
from .formatter import (
FORMAT_ATOM,
FORMAT_COOKIE,
FORMAT_RFC822,
FORMAT_RFC850,
FORMAT_RFC1036,
FORMAT_RFC1123,
FORMAT_RFC2822,
FORMAT_RFC3339,
FORMAT_RSS,
FORMAT_W3C,
)
from .parser import ParserError

View file

@ -0,0 +1 @@
__version__ = "0.17.0"

View file

@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
"""
Provides the default implementation of :class:`ArrowFactory <arrow.factory.ArrowFactory>`
methods for use as a module API.
"""
from __future__ import absolute_import
from arrow.factory import ArrowFactory
# internal default factory.
_factory = ArrowFactory()
def get(*args, **kwargs):
"""Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``get`` method."""
return _factory.get(*args, **kwargs)
get.__doc__ = _factory.get.__doc__
def utcnow():
"""Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``utcnow`` method."""
return _factory.utcnow()
utcnow.__doc__ = _factory.utcnow.__doc__
def now(tz=None):
"""Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``now`` method."""
return _factory.now(tz)
now.__doc__ = _factory.now.__doc__
def factory(type):
"""Returns an :class:`.ArrowFactory` for the specified :class:`Arrow <arrow.arrow.Arrow>`
or derived type.
:param type: the type, :class:`Arrow <arrow.arrow.Arrow>` or derived.
"""
return ArrowFactory(type)
__all__ = ["get", "utcnow", "now", "factory"]

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,9 @@
# -*- coding: utf-8 -*-
# Output of time.mktime(datetime.max.timetuple()) on macOS
# This value must be hardcoded for compatibility with Windows
# Platform-independent max timestamps are hard to form
# https://stackoverflow.com/q/46133223
MAX_TIMESTAMP = 253402318799.0
MAX_TIMESTAMP_MS = MAX_TIMESTAMP * 1000
MAX_TIMESTAMP_US = MAX_TIMESTAMP * 1000000

View file

@ -0,0 +1,301 @@
# -*- coding: utf-8 -*-
"""
Implements the :class:`ArrowFactory <arrow.factory.ArrowFactory>` class,
providing factory methods for common :class:`Arrow <arrow.arrow.Arrow>`
construction scenarios.
"""
from __future__ import absolute_import
import calendar
from datetime import date, datetime
from datetime import tzinfo as dt_tzinfo
from time import struct_time
from dateutil import tz as dateutil_tz
from arrow import parser
from arrow.arrow import Arrow
from arrow.util import is_timestamp, iso_to_gregorian, isstr
class ArrowFactory(object):
"""A factory for generating :class:`Arrow <arrow.arrow.Arrow>` objects.
:param type: (optional) the :class:`Arrow <arrow.arrow.Arrow>`-based class to construct from.
Defaults to :class:`Arrow <arrow.arrow.Arrow>`.
"""
def __init__(self, type=Arrow):
self.type = type
def get(self, *args, **kwargs):
"""Returns an :class:`Arrow <arrow.arrow.Arrow>` object based on flexible inputs.
:param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en_us'.
:param tzinfo: (optional) a :ref:`timezone expression <tz-expr>` or tzinfo object.
Replaces the timezone unless using an input form that is explicitly UTC or specifies
the timezone in a positional argument. Defaults to UTC.
:param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize
redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing.
Defaults to false.
Usage::
>>> import arrow
**No inputs** to get current UTC time::
>>> arrow.get()
<Arrow [2013-05-08T05:51:43.316458+00:00]>
**None** to also get current UTC time::
>>> arrow.get(None)
<Arrow [2013-05-08T05:51:49.016458+00:00]>
**One** :class:`Arrow <arrow.arrow.Arrow>` object, to get a copy.
>>> arw = arrow.utcnow()
>>> arrow.get(arw)
<Arrow [2013-10-23T15:21:54.354846+00:00]>
**One** ``float`` or ``int``, convertible to a floating-point timestamp, to get
that timestamp in UTC::
>>> arrow.get(1367992474.293378)
<Arrow [2013-05-08T05:54:34.293378+00:00]>
>>> arrow.get(1367992474)
<Arrow [2013-05-08T05:54:34+00:00]>
**One** ISO 8601-formatted ``str``, to parse it::
>>> arrow.get('2013-09-29T01:26:43.830580')
<Arrow [2013-09-29T01:26:43.830580+00:00]>
**One** ISO 8601-formatted ``str``, in basic format, to parse it::
>>> arrow.get('20160413T133656.456289')
<Arrow [2016-04-13T13:36:56.456289+00:00]>
**One** ``tzinfo``, to get the current time **converted** to that timezone::
>>> arrow.get(tz.tzlocal())
<Arrow [2013-05-07T22:57:28.484717-07:00]>
**One** naive ``datetime``, to get that datetime in UTC::
>>> arrow.get(datetime(2013, 5, 5))
<Arrow [2013-05-05T00:00:00+00:00]>
**One** aware ``datetime``, to get that datetime::
>>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal()))
<Arrow [2013-05-05T00:00:00-07:00]>
**One** naive ``date``, to get that date in UTC::
>>> arrow.get(date(2013, 5, 5))
<Arrow [2013-05-05T00:00:00+00:00]>
**One** time.struct time::
>>> arrow.get(gmtime(0))
<Arrow [1970-01-01T00:00:00+00:00]>
**One** iso calendar ``tuple``, to get that week date in UTC::
>>> arrow.get((2013, 18, 7))
<Arrow [2013-05-05T00:00:00+00:00]>
**Two** arguments, a naive or aware ``datetime``, and a replacement
:ref:`timezone expression <tz-expr>`::
>>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')
<Arrow [2013-05-05T00:00:00-07:00]>
**Two** arguments, a naive ``date``, and a replacement
:ref:`timezone expression <tz-expr>`::
>>> arrow.get(date(2013, 5, 5), 'US/Pacific')
<Arrow [2013-05-05T00:00:00-07:00]>
**Two** arguments, both ``str``, to parse the first according to the format of the second::
>>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ')
<Arrow [2013-05-05T12:30:45-05:00]>
**Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try::
>>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss'])
<Arrow [2013-05-05T12:30:45+00:00]>
**Three or more** arguments, as for the constructor of a ``datetime``::
>>> arrow.get(2013, 5, 5, 12, 30, 45)
<Arrow [2013-05-05T12:30:45+00:00]>
"""
arg_count = len(args)
locale = kwargs.pop("locale", "en_us")
tz = kwargs.get("tzinfo", None)
normalize_whitespace = kwargs.pop("normalize_whitespace", False)
# if kwargs given, send to constructor unless only tzinfo provided
if len(kwargs) > 1:
arg_count = 3
# tzinfo kwarg is not provided
if len(kwargs) == 1 and tz is None:
arg_count = 3
# () -> now, @ utc.
if arg_count == 0:
if isstr(tz):
tz = parser.TzinfoParser.parse(tz)
return self.type.now(tz)
if isinstance(tz, dt_tzinfo):
return self.type.now(tz)
return self.type.utcnow()
if arg_count == 1:
arg = args[0]
# (None) -> now, @ utc.
if arg is None:
return self.type.utcnow()
# try (int, float) -> from timestamp with tz
elif not isstr(arg) and is_timestamp(arg):
if tz is None:
# set to UTC by default
tz = dateutil_tz.tzutc()
return self.type.fromtimestamp(arg, tzinfo=tz)
# (Arrow) -> from the object's datetime.
elif isinstance(arg, Arrow):
return self.type.fromdatetime(arg.datetime)
# (datetime) -> from datetime.
elif isinstance(arg, datetime):
return self.type.fromdatetime(arg)
# (date) -> from date.
elif isinstance(arg, date):
return self.type.fromdate(arg)
# (tzinfo) -> now, @ tzinfo.
elif isinstance(arg, dt_tzinfo):
return self.type.now(arg)
# (str) -> parse.
elif isstr(arg):
dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace)
return self.type.fromdatetime(dt, tz)
# (struct_time) -> from struct_time
elif isinstance(arg, struct_time):
return self.type.utcfromtimestamp(calendar.timegm(arg))
# (iso calendar) -> convert then from date
elif isinstance(arg, tuple) and len(arg) == 3:
dt = iso_to_gregorian(*arg)
return self.type.fromdate(dt)
else:
raise TypeError(
"Can't parse single argument of type '{}'".format(type(arg))
)
elif arg_count == 2:
arg_1, arg_2 = args[0], args[1]
if isinstance(arg_1, datetime):
# (datetime, tzinfo/str) -> fromdatetime replace tzinfo.
if isinstance(arg_2, dt_tzinfo) or isstr(arg_2):
return self.type.fromdatetime(arg_1, arg_2)
else:
raise TypeError(
"Can't parse two arguments of types 'datetime', '{}'".format(
type(arg_2)
)
)
elif isinstance(arg_1, date):
# (date, tzinfo/str) -> fromdate replace tzinfo.
if isinstance(arg_2, dt_tzinfo) or isstr(arg_2):
return self.type.fromdate(arg_1, tzinfo=arg_2)
else:
raise TypeError(
"Can't parse two arguments of types 'date', '{}'".format(
type(arg_2)
)
)
# (str, format) -> parse.
elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)):
dt = parser.DateTimeParser(locale).parse(
args[0], args[1], normalize_whitespace
)
return self.type.fromdatetime(dt, tzinfo=tz)
else:
raise TypeError(
"Can't parse two arguments of types '{}' and '{}'".format(
type(arg_1), type(arg_2)
)
)
# 3+ args -> datetime-like via constructor.
else:
return self.type(*args, **kwargs)
def utcnow(self):
"""Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC time.
Usage::
>>> import arrow
>>> arrow.utcnow()
<Arrow [2013-05-08T05:19:07.018993+00:00]>
"""
return self.type.utcnow()
def now(self, tz=None):
"""Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in the given
timezone.
:param tz: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to local time.
Usage::
>>> import arrow
>>> arrow.now()
<Arrow [2013-05-07T22:19:11.363410-07:00]>
>>> arrow.now('US/Pacific')
<Arrow [2013-05-07T22:19:15.251821-07:00]>
>>> arrow.now('+02:00')
<Arrow [2013-05-08T07:19:25.618646+02:00]>
>>> arrow.now('local')
<Arrow [2013-05-07T22:19:39.130059-07:00]>
"""
if tz is None:
tz = dateutil_tz.tzlocal()
elif not isinstance(tz, dt_tzinfo):
tz = parser.TzinfoParser.parse(tz)
return self.type.now(tz)

View file

@ -0,0 +1,139 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division
import calendar
import re
from dateutil import tz as dateutil_tz
from arrow import locales, util
FORMAT_ATOM = "YYYY-MM-DD HH:mm:ssZZ"
FORMAT_COOKIE = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ"
FORMAT_RFC822 = "ddd, DD MMM YY HH:mm:ss Z"
FORMAT_RFC850 = "dddd, DD-MMM-YY HH:mm:ss ZZZ"
FORMAT_RFC1036 = "ddd, DD MMM YY HH:mm:ss Z"
FORMAT_RFC1123 = "ddd, DD MMM YYYY HH:mm:ss Z"
FORMAT_RFC2822 = "ddd, DD MMM YYYY HH:mm:ss Z"
FORMAT_RFC3339 = "YYYY-MM-DD HH:mm:ssZZ"
FORMAT_RSS = "ddd, DD MMM YYYY HH:mm:ss Z"
FORMAT_W3C = "YYYY-MM-DD HH:mm:ssZZ"
class DateTimeFormatter(object):
# This pattern matches characters enclosed in square brackets are matched as
# an atomic group. For more info on atomic groups and how to they are
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
_FORMAT_RE = re.compile(
r"(\[(?:(?=(?P<literal>[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)"
)
def __init__(self, locale="en_us"):
self.locale = locales.get_locale(locale)
def format(cls, dt, fmt):
return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt)
def _format_token(self, dt, token):
if token and token.startswith("[") and token.endswith("]"):
return token[1:-1]
if token == "YYYY":
return self.locale.year_full(dt.year)
if token == "YY":
return self.locale.year_abbreviation(dt.year)
if token == "MMMM":
return self.locale.month_name(dt.month)
if token == "MMM":
return self.locale.month_abbreviation(dt.month)
if token == "MM":
return "{:02d}".format(dt.month)
if token == "M":
return str(dt.month)
if token == "DDDD":
return "{:03d}".format(dt.timetuple().tm_yday)
if token == "DDD":
return str(dt.timetuple().tm_yday)
if token == "DD":
return "{:02d}".format(dt.day)
if token == "D":
return str(dt.day)
if token == "Do":
return self.locale.ordinal_number(dt.day)
if token == "dddd":
return self.locale.day_name(dt.isoweekday())
if token == "ddd":
return self.locale.day_abbreviation(dt.isoweekday())
if token == "d":
return str(dt.isoweekday())
if token == "HH":
return "{:02d}".format(dt.hour)
if token == "H":
return str(dt.hour)
if token == "hh":
return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
if token == "h":
return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
if token == "mm":
return "{:02d}".format(dt.minute)
if token == "m":
return str(dt.minute)
if token == "ss":
return "{:02d}".format(dt.second)
if token == "s":
return str(dt.second)
if token == "SSSSSS":
return str("{:06d}".format(int(dt.microsecond)))
if token == "SSSSS":
return str("{:05d}".format(int(dt.microsecond / 10)))
if token == "SSSS":
return str("{:04d}".format(int(dt.microsecond / 100)))
if token == "SSS":
return str("{:03d}".format(int(dt.microsecond / 1000)))
if token == "SS":
return str("{:02d}".format(int(dt.microsecond / 10000)))
if token == "S":
return str(int(dt.microsecond / 100000))
if token == "X":
# TODO: replace with a call to dt.timestamp() when we drop Python 2.7
return str(calendar.timegm(dt.utctimetuple()))
if token == "x":
# TODO: replace with a call to dt.timestamp() when we drop Python 2.7
ts = calendar.timegm(dt.utctimetuple()) + (dt.microsecond / 1000000)
return str(int(ts * 1000000))
if token == "ZZZ":
return dt.tzname()
if token in ["ZZ", "Z"]:
separator = ":" if token == "ZZ" else ""
tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo
total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60)
sign = "+" if total_minutes >= 0 else "-"
total_minutes = abs(total_minutes)
hour, minute = divmod(total_minutes, 60)
return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute)
if token in ("a", "A"):
return self.locale.meridian(dt.hour, token)
if token == "W":
year, week, day = dt.isocalendar()
return "{}-W{:02d}-{}".format(year, week, day)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,596 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import re
from datetime import datetime, timedelta
from dateutil import tz
from arrow import locales
from arrow.util import iso_to_gregorian, next_weekday, normalize_timestamp
try:
from functools import lru_cache
except ImportError: # pragma: no cover
from backports.functools_lru_cache import lru_cache # pragma: no cover
class ParserError(ValueError):
pass
# Allows for ParserErrors to be propagated from _build_datetime()
# when day_of_year errors occur.
# Before this, the ParserErrors were caught by the try/except in
# _parse_multiformat() and the appropriate error message was not
# transmitted to the user.
class ParserMatchError(ParserError):
pass
class DateTimeParser(object):
_FORMAT_RE = re.compile(
r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)"
)
_ESCAPE_RE = re.compile(r"\[[^\[\]]*\]")
_ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}")
_ONE_OR_TWO_OR_THREE_DIGIT_RE = re.compile(r"\d{1,3}")
_ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+")
_TWO_DIGIT_RE = re.compile(r"\d{2}")
_THREE_DIGIT_RE = re.compile(r"\d{3}")
_FOUR_DIGIT_RE = re.compile(r"\d{4}")
_TZ_Z_RE = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z")
_TZ_ZZ_RE = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z")
_TZ_NAME_RE = re.compile(r"\w[\w+\-/]+")
# NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will
# break cases like "15 Jul 2000" and a format list (see issue #447)
_TIMESTAMP_RE = re.compile(r"^\-?\d+\.?\d+$")
_TIMESTAMP_EXPANDED_RE = re.compile(r"^\-?\d+$")
_TIME_RE = re.compile(r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$")
_WEEK_DATE_RE = re.compile(r"(?P<year>\d{4})[\-]?W(?P<week>\d{2})[\-]?(?P<day>\d)?")
_BASE_INPUT_RE_MAP = {
"YYYY": _FOUR_DIGIT_RE,
"YY": _TWO_DIGIT_RE,
"MM": _TWO_DIGIT_RE,
"M": _ONE_OR_TWO_DIGIT_RE,
"DDDD": _THREE_DIGIT_RE,
"DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE,
"DD": _TWO_DIGIT_RE,
"D": _ONE_OR_TWO_DIGIT_RE,
"HH": _TWO_DIGIT_RE,
"H": _ONE_OR_TWO_DIGIT_RE,
"hh": _TWO_DIGIT_RE,
"h": _ONE_OR_TWO_DIGIT_RE,
"mm": _TWO_DIGIT_RE,
"m": _ONE_OR_TWO_DIGIT_RE,
"ss": _TWO_DIGIT_RE,
"s": _ONE_OR_TWO_DIGIT_RE,
"X": _TIMESTAMP_RE,
"x": _TIMESTAMP_EXPANDED_RE,
"ZZZ": _TZ_NAME_RE,
"ZZ": _TZ_ZZ_RE,
"Z": _TZ_Z_RE,
"S": _ONE_OR_MORE_DIGIT_RE,
"W": _WEEK_DATE_RE,
}
SEPARATORS = ["-", "/", "."]
def __init__(self, locale="en_us", cache_size=0):
self.locale = locales.get_locale(locale)
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
self._input_re_map.update(
{
"MMMM": self._generate_choice_re(
self.locale.month_names[1:], re.IGNORECASE
),
"MMM": self._generate_choice_re(
self.locale.month_abbreviations[1:], re.IGNORECASE
),
"Do": re.compile(self.locale.ordinal_day_re),
"dddd": self._generate_choice_re(
self.locale.day_names[1:], re.IGNORECASE
),
"ddd": self._generate_choice_re(
self.locale.day_abbreviations[1:], re.IGNORECASE
),
"d": re.compile(r"[1-7]"),
"a": self._generate_choice_re(
(self.locale.meridians["am"], self.locale.meridians["pm"])
),
# note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to
# ensure backwards compatibility of this token
"A": self._generate_choice_re(self.locale.meridians.values()),
}
)
if cache_size > 0:
self._generate_pattern_re = lru_cache(maxsize=cache_size)(
self._generate_pattern_re
)
# TODO: since we support more than ISO 8601, we should rename this function
# IDEA: break into multiple functions
def parse_iso(self, datetime_string, normalize_whitespace=False):
if normalize_whitespace:
datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
has_space_divider = " " in datetime_string
has_t_divider = "T" in datetime_string
num_spaces = datetime_string.count(" ")
if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0:
raise ParserError(
"Expected an ISO 8601-like string, but was given '{}'. Try passing in a format string to resolve this.".format(
datetime_string
)
)
has_time = has_space_divider or has_t_divider
has_tz = False
# date formats (ISO 8601 and others) to test against
# NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used)
formats = [
"YYYY-MM-DD",
"YYYY-M-DD",
"YYYY-M-D",
"YYYY/MM/DD",
"YYYY/M/DD",
"YYYY/M/D",
"YYYY.MM.DD",
"YYYY.M.DD",
"YYYY.M.D",
"YYYYMMDD",
"YYYY-DDDD",
"YYYYDDDD",
"YYYY-MM",
"YYYY/MM",
"YYYY.MM",
"YYYY",
"W",
]
if has_time:
if has_space_divider:
date_string, time_string = datetime_string.split(" ", 1)
else:
date_string, time_string = datetime_string.split("T", 1)
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE)
time_components = self._TIME_RE.match(time_parts[0])
if time_components is None:
raise ParserError(
"Invalid time component provided. Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format."
)
(
hours,
minutes,
seconds,
subseconds_sep,
subseconds,
) = time_components.groups()
has_tz = len(time_parts) == 2
has_minutes = minutes is not None
has_seconds = seconds is not None
has_subseconds = subseconds is not None
is_basic_time_format = ":" not in time_parts[0]
tz_format = "Z"
# use 'ZZ' token instead since tz offset is present in non-basic format
if has_tz and ":" in time_parts[1]:
tz_format = "ZZ"
time_sep = "" if is_basic_time_format else ":"
if has_subseconds:
time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format(
time_sep=time_sep, subseconds_sep=subseconds_sep
)
elif has_seconds:
time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep)
elif has_minutes:
time_string = "HH{time_sep}mm".format(time_sep=time_sep)
else:
time_string = "HH"
if has_space_divider:
formats = ["{} {}".format(f, time_string) for f in formats]
else:
formats = ["{}T{}".format(f, time_string) for f in formats]
if has_time and has_tz:
# Add "Z" or "ZZ" to the format strings to indicate to
# _parse_token() that a timezone needs to be parsed
formats = ["{}{}".format(f, tz_format) for f in formats]
return self._parse_multiformat(datetime_string, formats)
def parse(self, datetime_string, fmt, normalize_whitespace=False):
if normalize_whitespace:
datetime_string = re.sub(r"\s+", " ", datetime_string)
if isinstance(fmt, list):
return self._parse_multiformat(datetime_string, fmt)
fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt)
match = fmt_pattern_re.search(datetime_string)
if match is None:
raise ParserMatchError(
"Failed to match '{}' when parsing '{}'".format(fmt, datetime_string)
)
parts = {}
for token in fmt_tokens:
if token == "Do":
value = match.group("value")
elif token == "W":
value = (match.group("year"), match.group("week"), match.group("day"))
else:
value = match.group(token)
self._parse_token(token, value, parts)
return self._build_datetime(parts)
def _generate_pattern_re(self, fmt):
# fmt is a string of tokens like 'YYYY-MM-DD'
# we construct a new string by replacing each
# token by its pattern:
# 'YYYY-MM-DD' -> '(?P<YYYY>\d{4})-(?P<MM>\d{2})-(?P<DD>\d{2})'
tokens = []
offset = 0
# Escape all special RegEx chars
escaped_fmt = re.escape(fmt)
# Extract the bracketed expressions to be reinserted later.
escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt)
# Any number of S is the same as one.
# TODO: allow users to specify the number of digits to parse
escaped_fmt = re.sub(r"S+", "S", escaped_fmt)
escaped_data = re.findall(self._ESCAPE_RE, fmt)
fmt_pattern = escaped_fmt
for m in self._FORMAT_RE.finditer(escaped_fmt):
token = m.group(0)
try:
input_re = self._input_re_map[token]
except KeyError:
raise ParserError("Unrecognized token '{}'".format(token))
input_pattern = "(?P<{}>{})".format(token, input_re.pattern)
tokens.append(token)
# a pattern doesn't have the same length as the token
# it replaces! We keep the difference in the offset variable.
# This works because the string is scanned left-to-right and matches
# are returned in the order found by finditer.
fmt_pattern = (
fmt_pattern[: m.start() + offset]
+ input_pattern
+ fmt_pattern[m.end() + offset :]
)
offset += len(input_pattern) - (m.end() - m.start())
final_fmt_pattern = ""
split_fmt = fmt_pattern.split(r"\#")
# Due to the way Python splits, 'split_fmt' will always be longer
for i in range(len(split_fmt)):
final_fmt_pattern += split_fmt[i]
if i < len(escaped_data):
final_fmt_pattern += escaped_data[i][1:-1]
# Wrap final_fmt_pattern in a custom word boundary to strictly
# match the formatting pattern and filter out date and time formats
# that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah,
# blah1998-09-12blah. The custom word boundary matches every character
# that is not a whitespace character to allow for searching for a date
# and time string in a natural language sentence. Therefore, searching
# for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will
# work properly.
# Certain punctuation before or after the target pattern such as
# "1998-09-12," is permitted. For the full list of valid punctuation,
# see the documentation.
starting_word_boundary = (
r"(?<!\S\S)" # Don't have two consecutive non-whitespace characters. This ensures that we allow cases like .11.25.2019 but not 1.11.25.2019 (for pattern MM.DD.YYYY)
r"(?<![^\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)<>\s])" # This is the list of punctuation that is ok before the pattern (i.e. "It can't not be these characters before the pattern")
r"(\b|^)" # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a negative number through i.e. before epoch numbers
)
ending_word_boundary = (
r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks can appear after the pattern at most 1 time
r"(?!\S))" # Don't allow any non-whitespace character after the punctuation
)
bounded_fmt_pattern = r"{}{}{}".format(
starting_word_boundary, final_fmt_pattern, ending_word_boundary
)
return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE)
def _parse_token(self, token, value, parts):
if token == "YYYY":
parts["year"] = int(value)
elif token == "YY":
value = int(value)
parts["year"] = 1900 + value if value > 68 else 2000 + value
elif token in ["MMMM", "MMM"]:
parts["month"] = self.locale.month_number(value.lower())
elif token in ["MM", "M"]:
parts["month"] = int(value)
elif token in ["DDDD", "DDD"]:
parts["day_of_year"] = int(value)
elif token in ["DD", "D"]:
parts["day"] = int(value)
elif token == "Do":
parts["day"] = int(value)
elif token == "dddd":
# locale day names are 1-indexed
day_of_week = [x.lower() for x in self.locale.day_names].index(
value.lower()
)
parts["day_of_week"] = day_of_week - 1
elif token == "ddd":
# locale day abbreviations are 1-indexed
day_of_week = [x.lower() for x in self.locale.day_abbreviations].index(
value.lower()
)
parts["day_of_week"] = day_of_week - 1
elif token.upper() in ["HH", "H"]:
parts["hour"] = int(value)
elif token in ["mm", "m"]:
parts["minute"] = int(value)
elif token in ["ss", "s"]:
parts["second"] = int(value)
elif token == "S":
# We have the *most significant* digits of an arbitrary-precision integer.
# We want the six most significant digits as an integer, rounded.
# IDEA: add nanosecond support somehow? Need datetime support for it first.
value = value.ljust(7, str("0"))
# floating-point (IEEE-754) defaults to half-to-even rounding
seventh_digit = int(value[6])
if seventh_digit == 5:
rounding = int(value[5]) % 2
elif seventh_digit > 5:
rounding = 1
else:
rounding = 0
parts["microsecond"] = int(value[:6]) + rounding
elif token == "X":
parts["timestamp"] = float(value)
elif token == "x":
parts["expanded_timestamp"] = int(value)
elif token in ["ZZZ", "ZZ", "Z"]:
parts["tzinfo"] = TzinfoParser.parse(value)
elif token in ["a", "A"]:
if value in (self.locale.meridians["am"], self.locale.meridians["AM"]):
parts["am_pm"] = "am"
elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]):
parts["am_pm"] = "pm"
elif token == "W":
parts["weekdate"] = value
@staticmethod
def _build_datetime(parts):
weekdate = parts.get("weekdate")
if weekdate is not None:
# we can use strptime (%G, %V, %u) in python 3.6 but these tokens aren't available before that
year, week = int(weekdate[0]), int(weekdate[1])
if weekdate[2] is not None:
day = int(weekdate[2])
else:
# day not given, default to 1
day = 1
dt = iso_to_gregorian(year, week, day)
parts["year"] = dt.year
parts["month"] = dt.month
parts["day"] = dt.day
timestamp = parts.get("timestamp")
if timestamp is not None:
return datetime.fromtimestamp(timestamp, tz=tz.tzutc())
expanded_timestamp = parts.get("expanded_timestamp")
if expanded_timestamp is not None:
return datetime.fromtimestamp(
normalize_timestamp(expanded_timestamp),
tz=tz.tzutc(),
)
day_of_year = parts.get("day_of_year")
if day_of_year is not None:
year = parts.get("year")
month = parts.get("month")
if year is None:
raise ParserError(
"Year component is required with the DDD and DDDD tokens."
)
if month is not None:
raise ParserError(
"Month component is not allowed with the DDD and DDDD tokens."
)
date_string = "{}-{}".format(year, day_of_year)
try:
dt = datetime.strptime(date_string, "%Y-%j")
except ValueError:
raise ParserError(
"The provided day of year '{}' is invalid.".format(day_of_year)
)
parts["year"] = dt.year
parts["month"] = dt.month
parts["day"] = dt.day
day_of_week = parts.get("day_of_week")
day = parts.get("day")
# If day is passed, ignore day of week
if day_of_week is not None and day is None:
year = parts.get("year", 1970)
month = parts.get("month", 1)
day = 1
# dddd => first day of week after epoch
# dddd YYYY => first day of week in specified year
# dddd MM YYYY => first day of week in specified year and month
# dddd MM => first day after epoch in specified month
next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week)
parts["year"] = next_weekday_dt.year
parts["month"] = next_weekday_dt.month
parts["day"] = next_weekday_dt.day
am_pm = parts.get("am_pm")
hour = parts.get("hour", 0)
if am_pm == "pm" and hour < 12:
hour += 12
elif am_pm == "am" and hour == 12:
hour = 0
# Support for midnight at the end of day
if hour == 24:
if parts.get("minute", 0) != 0:
raise ParserError("Midnight at the end of day must not contain minutes")
if parts.get("second", 0) != 0:
raise ParserError("Midnight at the end of day must not contain seconds")
if parts.get("microsecond", 0) != 0:
raise ParserError(
"Midnight at the end of day must not contain microseconds"
)
hour = 0
day_increment = 1
else:
day_increment = 0
# account for rounding up to 1000000
microsecond = parts.get("microsecond", 0)
if microsecond == 1000000:
microsecond = 0
second_increment = 1
else:
second_increment = 0
increment = timedelta(days=day_increment, seconds=second_increment)
return (
datetime(
year=parts.get("year", 1),
month=parts.get("month", 1),
day=parts.get("day", 1),
hour=hour,
minute=parts.get("minute", 0),
second=parts.get("second", 0),
microsecond=microsecond,
tzinfo=parts.get("tzinfo"),
)
+ increment
)
def _parse_multiformat(self, string, formats):
_datetime = None
for fmt in formats:
try:
_datetime = self.parse(string, fmt)
break
except ParserMatchError:
pass
if _datetime is None:
raise ParserError(
"Could not match input '{}' to any of the following formats: {}".format(
string, ", ".join(formats)
)
)
return _datetime
# generates a capture group of choices separated by an OR operator
@staticmethod
def _generate_choice_re(choices, flags=0):
return re.compile(r"({})".format("|".join(choices)), flags=flags)
class TzinfoParser(object):
_TZINFO_RE = re.compile(r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$")
@classmethod
def parse(cls, tzinfo_string):
tzinfo = None
if tzinfo_string == "local":
tzinfo = tz.tzlocal()
elif tzinfo_string in ["utc", "UTC", "Z"]:
tzinfo = tz.tzutc()
else:
iso_match = cls._TZINFO_RE.match(tzinfo_string)
if iso_match:
sign, hours, minutes = iso_match.groups()
if minutes is None:
minutes = 0
seconds = int(hours) * 3600 + int(minutes) * 60
if sign == "-":
seconds *= -1
tzinfo = tz.tzoffset(None, seconds)
else:
tzinfo = tz.gettz(tzinfo_string)
if tzinfo is None:
raise ParserError(
'Could not parse timezone expression "{}"'.format(tzinfo_string)
)
return tzinfo

View file

@ -0,0 +1,115 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import datetime
import numbers
from dateutil.rrule import WEEKLY, rrule
from arrow.constants import MAX_TIMESTAMP, MAX_TIMESTAMP_MS, MAX_TIMESTAMP_US
def next_weekday(start_date, weekday):
"""Get next weekday from the specified start date.
:param start_date: Datetime object representing the start date.
:param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday).
:return: Datetime object corresponding to the next weekday after start_date.
Usage::
# Get first Monday after epoch
>>> next_weekday(datetime(1970, 1, 1), 0)
1970-01-05 00:00:00
# Get first Thursday after epoch
>>> next_weekday(datetime(1970, 1, 1), 3)
1970-01-01 00:00:00
# Get first Sunday after epoch
>>> next_weekday(datetime(1970, 1, 1), 6)
1970-01-04 00:00:00
"""
if weekday < 0 or weekday > 6:
raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).")
return rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0]
def total_seconds(td):
"""Get total seconds for timedelta."""
return td.total_seconds()
def is_timestamp(value):
"""Check if value is a valid timestamp."""
if isinstance(value, bool):
return False
if not (
isinstance(value, numbers.Integral)
or isinstance(value, float)
or isinstance(value, str)
):
return False
try:
float(value)
return True
except ValueError:
return False
def normalize_timestamp(timestamp):
"""Normalize millisecond and microsecond timestamps into normal timestamps."""
if timestamp > MAX_TIMESTAMP:
if timestamp < MAX_TIMESTAMP_MS:
timestamp /= 1e3
elif timestamp < MAX_TIMESTAMP_US:
timestamp /= 1e6
else:
raise ValueError(
"The specified timestamp '{}' is too large.".format(timestamp)
)
return timestamp
# Credit to https://stackoverflow.com/a/1700069
def iso_to_gregorian(iso_year, iso_week, iso_day):
"""Converts an ISO week date tuple into a datetime object."""
if not 1 <= iso_week <= 53:
raise ValueError("ISO Calendar week value must be between 1-53.")
if not 1 <= iso_day <= 7:
raise ValueError("ISO Calendar day value must be between 1-7")
# The first week of the year always contains 4 Jan.
fourth_jan = datetime.date(iso_year, 1, 4)
delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
year_start = fourth_jan - delta
gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
return gregorian
def validate_bounds(bounds):
if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]":
raise ValueError(
'Invalid bounds. Please select between "()", "(]", "[)", or "[]".'
)
# Python 2.7 / 3.0+ definitions for isstr function.
try: # pragma: no cover
basestring
def isstr(s):
return isinstance(s, basestring) # noqa: F821
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
__all__ = ["next_weekday", "total_seconds", "is_timestamp", "isstr", "iso_to_gregorian"]

View file

@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View file

@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
# -- Path setup --------------------------------------------------------------
import io
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
about = {}
with io.open("../arrow/_version.py", "r", encoding="utf-8") as f:
exec(f.read(), about)
# -- Project information -----------------------------------------------------
project = u"Arrow 🏹"
copyright = "2020, Chris Smith"
author = "Chris Smith"
release = about["__version__"]
# -- General configuration ---------------------------------------------------
extensions = ["sphinx.ext.autodoc"]
templates_path = []
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
master_doc = "index"
source_suffix = ".rst"
pygments_style = "sphinx"
language = None
# -- Options for HTML output -------------------------------------------------
html_theme = "alabaster"
html_theme_path = []
html_static_path = []
html_show_sourcelink = False
html_show_sphinx = False
html_show_copyright = True
# https://alabaster.readthedocs.io/en/latest/customization.html
html_theme_options = {
"description": "Arrow is a sensible and human-friendly approach to dates, times and timestamps.",
"github_user": "arrow-py",
"github_repo": "arrow",
"github_banner": True,
"show_related": False,
"show_powered_by": False,
"github_button": True,
"github_type": "star",
"github_count": "true", # must be a string
}
html_sidebars = {
"**": ["about.html", "localtoc.html", "relations.html", "searchbox.html"]
}

View file

@ -0,0 +1,566 @@
Arrow: Better dates & times for Python
======================================
Release v\ |release| (`Installation`_) (`Changelog <releases.html>`_)
.. include:: ../README.rst
:start-after: start-inclusion-marker-do-not-remove
:end-before: end-inclusion-marker-do-not-remove
User's Guide
------------
Creation
~~~~~~~~
Get 'now' easily:
.. code-block:: python
>>> arrow.utcnow()
<Arrow [2013-05-07T04:20:39.369271+00:00]>
>>> arrow.now()
<Arrow [2013-05-06T21:20:40.841085-07:00]>
>>> arrow.now('US/Pacific')
<Arrow [2013-05-06T21:20:44.761511-07:00]>
Create from timestamps (:code:`int` or :code:`float`):
.. code-block:: python
>>> arrow.get(1367900664)
<Arrow [2013-05-07T04:24:24+00:00]>
>>> arrow.get(1367900664.152325)
<Arrow [2013-05-07T04:24:24.152325+00:00]>
Use a naive or timezone-aware datetime, or flexibly specify a timezone:
.. code-block:: python
>>> arrow.get(datetime.utcnow())
<Arrow [2013-05-07T04:24:24.152325+00:00]>
>>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')
<Arrow [2013-05-05T00:00:00-07:00]>
>>> from dateutil import tz
>>> arrow.get(datetime(2013, 5, 5), tz.gettz('US/Pacific'))
<Arrow [2013-05-05T00:00:00-07:00]>
>>> arrow.get(datetime.now(tz.gettz('US/Pacific')))
<Arrow [2013-05-06T21:24:49.552236-07:00]>
Parse from a string:
.. code-block:: python
>>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss')
<Arrow [2013-05-05T12:30:45+00:00]>
Search a date in a string:
.. code-block:: python
>>> arrow.get('June was born in May 1980', 'MMMM YYYY')
<Arrow [1980-05-01T00:00:00+00:00]>
Some ISO 8601 compliant strings are recognized and parsed without a format string:
>>> arrow.get('2013-09-30T15:34:00.000-07:00')
<Arrow [2013-09-30T15:34:00-07:00]>
Arrow objects can be instantiated directly too, with the same arguments as a datetime:
.. code-block:: python
>>> arrow.get(2013, 5, 5)
<Arrow [2013-05-05T00:00:00+00:00]>
>>> arrow.Arrow(2013, 5, 5)
<Arrow [2013-05-05T00:00:00+00:00]>
Properties
~~~~~~~~~~
Get a datetime or timestamp representation:
.. code-block:: python
>>> a = arrow.utcnow()
>>> a.datetime
datetime.datetime(2013, 5, 7, 4, 38, 15, 447644, tzinfo=tzutc())
>>> a.timestamp
1367901495
Get a naive datetime, and tzinfo:
.. code-block:: python
>>> a.naive
datetime.datetime(2013, 5, 7, 4, 38, 15, 447644)
>>> a.tzinfo
tzutc()
Get any datetime value:
.. code-block:: python
>>> a.year
2013
Call datetime functions that return properties:
.. code-block:: python
>>> a.date()
datetime.date(2013, 5, 7)
>>> a.time()
datetime.time(4, 38, 15, 447644)
Replace & Shift
~~~~~~~~~~~~~~~
Get a new :class:`Arrow <arrow.arrow.Arrow>` object, with altered attributes, just as you would with a datetime:
.. code-block:: python
>>> arw = arrow.utcnow()
>>> arw
<Arrow [2013-05-12T03:29:35.334214+00:00]>
>>> arw.replace(hour=4, minute=40)
<Arrow [2013-05-12T04:40:35.334214+00:00]>
Or, get one with attributes shifted forward or backward:
.. code-block:: python
>>> arw.shift(weeks=+3)
<Arrow [2013-06-02T03:29:35.334214+00:00]>
Even replace the timezone without altering other attributes:
.. code-block:: python
>>> arw.replace(tzinfo='US/Pacific')
<Arrow [2013-05-12T03:29:35.334214-07:00]>
Move between the earlier and later moments of an ambiguous time:
.. code-block:: python
>>> paris_transition = arrow.Arrow(2019, 10, 27, 2, tzinfo="Europe/Paris", fold=0)
>>> paris_transition
<Arrow [2019-10-27T02:00:00+02:00]>
>>> paris_transition.ambiguous
True
>>> paris_transition.replace(fold=1)
<Arrow [2019-10-27T02:00:00+01:00]>
Format
~~~~~~
.. code-block:: python
>>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ')
'2013-05-07 05:23:16 -00:00'
Convert
~~~~~~~
Convert from UTC to other timezones by name or tzinfo:
.. code-block:: python
>>> utc = arrow.utcnow()
>>> utc
<Arrow [2013-05-07T05:24:11.823627+00:00]>
>>> utc.to('US/Pacific')
<Arrow [2013-05-06T22:24:11.823627-07:00]>
>>> utc.to(tz.gettz('US/Pacific'))
<Arrow [2013-05-06T22:24:11.823627-07:00]>
Or using shorthand:
.. code-block:: python
>>> utc.to('local')
<Arrow [2013-05-06T22:24:11.823627-07:00]>
>>> utc.to('local').to('utc')
<Arrow [2013-05-07T05:24:11.823627+00:00]>
Humanize
~~~~~~~~
Humanize relative to now:
.. code-block:: python
>>> past = arrow.utcnow().shift(hours=-1)
>>> past.humanize()
'an hour ago'
Or another Arrow, or datetime:
.. code-block:: python
>>> present = arrow.utcnow()
>>> future = present.shift(hours=2)
>>> future.humanize(present)
'in 2 hours'
Indicate time as relative or include only the distance
.. code-block:: python
>>> present = arrow.utcnow()
>>> future = present.shift(hours=2)
>>> future.humanize(present)
'in 2 hours'
>>> future.humanize(present, only_distance=True)
'2 hours'
Indicate a specific time granularity (or multiple):
.. code-block:: python
>>> present = arrow.utcnow()
>>> future = present.shift(minutes=66)
>>> future.humanize(present, granularity="minute")
'in 66 minutes'
>>> future.humanize(present, granularity=["hour", "minute"])
'in an hour and 6 minutes'
>>> present.humanize(future, granularity=["hour", "minute"])
'an hour and 6 minutes ago'
>>> future.humanize(present, only_distance=True, granularity=["hour", "minute"])
'an hour and 6 minutes'
Support for a growing number of locales (see ``locales.py`` for supported languages):
.. code-block:: python
>>> future = arrow.utcnow().shift(hours=1)
>>> future.humanize(a, locale='ru')
'через 2 час(а,ов)'
Ranges & Spans
~~~~~~~~~~~~~~
Get the time span of any unit:
.. code-block:: python
>>> arrow.utcnow().span('hour')
(<Arrow [2013-05-07T05:00:00+00:00]>, <Arrow [2013-05-07T05:59:59.999999+00:00]>)
Or just get the floor and ceiling:
.. code-block:: python
>>> arrow.utcnow().floor('hour')
<Arrow [2013-05-07T05:00:00+00:00]>
>>> arrow.utcnow().ceil('hour')
<Arrow [2013-05-07T05:59:59.999999+00:00]>
You can also get a range of time spans:
.. code-block:: python
>>> start = datetime(2013, 5, 5, 12, 30)
>>> end = datetime(2013, 5, 5, 17, 15)
>>> for r in arrow.Arrow.span_range('hour', start, end):
... print r
...
(<Arrow [2013-05-05T12:00:00+00:00]>, <Arrow [2013-05-05T12:59:59.999999+00:00]>)
(<Arrow [2013-05-05T13:00:00+00:00]>, <Arrow [2013-05-05T13:59:59.999999+00:00]>)
(<Arrow [2013-05-05T14:00:00+00:00]>, <Arrow [2013-05-05T14:59:59.999999+00:00]>)
(<Arrow [2013-05-05T15:00:00+00:00]>, <Arrow [2013-05-05T15:59:59.999999+00:00]>)
(<Arrow [2013-05-05T16:00:00+00:00]>, <Arrow [2013-05-05T16:59:59.999999+00:00]>)
Or just iterate over a range of time:
.. code-block:: python
>>> start = datetime(2013, 5, 5, 12, 30)
>>> end = datetime(2013, 5, 5, 17, 15)
>>> for r in arrow.Arrow.range('hour', start, end):
... print repr(r)
...
<Arrow [2013-05-05T12:30:00+00:00]>
<Arrow [2013-05-05T13:30:00+00:00]>
<Arrow [2013-05-05T14:30:00+00:00]>
<Arrow [2013-05-05T15:30:00+00:00]>
<Arrow [2013-05-05T16:30:00+00:00]>
.. toctree::
:maxdepth: 2
Factories
~~~~~~~~~
Use factories to harness Arrow's module API for a custom Arrow-derived type. First, derive your type:
.. code-block:: python
>>> class CustomArrow(arrow.Arrow):
...
... def days_till_xmas(self):
...
... xmas = arrow.Arrow(self.year, 12, 25)
...
... if self > xmas:
... xmas = xmas.shift(years=1)
...
... return (xmas - self).days
Then get and use a factory for it:
.. code-block:: python
>>> factory = arrow.ArrowFactory(CustomArrow)
>>> custom = factory.utcnow()
>>> custom
>>> <CustomArrow [2013-05-27T23:35:35.533160+00:00]>
>>> custom.days_till_xmas()
>>> 211
Supported Tokens
~~~~~~~~~~~~~~~~
Use the following tokens for parsing and formatting. Note that they are **not** the same as the tokens for `strptime <https://linux.die.net/man/3/strptime>`_:
+--------------------------------+--------------+-------------------------------------------+
| |Token |Output |
+================================+==============+===========================================+
|**Year** |YYYY |2000, 2001, 2002 ... 2012, 2013 |
+--------------------------------+--------------+-------------------------------------------+
| |YY |00, 01, 02 ... 12, 13 |
+--------------------------------+--------------+-------------------------------------------+
|**Month** |MMMM |January, February, March ... [#t1]_ |
+--------------------------------+--------------+-------------------------------------------+
| |MMM |Jan, Feb, Mar ... [#t1]_ |
+--------------------------------+--------------+-------------------------------------------+
| |MM |01, 02, 03 ... 11, 12 |
+--------------------------------+--------------+-------------------------------------------+
| |M |1, 2, 3 ... 11, 12 |
+--------------------------------+--------------+-------------------------------------------+
|**Day of Year** |DDDD |001, 002, 003 ... 364, 365 |
+--------------------------------+--------------+-------------------------------------------+
| |DDD |1, 2, 3 ... 364, 365 |
+--------------------------------+--------------+-------------------------------------------+
|**Day of Month** |DD |01, 02, 03 ... 30, 31 |
+--------------------------------+--------------+-------------------------------------------+
| |D |1, 2, 3 ... 30, 31 |
+--------------------------------+--------------+-------------------------------------------+
| |Do |1st, 2nd, 3rd ... 30th, 31st |
+--------------------------------+--------------+-------------------------------------------+
|**Day of Week** |dddd |Monday, Tuesday, Wednesday ... [#t2]_ |
+--------------------------------+--------------+-------------------------------------------+
| |ddd |Mon, Tue, Wed ... [#t2]_ |
+--------------------------------+--------------+-------------------------------------------+
| |d |1, 2, 3 ... 6, 7 |
+--------------------------------+--------------+-------------------------------------------+
|**ISO week date** |W |2011-W05-4, 2019-W17 |
+--------------------------------+--------------+-------------------------------------------+
|**Hour** |HH |00, 01, 02 ... 23, 24 |
+--------------------------------+--------------+-------------------------------------------+
| |H |0, 1, 2 ... 23, 24 |
+--------------------------------+--------------+-------------------------------------------+
| |hh |01, 02, 03 ... 11, 12 |
+--------------------------------+--------------+-------------------------------------------+
| |h |1, 2, 3 ... 11, 12 |
+--------------------------------+--------------+-------------------------------------------+
|**AM / PM** |A |AM, PM, am, pm [#t1]_ |
+--------------------------------+--------------+-------------------------------------------+
| |a |am, pm [#t1]_ |
+--------------------------------+--------------+-------------------------------------------+
|**Minute** |mm |00, 01, 02 ... 58, 59 |
+--------------------------------+--------------+-------------------------------------------+
| |m |0, 1, 2 ... 58, 59 |
+--------------------------------+--------------+-------------------------------------------+
|**Second** |ss |00, 01, 02 ... 58, 59 |
+--------------------------------+--------------+-------------------------------------------+
| |s |0, 1, 2 ... 58, 59 |
+--------------------------------+--------------+-------------------------------------------+
|**Sub-second** |S... |0, 02, 003, 000006, 123123123123... [#t3]_ |
+--------------------------------+--------------+-------------------------------------------+
|**Timezone** |ZZZ |Asia/Baku, Europe/Warsaw, GMT ... [#t4]_ |
+--------------------------------+--------------+-------------------------------------------+
| |ZZ |-07:00, -06:00 ... +06:00, +07:00, +08, Z |
+--------------------------------+--------------+-------------------------------------------+
| |Z |-0700, -0600 ... +0600, +0700, +08, Z |
+--------------------------------+--------------+-------------------------------------------+
|**Seconds Timestamp** |X |1381685817, 1381685817.915482 ... [#t5]_ |
+--------------------------------+--------------+-------------------------------------------+
|**ms or µs Timestamp** |x |1569980330813, 1569980330813221 |
+--------------------------------+--------------+-------------------------------------------+
.. rubric:: Footnotes
.. [#t1] localization support for parsing and formatting
.. [#t2] localization support only for formatting
.. [#t3] the result is truncated to microseconds, with `half-to-even rounding <https://en.wikipedia.org/wiki/IEEE_floating_point#Roundings_to_nearest>`_.
.. [#t4] timezone names from `tz database <https://www.iana.org/time-zones>`_ provided via dateutil package, note that abbreviations such as MST, PDT, BRST are unlikely to parse due to ambiguity. Use the full IANA zone name instead (Asia/Shanghai, Europe/London, America/Chicago etc).
.. [#t5] this token cannot be used for parsing timestamps out of natural language strings due to compatibility reasons
Built-in Formats
++++++++++++++++
There are several formatting standards that are provided as built-in tokens.
.. code-block:: python
>>> arw = arrow.utcnow()
>>> arw.format(arrow.FORMAT_ATOM)
'2020-05-27 10:30:35+00:00'
>>> arw.format(arrow.FORMAT_COOKIE)
'Wednesday, 27-May-2020 10:30:35 UTC'
>>> arw.format(arrow.FORMAT_RSS)
'Wed, 27 May 2020 10:30:35 +0000'
>>> arw.format(arrow.FORMAT_RFC822)
'Wed, 27 May 20 10:30:35 +0000'
>>> arw.format(arrow.FORMAT_RFC850)
'Wednesday, 27-May-20 10:30:35 UTC'
>>> arw.format(arrow.FORMAT_RFC1036)
'Wed, 27 May 20 10:30:35 +0000'
>>> arw.format(arrow.FORMAT_RFC1123)
'Wed, 27 May 2020 10:30:35 +0000'
>>> arw.format(arrow.FORMAT_RFC2822)
'Wed, 27 May 2020 10:30:35 +0000'
>>> arw.format(arrow.FORMAT_RFC3339)
'2020-05-27 10:30:35+00:00'
>>> arw.format(arrow.FORMAT_W3C)
'2020-05-27 10:30:35+00:00'
Escaping Formats
~~~~~~~~~~~~~~~~
Tokens, phrases, and regular expressions in a format string can be escaped when parsing and formatting by enclosing them within square brackets.
Tokens & Phrases
++++++++++++++++
Any `token <Supported Tokens_>`_ or phrase can be escaped as follows:
.. code-block:: python
>>> fmt = "YYYY-MM-DD h [h] m"
>>> arw = arrow.get("2018-03-09 8 h 40", fmt)
<Arrow [2018-03-09T08:40:00+00:00]>
>>> arw.format(fmt)
'2018-03-09 8 h 40'
>>> fmt = "YYYY-MM-DD h [hello] m"
>>> arw = arrow.get("2018-03-09 8 hello 40", fmt)
<Arrow [2018-03-09T08:40:00+00:00]>
>>> arw.format(fmt)
'2018-03-09 8 hello 40'
>>> fmt = "YYYY-MM-DD h [hello world] m"
>>> arw = arrow.get("2018-03-09 8 hello world 40", fmt)
<Arrow [2018-03-09T08:40:00+00:00]>
>>> arw.format(fmt)
'2018-03-09 8 hello world 40'
This can be useful for parsing dates in different locales such as French, in which it is common to format time strings as "8 h 40" rather than "8:40".
Regular Expressions
+++++++++++++++++++
You can also escape regular expressions by enclosing them within square brackets. In the following example, we are using the regular expression :code:`\s+` to match any number of whitespace characters that separate the tokens. This is useful if you do not know the number of spaces between tokens ahead of time (e.g. in log files).
.. code-block:: python
>>> fmt = r"ddd[\s+]MMM[\s+]DD[\s+]HH:mm:ss[\s+]YYYY"
>>> arrow.get("Mon Sep 08 16:41:45 2014", fmt)
<Arrow [2014-09-08T16:41:45+00:00]>
>>> arrow.get("Mon \tSep 08 16:41:45 2014", fmt)
<Arrow [2014-09-08T16:41:45+00:00]>
>>> arrow.get("Mon Sep 08 16:41:45 2014", fmt)
<Arrow [2014-09-08T16:41:45+00:00]>
Punctuation
~~~~~~~~~~~
Date and time formats may be fenced on either side by one punctuation character from the following list: ``, . ; : ? ! " \` ' [ ] { } ( ) < >``
.. code-block:: python
>>> arrow.get("Cool date: 2019-10-31T09:12:45.123456+04:30.", "YYYY-MM-DDTHH:mm:ss.SZZ")
<Arrow [2019-10-31T09:12:45.123456+04:30]>
>>> arrow.get("Tomorrow (2019-10-31) is Halloween!", "YYYY-MM-DD")
<Arrow [2019-10-31T00:00:00+00:00]>
>>> arrow.get("Halloween is on 2019.10.31.", "YYYY.MM.DD")
<Arrow [2019-10-31T00:00:00+00:00]>
>>> arrow.get("It's Halloween tomorrow (2019-10-31)!", "YYYY-MM-DD")
# Raises exception because there are multiple punctuation marks following the date
Redundant Whitespace
~~~~~~~~~~~~~~~~~~~~
Redundant whitespace characters (spaces, tabs, and newlines) can be normalized automatically by passing in the ``normalize_whitespace`` flag to ``arrow.get``:
.. code-block:: python
>>> arrow.get('\t \n 2013-05-05T12:30:45.123456 \t \n', normalize_whitespace=True)
<Arrow [2013-05-05T12:30:45.123456+00:00]>
>>> arrow.get('2013-05-05 T \n 12:30:45\t123456', 'YYYY-MM-DD T HH:mm:ss S', normalize_whitespace=True)
<Arrow [2013-05-05T12:30:45.123456+00:00]>
API Guide
---------
arrow.arrow
~~~~~~~~~~~
.. automodule:: arrow.arrow
:members:
arrow.factory
~~~~~~~~~~~~~
.. automodule:: arrow.factory
:members:
arrow.api
~~~~~~~~~
.. automodule:: arrow.api
:members:
arrow.locale
~~~~~~~~~~~~
.. automodule:: arrow.locales
:members:
:undoc-members:
Release History
---------------
.. toctree::
:maxdepth: 2
releases

View file

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

View file

@ -0,0 +1,3 @@
.. _releases:
.. include:: ../CHANGELOG.rst

View file

@ -0,0 +1,14 @@
backports.functools_lru_cache==1.6.1; python_version == "2.7"
dateparser==0.7.*
pre-commit==1.21.*; python_version <= "3.5"
pre-commit==2.6.*; python_version >= "3.6"
pytest==4.6.*; python_version == "2.7"
pytest==6.0.*; python_version >= "3.5"
pytest-cov==2.10.*
pytest-mock==2.0.*; python_version == "2.7"
pytest-mock==3.2.*; python_version >= "3.5"
python-dateutil==2.8.*
pytz==2019.*
simplejson==3.17.*
sphinx==1.8.*; python_version == "2.7"
sphinx==3.2.*; python_version >= "3.5"

View file

@ -0,0 +1,2 @@
[bdist_wheel]
universal = 1

View file

@ -0,0 +1,50 @@
# -*- coding: utf-8 -*-
import io
from setuptools import setup
with io.open("README.rst", "r", encoding="utf-8") as f:
readme = f.read()
about = {}
with io.open("arrow/_version.py", "r", encoding="utf-8") as f:
exec(f.read(), about)
setup(
name="arrow",
version=about["__version__"],
description="Better dates & times for Python",
long_description=readme,
long_description_content_type="text/x-rst",
url="https://arrow.readthedocs.io",
author="Chris Smith",
author_email="crsmithdev@gmail.com",
license="Apache 2.0",
packages=["arrow"],
zip_safe=False,
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
install_requires=[
"python-dateutil>=2.7.0",
"backports.functools_lru_cache>=1.2.1;python_version=='2.7'",
],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
keywords="arrow date time datetime timestamp timezone humanize",
project_urls={
"Repository": "https://github.com/arrow-py/arrow",
"Bug Reports": "https://github.com/arrow-py/arrow/issues",
"Documentation": "https://arrow.readthedocs.io",
},
)

View file

@ -0,0 +1,76 @@
# -*- coding: utf-8 -*-
from datetime import datetime
import pytest
from dateutil import tz as dateutil_tz
from arrow import arrow, factory, formatter, locales, parser
@pytest.fixture(scope="class")
def time_utcnow(request):
request.cls.arrow = arrow.Arrow.utcnow()
@pytest.fixture(scope="class")
def time_2013_01_01(request):
request.cls.now = arrow.Arrow.utcnow()
request.cls.arrow = arrow.Arrow(2013, 1, 1)
request.cls.datetime = datetime(2013, 1, 1)
@pytest.fixture(scope="class")
def time_2013_02_03(request):
request.cls.arrow = arrow.Arrow(2013, 2, 3, 12, 30, 45, 1)
@pytest.fixture(scope="class")
def time_2013_02_15(request):
request.cls.datetime = datetime(2013, 2, 15, 3, 41, 22, 8923)
request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime)
@pytest.fixture(scope="class")
def time_1975_12_25(request):
request.cls.datetime = datetime(
1975, 12, 25, 14, 15, 16, tzinfo=dateutil_tz.gettz("America/New_York")
)
request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime)
@pytest.fixture(scope="class")
def arrow_formatter(request):
request.cls.formatter = formatter.DateTimeFormatter()
@pytest.fixture(scope="class")
def arrow_factory(request):
request.cls.factory = factory.ArrowFactory()
@pytest.fixture(scope="class")
def lang_locales(request):
request.cls.locales = locales._locales
@pytest.fixture(scope="class")
def lang_locale(request):
# As locale test classes are prefixed with Test, we are dynamically getting the locale by the test class name.
# TestEnglishLocale -> EnglishLocale
name = request.cls.__name__[4:]
request.cls.locale = locales.get_locale_by_class_name(name)
@pytest.fixture(scope="class")
def dt_parser(request):
request.cls.parser = parser.DateTimeParser()
@pytest.fixture(scope="class")
def dt_parser_regex(request):
request.cls.format_regex = parser.DateTimeParser._FORMAT_RE
@pytest.fixture(scope="class")
def tzinfo_parser(request):
request.cls.parser = parser.TzinfoParser()

View file

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
import arrow
class TestModule:
def test_get(self, mocker):
mocker.patch("arrow.api._factory.get", return_value="result")
assert arrow.api.get() == "result"
def test_utcnow(self, mocker):
mocker.patch("arrow.api._factory.utcnow", return_value="utcnow")
assert arrow.api.utcnow() == "utcnow"
def test_now(self, mocker):
mocker.patch("arrow.api._factory.now", tz="tz", return_value="now")
assert arrow.api.now("tz") == "now"
def test_factory(self):
class MockCustomArrowClass(arrow.Arrow):
pass
result = arrow.api.factory(MockCustomArrowClass)
assert isinstance(result, arrow.factory.ArrowFactory)
assert isinstance(result.utcnow(), MockCustomArrowClass)

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more