Merge branch 'develop' into feature/advanced_rest_api

This commit is contained in:
Jakub Trllo 2019-10-31 23:35:28 +01:00
commit 312d3c96ab
3382 changed files with 20916 additions and 302141 deletions

6
.gitignore vendored
View file

@ -27,3 +27,9 @@ coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Node JS packages
##################
node_modules/
package-lock.json

View file

@ -3,15 +3,17 @@ import sys
from avalon import api as avalon
from pyblish import api as pyblish
from pypeapp import execute, Logger
from app import api as app
from .. import api
from .lib import set_avalon_workdir
t = app.Templates()
log = Logger().get_logger(__name__, "aport")
log = api.Logger.getLogger(__name__, "aport")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
ADDITIONAL_PLUGINS = ['ftrack']
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -33,8 +35,31 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "aport", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
def register_additional_plugin_paths():
'''Adding additional host plugins
'''
for host in ADDITIONAL_PLUGINS:
publish_path = os.path.join(
PLUGINS_DIR, host, "publish").replace("\\", "/")
pyblish.register_plugin_path(publish_path)
# adding path to PUBLISH_PATH environment
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(
"Registered additional plugin path: "
"{}".format(publish_path))
def install():
set_avalon_workdir()
# api.set_avalon_workdir()
log.info("Registering Aport plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
@ -42,6 +67,9 @@ def install():
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# additional plugins
register_additional_plugin_paths()
# Disable all families except for the ones we explicitly want to see
family_states = [
"imagesequence",
@ -51,6 +79,9 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# launch pico server
pico_server_launch()
@ -81,7 +112,7 @@ def pico_server_launch():
"api"
]
execute(
app.forward(
args,
cwd=path
)

View file

@ -1,34 +1,90 @@
# api.py
import os
import sys
import tempfile
import pico
from pico import PicoApp
from pico.decorators import request_args, set_cookie, delete_cookie, stream
from pico.decorators import header, cookie
from app.api import forward, Logger
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
import pipeline as ppl
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
log = Logger.getLogger(__name__, "aport")
@pico.expose()
def publish(json_data_path, gui):
def get_session():
ppl.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
ppl.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
ppl.AVALON_TASK = os.getenv("AVALON_TASK", None)
ppl.AVALON_SILO = os.getenv("AVALON_SILO", None)
return ppl.get_session()
@pico.expose()
def load_representations(project, representations):
'''Querry data from mongo db for defined representations.
Args:
project (str): name of the project
representations (list): representations which are required
Returns:
data (dict): representations in last versions
# testing url:
http://localhost:4242/api/load_representations?project=jakub_projectx&representations=[{%22asset%22:%22e09s031_0040%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22},%20{%22asset%22:%22e09s031_0030%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22}]
# returning:
{"e09s031_0040_referenceDefault":{"_id":"5c6dabaa2af61756b02f7f32","schema":"pype:representation-2.0","type":"representation","parent":"5c6dabaa2af61756b02f7f31","name":"mp4","data":{"path":"C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\thisFolder\\e09\\s031\\e09s031_0040\\publish\\clip\\referenceDefault\\v019\\jkprx_e09s031_0040_referenceDefault_v019.mp4","template":"{publish.root}/{publish.folder}/{version.main}/{publish.file}"},"dependencies":[],"context":{"root":"C:\\Users\\hubert\\_PYPE_testing\\projects","project":{"name":"jakub_projectx","code":"jkprx"},"task":"edit","silo":"thisFolder","asset":"e09s031_0040","family":"clip","subset":"referenceDefault","VERSION":19,"hierarchy":"thisFolder\\e09\\s031","representation":"mp4"}}}
'''
data = {}
# log.info("___project: {}".format(project))
# ppl.io.activate_project(project)
#
# from_mongo = ppl.io.find({"name": repr['representation'],
# "type": "representation"})[:]
for repr in representations:
log.info("asset: {}".format(repr['asset']))
# set context for each asset individually
context(project, repr['asset'], '')
# query data from mongo db for the asset's subset representation
related_repr = [r for r in ppl.io.find({"name": repr['representation'],
"type": "representation",
"context.asset": repr['asset']})[:]]
versions_dict = {r['context']['version']: i
for i, r in enumerate(related_repr)}
versions_list = [v for v in versions_dict.keys()]
sorted(versions_list)
version_index_last = versions_dict[max(versions_list)]
log.info("version_index_last: {}".format(version_index_last))
# create name which will be used on timeline clip
name = '_'.join([repr['asset'], repr['subset']])
# log.info("___related_repr: {}".format(related_repr))
# assign data for the clip representation
version = ppl.io.find_one(
{'_id': related_repr[version_index_last]['parent']})
log.info("version: {}".format(version))
# fixing path workarround
if '.#####.mxf' in related_repr[version_index_last]['data']['path']:
related_repr[version_index_last]['data']['path'] = related_repr[version_index_last]['data']['path'].replace(
'.#####.mxf', '.mxf')
related_repr[version_index_last]['version'] = version
related_repr[version_index_last]['parentClip'] = repr['parentClip']
data[name] = related_repr[version_index_last]
return data
@pico.expose()
def publish(send_json_path, get_json_path, gui):
"""
Runs standalone pyblish and adds link to
data in external json file
@ -37,82 +93,101 @@ def publish(json_data_path, gui):
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
send_json_path (string): path to temp json file with
sending context data
get_json_path (strign): path to temp json file with
returning context data
Returns:
dict: return_json_path
dict: get_json_path
Raises:
Exception: description
"""
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
log.info("staging_dir: {}".format(staging_dir))
return_json_path = os.path.join(staging_dir, "return_data.json").replace("\\", "/")
log.info("avalon.session is: \n{}".format(ppl.SESSION))
log.info("PUBLISH_PATH: \n{}".format(os.environ["PUBLISH_PATH"]))
log.info("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'),
"app", "pype-start.py")
publish = "--publish-gui" if gui else "--publish"
args = [pype_start, publish,
args = [pype_start,
"--root", os.environ['AVALON_PROJECTS'], "--publish-gui",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
"-d", "rqst_json_data_path", send_json_path,
"-d", "post_json_data_path", get_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
log.info("_aport.api Variable `AVALON_PROJECTS` had changed to `{0}`.".format(
os.environ['AVALON_PROJECTS']))
forward([
sys.executable, "-u"
] + args,
cwd=cwd
# cwd=cwd
)
return {"return_json_path": return_json_path}
return {"get_json_path": get_json_path}
@pico.expose()
def context(project_name, asset, task, app):
def context(project, asset, task, app='aport'):
os.environ["AVALON_PROJECT"] = ppl.AVALON_PROJECT = project
os.environ["AVALON_ASSET"] = ppl.AVALON_ASSET = asset
os.environ["AVALON_TASK"] = ppl.AVALON_TASK = task
os.environ["AVALON_SILO"] = ppl.AVALON_SILO = ''
ppl.get_session()
# log.info('ppl.SESSION: {}'.format(ppl.SESSION))
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project_name
io.Session["AVALON_PROJECT"] = project_name
ppl.update_current_task(task, asset, app)
avalon.update_current_task(task, asset, app)
project_code = ppl.io.find_one({"type": "project"})["data"].get("code", '')
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = \
ppl.SESSION["AVALON_PROJECTCODE"] = project_code
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
parents = ppl.io.find_one({"type": 'asset',
"name": ppl.AVALON_ASSET})['data']['parents']
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents).replace("\\", "/")
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
os.environ["AVALON_HIERARCHY"] = \
ppl.SESSION["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in ppl.SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
ppl.SESSION.update(fix_paths)
ppl.SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return ppl.SESSION
@pico.expose()
def anatomy_fill(data):
from pype import api as pype
pype.load_data_from_templates()
anatomy = pype.Anatomy
return anatomy.format(data)
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
aport_plugin_path = os.pathsep.join(
[p.replace("\\", "/")
for p in os.environ["PUBLISH_PATH"].split(os.pathsep)
if "aport" in p or
"ftrack" in p])
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
@ -125,8 +200,8 @@ def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
os.environ["PUBLISH_PATH"].split(os.pathsep)
+ [publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
@ -143,8 +218,8 @@ app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
for handler in Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
print(name)
print(handler)
pype.Logger.logging.root.removeHandler(handler)
Logger.logging.root.removeHandler(handler)
# SPLASH.hide_splash()

View file

@ -0,0 +1,432 @@
"""
Wrapper around interactions with the database
Copy of io module in avalon-core.
- In this case not working as singleton with api.Session!
"""
import os
import time
import errno
import shutil
import logging
import tempfile
import functools
import contextlib
from avalon import schema
import requests
# Third-party dependencies
import pymongo
def auto_reconnect(func):
"""Handling auto reconnect in 3 retry times"""
@functools.wraps(func)
def decorated(*args, **kwargs):
object = args[0]
for retry in range(3):
try:
return func(*args, **kwargs)
except pymongo.errors.AutoReconnect:
object.log.error("Reconnecting..")
time.sleep(0.1)
else:
raise
return decorated
class DbConnector(object):
log = logging.getLogger(__name__)
def __init__(self):
self.Session = {}
self._mongo_client = None
self._sentry_client = None
self._sentry_logging_handler = None
self._database = None
self._is_installed = False
def install(self):
"""Establish a persistent connection to the database"""
if self._is_installed:
return
logging.basicConfig()
self.Session.update(self._from_environment())
timeout = int(self.Session["AVALON_TIMEOUT"])
self._mongo_client = pymongo.MongoClient(
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
for retry in range(3):
try:
t1 = time.time()
self._mongo_client.server_info()
except Exception:
self.log.error("Retrying..")
time.sleep(1)
timeout *= 1.5
else:
break
else:
raise IOError(
"ERROR: Couldn't connect to %s in "
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
self.log.info("Connected to %s, delay %.3f s" % (
self.Session["AVALON_MONGO"], time.time() - t1))
self._install_sentry()
self._database = self._mongo_client[self.Session["AVALON_DB"]]
self._is_installed = True
def _install_sentry(self):
if "AVALON_SENTRY" not in self.Session:
return
try:
from raven import Client
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
except ImportError:
# Note: There was a Sentry address in this Session
return self.log.warning("Sentry disabled, raven not installed")
client = Client(self.Session["AVALON_SENTRY"])
# Transmit log messages to Sentry
handler = SentryHandler(client)
handler.setLevel(logging.WARNING)
setup_logging(handler)
self._sentry_client = client
self._sentry_logging_handler = handler
self.log.info(
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
)
def _from_environment(self):
Session = {
item[0]: os.getenv(item[0], item[1])
for item in (
# Root directory of projects on disk
("AVALON_PROJECTS", None),
# Name of current Project
("AVALON_PROJECT", ""),
# Name of current Asset
("AVALON_ASSET", ""),
# Name of current silo
("AVALON_SILO", ""),
# Name of current task
("AVALON_TASK", None),
# Name of current app
("AVALON_APP", None),
# Path to working directory
("AVALON_WORKDIR", None),
# Name of current Config
# TODO(marcus): Establish a suitable default config
("AVALON_CONFIG", "no_config"),
# Name of Avalon in graphical user interfaces
# Use this to customise the visual appearance of Avalon
# to better integrate with your surrounding pipeline
("AVALON_LABEL", "Avalon"),
# Used during any connections to the outside world
("AVALON_TIMEOUT", "1000"),
# Address to Asset Database
("AVALON_MONGO", "mongodb://localhost:27017"),
# Name of database used in MongoDB
("AVALON_DB", "avalon"),
# Address to Sentry
("AVALON_SENTRY", None),
# Address to Deadline Web Service
# E.g. http://192.167.0.1:8082
("AVALON_DEADLINE", None),
# Enable features not necessarily stable. The user's own risk
("AVALON_EARLY_ADOPTER", None),
# Address of central asset repository, contains
# the following interface:
# /upload
# /download
# /manager (optional)
("AVALON_LOCATION", "http://127.0.0.1"),
# Boolean of whether to upload published material
# to central asset repository
("AVALON_UPLOAD", None),
# Generic username and password
("AVALON_USERNAME", "avalon"),
("AVALON_PASSWORD", "secret"),
# Unique identifier for instances in working files
("AVALON_INSTANCE_ID", "avalon.instance"),
("AVALON_CONTAINER_ID", "avalon.container"),
# Enable debugging
("AVALON_DEBUG", None),
) if os.getenv(item[0], item[1]) is not None
}
Session["schema"] = "avalon-core:session-1.0"
try:
schema.validate(Session)
except schema.ValidationError as e:
# TODO(marcus): Make this mandatory
self.log.warning(e)
return Session
def uninstall(self):
"""Close any connection to the database"""
try:
self._mongo_client.close()
except AttributeError:
pass
self._mongo_client = None
self._database = None
self._is_installed = False
def active_project(self):
"""Return the name of the active project"""
return self.Session["AVALON_PROJECT"]
def activate_project(self, project_name):
self.Session["AVALON_PROJECT"] = project_name
def projects(self):
"""List available projects
Returns:
list of project documents
"""
collection_names = self.collections()
for project in collection_names:
if project in ("system.indexes",):
continue
# Each collection will have exactly one project document
document = self.find_project(project)
if document is not None:
yield document
def locate(self, path):
"""Traverse a hierarchy from top-to-bottom
Example:
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
Returns:
representation (ObjectId)
"""
components = zip(
("project", "asset", "subset", "version", "representation"),
path
)
parent = None
for type_, name in components:
latest = (type_ == "version") and name in (None, -1)
try:
if latest:
parent = self.find_one(
filter={
"type": type_,
"parent": parent
},
projection={"_id": 1},
sort=[("name", -1)]
)["_id"]
else:
parent = self.find_one(
filter={
"type": type_,
"name": name,
"parent": parent
},
projection={"_id": 1},
)["_id"]
except TypeError:
return None
return parent
@auto_reconnect
def collections(self):
return self._database.collection_names()
@auto_reconnect
def find_project(self, project):
return self._database[project].find_one({"type": "project"})
@auto_reconnect
def insert_one(self, item):
assert isinstance(item, dict), "item must be of type <dict>"
schema.validate(item)
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
@auto_reconnect
def insert_many(self, items, ordered=True):
# check if all items are valid
assert isinstance(items, list), "`items` must be of type <list>"
for item in items:
assert isinstance(item, dict), "`item` must be of type <dict>"
schema.validate(item)
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
items,
ordered=ordered)
@auto_reconnect
def find(self, filter, projection=None, sort=None):
return self._database[self.Session["AVALON_PROJECT"]].find(
filter=filter,
projection=projection,
sort=sort
)
@auto_reconnect
def find_one(self, filter, projection=None, sort=None):
assert isinstance(filter, dict), "filter must be <dict>"
return self._database[self.Session["AVALON_PROJECT"]].find_one(
filter=filter,
projection=projection,
sort=sort
)
@auto_reconnect
def save(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].save(
*args, **kwargs)
@auto_reconnect
def replace_one(self, filter, replacement):
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
filter, replacement)
@auto_reconnect
def update_many(self, filter, update):
return self._database[self.Session["AVALON_PROJECT"]].update_many(
filter, update)
@auto_reconnect
def distinct(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].distinct(
*args, **kwargs)
@auto_reconnect
def drop(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].drop(
*args, **kwargs)
@auto_reconnect
def delete_many(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
*args, **kwargs)
def parenthood(self, document):
assert document is not None, "This is a bug"
parents = list()
while document.get("parent") is not None:
document = self.find_one({"_id": document["parent"]})
if document is None:
break
parents.append(document)
return parents
@contextlib.contextmanager
def tempdir(self):
tempdir = tempfile.mkdtemp()
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def download(self, src, dst):
"""Download `src` to `dst`
Arguments:
src (str): URL to source file
dst (str): Absolute path to destination file
Yields tuple (progress, error):
progress (int): Between 0-100
error (Exception): Any exception raised when first making connection
"""
try:
response = requests.get(
src,
stream=True,
auth=requests.auth.HTTPBasicAuth(
self.Session["AVALON_USERNAME"],
self.Session["AVALON_PASSWORD"]
)
)
except requests.ConnectionError as e:
yield None, e
return
with self.tempdir() as dirname:
tmp = os.path.join(dirname, os.path.basename(src))
with open(tmp, "wb") as f:
total_length = response.headers.get("content-length")
if total_length is None: # no content length header
f.write(response.content)
else:
downloaded = 0
total_length = int(total_length)
for data in response.iter_content(chunk_size=4096):
downloaded += len(data)
f.write(data)
yield int(100.0 * downloaded / total_length), None
try:
os.makedirs(os.path.dirname(dst))
except OSError as e:
# An already existing destination directory is fine.
if e.errno != errno.EEXIST:
raise
shutil.copy(tmp, dst)

View file

@ -1,135 +1,26 @@
import os
import re
import sys
from avalon import io, api as avalon, lib as avalonlib
from pype import lib
from pype import api as pype
# from pypeapp.api import (Templates, Logger, format)
from pypeapp import Logger, Anatomy
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
import pype.api as pype
def get_asset():
"""
Obtain Asset string from session or environment variable
Returns:
string: asset name
Raises:
log: error
"""
lib.set_io_database()
asset = io.Session.get("AVALON_ASSET", None) \
or os.getenv("AVALON_ASSET", None)
log.info("asset: {}".format(asset))
assert asset, log.error("missing `AVALON_ASSET`"
"in avalon session "
"or os.environ!")
return asset
def get_anatomy(**kwarg):
return pype.Anatomy
def get_context_data(
project_name=None, hierarchy=None, asset=None, task_name=None
):
"""
Collect all main contextual data
def format_anatomy(data):
from .templates import (
get_anatomy
)
file = script_name()
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
anatomy = get_anatomy()
Returns:
dict: contextual data
# TODO: perhaps should be in try!
padding = anatomy.render.padding
"""
if not task_name:
lib.set_io_database()
task_name = io.Session.get("AVALON_TASK", None) \
or os.getenv("AVALON_TASK", None)
assert task_name, log.error(
"missing `AVALON_TASK` in avalon session or os.environ!"
)
data.update({
"hierarchy": pype.get_hierarchy(),
"frame": "#" * padding,
"VERSION": pype.get_version_from_workfile(file)
})
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
os.environ['AVALON_PROJECT'] = project_name
io.Session['AVALON_PROJECT'] = project_name
if not hierarchy:
hierarchy = pype.get_hierarchy()
project_doc = io.find_one({"type": "project"})
data = {
"task": task_name,
"asset": asset or get_asset(),
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code", '')
},
"hierarchy": hierarchy,
"app": application["application_dir"]
}
return data
def set_avalon_workdir(
project=None, hierarchy=None, asset=None, task=None
):
"""
Updates os.environ and session with filled workdir
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
Returns:
os.environ[AVALON_WORKDIR]: workdir path
avalon.session[AVALON_WORKDIR]: workdir path
"""
lib.set_io_database()
awd = io.Session.get("AVALON_WORKDIR", None) or \
os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
anatomy_filled = Anatomy(io.Session["AVALON_PROJECT"]).format(data)
awd = anatomy_filled["work"]["folder"]
awd_filled = os.path.normpath(format(awd, data))
io.Session["AVALON_WORKDIR"] = awd_filled
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
def get_workdir_template(data=None):
"""
Obtain workdir templated path from Anatomy()
Args:
data (dict, optional): basic contextual data
Returns:
string: template path
"""
anatomy = Anatomy()
anatomy_filled = anatomy.format(data or get_context_data())
try:
work = anatomy_filled["work"]
except Exception as e:
log.error(
"{0} Error in get_workdir_template(): {1}".format(__name__, str(e))
)
return work
# log.info("format_anatomy:anatomy: {}".format(anatomy))
return anatomy.format(data)

View file

@ -1,252 +0,0 @@
# api.py
import os
import sys
import tempfile
import pico
from pico import PicoApp
from pico.decorators import request_args, set_cookie, delete_cookie, stream
from pico.decorators import header, cookie
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
@pico.expose()
def publish(json_data_path, staging_dir=None):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
Returns:
dict: return_json_path
Raises:
Exception: description
"""
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
os.chdir(cwd)
log.info(os.getcwd())
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
log.info("staging_dir: {}".format(staging_dir))
return_json_path = os.path.join(staging_dir, "return_data.json")
log.info("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
"app", "pype-start.py")
args = [pype_start, "--publish",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
sys.executable, "-u"
] + args,
cwd=cwd
)
return {"return_json_path": return_json_path}
@pico.expose()
def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
@pico.expose()
def nuke_test():
import nuke
n = nuke.createNode("Constant")
log.info(n)
@pico.expose()
def hello(who='world'):
return 'Hello %s' % who
@pico.expose()
def multiply(x, y):
return x * y
@pico.expose()
def fail():
raise Exception('fail!')
@pico.expose()
def make_coffee():
raise ImATeapot()
@pico.expose()
def upload(upload, filename):
if not filename.endswith('.txt'):
raise BadRequest('Upload must be a .txt file!')
return upload.read().decode()
@pico.expose()
@request_args(ip='remote_addr')
def my_ip(ip):
return ip
@pico.expose()
@request_args(ip=lambda req: req.remote_addr)
def my_ip3(ip):
return ip
@pico.prehandle()
def set_user(request, kwargs):
if request.authorization:
if request.authorization.password != 'secret':
raise Unauthorized('Incorrect username or password')
request.user = request.authorization.username
else:
request.user = None
@pico.expose()
@request_args(username='user')
def current_user(username):
return username
@pico.expose()
@request_args(session=cookie('session_id'))
def session_id(session):
return session
@pico.expose()
@set_cookie()
def start_session():
return {'session_id': '42'}
@pico.expose()
@delete_cookie('session_id')
def end_session():
return True
@pico.expose()
@request_args(session=header('x-session-id'))
def session_id2(session):
return session
@pico.expose()
@stream()
def countdown(n=10):
for i in reversed(range(n)):
yield '%i' % i
time.sleep(0.5)
@pico.expose()
def user_description(user):
return '{name} is a {occupation} aged {age}'.format(**user)
@pico.expose()
def show_source():
return open(__file__.replace('.pyc', '.py')).read()
app = PicoApp()
app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in Logger().logging.root.handlers[:]]:
if "pype" not in str(name).lower():
print(name)
print(handler)
Logger().logging.root.removeHandler(handler)

View file

@ -1,196 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Pico Example - Everything</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/styles/default.min.css">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/highlight.min.js"></script>
<script></script>
<style type="text/css">
html, body {
height: 100%;
margin: 0px;
padding: 0px;
}
div {
padding: 5px;
}
#container {
height: 100%;
}
#header {
height: 5%;
}
#main {
height: 70%;
}
#output {
background-color: #333;
color: #aaa;
min-height: 15%;
overflow-y: scroll;
padding: 20px;
position: fixed;
bottom: 0px;
width: 100%;
}
.error {
color: #f00 !important;
}
#examples li{
padding: 10px;
margin: 10px;
background-color: silver;
}
code {
border-radius: 0;*/
margin: 5px;
white-space: pre !important;
}
#source {
height: 100%;
}
#examples {
height: 100%;
}
#spacer {
height: 20%;
}
.highlight {
background-color: yellow;
}
</style>
</head>
<body>
<div id="container">
<div class="row row-eq-height">
<div class="col-md-12">
<h1>Pico Examples</h1>
<p>Here we show some simple examples of using Pico. Click any <code>api.X</code> link to see the corresponding Python source.</p>
</div>
</div>
<div class="row row-eq-height" id="main">
<div class="col-md-6" id="examples">
<ol>
<li id="example1">
<h4>Hello World</h4>
<pre><code class="js"></code></pre>
Name: <input type="text" name="name" value="Bob"/>
<button class="btn btn-default btn-sm" type="button" onclick="example1()">Submit</button>
</li>
<li id="deregister">
<h4>deregister_plugin_path</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="deregister()">Deregister</button>
</li>
<li id="register">
<h4>register_plugin_path</h4>
<pre><code class="js"></code></pre>
Path: <input type="text" name="path" value="C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish"/>
<button class="btn btn-default btn-sm" type="button" onclick="register()">Register path</button>
</li>
<li id="example2">
<h4>Numeric Multiplication</h4>
<pre><code class="js"></code></pre>
<input type="number" name="x" value="6"/> x <input type="number" name="y" value="7"/>
<button class="btn btn-default btn-sm" type="button" onclick="example2()">Multiply</button>
</li>
<li id="example3">
<h4>File Upload</h4>
<pre><code class="js"></code></pre>
<input type="file" name="upload"/>
<button class="btn btn-default btn-sm" type="button" onclick="example3()">Upload</button>
</li>
<li id="example4">
<h4>Request parameters (IP address)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example4()">What's my IP?</button>
</li>
<li id="example5">
<h4>Authentication</h4>
<pre><code class="js"></code></pre>
<p class="bg-info">Note: see <a href="#set_user" onclick="jumpTo('set_user')">api.set_user</a> for the authentication handler.</p>
Username: <input type="text" name="username" value="bob"/>
Password: <input type="password" name="password" value="secret"/>
<button class="btn btn-default btn-sm" type="button" onclick="example5()">Sign In</button>
</li>
<li id="example6">
<h4>Sessions (cookies)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example6()">What's my session id?</button>
</li>
<li id="example7">
<h4>Sessions (header)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example7()">What's my session id?</button>
</li>
<li id="example8">
<h4>Streaming Response</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example8()">Countdown</button>
</li>
<li id="example9">
<h4>Objects</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example9()">Submit</button>
</li>
<li id="example10">
<h4>Errors</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example10()">Submit</button>
</li>
<li id="example11">
<h4>Errors</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example11()">Submit</button>
</li>
<li id="example12">
<h4>Forms</h4>
<p>This example submits a form as a whole instead of individual arguments.
The form input names must match the function argument names.
</p>
<pre><code class="html"></code></pre>
<pre><code class="js"></code></pre>
<div class="example">
<form>
x: <input type="number" name="x" value="6"/><br/>
y: <input type="number" name="y" value="7"/>
</form>
<button class="btn btn-default btn-sm" type="button" onclick="example12()">Multiply</button>
</div>
</li>
<li id="example13">
<h4>JSON</h4>
<p>This example submits data as JSON instead of individual arguments.
The object keys must match the function argument names.
</p>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example13()">Multiply</button>
</li>
</ol>
<div id="spacer">
</div>
</div>
<div class="col-md-6" id="source">
<pre><code class="python"></code></pre>
</div>
</div>
<div class="row" id="output">
</div>
</div>
<script src="script.js"></script>
</body>
</html>

View file

@ -1,146 +0,0 @@
import os
import sys
import tempfile
import pico
# from pico.decorators import request_args, prehandle
from pico import PicoApp
from pico import client
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
pype.Logger.logging.root.removeHandler(handler)
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
@pico.expose()
def publish(json_data_path, staging_dir=None):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
Returns:
dict: return_json_path
Raises:
Exception: description
"""
staging_dir = staging_dir \
or tempfile.mkdtemp(prefix="pype_aport_")
return_json_path = os.path.join(staging_dir, "return_data.json")
log.debug("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
"app", "pype-start.py")
args = [pype_start, "--publish",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
sys.executable, "-u"
] + args,
cwd=os.getenv('AVALON_WORKDIR').replace("\\", "/")
)
return {"return_json_path": return_json_path}
@pico.expose()
def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
@pico.expose()
def nuke_test():
import nuke
n = nuke.createNode("Constant")
log.info(n)
app = PicoApp()
app.register_module(__name__)

130
pype/aport/pipeline.py Normal file
View file

@ -0,0 +1,130 @@
import sys
import os
import getpass
from app.api import Logger
from io_nonsingleton import DbConnector
io = DbConnector()
log = Logger.getLogger(__name__, "aport")
self = sys.modules[__name__]
self.SESSION = None
self._registered_root = {"_": ""}
self.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
self.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
self.AVALON_TASK = os.getenv("AVALON_TASK", None)
self.AVALON_SILO = os.getenv("AVALON_SILO", None)
def get_session():
if not self.SESSION:
io.install()
self.SESSION = io.Session
# for k, v in os.environ.items():
# if 'AVALON' in k:
# print(str((k, v)))
return self.SESSION
def update_current_task(task=None, asset=None, app=None):
"""Update active Session to a new task work area.
This updates the live Session to a different `asset`, `task` or `app`.
Args:
task (str): The task to set.
asset (str): The asset to set.
app (str): The app to set.
Returns:
dict: The changed key, values in the current Session.
"""
mapping = {
"AVALON_ASSET": asset,
"AVALON_TASK": task,
"AVALON_APP": app,
}
changed = {key: value for key, value in mapping.items() if value}
if not changed:
return
# Update silo when asset changed
if "AVALON_ASSET" in changed:
asset_document = io.find_one({"name": changed["AVALON_ASSET"],
"type": "asset"})
assert asset_document, "Asset must exist"
silo = asset_document["silo"]
if silo is None:
silo = asset_document["name"]
changed["AVALON_SILO"] = silo
parents = asset_document['data']['parents']
hierarchy = ""
if len(parents) > 0:
hierarchy = os.path.sep.join(parents)
changed['AVALON_HIERARCHY'] = hierarchy
# Compute work directory (with the temporary changed session so far)
project = io.find_one({"type": "project"},
projection={"config.template.work": True})
template = project["config"]["template"]["work"]
_session = self.SESSION.copy()
_session.update(changed)
changed["AVALON_WORKDIR"] = _format_work_template(template, _session)
# Update the full session in one go to avoid half updates
self.SESSION.update(changed)
# Update the environment
os.environ.update(changed)
return changed
def _format_work_template(template, session=None):
"""Return a formatted configuration template with a Session.
Note: This *cannot* format the templates for published files since the
session does not hold the context for a published file. Instead use
`get_representation_path` to parse the full path to a published file.
Args:
template (str): The template to format.
session (dict, Optional): The Session to use. If not provided use the
currently active global Session.
Returns:
str: The fully formatted path.
"""
if session is None:
session = self.SESSION
project = io.find_one({'type': 'project'})
return template.format(**{
"root": registered_root(),
"project": {
"name": project.get("name", session["AVALON_PROJECT"]),
"code": project["data"].get("code", ''),
},
"silo": session["AVALON_SILO"],
"hierarchy": session['AVALON_HIERARCHY'],
"asset": session["AVALON_ASSET"],
"task": session["AVALON_TASK"],
"app": session["AVALON_APP"],
"user": session.get("AVALON_USER", getpass.getuser())
})
def registered_root():
"""Return currently registered root"""
return os.path.normpath(
self._registered_root["_"]
or self.SESSION.get("AVALON_PROJECTS") or ""
)

View file

@ -1,12 +1,10 @@
from pype import api as pype
from pypeapp import Anatomy, config
log = pype.Logger().get_logger(__name__, "aport")
log = pype.Logger.getLogger(__name__, "aport")
def get_anatomy(**kwarg):
return Anatomy()
return pype.Anatomy
def get_dataflow(**kwarg):
@ -17,8 +15,7 @@ def get_dataflow(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
presets = config.get_init_presets()
aport_dataflow = getattr(presets["dataflow"], str(host), None)
aport_dataflow = getattr(pype.Dataflow, str(host), None)
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
if preset:
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
@ -35,8 +32,7 @@ def get_colorspace(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
presets = config.get_init_presets()
aport_colorspace = getattr(presets["colorspace"], str(host), None)
aport_colorspace = getattr(pype.Colorspace, str(host), None)
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
if preset:
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)

View file

@ -1,2 +1,2 @@
from .lib import *
from .ftrack_server import *
from .ftrack_server import FtrackServer

View file

@ -1,129 +0,0 @@
import sys
import argparse
import logging
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
class AssetDelete(BaseAction):
'''Custom action.'''
#: Action identifier.
identifier = 'asset.delete'
#: Action label.
label = 'Asset Delete'
def discover(self, session, entities, event):
''' Validation '''
if (
len(entities) != 1 or
entities[0].entity_type not in ['Shot', 'Asset Build']
):
return False
return True
def interface(self, session, entities, event):
if not event['data'].get('values', {}):
entity = entities[0]
items = []
for asset in entity['assets']:
# get asset name for label
label = 'None'
if asset['name']:
label = asset['name']
items.append({
'label': label,
'name': label,
'value': False,
'type': 'boolean'
})
if len(items) < 1:
return {
'success': False,
'message': 'There are no assets to delete'
}
return items
def launch(self, session, entities, event):
entity = entities[0]
# if values were set remove those items
if 'values' in event['data']:
values = event['data']['values']
# get list of assets to delete from form
to_delete = []
for key in values:
if values[key]:
to_delete.append(key)
# delete them by name
for asset in entity['assets']:
if asset['name'] in to_delete:
session.delete(asset)
try:
session.commit()
except Exception:
session.rollback()
raise
return {
'success': True,
'message': 'Asset deleted.'
}
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
AssetDelete(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -8,6 +8,7 @@ from pype.ftrack.lib.io_nonsingleton import DbConnector
class AttributesRemapper(BaseAction):
'''Edit meta data action.'''
ignore_me = True
#: Action identifier.
identifier = 'attributes.remapper'
#: Action label.
@ -280,7 +281,4 @@ class AttributesRemapper(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
AttributesRemapper(session, plugins_presets).register()

View file

@ -55,11 +55,8 @@ class ClientReviewSort(BaseAction):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
action_handler = ClientReviewSort(session, plugins_presets)
action_handler.register()
ClientReviewSort(session, plugins_presets).register()
def main(arguments=None):

View file

@ -68,12 +68,6 @@ class ComponentOpen(BaseAction):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
ComponentOpen(session, plugins_presets).register()

View file

@ -572,12 +572,6 @@ class CustomAttributes(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
CustomAttributes(session, plugins_presets).register()

View file

@ -327,9 +327,6 @@ class PartialDict(dict):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CreateFolders(session, plugins_presets).register()

View file

@ -198,9 +198,6 @@ class CreateProjectFolders(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CreateProjectFolders(session, plugins_presets).register()

View file

@ -9,6 +9,8 @@ from pype.ftrack import BaseAction
class CustomAttributeDoctor(BaseAction):
ignore_me = True
#: Action identifier.
identifier = 'custom.attributes.doctor'
#: Action label.
@ -26,7 +28,9 @@ class CustomAttributeDoctor(BaseAction):
hierarchical_ca = ['handleStart', 'handleEnd', 'frameStart', 'frameEnd']
hierarchical_alternatives = {
'handleStart': 'handles',
'handleEnd': 'handles'
'handleEnd': 'handles',
"frameStart": "fstart",
"frameEnd": "fend"
}
# Roles for new custom attributes
@ -290,9 +294,6 @@ class CustomAttributeDoctor(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CustomAttributeDoctor(session, plugins_presets).register()

View file

@ -85,7 +85,7 @@ class DeleteAsset(BaseAction):
'type': 'asset',
'name': entity['name']
})
if av_entity is None:
return {
'success': False,
@ -314,12 +314,6 @@ class DeleteAsset(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
DeleteAsset(session, plugins_presets).register()

View file

@ -135,12 +135,6 @@ class AssetsRemover(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
AssetsRemover(session, plugins_presets).register()

View file

@ -1,93 +0,0 @@
import sys
import argparse
import logging
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
class VersionsCleanup(BaseAction):
'''Custom action.'''
# Action identifier
identifier = 'versions.cleanup'
# Action label
label = 'Versions cleanup'
def discover(self, session, entities, event):
''' Validation '''
# Only 1 AssetVersion is allowed
if len(entities) != 1 or entities[0].entity_type != 'AssetVersion':
return False
return True
def launch(self, session, entities, event):
entity = entities[0]
# Go through all versions in asset
for version in entity['asset']['versions']:
if not version['is_published']:
session.delete(version)
try:
session.commit()
except Exception:
session.rollback()
raise
return {
'success': True,
'message': 'Hidden versions were removed'
}
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
VersionsCleanup(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -220,8 +220,6 @@ class DJVViewAction(BaseAction):
def register(session, plugins_presets={}):
"""Register hooks."""
if not isinstance(session, ftrack_api.session.Session):
return
DJVViewAction(session, plugins_presets).register()

View file

@ -101,13 +101,14 @@ class JobKiller(BaseAction):
# Update all the queried jobs, setting the status to failed.
for job in jobs:
try:
origin_status = job["status"]
job['status'] = 'failed'
session.commit()
self.log.debug((
'Changing Job ({}) status: {} -> failed'
).format(job['id'], job['status']))
).format(job['id'], origin_status))
except Exception:
self.warning.debug((
self.log.warning((
'Changing Job ({}) has failed'
).format(job['id']))
@ -121,12 +122,6 @@ class JobKiller(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
JobKiller(session, plugins_presets).register()

View file

@ -115,9 +115,6 @@ class MultipleNotes(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
MultipleNotes(session, plugins_presets).register()

View file

@ -372,7 +372,4 @@ class PrepareProject(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
PrepareProject(session, plugins_presets).register()

View file

@ -328,8 +328,6 @@ class RVAction(BaseAction):
def register(session, plugins_presets={}):
"""Register hooks."""
if not isinstance(session, ftrack_api.session.Session):
return
RVAction(session, plugins_presets).register()

View file

@ -1,122 +0,0 @@
import sys
import argparse
import logging
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
class SetVersion(BaseAction):
'''Custom action.'''
#: Action identifier.
identifier = 'version.set'
#: Action label.
label = 'Version Set'
def discover(self, session, entities, event):
''' Validation '''
# Only 1 AssetVersion is allowed
if len(entities) != 1 or entities[0].entity_type != 'AssetVersion':
return False
return True
def interface(self, session, entities, event):
if not event['data'].get('values', {}):
entity = entities[0]
# Get actual version of asset
act_ver = entity['version']
# Set form
items = [{
'label': 'Version number',
'type': 'number',
'name': 'version_number',
'value': act_ver
}]
return items
def launch(self, session, entities, event):
entity = entities[0]
# Do something with the values or return a new form.
values = event['data'].get('values', {})
# Default is action True
scs = False
if not values['version_number']:
msg = 'You didn\'t enter any version.'
elif int(values['version_number']) <= 0:
msg = 'Negative or zero version is not valid.'
else:
try:
entity['version'] = values['version_number']
session.commit()
msg = 'Version was changed to v{0}'.format(
values['version_number']
)
scs = True
except Exception as e:
msg = 'Unexpected error occurs during version set ({})'.format(
str(e)
)
return {
'success': scs,
'message': msg
}
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
SetVersion(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -26,7 +26,7 @@ class StartTimer(BaseAction):
user.start_timer(entity, force=True)
self.session.commit()
self.log.info(
"Starting Ftrack timer for task: {}".format(entity['name'])
)
@ -37,7 +37,4 @@ class StartTimer(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
StartTimer(session, plugins_presets).register()

View file

@ -309,9 +309,6 @@ class SyncHierarchicalAttrs(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session, plugins_presets).register()

View file

@ -1,11 +1,14 @@
import os
import sys
import time
import argparse
import logging
import json
import collections
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib as ftracklib
from pype.ftrack import BaseAction
from pype.ftrack.lib import avalon_sync as ftracklib
from pype.vendor.ftrack_api import session as fa_session
@ -60,6 +63,20 @@ class SyncToAvalon(BaseAction):
#: Action priority
priority = 200
project_query = (
"select full_name, name, custom_attributes"
", project_schema._task_type_schema.types.name"
" from Project where full_name is \"{}\""
)
entities_query = (
"select id, name, parent_id, link, custom_attributes"
" from TypedContext where project.full_name is \"{}\""
)
# Entity type names(lowered) that won't be synchronized with their children
ignore_entity_types = ["task", "milestone"]
def __init__(self, session, plugins_presets):
super(SyncToAvalon, self).__init__(session)
# reload utils on initialize (in case of server restart)
@ -73,6 +90,7 @@ class SyncToAvalon(BaseAction):
return False
def launch(self, session, entities, event):
time_start = time.time()
message = ""
# JOB SETTINGS
@ -88,43 +106,83 @@ class SyncToAvalon(BaseAction):
})
session.commit()
try:
self.importable = []
self.log.debug("Preparing entities for synchronization")
# get from top entity in hierarchy all parent entities
top_entity = entities[0]['link']
if len(top_entity) > 1:
for e in top_entity:
parent_entity = session.get(e['type'], e['id'])
self.importable.append(parent_entity)
if entities[0].entity_type.lower() == "project":
ft_project_name = entities[0]["full_name"]
else:
ft_project_name = entities[0]["project"]["full_name"]
# get all child entities separately/unique
for entity in entities:
self.add_childs_to_importable(entity)
project_entities = session.query(
self.entities_query.format(ft_project_name)
).all()
ft_project = session.query(
self.project_query.format(ft_project_name)
).one()
entities_by_id = {}
entities_by_parent = collections.defaultdict(list)
entities_by_id[ft_project["id"]] = ft_project
for ent in project_entities:
entities_by_id[ent["id"]] = ent
entities_by_parent[ent["parent_id"]].append(ent)
importable = []
for ent_info in event["data"]["selection"]:
ent = entities_by_id[ent_info["entityId"]]
for link_ent_info in ent["link"]:
link_ent = entities_by_id[link_ent_info["id"]]
if (
ent.entity_type.lower() in self.ignore_entity_types or
link_ent in importable
):
continue
importable.append(link_ent)
def add_children(parent_id):
ents = entities_by_parent[parent_id]
for ent in ents:
if ent.entity_type.lower() in self.ignore_entity_types:
continue
if ent not in importable:
importable.append(ent)
add_children(ent["id"])
# add children of selection to importable
for ent_info in event["data"]["selection"]:
add_children(ent_info["entityId"])
# Check names: REGEX in schema/duplicates - raise error if found
all_names = []
duplicates = []
for entity in self.importable:
for entity in importable:
ftracklib.avalon_check_name(entity)
if entity.entity_type.lower() == "project":
continue
if entity['name'] in all_names:
duplicates.append("'{}'".format(entity['name']))
else:
all_names.append(entity['name'])
if len(duplicates) > 0:
# TODO Show information to user and return False
raise ValueError(
"Entity name duplication: {}".format(", ".join(duplicates))
)
# ----- PROJECT ------
# store Ftrack project- self.importable[0] must be project entity!!
ft_project = self.importable[0]
avalon_project = ftracklib.get_avalon_project(ft_project)
custom_attributes = ftracklib.get_avalon_attr(session)
# Import all entities to Avalon DB
for entity in self.importable:
for entity in importable:
result = ftracklib.import_to_avalon(
session=session,
entity=entity,
@ -132,7 +190,8 @@ class SyncToAvalon(BaseAction):
av_project=avalon_project,
custom_attributes=custom_attributes
)
# TODO better error handling
# maybe split into critical, warnings and messages?
if 'errors' in result and len(result['errors']) > 0:
job['status'] = 'failed'
session.commit()
@ -151,6 +210,7 @@ class SyncToAvalon(BaseAction):
job['status'] = 'done'
except ValueError as ve:
# TODO remove this part!!!!
job['status'] = 'failed'
message = str(ve)
self.log.error(
@ -169,6 +229,7 @@ class SyncToAvalon(BaseAction):
'Error during syncToAvalon: {}'.format(log_message),
exc_info=True
)
# TODO add traceback to message and show to user
message = (
'Unexpected Error'
' - Please check Log for more information'
@ -178,7 +239,14 @@ class SyncToAvalon(BaseAction):
job['status'] = 'failed'
session.commit()
self.trigger_action("sync.hierarchical.attrs.local", event)
time_end = time.time()
self.log.debug("Synchronization took \"{}\"".format(
str(time_end - time_start)
))
if job["status"] != "failed":
self.log.debug("Triggering Sync hierarchical attributes")
self.trigger_action("sync.hierarchical.attrs.local", event)
if len(message) > 0:
message = "Unable to sync: {}".format(message)
@ -192,63 +260,7 @@ class SyncToAvalon(BaseAction):
'message': "Synchronization was successfull"
}
def add_childs_to_importable(self, entity):
if not (entity.entity_type in ['Task']):
if entity not in self.importable:
self.importable.append(entity)
if entity['children']:
childrens = entity['children']
for child in childrens:
self.add_childs_to_importable(child)
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
SyncToAvalon(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -43,9 +43,6 @@ class TestAction(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
TestAction(session, plugins_presets).register()

View file

@ -68,8 +68,6 @@ class ThumbToChildren(BaseAction):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ThumbToChildren(session, plugins_presets).register()

View file

@ -90,8 +90,6 @@ class ThumbToParent(BaseAction):
def register(session, plugins_presets={}):
'''Register action. Called when used as an event plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ThumbToParent(session, plugins_presets).register()

View file

@ -0,0 +1,200 @@
import os
import sys
import argparse
import logging
import collections
import json
import re
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from avalon import io, inventory, schema
from pype.ftrack.lib.io_nonsingleton import DbConnector
class PypeUpdateFromV2_2_0(BaseAction):
"""This action is to remove silo field from database and changes asset
schema to newer version
WARNING: it is NOT for situations when you want to switch from avalon-core
to Pype's avalon-core!!!
"""
#: Action identifier.
identifier = "silos.doctor"
#: Action label.
label = "Pype Update"
variant = "- v2.2.0 to v2.3.0 or higher"
#: Action description.
description = "Use when Pype was updated from v2.2.0 to v2.3.0 or higher"
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator"]
icon = "{}/ftrack/action_icons/PypeUpdate.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
# connector to MongoDB (Avalon mongo)
db_con = DbConnector()
def discover(self, session, entities, event):
""" Validation """
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
items = []
item_splitter = {'type': 'label', 'value': '---'}
title = "Updated Pype from v 2.2.0 to v2.3.0 or higher"
items.append({
"type": "label",
"value": (
"NOTE: This doctor action should be used ONLY when Pype"
" was updated from v2.2.0 to v2.3.0 or higher.<br><br><br>"
)
})
items.append({
"type": "label",
"value": (
"Select if want to process <b>all synchronized projects</b>"
" or <b>selection</b>."
)
})
items.append({
"type": "enumerator",
"name": "__process_all__",
"data": [{
"label": "All synchronized projects",
"value": True
}, {
"label": "Selection",
"value": False
}],
"value": False
})
items.append({
"type": "label",
"value": (
"<br/><br/><h2>Synchronized projects:</h2>"
"<i>(ignore if <strong>\"ALL projects\"</strong> selected)</i>"
)
})
self.log.debug("Getting all Ftrack projects")
# Get all Ftrack projects
all_ftrack_projects = [
project["full_name"] for project in session.query("Project").all()
]
self.log.debug("Getting Avalon projects that are also in the Ftrack")
# Get Avalon projects that are in Ftrack
self.db_con.install()
possible_projects = [
project["name"] for project in self.db_con.projects()
if project["name"] in all_ftrack_projects
]
for project in possible_projects:
item_label = {
"type": "label",
"value": project
}
item = {
"label": "- process",
"name": project,
"type": 'boolean',
"value": False
}
items.append(item_splitter)
items.append(item_label)
items.append(item)
if len(possible_projects) == 0:
return {
"success": False,
"message": (
"Nothing to process."
" There are not projects synchronized to avalon."
)
}
else:
return {
"items": items,
"title": title
}
def launch(self, session, entities, event):
if 'values' not in event['data']:
return
projects_selection = {
True: [],
False: []
}
process_all = None
values = event['data']['values']
for key, value in values.items():
if key == "__process_all__":
process_all = value
continue
projects_selection[value].append(key)
# Skip if process_all value is not boolean
# - may happen when user delete string line in combobox
if not isinstance(process_all, bool):
self.log.warning(
"Nothing was processed. User didn't select if want to process"
" selection or all projects!"
)
return {
"success": False,
"message": (
"Nothing was processed. You must select if want to process"
" \"selection\" or \"all projects\"!"
)
}
projects_to_process = projects_selection[True]
if process_all:
projects_to_process.extend(projects_selection[False])
self.db_con.install()
for project in projects_to_process:
self.log.debug("Processing project \"{}\"".format(project))
self.db_con.Session["AVALON_PROJECT"] = project
self.log.debug("- Unsetting silos on assets")
self.db_con.update_many(
{"type": "asset"},
{"$unset": {"silo": ""}}
)
self.log.debug("- setting schema of assets to v.3")
self.db_con.update_many(
{"type": "asset"},
{"$set": {"schema": "avalon-core:asset-3.0"}}
)
return True
def register(session, plugins_presets={}):
"""Register plugin. Called when used as an plugin."""
if not isinstance(session, ftrack_api.session.Session):
return
PypeUpdateFromV2_2_0(session, plugins_presets).register()

View file

@ -40,7 +40,4 @@ class ActionAskWhereIRun(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ActionAskWhereIRun(session, plugins_presets).register()

View file

@ -80,7 +80,4 @@ class ActionShowWhereIRun(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ActionShowWhereIRun(session, plugins_presets).register()

View file

@ -21,7 +21,7 @@ class SyncHierarchicalAttrs(BaseAction):
identifier = 'sync.hierarchical.attrs'
#: Action label.
label = "Pype Admin"
variant = '- Sync Hier Attrs (server)'
variant = '- Sync Hier Attrs (Server)'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
@ -220,7 +220,7 @@ class SyncHierarchicalAttrs(BaseAction):
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
if self.interface_messages:
self.show_interface_from_dict(
messages=self.interface_messages,
@ -341,9 +341,6 @@ class SyncHierarchicalAttrs(BaseAction):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session, plugins_presets).register()

View file

@ -3,6 +3,8 @@ import sys
import argparse
import logging
import json
import collections
import time
from pypeapp import config
from pype.vendor import ftrack_api
@ -64,6 +66,20 @@ class SyncToAvalon(BaseAction):
)
)
project_query = (
"select full_name, name, custom_attributes"
", project_schema._task_type_schema.types.name"
" from Project where full_name is \"{}\""
)
entities_query = (
"select id, name, parent_id, link, custom_attributes"
" from TypedContext where project.full_name is \"{}\""
)
# Entity type names(lowered) that won't be synchronized with their children
ignore_entity_types = ["task", "milestone"]
def register(self):
self.session.event_hub.subscribe(
'topic=ftrack.action.discover',
@ -98,6 +114,7 @@ class SyncToAvalon(BaseAction):
return discover
def launch(self, session, entities, event):
time_start = time.time()
message = ""
# JOB SETTINGS
@ -113,43 +130,83 @@ class SyncToAvalon(BaseAction):
})
session.commit()
try:
self.importable = []
self.log.debug("Preparing entities for synchronization")
# get from top entity in hierarchy all parent entities
top_entity = entities[0]['link']
if len(top_entity) > 1:
for e in top_entity:
parent_entity = session.get(e['type'], e['id'])
self.importable.append(parent_entity)
if entities[0].entity_type.lower() == "project":
ft_project_name = entities[0]["full_name"]
else:
ft_project_name = entities[0]["project"]["full_name"]
# get all child entities separately/unique
for entity in entities:
self.add_childs_to_importable(entity)
project_entities = session.query(
self.entities_query.format(ft_project_name)
).all()
ft_project = session.query(
self.project_query.format(ft_project_name)
).one()
entities_by_id = {}
entities_by_parent = collections.defaultdict(list)
entities_by_id[ft_project["id"]] = ft_project
for ent in project_entities:
entities_by_id[ent["id"]] = ent
entities_by_parent[ent["parent_id"]].append(ent)
importable = []
for ent_info in event["data"]["selection"]:
ent = entities_by_id[ent_info["entityId"]]
for link_ent_info in ent["link"]:
link_ent = entities_by_id[link_ent_info["id"]]
if (
ent.entity_type.lower() in self.ignore_entity_types or
link_ent in importable
):
continue
importable.append(link_ent)
def add_children(parent_id):
ents = entities_by_parent[parent_id]
for ent in ents:
if ent.entity_type.lower() in self.ignore_entity_types:
continue
if ent not in importable:
importable.append(ent)
add_children(ent["id"])
# add children of selection to importable
for ent_info in event["data"]["selection"]:
add_children(ent_info["entityId"])
# Check names: REGEX in schema/duplicates - raise error if found
all_names = []
duplicates = []
for e in self.importable:
lib.avalon_check_name(e)
if e['name'] in all_names:
duplicates.append("'{}'".format(e['name']))
for entity in importable:
lib.avalon_check_name(entity)
if entity.entity_type.lower() == "project":
continue
if entity['name'] in all_names:
duplicates.append("'{}'".format(entity['name']))
else:
all_names.append(e['name'])
all_names.append(entity['name'])
if len(duplicates) > 0:
# TODO Show information to user and return False
raise ValueError(
"Entity name duplication: {}".format(", ".join(duplicates))
)
# ----- PROJECT ------
# store Ftrack project- self.importable[0] must be project entity!!
ft_project = self.importable[0]
avalon_project = lib.get_avalon_project(ft_project)
custom_attributes = lib.get_avalon_attr(session)
# Import all entities to Avalon DB
for entity in self.importable:
for entity in importable:
result = lib.import_to_avalon(
session=session,
entity=entity,
@ -157,7 +214,8 @@ class SyncToAvalon(BaseAction):
av_project=avalon_project,
custom_attributes=custom_attributes
)
# TODO better error handling
# maybe split into critical, warnings and messages?
if 'errors' in result and len(result['errors']) > 0:
job['status'] = 'failed'
session.commit()
@ -177,6 +235,7 @@ class SyncToAvalon(BaseAction):
session.commit()
except ValueError as ve:
# TODO remove this part!!!!
job['status'] = 'failed'
session.commit()
message = str(ve)
@ -197,6 +256,7 @@ class SyncToAvalon(BaseAction):
'Error during syncToAvalon: {}'.format(log_message),
exc_info=True
)
# TODO add traceback to message and show to user
message = (
'Unexpected Error'
' - Please check Log for more information'
@ -207,8 +267,15 @@ class SyncToAvalon(BaseAction):
job['status'] = 'failed'
session.commit()
self.trigger_action("sync.hierarchical.attrs", event)
time_end = time.time()
self.log.debug("Synchronization took \"{}\"".format(
str(time_end - time_start)
))
if job["status"] != "failed":
self.log.debug("Triggering Sync hierarchical attributes")
self.trigger_action("sync.hierarchical.attrs", event)
if len(message) > 0:
message = "Unable to sync: {}".format(message)
@ -222,16 +289,6 @@ class SyncToAvalon(BaseAction):
'message': "Synchronization was successfull"
}
def add_childs_to_importable(self, entity):
if not (entity.entity_type in ['Task']):
if entity not in self.importable:
self.importable.append(entity)
if entity['children']:
childrens = entity['children']
for child in childrens:
self.add_childs_to_importable(child)
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
@ -239,9 +296,6 @@ def register(session, plugins_presets):
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
SyncToAvalon(session, plugins_presets).register()

View file

@ -53,7 +53,5 @@ class DelAvalonIdFromNew(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
DelAvalonIdFromNew(session, plugins_presets).register()

View file

@ -47,7 +47,7 @@ class NextTaskUpdate(BaseEvent):
statusid_changes = changes.get('statusid', {})
if (
entity['entityType'] != 'task' or
'statusid' not in entity['keys'] or
'statusid' not in (entity.get('keys') or []) or
statusid_changes.get('new', None) is None or
statusid_changes.get('old', None) is None
):
@ -88,7 +88,5 @@ class NextTaskUpdate(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
NextTaskUpdate(session, plugins_presets).register()

View file

@ -36,7 +36,5 @@ class Radio_buttons(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
Radio_buttons(session, plugins_presets).register()

View file

@ -19,24 +19,38 @@ class SyncHierarchicalAttrs(BaseEvent):
processable = []
processable_ent = {}
for ent in event['data']['entities']:
keys = ent.get('keys')
if not keys:
# Ignore entities that are not tasks or projects
if ent['entityType'].lower() not in ['task', 'show']:
continue
if not ent['entityType'] in ['task', 'show']:
action = ent.get("action")
# skip if remove (Entity does not exist in Ftrack)
if action == "remove":
continue
# When entity was add we don't care about keys
if action != "add":
keys = ent.get('keys')
if not keys:
continue
entity = session.get(self._get_entity_type(ent), ent['entityId'])
processable.append(ent)
processable_ent[ent['entityId']] = entity
processable_ent[ent['entityId']] = {
"entity": entity,
"action": action,
"link": entity["link"]
}
if not processable:
return True
# Find project of entities
ft_project = None
for entity in processable_ent.values():
for entity_dict in processable_ent.values():
try:
base_proj = entity['link'][0]
base_proj = entity_dict['link'][0]
except Exception:
continue
ft_project = session.get(base_proj['type'], base_proj['id'])
@ -50,6 +64,7 @@ class SyncHierarchicalAttrs(BaseEvent):
):
return True
# Get hierarchical custom attributes from "avalon" group
custom_attributes = {}
query = 'CustomAttributeGroup where name is "avalon"'
all_avalon_attr = session.query(query).one()
@ -67,19 +82,74 @@ class SyncHierarchicalAttrs(BaseEvent):
self.db_con.Session['AVALON_PROJECT'] = ft_project['full_name']
for ent in processable:
for key in ent['keys']:
if key not in custom_attributes:
continue
entity_dict = processable_ent[ent['entityId']]
entity = processable_ent[ent['entityId']]
attr_value = entity['custom_attributes'][key]
self.update_hierarchical_attribute(entity, key, attr_value)
entity = entity_dict["entity"]
ent_path = "/".join([ent["name"] for ent in entity_dict['link']])
action = entity_dict["action"]
keys_to_process = {}
if action == "add":
# Store all custom attributes when entity was added
for key in custom_attributes:
keys_to_process[key] = entity['custom_attributes'][key]
else:
# Update only updated keys
for key in ent['keys']:
if key in custom_attributes:
keys_to_process[key] = entity['custom_attributes'][key]
processed_keys = self.get_hierarchical_values(
keys_to_process, entity
)
# Do the processing of values
self.update_hierarchical_attribute(entity, processed_keys, ent_path)
self.db_con.uninstall()
return True
def update_hierarchical_attribute(self, entity, key, value):
def get_hierarchical_values(self, keys_dict, entity):
# check already set values
_set_keys = []
for key, value in keys_dict.items():
if value is not None:
_set_keys.append(key)
# pop set values from keys_dict
set_keys = {}
for key in _set_keys:
set_keys[key] = keys_dict.pop(key)
# find if entity has set values and pop them out
keys_to_pop = []
for key in keys_dict.keys():
_val = entity["custom_attributes"][key]
if _val:
keys_to_pop.append(key)
set_keys[key] = _val
for key in keys_to_pop:
keys_dict.pop(key)
# if there are not keys to find value return found
if not keys_dict:
return set_keys
# end recursion if entity is project
if entity.entity_type.lower() == "project":
for key, value in keys_dict.items():
set_keys[key] = value
else:
result = self.get_hierarchical_values(keys_dict, entity["parent"])
for key, value in result.items():
set_keys[key] = value
return set_keys
def update_hierarchical_attribute(self, entity, keys_dict, ent_path):
# TODO store all keys at once for entity
custom_attributes = entity.get('custom_attributes')
if not custom_attributes:
return
@ -97,30 +167,47 @@ class SyncHierarchicalAttrs(BaseEvent):
if not mongo_entity:
return
changed_keys = {}
data = mongo_entity.get('data') or {}
cur_value = data.get(key)
if cur_value:
if cur_value == value:
return
for key, value in keys_dict.items():
cur_value = data.get(key)
if cur_value:
if cur_value == value:
continue
changed_keys[key] = value
data[key] = value
if not changed_keys:
return
self.log.debug(
"{} - updated hierarchical attributes: {}".format(
ent_path, str(changed_keys)
)
)
data[key] = value
self.db_con.update_many(
{'_id': mongoid},
{'$set': {'data': data}}
)
for child in entity.get('children', []):
if key not in child.get('custom_attributes', {}):
_keys_dict = {}
for key, value in keys_dict.items():
if key not in child.get('custom_attributes', {}):
continue
child_value = child['custom_attributes'][key]
if child_value is not None:
continue
_keys_dict[key] = value
if not _keys_dict:
continue
child_value = child['custom_attributes'][key]
if child_value is not None:
continue
self.update_hierarchical_attribute(child, key, value)
child_path = "/".join([ent["name"] for ent in child['link']])
self.update_hierarchical_attribute(child, _keys_dict, child_path)
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session, plugins_presets).register()

View file

@ -113,15 +113,13 @@ class Sync_to_Avalon(BaseEvent):
{'type': 'label', 'value': '<p>{}</p>'.format(ftrack_message)}
]
self.show_interface(items, title, event=event)
self.log.error('Fatal error during sync: {}'.format(message))
self.log.error(
'Fatal error during sync: {}'.format(message), exc_info=True
)
return
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
Sync_to_Avalon(session, plugins_presets).register()

View file

@ -8,7 +8,7 @@ from pype.ftrack import BaseEvent
class Test_Event(BaseEvent):
ignore_me = True
priority = 10000
def launch(self, session, event):
@ -22,7 +22,5 @@ class Test_Event(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
Test_Event(session, plugins_presets).register()

View file

@ -27,7 +27,7 @@ class ThumbnailEvents(BaseEvent):
# entity['action'] == 'encoded'):
if (
entity['entityType'] == 'assetversion'
and 'thumbid' in entity['keys']
and 'thumbid' in (entity.get('keys') or [])
):
version = session.get('AssetVersion', entity['entityId'])
@ -47,7 +47,5 @@ class ThumbnailEvents(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
ThumbnailEvents(session, plugins_presets).register()

View file

@ -183,7 +183,7 @@ class UserAssigmentEvent(BaseEvent):
def launch(self, session, event):
# load shell scripts presets
presets = config.get_presets()['ftrack']["user_assigment_event"]
presets = config.get_presets()['ftrack'].get("user_assigment_event")
if not presets:
return
for entity in event.get('data', {}).get('entities', []):
@ -233,7 +233,5 @@ def register(session, plugins_presets):
"""
Register plugin. Called when used as an plugin.
"""
if not isinstance(session, ftrack_api.session.Session):
return
UserAssigmentEvent(session, plugins_presets).register()

View file

@ -13,7 +13,7 @@ class VersionToTaskStatus(BaseEvent):
# Filter non-assetversions
if (
entity['entityType'] == 'assetversion' and
'statusid' in entity.get('keys', [])
'statusid' in (entity.get('keys') or [])
):
version = session.get('AssetVersion', entity['entityId'])
@ -71,7 +71,5 @@ class VersionToTaskStatus(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
VersionToTaskStatus(session, plugins_presets).register()

View file

@ -1,7 +1 @@
from .ftrack_server import FtrackServer
from . import event_server_cli
__all__ = [
'event_server_cli',
'FtrackServer'
]

View file

@ -1,18 +1,34 @@
import os
import sys
import signal
import datetime
import subprocess
import socket
import argparse
import atexit
import time
from urllib.parse import urlparse
import requests
from pype.vendor import ftrack_api
from pype.ftrack import credentials
from pype.ftrack.lib import credentials
from pype.ftrack.ftrack_server import FtrackServer
from pypeapp import Logger
log = Logger().get_logger('Ftrack event server', "ftrack-event-server-cli")
from pype.ftrack.ftrack_server.lib import ftrack_events_mongo_settings
import socket_thread
def check_url(url):
class MongoPermissionsError(Exception):
"""Is used when is created multiple objects of same RestApi class."""
def __init__(self, message=None):
if not message:
message = "Exiting because have issue with acces to MongoDB"
super().__init__(message)
def check_ftrack_url(url, log_errors=True):
"""Checks if Ftrack server is responding"""
if not url:
log.error('Ftrack URL is not set!')
print('ERROR: Ftrack URL is not set!')
return None
url = url.strip('/ ')
@ -25,24 +41,47 @@ def check_url(url):
try:
result = requests.get(url, allow_redirects=False)
except requests.exceptions.RequestException:
log.error('Entered Ftrack URL is not accesible!')
return None
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
log.error('Entered Ftrack URL is not accesible!')
return None
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
log.debug('Ftrack server {} is accessible.'.format(url))
print('DEBUG: Ftrack server {} is accessible.'.format(url))
return url
def check_mongo_url(host, port, log_error=False):
"""Checks if mongo server is responding"""
sock = None
try:
sock = socket.create_connection(
(host, port),
timeout=1
)
return True
except socket.error as err:
if log_error:
print("Can't connect to MongoDB at {}:{} because: {}".format(
host, port, err
))
return False
finally:
if sock is not None:
sock.close()
def validate_credentials(url, user, api):
first_validation = True
if not user:
log.error('Ftrack Username is not set! Exiting.')
print('ERROR: Ftrack Username is not set! Exiting.')
first_validation = False
if not api:
log.error('Ftrack API key is not set! Exiting.')
print('ERROR: Ftrack API key is not set! Exiting.')
first_validation = False
if not first_validation:
return False
@ -55,21 +94,21 @@ def validate_credentials(url, user, api):
)
session.close()
except Exception as e:
log.error(
'Can\'t log into Ftrack with used credentials:'
print(
'ERROR: Can\'t log into Ftrack with used credentials:'
' Ftrack server: "{}" // Username: {} // API key: {}'.format(
url, user, api
))
return False
log.debug('Credentials Username: "{}", API key: "{}" are valid.'.format(
print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format(
user, api
))
return True
def process_event_paths(event_paths):
log.debug('Processing event paths: {}.'.format(str(event_paths)))
print('DEBUG: Processing event paths: {}.'.format(str(event_paths)))
return_paths = []
not_found = []
if not event_paths:
@ -87,14 +126,249 @@ def process_event_paths(event_paths):
return os.pathsep.join(return_paths), not_found
def run_event_server(ftrack_url, username, api_key, event_paths):
os.environ['FTRACK_SERVER'] = ftrack_url
os.environ['FTRACK_API_USER'] = username
os.environ['FTRACK_API_KEY'] = api_key
os.environ['FTRACK_EVENTS_PATH'] = event_paths
def legacy_server(ftrack_url):
# Current file
file_path = os.path.dirname(os.path.realpath(__file__))
min_fail_seconds = 5
max_fail_count = 3
wait_time_after_max_fail = 10
subproc = None
subproc_path = "{}/sub_legacy_server.py".format(file_path)
subproc_last_failed = datetime.datetime.now()
subproc_failed_count = 0
ftrack_accessible = False
printed_ftrack_error = False
while True:
if not ftrack_accessible:
ftrack_accessible = check_ftrack_url(ftrack_url)
# Run threads only if Ftrack is accessible
if not ftrack_accessible and not printed_ftrack_error:
print("Can't access Ftrack {} <{}>".format(
ftrack_url, str(datetime.datetime.now())
))
if subproc is not None:
if subproc.poll() is None:
subproc.terminate()
subproc = None
printed_ftrack_error = True
time.sleep(1)
continue
printed_ftrack_error = False
if subproc is None:
if subproc_failed_count < max_fail_count:
subproc = subprocess.Popen(
["python", subproc_path],
stdout=subprocess.PIPE
)
elif subproc_failed_count == max_fail_count:
print((
"Storer failed {}times I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
subproc_failed_count += 1
elif ((
datetime.datetime.now() - subproc_last_failed
).seconds > wait_time_after_max_fail):
subproc_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif subproc.poll() is not None:
subproc = None
ftrack_accessible = False
_subproc_last_failed = datetime.datetime.now()
delta_time = (_subproc_last_failed - subproc_last_failed).seconds
if delta_time < min_fail_seconds:
subproc_failed_count += 1
else:
subproc_failed_count = 0
subproc_last_failed = _subproc_last_failed
time.sleep(1)
def main_loop(ftrack_url):
""" This is main loop of event handling.
Loop is handling threads which handles subprocesses of event storer and
processor. When one of threads is stopped it is tested to connect to
ftrack and mongo server. Threads are not started when ftrack or mongo
server is not accessible. When threads are started it is checked for socket
signals as heartbeat. Heartbeat must become at least once per 30sec
otherwise thread will be killed.
"""
# Get mongo hostname and port for testing mongo connection
mongo_list = ftrack_events_mongo_settings()
mongo_hostname = mongo_list[0]
mongo_port = mongo_list[1]
# Current file
file_path = os.path.dirname(os.path.realpath(__file__))
min_fail_seconds = 5
max_fail_count = 3
wait_time_after_max_fail = 10
# Threads data
storer_name = "StorerThread"
storer_port = 10001
storer_path = "{}/sub_event_storer.py".format(file_path)
storer_thread = None
storer_last_failed = datetime.datetime.now()
storer_failed_count = 0
processor_name = "ProcessorThread"
processor_port = 10011
processor_path = "{}/sub_event_processor.py".format(file_path)
processor_thread = None
processor_last_failed = datetime.datetime.now()
processor_failed_count = 0
ftrack_accessible = False
mongo_accessible = False
printed_ftrack_error = False
printed_mongo_error = False
# stop threads on exit
# TODO check if works and args have thread objects!
def on_exit(processor_thread, storer_thread):
if processor_thread is not None:
processor_thread.stop()
processor_thread.join()
processor_thread = None
if storer_thread is not None:
storer_thread.stop()
storer_thread.join()
storer_thread = None
atexit.register(
on_exit, processor_thread=processor_thread, storer_thread=storer_thread
)
# Main loop
while True:
# Check if accessible Ftrack and Mongo url
if not ftrack_accessible:
ftrack_accessible = check_ftrack_url(ftrack_url)
if not mongo_accessible:
mongo_accessible = check_mongo_url(mongo_hostname, mongo_port)
# Run threads only if Ftrack is accessible
if not ftrack_accessible or not mongo_accessible:
if not mongo_accessible and not printed_mongo_error:
print("Can't access Mongo {}".format(mongo_url))
if not ftrack_accessible and not printed_ftrack_error:
print("Can't access Ftrack {}".format(ftrack_url))
if storer_thread is not None:
storer_thread.stop()
storer_thread.join()
storer_thread = None
if processor_thread is not None:
processor_thread.stop()
processor_thread.join()
processor_thread = None
printed_ftrack_error = True
printed_mongo_error = True
time.sleep(1)
continue
printed_ftrack_error = False
printed_mongo_error = False
# Run backup thread which does not requeire mongo to work
if storer_thread is None:
if storer_failed_count < max_fail_count:
storer_thread = socket_thread.SocketThread(
storer_name, storer_port, storer_path
)
storer_thread.start()
elif storer_failed_count == max_fail_count:
print((
"Storer failed {}times I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
storer_failed_count += 1
elif ((
datetime.datetime.now() - storer_last_failed
).seconds > wait_time_after_max_fail):
storer_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not storer_thread.isAlive():
if storer_thread.mongo_error:
raise MongoPermissionsError()
storer_thread.join()
storer_thread = None
ftrack_accessible = False
mongo_accessible = False
_storer_last_failed = datetime.datetime.now()
delta_time = (_storer_last_failed - storer_last_failed).seconds
if delta_time < min_fail_seconds:
storer_failed_count += 1
else:
storer_failed_count = 0
storer_last_failed = _storer_last_failed
if processor_thread is None:
if processor_failed_count < max_fail_count:
processor_thread = socket_thread.SocketThread(
processor_name, processor_port, processor_path
)
processor_thread.start()
elif processor_failed_count == max_fail_count:
print((
"Processor failed {}times in row"
" I'll try to run again {}s later"
).format(str(max_fail_count), str(wait_time_after_max_fail)))
processor_failed_count += 1
elif ((
datetime.datetime.now() - processor_last_failed
).seconds > wait_time_after_max_fail):
processor_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not processor_thread.isAlive():
if storer_thread.mongo_error:
raise Exception(
"Exiting because have issue with acces to MongoDB"
)
processor_thread.join()
processor_thread = None
ftrack_accessible = False
mongo_accessible = False
_processor_last_failed = datetime.datetime.now()
delta_time = (
_processor_last_failed - processor_last_failed
).seconds
if delta_time < min_fail_seconds:
processor_failed_count += 1
else:
processor_failed_count = 0
processor_last_failed = _processor_last_failed
time.sleep(1)
server = FtrackServer('event')
server.run_server()
def main(argv):
'''
@ -184,7 +458,11 @@ def main(argv):
help="Load creadentials from apps dir",
action="store_true"
)
parser.add_argument(
'-legacy',
help="Load creadentials from apps dir",
action="store_true"
)
ftrack_url = os.environ.get('FTRACK_SERVER')
username = os.environ.get('FTRACK_API_USER')
api_key = os.environ.get('FTRACK_API_KEY')
@ -209,8 +487,9 @@ def main(argv):
if kwargs.ftrackapikey:
api_key = kwargs.ftrackapikey
legacy = kwargs.legacy
# Check url regex and accessibility
ftrack_url = check_url(ftrack_url)
ftrack_url = check_ftrack_url(ftrack_url)
if not ftrack_url:
return 1
@ -221,21 +500,40 @@ def main(argv):
# Process events path
event_paths, not_found = process_event_paths(event_paths)
if not_found:
log.warning(
'These paths were not found: {}'.format(str(not_found))
print(
'WARNING: These paths were not found: {}'.format(str(not_found))
)
if not event_paths:
if not_found:
log.error('Any of entered paths is valid or can be accesible.')
print('ERROR: Any of entered paths is valid or can be accesible.')
else:
log.error('Paths to events are not set. Exiting.')
print('ERROR: Paths to events are not set. Exiting.')
return 1
if kwargs.storecred:
credentials._save_credentials(username, api_key, True)
run_event_server(ftrack_url, username, api_key, event_paths)
# Set Ftrack environments
os.environ["FTRACK_SERVER"] = ftrack_url
os.environ["FTRACK_API_USER"] = username
os.environ["FTRACK_API_KEY"] = api_key
os.environ["FTRACK_EVENTS_PATH"] = event_paths
if legacy:
return legacy_server(ftrack_url)
return main_loop(ftrack_url)
if (__name__ == ('__main__')):
if __name__ == "__main__":
# Register interupt signal
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
if hasattr(signal, "SIGKILL"):
signal.signal(signal.SIGKILL, signal_handler)
sys.exit(main(sys.argv))

View file

@ -126,23 +126,27 @@ class FtrackServer:
msg = '"{}" - register was not successful ({})'.format(
function_dict['name'], str(exc)
)
log.warning(msg)
log.warning(msg, exc_info=True)
def run_server(self):
self.session = ftrack_api.Session(auto_connect_event_hub=True,)
def run_server(self, session=None, load_files=True):
if not session:
session = ftrack_api.Session(auto_connect_event_hub=True)
paths_str = os.environ.get(self.env_key)
if paths_str is None:
log.error((
"Env var \"{}\" is not set, \"{}\" server won\'t launch"
).format(self.env_key, self.server_type))
return
self.session = session
paths = paths_str.split(os.pathsep)
self.set_files(paths)
if load_files:
paths_str = os.environ.get(self.env_key)
if paths_str is None:
log.error((
"Env var \"{}\" is not set, \"{}\" server won\'t launch"
).format(self.env_key, self.server_type))
return
log.info(60*"*")
log.info('Registration of actions/events has finished!')
paths = paths_str.split(os.pathsep)
self.set_files(paths)
log.info(60*"*")
log.info('Registration of actions/events has finished!')
# keep event_hub on session running
self.session.event_hub.wait()

View file

@ -0,0 +1,68 @@
import os
try:
from urllib.parse import urlparse, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs
def ftrack_events_mongo_settings():
host = None
port = None
username = None
password = None
collection = None
database = None
auth_db = ""
if os.environ.get('FTRACK_EVENTS_MONGO_URL'):
result = urlparse(os.environ['FTRACK_EVENTS_MONGO_URL'])
host = result.hostname
try:
port = result.port
except ValueError:
raise RuntimeError("invalid port specified")
username = result.username
password = result.password
try:
database = result.path.lstrip("/").split("/")[0]
collection = result.path.lstrip("/").split("/")[1]
except IndexError:
if not database:
raise RuntimeError("missing database name for logging")
try:
auth_db = parse_qs(result.query)['authSource'][0]
except KeyError:
# no auth db provided, mongo will use the one we are connecting to
pass
else:
host = os.environ.get('FTRACK_EVENTS_MONGO_HOST')
port = int(os.environ.get('FTRACK_EVENTS_MONGO_PORT', "0"))
database = os.environ.get('FTRACK_EVENTS_MONGO_DB')
username = os.environ.get('FTRACK_EVENTS_MONGO_USER')
password = os.environ.get('FTRACK_EVENTS_MONGO_PASSWORD')
collection = os.environ.get('FTRACK_EVENTS_MONGO_COL')
auth_db = os.environ.get('FTRACK_EVENTS_MONGO_AUTH_DB', 'avalon')
return host, port, database, username, password, collection, auth_db
def get_ftrack_event_mongo_info():
host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings()
user_pass = ""
if username and password:
user_pass = "{}:{}@".format(username, password)
socket_path = "{}:{}".format(host, port)
dab = ""
if database:
dab = "/{}".format(database)
auth = ""
if auth_db:
auth = "?authSource={}".format(auth_db)
url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth)
return url, database, collection

View file

@ -0,0 +1,292 @@
import logging
import os
import atexit
import datetime
import tempfile
import threading
import time
import requests
import queue
import pymongo
import ftrack_api
import ftrack_api.session
import ftrack_api.cache
import ftrack_api.operation
import ftrack_api._centralized_storage_scenario
import ftrack_api.event
from ftrack_api.logging import LazyLogMessage as L
from pype.ftrack.lib.custom_db_connector import DbConnector
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
from pypeapp import Logger
log = Logger().get_logger("Session processor")
class ProcessEventHub(ftrack_api.event.hub.EventHub):
url, database, table_name = get_ftrack_event_mongo_info()
is_table_created = False
def __init__(self, *args, **kwargs):
self.dbcon = DbConnector(
mongo_url=self.url,
database_name=self.database,
table_name=self.table_name
)
self.sock = kwargs.pop("sock")
super(ProcessEventHub, self).__init__(*args, **kwargs)
def prepare_dbcon(self):
try:
self.dbcon.install()
self.dbcon._database.collection_names()
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
))
sys.exit(0)
except pymongo.errors.OperationFailure:
log.error((
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(self.database, self.table_name))
self.sock.sendall(b"MongoError")
sys.exit(0)
def wait(self, duration=None):
"""Overriden wait
Event are loaded from Mongo DB when queue is empty. Handled event is
set as processed in Mongo DB.
"""
started = time.time()
self.prepare_dbcon()
while True:
try:
event = self._event_queue.get(timeout=0.1)
except queue.Empty:
if not self.load_events():
time.sleep(0.5)
else:
try:
self._handle(event)
self.dbcon.update_one(
{"id": event["id"]},
{"$set": {"pype_data.is_processed": True}}
)
except pymongo.errors.AutoReconnect:
log.error((
"Mongo server \"{}\" is not responding, exiting."
).format(os.environ["AVALON_MONGO"]))
sys.exit(0)
# Additional special processing of events.
if event['topic'] == 'ftrack.meta.disconnected':
break
if duration is not None:
if (time.time() - started) > duration:
break
def load_events(self):
"""Load not processed events sorted by stored date"""
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
result = self.dbcon.delete_many({
"pype_data.stored": {"$lte": ago_date},
"pype_data.is_processed": True
})
not_processed_events = self.dbcon.find(
{"pype_data.is_processed": False}
).sort(
[("pype_data.stored", pymongo.ASCENDING)]
)
found = False
for event_data in not_processed_events:
new_event_data = {
k: v for k, v in event_data.items()
if k not in ["_id", "pype_data"]
}
try:
event = ftrack_api.event.base.Event(**new_event_data)
except Exception:
self.logger.exception(L(
'Failed to convert payload into event: {0}',
event_data
))
continue
found = True
self._event_queue.put(event)
return found
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which skip events and extend heartbeat"""
code_name = self._code_name_mapping[code]
if code_name == "event":
return
if code_name == "heartbeat":
self.sock.sendall(b"processor")
return self._send_packet(self._code_name_mapping["heartbeat"])
return super()._handle_packet(code, packet_identifier, path, data)
class ProcessSession(ftrack_api.session.Session):
'''An isolated session for interaction with an ftrack server.'''
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None, sock=None
):
super(ftrack_api.session.Session, self).__init__()
self.logger = logging.getLogger(
__name__ + '.' + self.__class__.__name__
)
self._closed = False
if server_url is None:
server_url = os.environ.get('FTRACK_SERVER')
if not server_url:
raise TypeError(
'Required "server_url" not specified. Pass as argument or set '
'in environment variable FTRACK_SERVER.'
)
self._server_url = server_url
if api_key is None:
api_key = os.environ.get(
'FTRACK_API_KEY',
# Backwards compatibility
os.environ.get('FTRACK_APIKEY')
)
if not api_key:
raise TypeError(
'Required "api_key" not specified. Pass as argument or set in '
'environment variable FTRACK_API_KEY.'
)
self._api_key = api_key
if api_user is None:
api_user = os.environ.get('FTRACK_API_USER')
if not api_user:
try:
api_user = getpass.getuser()
except Exception:
pass
if not api_user:
raise TypeError(
'Required "api_user" not specified. Pass as argument, set in '
'environment variable FTRACK_API_USER or one of the standard '
'environment variables used by Python\'s getpass module.'
)
self._api_user = api_user
# Currently pending operations.
self.recorded_operations = ftrack_api.operation.Operations()
self.record_operations = True
self.cache_key_maker = cache_key_maker
if self.cache_key_maker is None:
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
# Enforce always having a memory cache at top level so that the same
# in-memory instance is returned from session.
self.cache = ftrack_api.cache.LayeredCache([
ftrack_api.cache.MemoryCache()
])
if cache is not None:
if callable(cache):
cache = cache(self)
if cache is not None:
self.cache.caches.append(cache)
self._managed_request = None
self._request = requests.Session()
self._request.auth = ftrack_api.session.SessionAuthentication(
self._api_key, self._api_user
)
self.auto_populate = auto_populate
# Fetch server information and in doing so also check credentials.
self._server_information = self._fetch_server_information()
# Now check compatibility of server based on retrieved information.
self.check_server_compatibility()
# Construct event hub and load plugins.
self._event_hub = ProcessEventHub(
self._server_url,
self._api_user,
self._api_key,
sock=sock
)
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
target=self._event_hub.connect
)
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# To help with migration from auto_connect_event_hub default changing
# from True to False.
self._event_hub._deprecation_warning_auto_connect = (
auto_connect_event_hub is None
)
# Register to auto-close session on exit.
atexit.register(self.close)
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
self._plugin_paths = os.environ.get(
'FTRACK_EVENT_PLUGIN_PATH', ''
).split(os.pathsep)
self._discover_plugins(plugin_arguments=plugin_arguments)
# TODO: Make schemas read-only and non-mutable (or at least without
# rebuilding types)?
if schema_cache_path is not False:
if schema_cache_path is None:
schema_cache_path = os.environ.get(
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
)
schema_cache_path = os.path.join(
schema_cache_path, 'ftrack_api_schema_cache.json'
)
self.schemas = self._load_schemas(schema_cache_path)
self.types = self._build_entity_type_classes(self.schemas)
ftrack_api._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)

View file

@ -0,0 +1,257 @@
import logging
import os
import atexit
import tempfile
import threading
import requests
import ftrack_api
import ftrack_api.session
import ftrack_api.cache
import ftrack_api.operation
import ftrack_api._centralized_storage_scenario
import ftrack_api.event
from ftrack_api.logging import LazyLogMessage as L
class StorerEventHub(ftrack_api.event.hub.EventHub):
def __init__(self, *args, **kwargs):
self.sock = kwargs.pop("sock")
super(StorerEventHub, self).__init__(*args, **kwargs)
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
if self._code_name_mapping[code] == "heartbeat":
# Reply with heartbeat.
self.sock.sendall(b"storer")
return self._send_packet(self._code_name_mapping['heartbeat'])
return super(StorerEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
class StorerSession(ftrack_api.session.Session):
'''An isolated session for interaction with an ftrack server.'''
def __init__(
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
plugin_paths=None, cache=None, cache_key_maker=None,
auto_connect_event_hub=None, schema_cache_path=None,
plugin_arguments=None, sock=None
):
'''Initialise session.
*server_url* should be the URL of the ftrack server to connect to
including any port number. If not specified attempt to look up from
:envvar:`FTRACK_SERVER`.
*api_key* should be the API key to use for authentication whilst
*api_user* should be the username of the user in ftrack to record
operations against. If not specified, *api_key* should be retrieved
from :envvar:`FTRACK_API_KEY` and *api_user* from
:envvar:`FTRACK_API_USER`.
If *auto_populate* is True (the default), then accessing entity
attributes will cause them to be automatically fetched from the server
if they are not already. This flag can be changed on the session
directly at any time.
*plugin_paths* should be a list of paths to search for plugins. If not
specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`.
*cache* should be an instance of a cache that fulfils the
:class:`ftrack_api.cache.Cache` interface and will be used as the cache
for the session. It can also be a callable that will be called with the
session instance as sole argument. The callable should return ``None``
if a suitable cache could not be configured, but session instantiation
can continue safely.
.. note::
The session will add the specified cache to a pre-configured layered
cache that specifies the top level cache as a
:class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary
to construct a separate memory cache for typical behaviour. Working
around this behaviour or removing the memory cache can lead to
unexpected behaviour.
*cache_key_maker* should be an instance of a key maker that fulfils the
:class:`ftrack_api.cache.KeyMaker` interface and will be used to
generate keys for objects being stored in the *cache*. If not specified,
a :class:`~ftrack_api.cache.StringKeyMaker` will be used.
If *auto_connect_event_hub* is True then embedded event hub will be
automatically connected to the event server and allow for publishing and
subscribing to **non-local** events. If False, then only publishing and
subscribing to **local** events will be possible until the hub is
manually connected using :meth:`EventHub.connect
<ftrack_api.event.hub.EventHub.connect>`.
.. note::
The event hub connection is performed in a background thread to
improve session startup time. If a registered plugin requires a
connected event hub then it should check the event hub connection
status explicitly. Subscribing to events does *not* require a
connected event hub.
Enable schema caching by setting *schema_cache_path* to a folder path.
If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to
determine the path to store cache in. If the environment variable is
also not specified then a temporary directory will be used. Set to
`False` to disable schema caching entirely.
*plugin_arguments* should be an optional mapping (dict) of keyword
arguments to pass to plugin register functions upon discovery. If a
discovered plugin has a signature that is incompatible with the passed
arguments, the discovery mechanism will attempt to reduce the passed
arguments to only those that the plugin accepts. Note that a warning
will be logged in this case.
'''
super(ftrack_api.session.Session, self).__init__()
self.logger = logging.getLogger(
__name__ + '.' + self.__class__.__name__
)
self._closed = False
if server_url is None:
server_url = os.environ.get('FTRACK_SERVER')
if not server_url:
raise TypeError(
'Required "server_url" not specified. Pass as argument or set '
'in environment variable FTRACK_SERVER.'
)
self._server_url = server_url
if api_key is None:
api_key = os.environ.get(
'FTRACK_API_KEY',
# Backwards compatibility
os.environ.get('FTRACK_APIKEY')
)
if not api_key:
raise TypeError(
'Required "api_key" not specified. Pass as argument or set in '
'environment variable FTRACK_API_KEY.'
)
self._api_key = api_key
if api_user is None:
api_user = os.environ.get('FTRACK_API_USER')
if not api_user:
try:
api_user = getpass.getuser()
except Exception:
pass
if not api_user:
raise TypeError(
'Required "api_user" not specified. Pass as argument, set in '
'environment variable FTRACK_API_USER or one of the standard '
'environment variables used by Python\'s getpass module.'
)
self._api_user = api_user
# Currently pending operations.
self.recorded_operations = ftrack_api.operation.Operations()
self.record_operations = True
self.cache_key_maker = cache_key_maker
if self.cache_key_maker is None:
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
# Enforce always having a memory cache at top level so that the same
# in-memory instance is returned from session.
self.cache = ftrack_api.cache.LayeredCache([
ftrack_api.cache.MemoryCache()
])
if cache is not None:
if callable(cache):
cache = cache(self)
if cache is not None:
self.cache.caches.append(cache)
self._managed_request = None
self._request = requests.Session()
self._request.auth = ftrack_api.session.SessionAuthentication(
self._api_key, self._api_user
)
self.auto_populate = auto_populate
# Fetch server information and in doing so also check credentials.
self._server_information = self._fetch_server_information()
# Now check compatibility of server based on retrieved information.
self.check_server_compatibility()
# Construct event hub and load plugins.
self._event_hub = StorerEventHub(
self._server_url,
self._api_user,
self._api_key,
sock=sock
)
self._auto_connect_event_hub_thread = None
if auto_connect_event_hub in (None, True):
# Connect to event hub in background thread so as not to block main
# session usage waiting for event hub connection.
self._auto_connect_event_hub_thread = threading.Thread(
target=self._event_hub.connect
)
self._auto_connect_event_hub_thread.daemon = True
self._auto_connect_event_hub_thread.start()
# To help with migration from auto_connect_event_hub default changing
# from True to False.
self._event_hub._deprecation_warning_auto_connect = (
auto_connect_event_hub is None
)
# Register to auto-close session on exit.
atexit.register(self.close)
self._plugin_paths = plugin_paths
if self._plugin_paths is None:
self._plugin_paths = os.environ.get(
'FTRACK_EVENT_PLUGIN_PATH', ''
).split(os.pathsep)
self._discover_plugins(plugin_arguments=plugin_arguments)
# TODO: Make schemas read-only and non-mutable (or at least without
# rebuilding types)?
if schema_cache_path is not False:
if schema_cache_path is None:
schema_cache_path = os.environ.get(
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
)
schema_cache_path = os.path.join(
schema_cache_path, 'ftrack_api_schema_cache.json'
)
self.schemas = self._load_schemas(schema_cache_path)
self.types = self._build_entity_type_classes(self.schemas)
ftrack_api._centralized_storage_scenario.register(self)
self._configure_locations()
self.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.api.session.ready',
data=dict(
session=self
)
),
synchronous=True
)

View file

@ -0,0 +1,123 @@
import os
import sys
import time
import signal
import socket
import threading
import subprocess
from pypeapp import Logger
class SocketThread(threading.Thread):
"""Thread that checks suprocess of storer of processor of events"""
MAX_TIMEOUT = 35
def __init__(self, name, port, filepath):
super(SocketThread, self).__init__()
self.log = Logger().get_logger("SocketThread", "Event Thread")
self.setName(name)
self.name = name
self.port = port
self.filepath = filepath
self.sock = None
self.subproc = None
self.connection = None
self._is_running = False
self.finished = False
self.mongo_error = False
def stop(self):
self._is_running = False
def run(self):
self._is_running = True
time_socket = time.time()
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock = sock
# Bind the socket to the port - skip already used ports
while True:
try:
server_address = ("localhost", self.port)
sock.bind(server_address)
break
except OSError:
self.port += 1
self.log.debug(
"Running Socked thread on {}:{}".format(*server_address)
)
self.subproc = subprocess.Popen(
["python", self.filepath, "-port", str(self.port)],
stdout=subprocess.PIPE
)
# Listen for incoming connections
sock.listen(1)
sock.settimeout(1.0)
while True:
if not self._is_running:
break
try:
connection, client_address = sock.accept()
time_socket = time.time()
connection.settimeout(1.0)
self.connection = connection
except socket.timeout:
if (time.time() - time_socket) > self.MAX_TIMEOUT:
self.log.error("Connection timeout passed. Terminating.")
self._is_running = False
self.subproc.terminate()
break
continue
try:
time_con = time.time()
# Receive the data in small chunks and retransmit it
while True:
try:
if not self._is_running:
break
try:
data = connection.recv(16)
time_con = time.time()
except socket.timeout:
if (time.time() - time_con) > self.MAX_TIMEOUT:
self.log.error(
"Connection timeout passed. Terminating."
)
self._is_running = False
self.subproc.terminate()
break
continue
except ConnectionResetError:
self._is_running = False
break
if data:
if data == b"MongoError":
self.mongo_error = True
connection.sendall(data)
except Exception as exc:
self.log.error(
"Event server process failed", exc_info=True
)
finally:
# Clean up the connection
connection.close()
if self.subproc.poll() is None:
self.subproc.terminate()
lines = self.subproc.stdout.readlines()
if lines:
print("*** Socked Thread stdout ***")
for line in lines:
os.write(1, line)
self.finished = True

View file

@ -0,0 +1,53 @@
import os
import sys
import datetime
import signal
import socket
import pymongo
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.session_processor import ProcessSession
from pypeapp import Logger
log = Logger().get_logger("Event processor")
def main(args):
port = int(args[-1])
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect the socket to the port where the server is listening
server_address = ("localhost", port)
log.debug("Processor connected to {} port {}".format(*server_address))
sock.connect(server_address)
sock.sendall(b"CreatedProcess")
try:
session = ProcessSession(auto_connect_event_hub=True, sock=sock)
server = FtrackServer('event')
log.debug("Launched Ftrack Event processor")
server.run_server(session)
except Exception as exc:
import traceback
traceback.print_tb(exc.__traceback__)
finally:
log.debug("First closing socket")
sock.close()
return 1
if __name__ == "__main__":
# Register interupt signal
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
if hasattr(signal, "SIGKILL"):
signal.signal(signal.SIGKILL, signal_handler)
sys.exit(main(sys.argv))

View file

@ -0,0 +1,118 @@
import os
import sys
import datetime
import signal
import socket
import pymongo
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
from pype.ftrack.lib.custom_db_connector import DbConnector
from session_storer import StorerSession
from pypeapp import Logger
log = Logger().get_logger("Event storer")
url, database, table_name = get_ftrack_event_mongo_info()
dbcon = DbConnector(
mongo_url=url,
database_name=database,
table_name=table_name
)
# ignore_topics = ["ftrack.meta.connected"]
ignore_topics = []
def install_db():
try:
dbcon.install()
dbcon._database.collection_names()
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
))
sys.exit(0)
def launch(event):
if event.get("topic") in ignore_topics:
return
event_data = event._data
event_id = event["id"]
event_data["pype_data"] = {
"stored": datetime.datetime.utcnow(),
"is_processed": False
}
try:
# dbcon.insert_one(event_data)
dbcon.update({"id": event_id}, event_data, upsert=True)
log.debug("Event: {} stored".format(event_id))
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
))
sys.exit(0)
except Exception as exc:
log.error(
"Event: {} failed to store".format(event_id),
exc_info=True
)
def register(session):
'''Registers the event, subscribing the discover and launch topics.'''
install_db()
session.event_hub.subscribe("topic=*", launch)
def main(args):
port = int(args[-1])
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect the socket to the port where the server is listening
server_address = ("localhost", port)
log.debug("Storer connected to {} port {}".format(*server_address))
sock.connect(server_address)
sock.sendall(b"CreatedStore")
try:
session = StorerSession(auto_connect_event_hub=True, sock=sock)
register(session)
server = FtrackServer("event")
log.debug("Launched Ftrack Event storer")
server.run_server(session, load_files=False)
except pymongo.errors.OperationFailure:
log.error((
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(database, table_name))
sock.sendall(b"MongoError")
finally:
log.debug("First closing socket")
sock.close()
return 1
if __name__ == "__main__":
# Register interupt signal
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
if hasattr(signal, "SIGKILL"):
signal.signal(signal.SIGKILL, signal_handler)
sys.exit(main(sys.argv))

View file

@ -0,0 +1,100 @@
import os
import sys
import time
import datetime
import signal
import threading
from ftrack_server import FtrackServer
from pype.vendor import ftrack_api
from pype.vendor.ftrack_api.event.hub import EventHub
from pypeapp import Logger
log = Logger().get_logger("Event Server Legacy")
class TimerChecker(threading.Thread):
max_time_out = 35
def __init__(self, server, session):
self.server = server
self.session = session
self.is_running = False
self.failed = False
super().__init__()
def stop(self):
self.is_running = False
def run(self):
start = datetime.datetime.now()
self.is_running = True
connected = False
while True:
if not self.is_running:
break
if not self.session.event_hub.connected:
if not connected:
if (datetime.datetime.now() - start).seconds > self.max_time_out:
log.error((
"Exiting event server. Session was not connected"
" to ftrack server in {} seconds."
).format(self.max_time_out))
self.failed = True
break
else:
log.error(
"Exiting event server. Event Hub is not connected."
)
self.server.stop_session()
self.failed = True
break
else:
if not connected:
connected = True
time.sleep(1)
def main(args):
check_thread = None
try:
server = FtrackServer('event')
session = ftrack_api.Session(auto_connect_event_hub=True)
check_thread = TimerChecker(server, session)
check_thread.start()
log.debug("Launching Ftrack Event Legacy Server")
server.run_server(session)
except Exception as exc:
import traceback
traceback.print_tb(exc.__traceback__)
finally:
log_info = True
if check_thread is not None:
check_thread.stop()
check_thread.join()
if check_thread.failed:
log_info = False
if log_info:
log.info("Exiting Event server subprocess")
return 1
if __name__ == "__main__":
# Register interupt signal
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
if hasattr(signal, "SIGKILL"):
signal.signal(signal.SIGKILL, signal_handler)
sys.exit(main(sys.argv))

View file

@ -27,6 +27,11 @@ def import_to_avalon(
output = {}
errors = []
entity_type = entity.entity_type
ent_path = "/".join([ent["name"] for ent in entity['link']])
log.debug("{} [{}] - Processing".format(ent_path, entity_type))
ca_mongoid = get_ca_mongoid()
# Validate if entity has custom attribute avalon_mongo_id
if ca_mongoid not in entity['custom_attributes']:
@ -34,18 +39,13 @@ def import_to_avalon(
'Custom attribute "{}" for "{}" is not created'
' or don\'t have set permissions for API'
).format(ca_mongoid, entity['name'])
log.error(msg)
errors.append({'Custom attribute error': msg})
output['errors'] = errors
return output
# Validate if entity name match REGEX in schema
try:
avalon_check_name(entity)
except ValidationError:
msg = '"{}" includes unsupported symbols like "dash" or "space"'
errors.append({'Unsupported character': msg})
output['errors'] = errors
return output
avalon_check_name(entity)
entity_type = entity.entity_type
# Project ////////////////////////////////////////////////////////////////
@ -61,6 +61,7 @@ def import_to_avalon(
ft_project_code = ft_project['name']
if av_project is None:
log.debug("{} - Creating project".format(project_name))
item = {
'schema': "avalon-core:project-2.0",
'type': type,
@ -96,10 +97,20 @@ def import_to_avalon(
'Project name', av_project['name'], project_name
)}
)
if (
av_project_code is not None and
av_project_code != ft_project_code
):
log.warning((
"{0} - Project code"
" is different in Avalon (\"{1}\")"
" that in Ftrack (\"{2}\")!"
" Trying to change it back in Ftrack to \"{1}\"."
).format(
ent_path, str(av_project_code), str(ft_project_code)
))
entity['name'] = av_project_code
errors.append(
{'Changed name error': msg.format(
@ -107,7 +118,18 @@ def import_to_avalon(
)}
)
session.commit()
try:
session.commit()
log.info((
"{} - Project code was changed back to \"{}\""
).format(ent_path, str(av_project_code)))
except Exception:
log.error(
(
"{} - Couldn't change project code back to \"{}\"."
).format(ent_path, str(av_project_code)),
exc_info=True
)
output['errors'] = errors
return output
@ -138,6 +160,7 @@ def import_to_avalon(
for k, v in data.items():
enter_data[k] = v
log.debug("{} - Updating data".format(ent_path))
database[project_name].update_many(
{'_id': ObjectId(projectId)},
{'$set': {
@ -178,20 +201,18 @@ def import_to_avalon(
entity, session, custom_attributes
)
# 1. hierarchical entity have silo set to None
silo = None
if len(data['parents']) > 0:
silo = data['parents'][0]
name = entity['name']
avalon_asset = None
# existence of this custom attr is already checked
if ca_mongoid not in entity['custom_attributes']:
msg = '"{}" don\'t have "{}" custom attribute'
errors.append({'Missing Custom attribute': msg.format(
entity_type, ca_mongoid
)})
msg = (
"Entity type \"{}\" don't have created custom attribute \"{}\""
" or user \"{}\" don't have permissions to read or change it."
).format(entity_type, ca_mongoid, session.api_user)
log.error(msg)
errors.append({'Missing Custom attribute': msg})
output['errors'] = errors
return output
@ -213,24 +234,24 @@ def import_to_avalon(
)
if avalon_asset is None:
item = {
'schema': "avalon-core:asset-2.0",
'schema': "avalon-core:asset-3.0",
'name': name,
'silo': silo,
'parent': ObjectId(projectId),
'type': 'asset',
'data': data
}
schema.validate(item)
mongo_id = database[project_name].insert_one(item).inserted_id
log.debug("{} - Created in project \"{}\"".format(
ent_path, project_name
))
# Raise error if it seems to be different ent. with same name
elif (
avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo
):
elif avalon_asset['data']['parents'] != data['parents']:
msg = (
'In Avalon DB already exists entity with name "{0}"'
).format(name)
"{} - In Avalon DB already exists entity with name \"{}\""
"\n- \"{}\""
).format(ent_path, name, "/".join(db_asset_path_items))
log.error(msg)
errors.append({'Entity name duplication': msg})
output['errors'] = errors
return output
@ -240,21 +261,20 @@ def import_to_avalon(
mongo_id = avalon_asset['_id']
else:
if avalon_asset['name'] != entity['name']:
if silo is None or changeability_check_childs(entity) is False:
if changeability_check_childs(entity) is False:
msg = (
'You can\'t change name {} to {}'
'{} - You can\'t change name "{}" to "{}"'
', avalon wouldn\'t work properly!'
'\n\nName was changed back!'
'\n\nCreate new entity if you want to change name.'
).format(avalon_asset['name'], entity['name'])
).format(ent_path, avalon_asset['name'], entity['name'])
log.warning(msg)
entity['name'] = avalon_asset['name']
session.commit()
errors.append({'Changed name error': msg})
if (
avalon_asset['silo'] != silo or
avalon_asset['data']['parents'] != data['parents']
):
if avalon_asset['data']['parents'] != data['parents']:
old_path = '/'.join(avalon_asset['data']['parents'])
new_path = '/'.join(data['parents'])
@ -266,10 +286,7 @@ def import_to_avalon(
moved_back = False
if 'visualParent' in avalon_asset['data']:
if silo is None:
asset_parent_id = avalon_asset['parent']
else:
asset_parent_id = avalon_asset['data']['visualParent']
asset_parent_id = avalon_asset['data']['visualParent'] or avalon_asset['parent']
asset_parent = database[project_name].find_one(
{'_id': ObjectId(asset_parent_id)}
@ -282,6 +299,7 @@ def import_to_avalon(
avalon_asset['name'], old_path, new_path,
'entity was moved back'
)
log.warning(msg)
moved_back = True
except Exception:
@ -292,6 +310,7 @@ def import_to_avalon(
avalon_asset['name'], old_path, new_path,
'please move it back'
)
log.error(msg)
errors.append({'Hierarchy change error': msg})
@ -315,11 +334,12 @@ def import_to_avalon(
{'_id': ObjectId(mongo_id)},
{'$set': {
'name': name,
'silo': silo,
'data': enter_data,
'parent': ObjectId(projectId)
}})
log.debug("{} - Updated data (in project \"{}\")".format(
ent_path, project_name
))
entity['custom_attributes'][ca_mongoid] = str(mongo_id)
session.commit()
@ -329,9 +349,13 @@ def import_to_avalon(
def get_avalon_attr(session, split_hierarchical=False):
custom_attributes = []
hier_custom_attributes = []
query = 'CustomAttributeGroup where name is "avalon"'
all_avalon_attr = session.query(query).one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical"
" from CustomAttributeConfiguration"
" where group.name = \"avalon\""
)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
if 'avalon_' in cust_attr['key']:
continue
@ -388,6 +412,12 @@ def get_data(entity, session, custom_attributes):
data['ftrackId'] = entity['id']
data['entityType'] = entity_type
ent_types_query = "select id, name from ObjectType"
ent_types = session.query(ent_types_query).all()
ent_types_by_name = {
ent_type["name"]: ent_type["id"] for ent_type in ent_types
}
for cust_attr in custom_attributes:
# skip hierarchical attributes
if cust_attr.get('is_hierarchical', False):
@ -410,8 +440,14 @@ def get_data(entity, session, custom_attributes):
# Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build')
entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type)
# Get object id of entity type
query = 'ObjectType where name is "{}"'.format(entity_type_full)
ent_obj_type_id = session.query(query).one()['id']
ent_obj_type_id = ent_types_by_name.get(entity_type_full)
# Backup soluction when id is not found by prequeried objects
if not ent_obj_type_id:
query = 'ObjectType where name is "{}"'.format(
entity_type_full
)
ent_obj_type_id = session.query(query).one()['id']
if cust_attr['object_type_id'] == ent_obj_type_id:
if key in entity['custom_attributes']:
@ -547,36 +583,24 @@ def get_project_apps(entity):
return apps
def avalon_check_name(entity, inSchema=None):
ValidationError = jsonschema.ValidationError
alright = True
name = entity['name']
if " " in name:
alright = False
def avalon_check_name(entity, in_schema=None):
default_pattern = "^[a-zA-Z0-9_.]*$"
data = {}
data['data'] = {}
data['type'] = 'asset'
schema = "avalon-core:asset-2.0"
# TODO have project any REGEX check?
if entity.entity_type in ['Project']:
# data['type'] = 'project'
name = entity['full_name']
# schema = "avalon-core:project-2.0"
name = entity["name"]
schema_name = "asset-3.0"
data['silo'] = 'Film'
if in_schema:
schema_name = in_schema
elif entity.entity_type.lower() == "project":
name = entity["full_name"]
schema_name = "project-2.0"
if inSchema is not None:
schema = inSchema
data['schema'] = schema
data['name'] = name
try:
avalon.schema.validate(data)
except ValidationError:
alright = False
if alright is False:
msg = '"{}" includes unsupported symbols like "dash" or "space"'
schema_obj = avalon.schema._cache.get(schema_name + ".json")
name_pattern = schema_obj.get("properties", {}).get("name", {}).get(
"pattern", default_pattern
)
if not re.match(name_pattern, name):
msg = "\"{}\" includes unsupported symbols like \"dash\" or \"space\""
raise ValueError(msg.format(name))

View file

@ -13,6 +13,7 @@ import logging
import tempfile
import functools
import contextlib
import atexit
import requests
@ -54,6 +55,17 @@ def check_active_table(func):
return decorated
def check_active_table(func):
"""Handling auto reconnect in 3 retry times"""
@functools.wraps(func)
def decorated(obj, *args, **kwargs):
if not obj.active_table:
raise NotActiveTable("Active table is not set. (This is bug)")
return func(obj, *args, **kwargs)
return decorated
class DbConnector:
log = logging.getLogger(__name__)
timeout = 1000
@ -87,7 +99,7 @@ class DbConnector:
"""Establish a persistent connection to the database"""
if self._is_installed:
return
atexit.register(self.uninstall)
logging.basicConfig()
self._mongo_client = pymongo.MongoClient(
@ -129,6 +141,16 @@ class DbConnector:
self._mongo_client = None
self._database = None
self._is_installed = False
atexit.unregister(self.uninstall)
def create_table(self, name, **options):
if self.exist_table(name):
return
return self._database.create_collection(name, **options)
def exist_table(self, table_name):
return table_name in self.tables()
def create_table(self, name, **options):
if self.exist_table(name):
@ -158,10 +180,7 @@ class DbConnector:
@auto_reconnect
def insert_one(self, item, **options):
assert isinstance(item, dict), "item must be of type <dict>"
return self._database[self.active_table].insert_one(
item,
session=session
)
return self._database[self.active_table].insert_one(item, **options)
@check_active_table
@auto_reconnect

View file

@ -3,6 +3,7 @@ import time
from pypeapp import Logger
from pype.vendor import ftrack_api
from pype.vendor.ftrack_api import session as fa_session
from pype.ftrack.ftrack_server import session_processor
class MissingPermision(Exception):
@ -31,8 +32,21 @@ class BaseHandler(object):
def __init__(self, session, plugins_presets={}):
'''Expects a ftrack_api.Session instance'''
self._session = session
self.log = Logger().get_logger(self.__class__.__name__)
if not(
isinstance(session, ftrack_api.session.Session) or
isinstance(session, session_processor.ProcessSession)
):
raise Exception((
"Session object entered with args is instance of \"{}\""
" but expected instances are \"{}\" and \"{}\""
).format(
str(type(session)),
str(ftrack_api.session.Session),
str(session_processor.ProcessSession)
))
self._session = session
# Using decorator
self.register = self.register_decorator(self.register)

View file

@ -192,7 +192,7 @@ class DbConnector(object):
) if os.getenv(item[0], item[1]) is not None
}
Session["schema"] = "avalon-core:session-1.0"
Session["schema"] = "avalon-core:session-2.0"
try:
schema.validate(Session)
except schema.ValidationError as e:

View file

@ -77,7 +77,7 @@ def on_open(*args):
# Show outdated pop-up
def _on_show_inventory():
import avalon.tools.cbsceneinventory as tool
import avalon.tools.sceneinventory as tool
tool.show(parent=parent)
dialog = popup.Popup(parent=parent)

37
pype/logging/gui/app.py Normal file
View file

@ -0,0 +1,37 @@
from Qt import QtWidgets, QtCore
from .widgets import LogsWidget, LogDetailWidget
from pypeapp import style
class LogsWindow(QtWidgets.QWidget):
def __init__(self, parent=None):
super(LogsWindow, self).__init__(parent)
self.setStyleSheet(style.load_stylesheet())
self.resize(1200, 800)
logs_widget = LogsWidget(parent=self)
log_detail = LogDetailWidget(parent=self)
main_layout = QtWidgets.QHBoxLayout()
log_splitter = QtWidgets.QSplitter()
log_splitter.setOrientation(QtCore.Qt.Horizontal)
log_splitter.addWidget(logs_widget)
log_splitter.addWidget(log_detail)
log_splitter.setStretchFactor(0, 65)
log_splitter.setStretchFactor(1, 35)
main_layout.addWidget(log_splitter)
self.logs_widget = logs_widget
self.log_detail = log_detail
self.setLayout(main_layout)
self.setWindowTitle("Logs")
self.logs_widget.active_changed.connect(self.on_selection_changed)
def on_selection_changed(self):
index = self.logs_widget.selected_log()
node = index.data(self.logs_widget.model.NodeRole)
self.log_detail.set_detail(node)

94
pype/logging/gui/lib.py Normal file
View file

@ -0,0 +1,94 @@
import contextlib
from Qt import QtCore
def _iter_model_rows(
model, column, include_root=False
):
"""Iterate over all row indices in a model"""
indices = [QtCore.QModelIndex()] # start iteration at root
for index in indices:
# Add children to the iterations
child_rows = model.rowCount(index)
for child_row in range(child_rows):
child_index = model.index(child_row, column, index)
indices.append(child_index)
if not include_root and not index.isValid():
continue
yield index
@contextlib.contextmanager
def preserve_states(
tree_view, column=0, role=None,
preserve_expanded=True, preserve_selection=True,
expanded_role=QtCore.Qt.DisplayRole, selection_role=QtCore.Qt.DisplayRole
):
"""Preserves row selection in QTreeView by column's data role.
This function is created to maintain the selection status of
the model items. When refresh is triggered the items which are expanded
will stay expanded and vise versa.
tree_view (QWidgets.QTreeView): the tree view nested in the application
column (int): the column to retrieve the data from
role (int): the role which dictates what will be returned
Returns:
None
"""
# When `role` is set then override both expanded and selection roles
if role:
expanded_role = role
selection_role = role
model = tree_view.model()
selection_model = tree_view.selectionModel()
flags = selection_model.Select | selection_model.Rows
expanded = set()
if preserve_expanded:
for index in _iter_model_rows(
model, column=column, include_root=False
):
if tree_view.isExpanded(index):
value = index.data(expanded_role)
expanded.add(value)
selected = None
if preserve_selection:
selected_rows = selection_model.selectedRows()
if selected_rows:
selected = set(row.data(selection_role) for row in selected_rows)
try:
yield
finally:
if expanded:
for index in _iter_model_rows(
model, column=0, include_root=False
):
value = index.data(expanded_role)
is_expanded = value in expanded
# skip if new index was created meanwhile
if is_expanded is None:
continue
tree_view.setExpanded(index, is_expanded)
if selected:
# Go through all indices, select the ones with similar data
for index in _iter_model_rows(
model, column=column, include_root=False
):
value = index.data(selection_role)
state = value in selected
if state:
tree_view.scrollTo(index) # Ensure item is visible
selection_model.select(index, flags)

169
pype/logging/gui/models.py Normal file
View file

@ -0,0 +1,169 @@
import os
from Qt import QtCore
from pypeapp import Logger
from pypeapp.lib.log import _bootstrap_mongo_log
log = Logger().get_logger("LogModel", "LoggingModule")
class LogModel(QtCore.QAbstractItemModel):
COLUMNS = [
"user",
"host",
"lineNumber",
"method",
"module",
"fileName",
"loggerName",
"message",
"level",
"timestamp",
]
colums_mapping = {
"user": "User",
"host": "Host",
"lineNumber": "Line n.",
"method": "Method",
"module": "Module",
"fileName": "File name",
"loggerName": "Logger name",
"message": "Message",
"level": "Level",
"timestamp": "Timestamp",
}
NodeRole = QtCore.Qt.UserRole + 1
def __init__(self, parent=None):
super(LogModel, self).__init__(parent)
self._root_node = Node()
collection = os.environ.get('PYPE_LOG_MONGO_COL')
database = _bootstrap_mongo_log()
self.dbcon = None
if collection in database.list_collection_names():
self.dbcon = database[collection]
def add_log(self, log):
node = Node(log)
self._root_node.add_child(node)
def refresh(self):
self.clear()
self.beginResetModel()
if self.dbcon:
result = self.dbcon.find({})
for item in result:
self.add_log(item)
self.endResetModel()
def data(self, index, role):
if not index.isValid():
return None
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
node = index.internalPointer()
column = index.column()
key = self.COLUMNS[column]
if key == "timestamp":
return str(node.get(key, None))
return node.get(key, None)
if role == self.NodeRole:
return index.internalPointer()
def index(self, row, column, parent):
"""Return index for row/column under parent"""
if not parent.isValid():
parent_node = self._root_node
else:
parent_node = parent.internalPointer()
child_item = parent_node.child(row)
if child_item:
return self.createIndex(row, column, child_item)
else:
return QtCore.QModelIndex()
def rowCount(self, parent):
node = self._root_node
if parent.isValid():
node = parent.internalPointer()
return node.childCount()
def columnCount(self, parent):
return len(self.COLUMNS)
def parent(self, index):
return QtCore.QModelIndex()
def headerData(self, section, orientation, role):
if role == QtCore.Qt.DisplayRole:
if section < len(self.COLUMNS):
key = self.COLUMNS[section]
return self.colums_mapping.get(key, key)
super(LogModel, self).headerData(section, orientation, role)
def flags(self, index):
return (QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
def clear(self):
self.beginResetModel()
self._root_node = Node()
self.endResetModel()
class Node(dict):
"""A node that can be represented in a tree view.
The node can store data just like a dictionary.
>>> data = {"name": "John", "score": 10}
>>> node = Node(data)
>>> assert node["name"] == "John"
"""
def __init__(self, data=None):
super(Node, self).__init__()
self._children = list()
self._parent = None
if data is not None:
assert isinstance(data, dict)
self.update(data)
def childCount(self):
return len(self._children)
def child(self, row):
if row >= len(self._children):
log.warning("Invalid row as child: {0}".format(row))
return
return self._children[row]
def children(self):
return self._children
def parent(self):
return self._parent
def row(self):
"""
Returns:
int: Index of this node under parent"""
if self._parent is not None:
siblings = self.parent().children()
return siblings.index(self)
def add_child(self, child):
"""Add a child to this node"""
child._parent = self
self._children.append(child)

426
pype/logging/gui/widgets.py Normal file
View file

@ -0,0 +1,426 @@
import datetime
import inspect
from Qt import QtCore, QtWidgets, QtGui
from PyQt5.QtCore import QVariant
from .models import LogModel
from .lib import preserve_states
class SearchComboBox(QtWidgets.QComboBox):
"""Searchable ComboBox with empty placeholder value as first value"""
def __init__(self, parent=None, placeholder=""):
super(SearchComboBox, self).__init__(parent)
self.setEditable(True)
self.setInsertPolicy(self.NoInsert)
self.lineEdit().setPlaceholderText(placeholder)
# Apply completer settings
completer = self.completer()
completer.setCompletionMode(completer.PopupCompletion)
completer.setCaseSensitivity(QtCore.Qt.CaseInsensitive)
# Force style sheet on popup menu
# It won't take the parent stylesheet for some reason
# todo: better fix for completer popup stylesheet
if parent:
popup = completer.popup()
popup.setStyleSheet(parent.styleSheet())
self.currentIndexChanged.connect(self.onIndexChange)
def onIndexChange(self, index):
print(index)
def populate(self, items):
self.clear()
self.addItems([""]) # ensure first item is placeholder
self.addItems(items)
def get_valid_value(self):
"""Return the current text if it's a valid value else None
Note: The empty placeholder value is valid and returns as ""
"""
text = self.currentText()
lookup = set(self.itemText(i) for i in range(self.count()))
if text not in lookup:
return None
return text
class CheckableComboBox2(QtWidgets.QComboBox):
def __init__(self, parent=None):
super(CheckableComboBox, self).__init__(parent)
self.view().pressed.connect(self.handleItemPressed)
self._changed = False
def handleItemPressed(self, index):
item = self.model().itemFromIndex(index)
if item.checkState() == QtCore.Qt.Checked:
item.setCheckState(QtCore.Qt.Unchecked)
else:
item.setCheckState(QtCore.Qt.Checked)
self._changed = True
def hidePopup(self):
if not self._changed:
super(CheckableComboBox, self).hidePopup()
self._changed = False
def itemChecked(self, index):
item = self.model().item(index, self.modelColumn())
return item.checkState() == QtCore.Qt.Checked
def setItemChecked(self, index, checked=True):
item = self.model().item(index, self.modelColumn())
if checked:
item.setCheckState(QtCore.Qt.Checked)
else:
item.setCheckState(QtCore.Qt.Unchecked)
class SelectableMenu(QtWidgets.QMenu):
selection_changed = QtCore.Signal()
def mouseReleaseEvent(self, event):
action = self.activeAction()
if action and action.isEnabled():
action.trigger()
self.selection_changed.emit()
else:
super(SelectableMenu, self).mouseReleaseEvent(event)
class CustomCombo(QtWidgets.QWidget):
selection_changed = QtCore.Signal()
def __init__(self, title, parent=None):
super(CustomCombo, self).__init__(parent)
toolbutton = QtWidgets.QToolButton(self)
toolbutton.setText(title)
toolmenu = SelectableMenu(self)
toolbutton.setMenu(toolmenu)
toolbutton.setPopupMode(QtWidgets.QToolButton.MenuButtonPopup)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(toolbutton)
self.setLayout(layout)
# toolmenu.selection_changed.connect(self.on_selection_changed)
toolmenu.selection_changed.connect(self.selection_changed)
self.toolbutton = toolbutton
self.toolmenu = toolmenu
self.main_layout = layout
def populate(self, items):
self.toolmenu.clear()
self.addItems(items)
def addItems(self, items):
for item in items:
action = self.toolmenu.addAction(item)
action.setCheckable(True)
action.setChecked(True)
self.toolmenu.addAction(action)
def items(self):
for action in self.toolmenu.actions():
yield action
class CheckableComboBox(QtWidgets.QComboBox):
def __init__(self, parent=None):
super(CheckableComboBox, self).__init__(parent)
view = QtWidgets.QTreeView()
view.header().hide()
view.setRootIsDecorated(False)
model = QtGui.QStandardItemModel()
view.pressed.connect(self.handleItemPressed)
self._changed = False
self.setView(view)
self.setModel(model)
self.view = view
self.model = model
def handleItemPressed(self, index):
item = self.model.itemFromIndex(index)
if item.checkState() == QtCore.Qt.Checked:
item.setCheckState(QtCore.Qt.Unchecked)
else:
item.setCheckState(QtCore.Qt.Checked)
self._changed = True
def hidePopup(self):
if not self._changed:
super(CheckableComboBox, self).hidePopup()
self._changed = False
def itemChecked(self, index):
item = self.model.item(index, self.modelColumn())
return item.checkState() == QtCore.Qt.Checked
def setItemChecked(self, index, checked=True):
item = self.model.item(index, self.modelColumn())
if checked:
item.setCheckState(QtCore.Qt.Checked)
else:
item.setCheckState(QtCore.Qt.Unchecked)
def addItems(self, items):
for text, checked in items:
text_item = QtGui.QStandardItem(text)
checked_item = QtGui.QStandardItem()
checked_item.setData(QVariant(checked), QtCore.Qt.CheckStateRole)
self.model.appendRow([text_item, checked_item])
class LogsWidget(QtWidgets.QWidget):
"""A widget that lists the published subsets for an asset"""
active_changed = QtCore.Signal()
def __init__(self, parent=None):
super(LogsWidget, self).__init__(parent=parent)
model = LogModel()
filter_layout = QtWidgets.QHBoxLayout()
# user_filter = SearchComboBox(self, "Users")
user_filter = CustomCombo("Users", self)
users = model.dbcon.distinct("user")
user_filter.populate(users)
user_filter.selection_changed.connect(self.user_changed)
level_filter = CustomCombo("Levels", self)
# levels = [(level, True) for level in model.dbcon.distinct("level")]
levels = model.dbcon.distinct("level")
level_filter.addItems(levels)
date_from_label = QtWidgets.QLabel("From:")
date_filter_from = QtWidgets.QDateTimeEdit()
date_from_layout = QtWidgets.QVBoxLayout()
date_from_layout.addWidget(date_from_label)
date_from_layout.addWidget(date_filter_from)
# now = datetime.datetime.now()
# QtCore.QDateTime(now.year, now.month, now.day, now.hour, now.minute, second = 0, msec = 0, timeSpec = 0)
date_to_label = QtWidgets.QLabel("To:")
date_filter_to = QtWidgets.QDateTimeEdit()
date_to_layout = QtWidgets.QVBoxLayout()
date_to_layout.addWidget(date_to_label)
date_to_layout.addWidget(date_filter_to)
filter_layout.addWidget(user_filter)
filter_layout.addWidget(level_filter)
filter_layout.addLayout(date_from_layout)
filter_layout.addLayout(date_to_layout)
view = QtWidgets.QTreeView(self)
view.setAllColumnsShowFocus(True)
# # Set view delegates
# time_delegate = PrettyTimeDelegate()
# column = model.COLUMNS.index("time")
# view.setItemDelegateForColumn(column, time_delegate)
layout = QtWidgets.QVBoxLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
layout.addLayout(filter_layout)
layout.addWidget(view)
view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
view.setSortingEnabled(True)
view.sortByColumn(
model.COLUMNS.index("timestamp"),
QtCore.Qt.AscendingOrder
)
view.setModel(model)
view.customContextMenuRequested.connect(self.on_context_menu)
view.selectionModel().selectionChanged.connect(self.active_changed)
# user_filter.connect()
# TODO remove if nothing will affect...
# header = self.view.header()
# # Enforce the columns to fit the data (purely cosmetic)
# if Qt.__binding__ in ("PySide2", "PyQt5"):
# header.setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)
# else:
# header.setResizeMode(QtWidgets.QHeaderView.ResizeToContents)
# Set signals
# prepare
model.refresh()
# Store to memory
self.model = model
self.view = view
self.user_filter = user_filter
def user_changed(self):
for action in self.user_filter.items():
print(action)
def on_context_menu(self, point):
# TODO will be any actions? it's ready
return
point_index = self.view.indexAt(point)
if not point_index.isValid():
return
# Get selected subsets without groups
selection = self.view.selectionModel()
rows = selection.selectedRows(column=0)
def selected_log(self):
selection = self.view.selectionModel()
rows = selection.selectedRows(column=0)
if len(rows) == 1:
return rows[0]
return None
class LogDetailWidget(QtWidgets.QWidget):
"""A Widget that display information about a specific version"""
data_rows = [
"user",
"message",
"level",
"logname",
"method",
"module",
"fileName",
"lineNumber",
"host",
"timestamp"
]
html_text = u"""
<h3>{user} - {timestamp}</h3>
<b>User</b><br>{user}<br>
<br><b>Level</b><br>{level}<br>
<br><b>Message</b><br>{message}<br>
<br><b>Log Name</b><br>{logname}<br><br><b>Method</b><br>{method}<br>
<br><b>File</b><br>{fileName}<br>
<br><b>Line</b><br>{lineNumber}<br>
<br><b>Host</b><br>{host}<br>
<br><b>Timestamp</b><br>{timestamp}<br>
"""
def __init__(self, parent=None):
super(LogDetailWidget, self).__init__(parent=parent)
layout = QtWidgets.QVBoxLayout(self)
label = QtWidgets.QLabel("Detail")
detail_widget = LogDetailTextEdit()
detail_widget.setReadOnly(True)
layout.addWidget(label)
layout.addWidget(detail_widget)
self.detail_widget = detail_widget
self.setEnabled(True)
self.set_detail(None)
def set_detail(self, detail_data):
if not detail_data:
self.detail_widget.setText("")
return
data = dict()
for row in self.data_rows:
value = detail_data.get(row) or "< Not set >"
data[row] = value
self.detail_widget.setHtml(self.html_text.format(**data))
class LogDetailTextEdit(QtWidgets.QTextEdit):
"""QTextEdit that displays version specific information.
This also overrides the context menu to add actions like copying
source path to clipboard or copying the raw data of the version
to clipboard.
"""
def __init__(self, parent=None):
super(LogDetailTextEdit, self).__init__(parent=parent)
# self.data = {
# "source": None,
# "raw": None
# }
#
# def contextMenuEvent(self, event):
# """Context menu with additional actions"""
# menu = self.createStandardContextMenu()
#
# # Add additional actions when any text so we can assume
# # the version is set.
# if self.toPlainText().strip():
#
# menu.addSeparator()
# action = QtWidgets.QAction("Copy source path to clipboard",
# menu)
# action.triggered.connect(self.on_copy_source)
# menu.addAction(action)
#
# action = QtWidgets.QAction("Copy raw data to clipboard",
# menu)
# action.triggered.connect(self.on_copy_raw)
# menu.addAction(action)
#
# menu.exec_(event.globalPos())
# del menu
#
# def on_copy_source(self):
# """Copy formatted source path to clipboard"""
# source = self.data.get("source", None)
# if not source:
# return
#
# # path = source.format(root=api.registered_root())
# # clipboard = QtWidgets.QApplication.clipboard()
# # clipboard.setText(path)
#
# def on_copy_raw(self):
# """Copy raw version data to clipboard
#
# The data is string formatted with `pprint.pformat`.
#
# """
# raw = self.data.get("raw", None)
# if not raw:
# return
#
# raw_text = pprint.pformat(raw)
# clipboard = QtWidgets.QApplication.clipboard()
# clipboard.setText(raw_text)

View file

@ -0,0 +1,5 @@
from .logging_module import LoggingModule
def tray_init(tray_widget, main_widget):
return LoggingModule(main_widget, tray_widget)

View file

@ -0,0 +1,36 @@
import os
from Qt import QtWidgets
from pypeapp import Logger
from ..gui.app import LogsWindow
log = Logger().get_logger("LoggingModule", "logging")
class LoggingModule:
def __init__(self, main_parent=None, parent=None):
self.parent = parent
self.window = LogsWindow()
# Definition of Tray menu
def tray_menu(self, parent_menu):
# Menu for Tray App
menu = QtWidgets.QMenu('Logging', parent_menu)
# menu.setProperty('submenu', 'on')
show_action = QtWidgets.QAction("Show Logs", menu)
show_action.triggered.connect(self.on_show_logs)
menu.addAction(show_action)
parent_menu.addMenu(menu)
def tray_start(self):
pass
def process_modules(self, modules):
return
def on_show_logs(self):
self.window.show()

View file

@ -178,7 +178,7 @@ def on_open(_):
# Show outdated pop-up
def _on_show_inventory():
import avalon.tools.cbsceneinventory as tool
import avalon.tools.sceneinventory as tool
tool.show(parent=parent)
dialog = popup.Popup(parent=parent)

View file

@ -75,8 +75,8 @@ def override_toolbox_ui():
"res")
icons = os.path.join(res, "icons")
import avalon.tools.cbsceneinventory as inventory
import avalon.tools.cbloader as loader
import avalon.tools.sceneinventory as inventory
import avalon.tools.loader as loader
from avalon.maya.pipeline import launch_workfiles_app
import mayalookassigner

View file

@ -9,6 +9,7 @@ import json
import logging
import contextlib
from collections import OrderedDict, defaultdict
from math import ceil
from maya import cmds, mel
import maya.api.OpenMaya as om
@ -90,7 +91,7 @@ _alembic_options = {
}
INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000}
FLOAT_FPS = {23.976, 29.97, 47.952, 59.94}
FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94}
def _get_mel_global(name):
@ -116,6 +117,10 @@ def matrix_equals(a, b, tolerance=1e-10):
return True
def float_round(num, places=0, direction=ceil):
return direction(num * (10**places)) / float(10**places)
def unique(name):
assert isinstance(name, string_types), "`name` must be string"
@ -296,7 +301,13 @@ def attribute_values(attr_values):
"""
original = [(attr, cmds.getAttr(attr)) for attr in attr_values]
# NOTE(antirotor): this didn't work for some reason for Yeti attributes
# original = [(attr, cmds.getAttr(attr)) for attr in attr_values]
original = []
for attr in attr_values:
type = cmds.getAttr(attr, type=True)
value = cmds.getAttr(attr)
original.append((attr, str(value) if type == "string" else value))
try:
for attr, value in attr_values.items():
if isinstance(value, string_types):
@ -1752,25 +1763,26 @@ def set_scene_fps(fps, update=True):
'30': 'ntsc',
'48': 'show',
'50': 'palf',
'60': 'ntscf'}
'60': 'ntscf',
'23.98': '23.976fps',
'23.976': '23.976fps',
'29.97': '29.97fps',
'47.952': '47.952fps',
'47.95': '47.952fps',
'59.94': '59.94fps',
'44100': '44100fps',
'48000': '48000fps'}
if fps in FLOAT_FPS:
unit = "{}fps".format(fps)
elif fps in INT_FPS:
unit = "{}fps".format(int(fps))
else:
# pull from mapping
# this should convert float string to float and int to int
# so 25.0 is converted to 25, but 23.98 will be still float.
decimals = int(str(fps-int(fps))[2:])
if decimals == 0:
fps = int(fps)
unit = fps_mapping.get(str(fps), None)
if unit is None:
raise ValueError("Unsupported FPS value: `%s`" % fps)
# get maya version
version = int(cmds.about(version=True))
if version < 2018:
# pull from mapping
unit = fps_mapping.get(str(int(fps)), None)
if unit is None:
raise ValueError("Unsupported FPS value: `%s`" % fps)
# Get time slider current state
start_frame = cmds.playbackOptions(query=True, minTime=True)
end_frame = cmds.playbackOptions(query=True, maxTime=True)
@ -1874,7 +1886,12 @@ def validate_fps():
"""
fps = lib.get_asset()["data"]["fps"]
current_fps = mel.eval('currentTimeUnitToFPS()') # returns float
# TODO(antirotor): This is hack as for framerates having multiple
# decimal places. FTrack is ceiling decimal values on
# fps to two decimal places but Maya 2019+ is reporting those fps
# with much higher resolution. As we currently cannot fix Ftrack
# rounding, we have to round those numbers coming from Maya.
current_fps = float_round(mel.eval('currentTimeUnitToFPS()'), 2)
if current_fps != fps:

View file

@ -4,6 +4,7 @@ from Qt import QtWidgets
import os
import json
from .widget_login import MusterLogin
from avalon.vendor import requests
class MusterModule:
@ -37,6 +38,10 @@ class MusterModule:
pass
def process_modules(self, modules):
def api_callback():
self.aShowLogin.trigger()
if "RestApiServer" in modules:
def api_show_login():
self.aShowLogin.trigger()
@ -62,7 +67,7 @@ class MusterModule:
self.menu.addAction(self.aShowLogin)
self.aShowLogin.triggered.connect(self.show_login)
return self.menu
parent.addMenu(self.menu)
def load_credentials(self):
"""
@ -78,13 +83,39 @@ class MusterModule:
return credentials
def save_credentials(self, username, password):
def get_auth_token(self, username, password):
"""
Authenticate user with Muster and get authToken from server.
"""
MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
if not MUSTER_REST_URL:
raise AttributeError("Muster REST API url not set")
params = {
'username': username,
'password': password
}
api_entry = '/api/login'
response = requests.post(
MUSTER_REST_URL + api_entry, params=params)
if response.status_code != 200:
self.log.error(
'Cannot log into Muster: {}'.format(response.status_code))
raise Exception('Cannot login into Muster.')
try:
token = response.json()['ResponseData']['authToken']
except ValueError as e:
self.log.error('Invalid response from Muster server {}'.format(e))
raise Exception('Invalid response from Muster while logging in.')
self.save_credentials(token)
def save_credentials(self, token):
"""
Save credentials to JSON file
"""
data = {
'username': username,
'password': password
'token': token
}
file = open(self.cred_path, 'w')

View file

@ -88,8 +88,7 @@ class MusterLogin(QtWidgets.QWidget):
self.error_label = QtWidgets.QLabel("")
self.error_label.setFont(self.font)
self.error_label.setTextFormat(QtCore.Qt.RichText)
self.error_label.setObjectName("error_label")
self.error_label.setStyleSheet('color: #FC6000')
self.error_label.setWordWrap(True)
self.error_label.hide()
@ -105,6 +104,9 @@ class MusterLogin(QtWidgets.QWidget):
self.btn_ok.clicked.connect(self.click_ok)
self.btn_cancel = QtWidgets.QPushButton("Cancel")
QtWidgets.QShortcut(
QtGui.QKeySequence(
QtCore.Qt.Key_Escape), self).activated.connect(self.close)
self.btn_cancel.clicked.connect(self.close)
self.btn_group.addWidget(self.btn_ok)
@ -115,7 +117,21 @@ class MusterLogin(QtWidgets.QWidget):
return self.main
def keyPressEvent(self, key_event):
if key_event.key() == QtCore.Qt.Key_Return:
if self.input_username.hasFocus():
self.input_password.setFocus()
elif self.input_password.hasFocus() or self.btn_ok.hasFocus():
self.click_ok()
elif self.btn_cancel.hasFocus():
self.close()
else:
super().keyPressEvent(key_event)
def setError(self, msg):
self.error_label.setText(msg)
self.error_label.show()
@ -130,11 +146,16 @@ class MusterLogin(QtWidgets.QWidget):
if not username:
self.setError("Username cannot be empty")
self.invalid_input(self.input_username)
self.save_credentials(username, password)
self._close_widget()
try:
self.save_credentials(username, password)
except Exception as e:
self.setError(
"<b>Cannot get auth token:</b>\n<code>{}</code>".format(e))
else:
self._close_widget()
def save_credentials(self, username, password):
self.parent_widget.save_credentials(username, password)
self.parent_widget.get_auth_token(username, password)
def closeEvent(self, event):
event.ignore()

View file

@ -9,7 +9,7 @@ log = Logger().get_logger(__name__, "nuke")
def install():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(Session["AVALON_LABEL"])
workfile_settings = lib.WorkfileSettings()
workfile_settings = lib.WorkfileSettings
# replace reset resolution from avalon core to pype's
name = "Reset Resolution"
new_name = "Set Resolution"
@ -20,7 +20,7 @@ def install():
log.debug("Changing Item: {}".format(rm_item))
# rm_item[1].setEnabled(False)
menu.removeItem(rm_item[1].name())
menu.addCommand(new_name, workfile_settings.reset_resolution, index=(rm_item[0]))
menu.addCommand(new_name, lambda: workfile_settings().reset_resolution(), index=(rm_item[0]))
# replace reset frame range from avalon core to pype's
name = "Reset Frame Range"
@ -31,12 +31,12 @@ def install():
log.debug("Changing Item: {}".format(rm_item))
# rm_item[1].setEnabled(False)
menu.removeItem(rm_item[1].name())
menu.addCommand(new_name, workfile_settings.reset_frame_range_handles, index=(rm_item[0]))
menu.addCommand(new_name, lambda: workfile_settings().reset_frame_range_handles(), index=(rm_item[0]))
# add colorspace menu item
name = "Set colorspace"
menu.addCommand(
name, workfile_settings.set_colorspace,
name, lambda: workfile_settings().set_colorspace(),
index=(rm_item[0]+2)
)
log.debug("Adding menu item: {}".format(name))
@ -44,7 +44,7 @@ def install():
# add workfile builder menu item
name = "Build First Workfile.."
menu.addCommand(
name, lib.BuildWorkfile().process,
name, lambda: lib.BuildWorkfile().process(),
index=(rm_item[0]+7)
)
log.debug("Adding menu item: {}".format(name))
@ -52,7 +52,7 @@ def install():
# add item that applies all setting above
name = "Apply all settings"
menu.addCommand(
name, workfile_settings.set_context_settings, index=(rm_item[0]+3)
name, lambda: workfile_settings().set_context_settings(), index=(rm_item[0]+3)
)
log.debug("Adding menu item: {}".format(name))

View file

@ -5,8 +5,8 @@ from avalon import api as avalon
from pyblish import api as pyblish
from .workio import (
open,
save,
open_file,
save_file,
current_file,
has_unsaved_changes,
file_extensions,
@ -21,8 +21,8 @@ from .tags import add_tags_from_presets
__all__ = [
# Workfiles API
"open",
"save",
"open_file",
"save_file",
"current_file",
"has_unsaved_changes",
"file_extensions",

View file

@ -15,7 +15,7 @@ def has_unsaved_changes():
return True
def save(filepath):
def save_file(filepath):
project = hiero.core.projects()[-1]
if project:
project.saveAs(filepath)
@ -24,7 +24,7 @@ def save(filepath):
project.saveAs(filepath)
def open(filepath):
def open_file(filepath):
hiero.core.openProject(filepath)
return True

View file

@ -6,6 +6,7 @@ from avalon import (
)
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
@ -26,27 +27,26 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
def process(self, context):
# get json paths from data
rqst_json_data_path = context.data['rqst_json_data_path']
post_json_data_path = context.data['post_json_data_path']
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
fix_paths = {k: v.replace("\\", "/") for k, v in session.items()
if isinstance(v, str)}
session.update(fix_paths)
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = \
staging_dir = os.path.dirname(
post_json_data_path).replace("\\", "/")
if not os.path.exists(staging_dir):
os.makedirs(staging_dir)
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with open(rqst_json_data_path) as f:
context.data['json_data'] = json_data = json.load(f)
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
@ -63,12 +63,13 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_CONFIG", None)
assert templates_dir, "Missing `PYPE_CONFIG` in os.environ"
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
@ -84,16 +85,16 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = current_file
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project()["data"]
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset()["data"]
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}

View file

@ -25,32 +25,41 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.48
def process(self, context):
a_session = context.data.get("avalonSession")
json_data = context.data.get("json_data", None)
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
instances_data = json_data.get("instances", None)
assert instances_data, "No `instance` data in json file"
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
context.data["ftrackTypes"] = ftrack_types
asset_default = presets["asset_default"]
assert instances_data, "No `asset_default` data in json file"
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = pype.get_asset(asset_name)
entity = io.find_one({"name": asset_name,
"type": "asset"})
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("frameStart", None)
frame_start = context.data["assetData"].get("fstart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = entity["data"]["frameStart"]
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
frame_start = asset_default["fstart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
@ -60,7 +69,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
handles = context.data["assetData"].get("handles", None)
if not handles:
# get frame start > second try from parent data
handles = entity["data"]["handles"]
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
@ -77,25 +86,38 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# get current file host
host = a_session["AVALON_APP"]
family = "workfile"
family = "projectfile"
families = "filesave"
subset_name = "{0}_{1}".format(task, family)
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(subset_name)
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families],
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
@ -103,11 +125,27 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
assert name, "No `name` key in json_data.instance: {}".format(inst)
family = inst.get("family", None)
assert family, "No `family` key in json_data.instance: {}".format(inst)
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
tags = inst.get("tags", None)
if tags:
@ -117,32 +155,86 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
tasks = rules_tasks["defaultTasks"]
self.log.debug("tasks: `{}`".format(tasks))
subset_lst = []
subset_dict = {}
for task in tasks:
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
isinstance(subset_dict[sub], list)
except Exception:
subset_dict[sub] = list()
for subset in subsets:
subset_name = "{0}_{1}".format(task, subset)
instance = context.create_instance(subset_name)
# instance.add(inst)
instance.data.update({
"subset": subset_name,
"task": task,
"frameStart": frame_start,
"handles": handles,
"host": host,
"asset": asset,
"label": "{0} - {1} > {2}".format(name, task, subset),
"name": subset_name,
"family": inst["family"],
"families": [subset],
"jsonData": inst,
# "parents": , # bez tasku
# "hierarchy": ,
"publish": True,
})
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)
subset_dict[sub].append(task)
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
ext = repr['representation']
else:
continue
family = inst["family"]
# skip if thumnail in name of subset
if "thumbnail" in subset:
continue
elif "audio" in subset:
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
# create unique subset's name
name = "{0}_{1}_{2}".format(asset,
inst["family"],
subset_name)
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
"files": files,
"label": "{0} - {1}".format(
asset, subset_name),
"name": name,
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"publish": True,
"version": version})
self.log.info(
"collected instance: {}".format(instance.data))
instances.append(instance)
context.data["instances"] = instances

View file

@ -0,0 +1,20 @@
"""
Requires:
None
Provides:
context -> anatomy (pypeapp.Anatomy)
"""
from pypeapp import Anatomy
import pyblish.api
class CollectAnatomy(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
order = pyblish.api.CollectorOrder
label = "Collect Anatomy"
def process(self, context):
context.data['anatomy'] = Anatomy()
self.log.info("Anatomy templates collected...")

View file

@ -1,3 +1,10 @@
"""
Requires:
None
Provides:
context -> comment (str)
"""
import pyblish.api

View file

@ -1,3 +1,10 @@
"""
Requires:
context -> currentFile (str)
Provides:
context -> label (str)
"""
import os
import pyblish.api
@ -19,4 +26,6 @@ class CollectContextLabel(pyblish.api.ContextPlugin):
# Set label
label = "{host} - {scene}".format(host=host.title(), scene=base)
if host == "standalonepublisher":
label = host.title()
context.data["label"] = label

View file

@ -0,0 +1,19 @@
import os
import getpass
import pyblish.api
class CollectCurrentUserPype(pyblish.api.ContextPlugin):
"""Inject the currently logged on user into the Context"""
# Order must be after default pyblish-base CollectCurrentUser
order = pyblish.api.CollectorOrder + 0.001
label = "Collect Pype User"
def process(self, context):
user = os.getenv("PYPE_USERNAME", "").strip()
if not user:
return
context.data["user"] = user
self.log.debug("Pype user is \"{}\"".format(user))

View file

@ -1,3 +1,11 @@
"""
Requires:
None
Provides:
context -> currentFile (str)
"""
import os
import pyblish.api

View file

@ -1,3 +1,11 @@
"""
Requires:
environment -> DEADLINE_PATH
Provides:
context -> deadlineUser (str)
"""
import os
import subprocess
@ -54,4 +62,3 @@ class CollectDeadlineUser(pyblish.api.ContextPlugin):
self.log.info("Found Deadline user: {}".format(user))
context.data['deadlineUser'] = user

View file

@ -1,3 +1,13 @@
"""
Requires:
environment -> PYPE_PUBLISH_PATHS
context -> workspaceDir
Provides:
context -> user (str)
instance -> new instance
"""
import os
import re
import copy
@ -121,6 +131,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
else:
root = cwd
if data.get("ftrack"):
f = data.get("ftrack")
os.environ["FTRACK_API_USER"] = f["FTRACK_API_USER"]
os.environ["FTRACK_API_KEY"] = f["FTRACK_API_KEY"]
os.environ["FTRACK_SERVER"] = f["FTRACK_SERVER"]
metadata = data.get("metadata")
if metadata:
session = metadata.get("session")

View file

@ -1,3 +1,11 @@
"""
Requires:
none
Provides:
context -> machine (str)
"""
import pyblish.api

View file

@ -1,5 +1,11 @@
import os
import json
"""
Requires:
config_data -> ftrack.output_representation
Provides:
context -> output_repre_config (str)
"""
import pyblish.api
from pypeapp import config
@ -9,7 +15,7 @@ class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
label = "Collect Config for representation"
hosts = ["shell"]
hosts = ["shell", "standalonepublisher"]
def process(self, context):
config_data = config.get_presets()["ftrack"]["output_representation"]

View file

@ -1,3 +1,12 @@
"""
Requires:
config_data -> colorspace.default
config_data -> dataflow.default
Provides:
context -> presets
"""
from pyblish import api
from pypeapp import config
@ -5,7 +14,7 @@ from pypeapp import config
class CollectPresets(api.ContextPlugin):
"""Collect Presets."""
order = api.CollectorOrder
order = api.CollectorOrder - 0.491
label = "Collect Presets"
def process(self, context):

View file

@ -1,8 +1,15 @@
"""
Requires:
None
Provides:
context -> projectData
"""
import pyblish.api
import pype.api as pype
class CollectProjectData(pyblish.api.ContextPlugin):
"""Collecting project data from avalon db"""

View file

@ -13,6 +13,8 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
label = 'Collect Version'
def process(self, context):
if "standalonepublisher" in context.data.get("host", []):
return
filename = os.path.basename(context.data.get('currentFile'))

View file

@ -1,16 +1,87 @@
"""
Requires:
session -> AVALON_PROJECT
context -> anatomy (pypeapp.Anatomy)
instance -> subset
instance -> asset
instance -> family
import pype.api as pype
from pypeapp import Anatomy
Provides:
instance -> template
instance -> assumedTemplateData
instance -> assumedDestination
"""
import os
from avalon import io, api
import pyblish.api
class CollectTemplates(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
class CollectTemplates(pyblish.api.InstancePlugin):
"""Fill templates with data needed for publish"""
order = pyblish.api.CollectorOrder
label = "Collect Templates"
order = pyblish.api.CollectorOrder + 0.1
label = "Collect and fill Templates"
hosts = ["maya", "nuke", "standalonepublisher"]
def process(self, context):
context.data['anatomy'] = Anatomy()
self.log.info("Anatomy templates collected...")
def process(self, instance):
# get all the stuff from the database
subset_name = instance.data["subset"]
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one({"type": "project",
"name": project_name},
projection={"config": True, "data": True})
template = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
asset = io.find_one({"type": "asset",
"name": asset_name,
"parent": project["_id"]})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({"type": "subset",
"name": subset_name,
"parent": asset["_id"]})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
# if there is a subset there ought to be version
if version is not None:
version_number += int(version["name"])
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["template"] = template
instance.data["assumedTemplateData"] = template_data
# We take the parent folder of representation 'filepath'
instance.data["assumedDestination"] = os.path.dirname(
(anatomy.format(template_data))["publish"]["path"]
)

View file

@ -72,13 +72,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
entity = io.find_one({"type": "asset", "name": name})
# Create entity if doesn"t exist
if entity is None:
if self.project["_id"] == parent["_id"]:
silo = None
elif parent["silo"] is None:
silo = parent["name"]
else:
silo = parent["silo"]
entity = self.create_avalon_asset(name, silo, data)
entity = self.create_avalon_asset(name, data)
# Update entity data with input data
io.update_many({"_id": entity["_id"]}, {"$set": {"data": data}})
@ -86,11 +80,10 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if "childs" in entity_data:
self.import_to_avalon(entity_data["childs"], entity)
def create_avalon_asset(self, name, silo, data):
def create_avalon_asset(self, name, data):
item = {
"schema": "avalon-core:asset-2.0",
"schema": "avalon-core:asset-3.0",
"name": name,
"silo": silo,
"parent": self.project["_id"],
"type": "asset",
"data": data

View file

@ -1,7 +1,6 @@
import os
import logging
import shutil
import clique
import errno
import pyblish.api
@ -25,9 +24,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
families = ["assembly",
"yetiRig",
"yeticache"]
families = ["assembly"]
exclude_families = ["clip"]
def process(self, instance):
@ -41,7 +38,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
@ -158,7 +154,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"version": int(version["name"]),
"hierarchy": hierarchy}
template_publish = project["config"]["template"]["publish"]
# template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files

View file

@ -30,7 +30,8 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination)).replace("\\", "/")
mock_destination = os.path.abspath(
os.path.normpath(mock_destination)).replace("\\", "/")
# Define resource destination and transfers
resources = instance.data.get("resources", list())
@ -38,7 +39,8 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"]).replace("\\", "/")
source_filename = os.path.basename(
resource["source"]).replace("\\", "/")
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
@ -53,7 +55,8 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname).replace("\\", "/")
fdest = os.path.join(
mock_destination, fname).replace("\\", "/")
transfers.append([fsrc, fdest])
instance.data["resources"] = resources

View file

@ -3,7 +3,6 @@ from os.path import getsize
import logging
import speedcopy
import clique
import traceback
import errno
import pyblish.api
from avalon import api, io
@ -64,7 +63,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"plate",
"look",
"lut",
"audio"
"audio",
"yetiRig",
"yeticache"
]
exclude_families = ["clip"]
@ -110,7 +111,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# extracted_traceback[1], result["error"]
# )
# )
# assert all(result["success"] for result in context.data["results"]), (
# assert all(result["success"] for result in context.data["results"]),(
# "Atomicity not held, aborting.")
# Assemble
@ -251,7 +252,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"silo": asset.get('silo'),
"task": TASK,
"asset": ASSET,
"family": instance.data['family'],
@ -267,7 +268,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template = os.path.normpath(
anatomy.templates[template_name]["path"])
if isinstance(files, list):
sequence_repre = isinstance(files, list)
if sequence_repre:
src_collections, remainder = clique.assemble(files)
self.log.debug(
"src_tail_collections: {}".format(str(src_collections)))
@ -304,6 +307,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_tail = dst_collection.format("{tail}")
index_frame_start = None
if repre.get("frameStart"):
frame_start_padding = len(str(
repre.get("frameEnd")))
@ -328,7 +332,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("source: {}".format(src))
instance.data["transfers"].append([src, dst])
repre['published_path'] = "{0}{1}{2}".format(dst_head, dst_padding_exp, dst_tail)
repre['published_path'] = "{0}{1}{2}".format(dst_head,
dst_padding_exp,
dst_tail)
# for imagesequence version data
hashes = '#' * len(dst_padding)
dst = os.path.normpath("{0}{1}{2}".format(
@ -379,7 +385,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"project": {"name": PROJECT,
"code": project['data']['code']},
'task': TASK,
"silo": asset['silo'],
"silo": asset.get('silo'),
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
@ -388,6 +394,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"representation": repre['ext']
}
}
if sequence_repre and repre.get("frameStart"):
representation['context']['frame'] = repre.get("frameStart")
self.log.debug("__ representation: {}".format(representation))
destination_list.append(dst)
self.log.debug("__ destination_list: {}".format(destination_list))
@ -482,12 +492,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
self.log.debug("families. %s" % instance.data.get('families'))
self.log.debug("families. %s" % type(instance.data.get('families')))
_id = io.insert_one({
"schema": "avalon-core:subset-2.0",
"schema": "pype:subset-3.0",
"type": "subset",
"name": subset_name,
"data": {},
"data": {
"families": instance.data.get('families')
},
"parent": asset["_id"]
}).inserted_id
@ -510,7 +524,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
version_locations = [location for location in locations if
location is not None]
return {"schema": "avalon-core:version-2.0",
return {"schema": "pype:version-3.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,

View file

@ -152,7 +152,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"silo": asset.get('silo'),
"task": api.Session["AVALON_TASK"],
"asset": ASSET,
"family": instance.data['family'],

View file

@ -1,7 +1,6 @@
import os
import json
import re
from pprint import pprint
import logging
from avalon import api, io
@ -147,7 +146,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"PYPE_ROOT"
]
def _submit_deadline_post_job(self, instance, job):
"""
Deadline specific code separated from :meth:`process` for sake of
@ -192,7 +190,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent
# job so they use the same environment
environment = job["Props"].get("Env", {})
i = 0
for index, key in enumerate(environment):
@ -231,12 +228,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"""
# Get a submission job
data = instance.data.copy()
render_job = data.pop("deadlineSubmissionJob")
render_job = data.pop("deadlineSubmissionJob", None)
submission_type = "deadline"
if not render_job:
# No deadline job. Try Muster: musterSubmissionJob
render_job = data.pop("musterSubmissionJob")
render_job = data.pop("musterSubmissionJob", None)
submission_type = "muster"
if not render_job:
raise RuntimeError("Can't continue without valid Deadline "
@ -295,11 +292,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Optional metadata (for debugging)
"metadata": {
"instance": data,
"job": job,
"job": render_job,
"session": api.Session.copy()
}
}
if submission_type == "muster":
ftrack = {
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
"FTRACK_API_KEY": os.environ.get("FTRACK_API_KEY"),
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER")
}
metadata.update({"ftrack": ftrack})
# Ensure output dir exists
output_dir = instance.data["outputDir"]
if not os.path.isdir(output_dir):

View file

@ -0,0 +1,190 @@
import pyblish.api
import pype.api
class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
"""
This will validate attributes in ftrack against data in scene.
Attributes to be validated are specified in:
`$PYPE_CONFIG/presets/<host>/ftrack_attributes.json`
This is array (list) of checks in format:
[
[<attribute>, <operator>, <expression>]
]
Where <attribute> is name of ftrack attribute, <operator> is one of:
"is", is_not", "greater_than", "less_than", "contains", "not_contains",
"starts_with", "ends_with"
<expression> is python code that is evaluated by validator. This allows
you to fetch whatever value in scene you want, for example in Maya:
[
"fps", "is",
"from maya import mel; out = mel.eval('currentTimeUnitToFPS()')"
]
will test if ftrack fps attribute on current Task parent is same as fps
info we get from maya. Store the value you need to compare in
variable `out` in your expression.
"""
label = "Validate Custom Ftrack Attributes"
order = pype.api.ValidateContentsOrder
families = ["ftrack"]
optional = True
def process(self, instance):
context = instance.context
task = context.data.get('ftrackTask', False)
if not task:
self._raise(AttributeError,
"Missing FTrack Task entity in context")
host = pyblish.api.current_host()
to_check = context.data["presets"][host].get("ftrack_attributes")
if not to_check:
self.log.warning("ftrack_attributes preset not found")
return
self.log.info("getting attributes from ftrack ...")
# get parent of task
custom_attributes = {}
try:
parent = task["parent"]
custom_attributes = parent["custom_attributes"].items()
except KeyError:
self._raise(KeyError, "missing `parent` or `attributes`")
custom_attributes = dict(custom_attributes)
# get list of hierarchical attributes from ftrack
session = context.data["ftrackSession"]
custom_hier_attributes = self._get_custom_hier_attrs(session)
custom_attributes = {}
_nonhier = {}
custom_hier_attributes = {k: None for k in custom_hier_attributes}
for key, value in dict(parent["custom_attributes"]).items():
if key in custom_hier_attributes:
custom_hier_attributes[key] = value
else:
_nonhier[key] = value
custom_hier_values = self._get_hierarchical_values(
custom_hier_attributes, parent)
custom_hier_values.update(_nonhier)
errors = []
attribs = custom_hier_values
for check in to_check:
ev = {}
# WARNING(Ondrej Samohel): This is really not secure as we are
# basically executing user code. But there's no other way to make
# it flexible enough for users to get stuff from
exec(str(check[2]), {}, ev)
if not ev.get("out"):
errors.append("{} code doesn't return 'out': '{}'".format(
check[0], check[2]))
continue
if check[0] in attribs:
if check[1] == "is":
if attribs[check[0]] != ev["out"]:
errors.append("{}: {} is not {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "is_not":
if attribs[check[0]] == ev["out"]:
errors.append("{}: {} is {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "less_than":
if attribs[check[0]] < ev["out"]:
errors.append("{}: {} is greater {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "greater_than":
if attribs[check[0]] < ev["out"]:
errors.append("{}: {} is less {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "contains":
if attribs[check[0]] in ev["out"]:
errors.append("{}: {} does not contain {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "not_contains":
if attribs[check[0]] not in ev["out"]:
errors.append("{}: {} contains {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "starts_with":
if attribs[check[0]].startswith(ev["out"]):
errors.append("{}: {} does not starts with {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "ends_with":
if attribs[check[0]].endswith(ev["out"]):
errors.append("{}: {} does not end with {}".format(
check[0], attribs[check[0]], ev["out"]))
if errors:
self.log.error('There are invalid values for attributes:')
for e in errors:
self.log.error(e)
raise ValueError("ftrack attributes doesn't match")
def _get_custom_hier_attrs(self, session):
hier_custom_attributes = []
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical"
" from CustomAttributeConfiguration"
)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
if cust_attr["is_hierarchical"]:
hier_custom_attributes.append(cust_attr["key"])
return hier_custom_attributes
def _get_hierarchical_values(self, keys_dict, entity):
# check values already set
_set_keys = []
for key, value in keys_dict.items():
if value is not None:
_set_keys.append(key)
# pop set values from keys_dict
set_keys = {}
for key in _set_keys:
set_keys[key] = keys_dict.pop(key)
# find if entity has set values and pop them out
keys_to_pop = []
for key in keys_dict.keys():
_val = entity["custom_attributes"][key]
if _val:
keys_to_pop.append(key)
set_keys[key] = _val
for key in keys_to_pop:
keys_dict.pop(key)
# if there are not keys to find value return found
if not keys_dict:
return set_keys
# end recursion if entity is project
if entity.entity_type.lower() == "project":
for key, value in keys_dict.items():
set_keys[key] = value
else:
result = self._get_hierarchical_values(keys_dict, entity["parent"])
for key, value in result.items():
set_keys[key] = value
return set_keys
def _raise(self, exc, msg):
self.log.error(msg)
raise exc(msg)

Some files were not shown because too many files have changed in this diff Show more