feat(aport, plugins): updating to last stable version from CCAS

This commit is contained in:
Jakub Jezek 2019-10-23 15:36:08 +02:00
parent 1643093074
commit de3b959273
21 changed files with 1569 additions and 853 deletions

View file

@ -3,15 +3,17 @@ import sys
from avalon import api as avalon
from pyblish import api as pyblish
from pypeapp import execute, Logger
from app import api as app
from .. import api
from .lib import set_avalon_workdir
t = app.Templates()
log = Logger().get_logger(__name__, "aport")
log = api.Logger.getLogger(__name__, "aport")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
ADDITIONAL_PLUGINS = ['ftrack']
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -33,8 +35,31 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "aport", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
def register_additional_plugin_paths():
'''Adding additional host plugins
'''
for host in ADDITIONAL_PLUGINS:
publish_path = os.path.join(
PLUGINS_DIR, host, "publish").replace("\\", "/")
pyblish.register_plugin_path(publish_path)
# adding path to PUBLISH_PATH environment
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(
"Registered additional plugin path: "
"{}".format(publish_path))
def install():
set_avalon_workdir()
# api.set_avalon_workdir()
log.info("Registering Aport plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
@ -42,6 +67,9 @@ def install():
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# additional plugins
register_additional_plugin_paths()
# Disable all families except for the ones we explicitly want to see
family_states = [
"imagesequence",
@ -51,6 +79,9 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# launch pico server
pico_server_launch()
@ -81,7 +112,7 @@ def pico_server_launch():
"api"
]
execute(
app.forward(
args,
cwd=path
)

View file

@ -1,34 +1,90 @@
# api.py
import os
import sys
import tempfile
import pico
from pico import PicoApp
from pico.decorators import request_args, set_cookie, delete_cookie, stream
from pico.decorators import header, cookie
from app.api import forward, Logger
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
import pipeline as ppl
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
log = Logger.getLogger(__name__, "aport")
@pico.expose()
def publish(json_data_path, gui):
def get_session():
ppl.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
ppl.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
ppl.AVALON_TASK = os.getenv("AVALON_TASK", None)
ppl.AVALON_SILO = os.getenv("AVALON_SILO", None)
return ppl.get_session()
@pico.expose()
def load_representations(project, representations):
'''Querry data from mongo db for defined representations.
Args:
project (str): name of the project
representations (list): representations which are required
Returns:
data (dict): representations in last versions
# testing url:
http://localhost:4242/api/load_representations?project=jakub_projectx&representations=[{%22asset%22:%22e09s031_0040%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22},%20{%22asset%22:%22e09s031_0030%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22}]
# returning:
{"e09s031_0040_referenceDefault":{"_id":"5c6dabaa2af61756b02f7f32","schema":"pype:representation-2.0","type":"representation","parent":"5c6dabaa2af61756b02f7f31","name":"mp4","data":{"path":"C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\thisFolder\\e09\\s031\\e09s031_0040\\publish\\clip\\referenceDefault\\v019\\jkprx_e09s031_0040_referenceDefault_v019.mp4","template":"{publish.root}/{publish.folder}/{version.main}/{publish.file}"},"dependencies":[],"context":{"root":"C:\\Users\\hubert\\_PYPE_testing\\projects","project":{"name":"jakub_projectx","code":"jkprx"},"task":"edit","silo":"thisFolder","asset":"e09s031_0040","family":"clip","subset":"referenceDefault","VERSION":19,"hierarchy":"thisFolder\\e09\\s031","representation":"mp4"}}}
'''
data = {}
# log.info("___project: {}".format(project))
# ppl.io.activate_project(project)
#
# from_mongo = ppl.io.find({"name": repr['representation'],
# "type": "representation"})[:]
for repr in representations:
log.info("asset: {}".format(repr['asset']))
# set context for each asset individually
context(project, repr['asset'], '')
# query data from mongo db for the asset's subset representation
related_repr = [r for r in ppl.io.find({"name": repr['representation'],
"type": "representation",
"context.asset": repr['asset']})[:]]
versions_dict = {r['context']['version']: i
for i, r in enumerate(related_repr)}
versions_list = [v for v in versions_dict.keys()]
sorted(versions_list)
version_index_last = versions_dict[max(versions_list)]
log.info("version_index_last: {}".format(version_index_last))
# create name which will be used on timeline clip
name = '_'.join([repr['asset'], repr['subset']])
# log.info("___related_repr: {}".format(related_repr))
# assign data for the clip representation
version = ppl.io.find_one(
{'_id': related_repr[version_index_last]['parent']})
log.info("version: {}".format(version))
# fixing path workarround
if '.#####.mxf' in related_repr[version_index_last]['data']['path']:
related_repr[version_index_last]['data']['path'] = related_repr[version_index_last]['data']['path'].replace(
'.#####.mxf', '.mxf')
related_repr[version_index_last]['version'] = version
related_repr[version_index_last]['parentClip'] = repr['parentClip']
data[name] = related_repr[version_index_last]
return data
@pico.expose()
def publish(send_json_path, get_json_path, gui):
"""
Runs standalone pyblish and adds link to
data in external json file
@ -37,82 +93,101 @@ def publish(json_data_path, gui):
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
send_json_path (string): path to temp json file with
sending context data
get_json_path (strign): path to temp json file with
returning context data
Returns:
dict: return_json_path
dict: get_json_path
Raises:
Exception: description
"""
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
log.info("staging_dir: {}".format(staging_dir))
return_json_path = os.path.join(staging_dir, "return_data.json").replace("\\", "/")
log.info("avalon.session is: \n{}".format(ppl.SESSION))
log.info("PUBLISH_PATH: \n{}".format(os.environ["PUBLISH_PATH"]))
log.info("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'),
"app", "pype-start.py")
publish = "--publish-gui" if gui else "--publish"
args = [pype_start, publish,
args = [pype_start,
"--root", os.environ['AVALON_PROJECTS'], "--publish-gui",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
"-d", "rqst_json_data_path", send_json_path,
"-d", "post_json_data_path", get_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
log.info("_aport.api Variable `AVALON_PROJECTS` had changed to `{0}`.".format(
os.environ['AVALON_PROJECTS']))
forward([
sys.executable, "-u"
] + args,
cwd=cwd
# cwd=cwd
)
return {"return_json_path": return_json_path}
return {"get_json_path": get_json_path}
@pico.expose()
def context(project_name, asset, task, app):
def context(project, asset, task, app='aport'):
os.environ["AVALON_PROJECT"] = ppl.AVALON_PROJECT = project
os.environ["AVALON_ASSET"] = ppl.AVALON_ASSET = asset
os.environ["AVALON_TASK"] = ppl.AVALON_TASK = task
os.environ["AVALON_SILO"] = ppl.AVALON_SILO = ''
ppl.get_session()
# log.info('ppl.SESSION: {}'.format(ppl.SESSION))
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project_name
io.Session["AVALON_PROJECT"] = project_name
ppl.update_current_task(task, asset, app)
avalon.update_current_task(task, asset, app)
project_code = ppl.io.find_one({"type": "project"})["data"].get("code", '')
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = \
ppl.SESSION["AVALON_PROJECTCODE"] = project_code
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
parents = ppl.io.find_one({"type": 'asset',
"name": ppl.AVALON_ASSET})['data']['parents']
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents).replace("\\", "/")
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
os.environ["AVALON_HIERARCHY"] = \
ppl.SESSION["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in ppl.SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
ppl.SESSION.update(fix_paths)
ppl.SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return ppl.SESSION
@pico.expose()
def anatomy_fill(data):
from pype import api as pype
pype.load_data_from_templates()
anatomy = pype.Anatomy
return anatomy.format(data)
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
aport_plugin_path = os.pathsep.join(
[p.replace("\\", "/")
for p in os.environ["PUBLISH_PATH"].split(os.pathsep)
if "aport" in p or
"ftrack" in p])
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
@ -125,8 +200,8 @@ def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
os.environ["PUBLISH_PATH"].split(os.pathsep)
+ [publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
@ -143,8 +218,8 @@ app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
for handler in Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
print(name)
print(handler)
pype.Logger.logging.root.removeHandler(handler)
Logger.logging.root.removeHandler(handler)
# SPLASH.hide_splash()

View file

@ -0,0 +1,432 @@
"""
Wrapper around interactions with the database
Copy of io module in avalon-core.
- In this case not working as singleton with api.Session!
"""
import os
import time
import errno
import shutil
import logging
import tempfile
import functools
import contextlib
from avalon import schema
import requests
# Third-party dependencies
import pymongo
def auto_reconnect(func):
"""Handling auto reconnect in 3 retry times"""
@functools.wraps(func)
def decorated(*args, **kwargs):
object = args[0]
for retry in range(3):
try:
return func(*args, **kwargs)
except pymongo.errors.AutoReconnect:
object.log.error("Reconnecting..")
time.sleep(0.1)
else:
raise
return decorated
class DbConnector(object):
log = logging.getLogger(__name__)
def __init__(self):
self.Session = {}
self._mongo_client = None
self._sentry_client = None
self._sentry_logging_handler = None
self._database = None
self._is_installed = False
def install(self):
"""Establish a persistent connection to the database"""
if self._is_installed:
return
logging.basicConfig()
self.Session.update(self._from_environment())
timeout = int(self.Session["AVALON_TIMEOUT"])
self._mongo_client = pymongo.MongoClient(
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
for retry in range(3):
try:
t1 = time.time()
self._mongo_client.server_info()
except Exception:
self.log.error("Retrying..")
time.sleep(1)
timeout *= 1.5
else:
break
else:
raise IOError(
"ERROR: Couldn't connect to %s in "
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
self.log.info("Connected to %s, delay %.3f s" % (
self.Session["AVALON_MONGO"], time.time() - t1))
self._install_sentry()
self._database = self._mongo_client[self.Session["AVALON_DB"]]
self._is_installed = True
def _install_sentry(self):
if "AVALON_SENTRY" not in self.Session:
return
try:
from raven import Client
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
except ImportError:
# Note: There was a Sentry address in this Session
return self.log.warning("Sentry disabled, raven not installed")
client = Client(self.Session["AVALON_SENTRY"])
# Transmit log messages to Sentry
handler = SentryHandler(client)
handler.setLevel(logging.WARNING)
setup_logging(handler)
self._sentry_client = client
self._sentry_logging_handler = handler
self.log.info(
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
)
def _from_environment(self):
Session = {
item[0]: os.getenv(item[0], item[1])
for item in (
# Root directory of projects on disk
("AVALON_PROJECTS", None),
# Name of current Project
("AVALON_PROJECT", ""),
# Name of current Asset
("AVALON_ASSET", ""),
# Name of current silo
("AVALON_SILO", ""),
# Name of current task
("AVALON_TASK", None),
# Name of current app
("AVALON_APP", None),
# Path to working directory
("AVALON_WORKDIR", None),
# Name of current Config
# TODO(marcus): Establish a suitable default config
("AVALON_CONFIG", "no_config"),
# Name of Avalon in graphical user interfaces
# Use this to customise the visual appearance of Avalon
# to better integrate with your surrounding pipeline
("AVALON_LABEL", "Avalon"),
# Used during any connections to the outside world
("AVALON_TIMEOUT", "1000"),
# Address to Asset Database
("AVALON_MONGO", "mongodb://localhost:27017"),
# Name of database used in MongoDB
("AVALON_DB", "avalon"),
# Address to Sentry
("AVALON_SENTRY", None),
# Address to Deadline Web Service
# E.g. http://192.167.0.1:8082
("AVALON_DEADLINE", None),
# Enable features not necessarily stable. The user's own risk
("AVALON_EARLY_ADOPTER", None),
# Address of central asset repository, contains
# the following interface:
# /upload
# /download
# /manager (optional)
("AVALON_LOCATION", "http://127.0.0.1"),
# Boolean of whether to upload published material
# to central asset repository
("AVALON_UPLOAD", None),
# Generic username and password
("AVALON_USERNAME", "avalon"),
("AVALON_PASSWORD", "secret"),
# Unique identifier for instances in working files
("AVALON_INSTANCE_ID", "avalon.instance"),
("AVALON_CONTAINER_ID", "avalon.container"),
# Enable debugging
("AVALON_DEBUG", None),
) if os.getenv(item[0], item[1]) is not None
}
Session["schema"] = "avalon-core:session-1.0"
try:
schema.validate(Session)
except schema.ValidationError as e:
# TODO(marcus): Make this mandatory
self.log.warning(e)
return Session
def uninstall(self):
"""Close any connection to the database"""
try:
self._mongo_client.close()
except AttributeError:
pass
self._mongo_client = None
self._database = None
self._is_installed = False
def active_project(self):
"""Return the name of the active project"""
return self.Session["AVALON_PROJECT"]
def activate_project(self, project_name):
self.Session["AVALON_PROJECT"] = project_name
def projects(self):
"""List available projects
Returns:
list of project documents
"""
collection_names = self.collections()
for project in collection_names:
if project in ("system.indexes",):
continue
# Each collection will have exactly one project document
document = self.find_project(project)
if document is not None:
yield document
def locate(self, path):
"""Traverse a hierarchy from top-to-bottom
Example:
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
Returns:
representation (ObjectId)
"""
components = zip(
("project", "asset", "subset", "version", "representation"),
path
)
parent = None
for type_, name in components:
latest = (type_ == "version") and name in (None, -1)
try:
if latest:
parent = self.find_one(
filter={
"type": type_,
"parent": parent
},
projection={"_id": 1},
sort=[("name", -1)]
)["_id"]
else:
parent = self.find_one(
filter={
"type": type_,
"name": name,
"parent": parent
},
projection={"_id": 1},
)["_id"]
except TypeError:
return None
return parent
@auto_reconnect
def collections(self):
return self._database.collection_names()
@auto_reconnect
def find_project(self, project):
return self._database[project].find_one({"type": "project"})
@auto_reconnect
def insert_one(self, item):
assert isinstance(item, dict), "item must be of type <dict>"
schema.validate(item)
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
@auto_reconnect
def insert_many(self, items, ordered=True):
# check if all items are valid
assert isinstance(items, list), "`items` must be of type <list>"
for item in items:
assert isinstance(item, dict), "`item` must be of type <dict>"
schema.validate(item)
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
items,
ordered=ordered)
@auto_reconnect
def find(self, filter, projection=None, sort=None):
return self._database[self.Session["AVALON_PROJECT"]].find(
filter=filter,
projection=projection,
sort=sort
)
@auto_reconnect
def find_one(self, filter, projection=None, sort=None):
assert isinstance(filter, dict), "filter must be <dict>"
return self._database[self.Session["AVALON_PROJECT"]].find_one(
filter=filter,
projection=projection,
sort=sort
)
@auto_reconnect
def save(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].save(
*args, **kwargs)
@auto_reconnect
def replace_one(self, filter, replacement):
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
filter, replacement)
@auto_reconnect
def update_many(self, filter, update):
return self._database[self.Session["AVALON_PROJECT"]].update_many(
filter, update)
@auto_reconnect
def distinct(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].distinct(
*args, **kwargs)
@auto_reconnect
def drop(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].drop(
*args, **kwargs)
@auto_reconnect
def delete_many(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
*args, **kwargs)
def parenthood(self, document):
assert document is not None, "This is a bug"
parents = list()
while document.get("parent") is not None:
document = self.find_one({"_id": document["parent"]})
if document is None:
break
parents.append(document)
return parents
@contextlib.contextmanager
def tempdir(self):
tempdir = tempfile.mkdtemp()
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def download(self, src, dst):
"""Download `src` to `dst`
Arguments:
src (str): URL to source file
dst (str): Absolute path to destination file
Yields tuple (progress, error):
progress (int): Between 0-100
error (Exception): Any exception raised when first making connection
"""
try:
response = requests.get(
src,
stream=True,
auth=requests.auth.HTTPBasicAuth(
self.Session["AVALON_USERNAME"],
self.Session["AVALON_PASSWORD"]
)
)
except requests.ConnectionError as e:
yield None, e
return
with self.tempdir() as dirname:
tmp = os.path.join(dirname, os.path.basename(src))
with open(tmp, "wb") as f:
total_length = response.headers.get("content-length")
if total_length is None: # no content length header
f.write(response.content)
else:
downloaded = 0
total_length = int(total_length)
for data in response.iter_content(chunk_size=4096):
downloaded += len(data)
f.write(data)
yield int(100.0 * downloaded / total_length), None
try:
os.makedirs(os.path.dirname(dst))
except OSError as e:
# An already existing destination directory is fine.
if e.errno != errno.EEXIST:
raise
shutil.copy(tmp, dst)

View file

@ -1,135 +1,26 @@
import os
import re
import sys
from avalon import io, api as avalon, lib as avalonlib
from pype import lib
from pype import api as pype
# from pypeapp.api import (Templates, Logger, format)
from pypeapp import Logger, Anatomy
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
import pype.api as pype
def get_asset():
"""
Obtain Asset string from session or environment variable
Returns:
string: asset name
Raises:
log: error
"""
lib.set_io_database()
asset = io.Session.get("AVALON_ASSET", None) \
or os.getenv("AVALON_ASSET", None)
log.info("asset: {}".format(asset))
assert asset, log.error("missing `AVALON_ASSET`"
"in avalon session "
"or os.environ!")
return asset
def get_anatomy(**kwarg):
return pype.Anatomy
def get_context_data(
project_name=None, hierarchy=None, asset=None, task_name=None
):
"""
Collect all main contextual data
def format_anatomy(data):
from .templates import (
get_anatomy
)
file = script_name()
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
anatomy = get_anatomy()
Returns:
dict: contextual data
# TODO: perhaps should be in try!
padding = anatomy.render.padding
"""
if not task_name:
lib.set_io_database()
task_name = io.Session.get("AVALON_TASK", None) \
or os.getenv("AVALON_TASK", None)
assert task_name, log.error(
"missing `AVALON_TASK` in avalon session or os.environ!"
)
data.update({
"hierarchy": pype.get_hierarchy(),
"frame": "#" * padding,
"VERSION": pype.get_version_from_workfile(file)
})
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
os.environ['AVALON_PROJECT'] = project_name
io.Session['AVALON_PROJECT'] = project_name
if not hierarchy:
hierarchy = pype.get_hierarchy()
project_doc = io.find_one({"type": "project"})
data = {
"task": task_name,
"asset": asset or get_asset(),
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code", '')
},
"hierarchy": hierarchy,
"app": application["application_dir"]
}
return data
def set_avalon_workdir(
project=None, hierarchy=None, asset=None, task=None
):
"""
Updates os.environ and session with filled workdir
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
Returns:
os.environ[AVALON_WORKDIR]: workdir path
avalon.session[AVALON_WORKDIR]: workdir path
"""
lib.set_io_database()
awd = io.Session.get("AVALON_WORKDIR", None) or \
os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
anatomy_filled = Anatomy(io.Session["AVALON_PROJECT"]).format(data)
awd = anatomy_filled["work"]["folder"]
awd_filled = os.path.normpath(format(awd, data))
io.Session["AVALON_WORKDIR"] = awd_filled
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
def get_workdir_template(data=None):
"""
Obtain workdir templated path from Anatomy()
Args:
data (dict, optional): basic contextual data
Returns:
string: template path
"""
anatomy = Anatomy()
anatomy_filled = anatomy.format(data or get_context_data())
try:
work = anatomy_filled["work"]
except Exception as e:
log.error(
"{0} Error in get_workdir_template(): {1}".format(__name__, str(e))
)
return work
# log.info("format_anatomy:anatomy: {}".format(anatomy))
return anatomy.format(data)

View file

@ -1,252 +0,0 @@
# api.py
import os
import sys
import tempfile
import pico
from pico import PicoApp
from pico.decorators import request_args, set_cookie, delete_cookie, stream
from pico.decorators import header, cookie
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
@pico.expose()
def publish(json_data_path, staging_dir=None):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
Returns:
dict: return_json_path
Raises:
Exception: description
"""
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
os.chdir(cwd)
log.info(os.getcwd())
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
log.info("staging_dir: {}".format(staging_dir))
return_json_path = os.path.join(staging_dir, "return_data.json")
log.info("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
"app", "pype-start.py")
args = [pype_start, "--publish",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
sys.executable, "-u"
] + args,
cwd=cwd
)
return {"return_json_path": return_json_path}
@pico.expose()
def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
@pico.expose()
def nuke_test():
import nuke
n = nuke.createNode("Constant")
log.info(n)
@pico.expose()
def hello(who='world'):
return 'Hello %s' % who
@pico.expose()
def multiply(x, y):
return x * y
@pico.expose()
def fail():
raise Exception('fail!')
@pico.expose()
def make_coffee():
raise ImATeapot()
@pico.expose()
def upload(upload, filename):
if not filename.endswith('.txt'):
raise BadRequest('Upload must be a .txt file!')
return upload.read().decode()
@pico.expose()
@request_args(ip='remote_addr')
def my_ip(ip):
return ip
@pico.expose()
@request_args(ip=lambda req: req.remote_addr)
def my_ip3(ip):
return ip
@pico.prehandle()
def set_user(request, kwargs):
if request.authorization:
if request.authorization.password != 'secret':
raise Unauthorized('Incorrect username or password')
request.user = request.authorization.username
else:
request.user = None
@pico.expose()
@request_args(username='user')
def current_user(username):
return username
@pico.expose()
@request_args(session=cookie('session_id'))
def session_id(session):
return session
@pico.expose()
@set_cookie()
def start_session():
return {'session_id': '42'}
@pico.expose()
@delete_cookie('session_id')
def end_session():
return True
@pico.expose()
@request_args(session=header('x-session-id'))
def session_id2(session):
return session
@pico.expose()
@stream()
def countdown(n=10):
for i in reversed(range(n)):
yield '%i' % i
time.sleep(0.5)
@pico.expose()
def user_description(user):
return '{name} is a {occupation} aged {age}'.format(**user)
@pico.expose()
def show_source():
return open(__file__.replace('.pyc', '.py')).read()
app = PicoApp()
app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in Logger().logging.root.handlers[:]]:
if "pype" not in str(name).lower():
print(name)
print(handler)
Logger().logging.root.removeHandler(handler)

View file

@ -1,196 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Pico Example - Everything</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/styles/default.min.css">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/highlight.min.js"></script>
<script></script>
<style type="text/css">
html, body {
height: 100%;
margin: 0px;
padding: 0px;
}
div {
padding: 5px;
}
#container {
height: 100%;
}
#header {
height: 5%;
}
#main {
height: 70%;
}
#output {
background-color: #333;
color: #aaa;
min-height: 15%;
overflow-y: scroll;
padding: 20px;
position: fixed;
bottom: 0px;
width: 100%;
}
.error {
color: #f00 !important;
}
#examples li{
padding: 10px;
margin: 10px;
background-color: silver;
}
code {
border-radius: 0;*/
margin: 5px;
white-space: pre !important;
}
#source {
height: 100%;
}
#examples {
height: 100%;
}
#spacer {
height: 20%;
}
.highlight {
background-color: yellow;
}
</style>
</head>
<body>
<div id="container">
<div class="row row-eq-height">
<div class="col-md-12">
<h1>Pico Examples</h1>
<p>Here we show some simple examples of using Pico. Click any <code>api.X</code> link to see the corresponding Python source.</p>
</div>
</div>
<div class="row row-eq-height" id="main">
<div class="col-md-6" id="examples">
<ol>
<li id="example1">
<h4>Hello World</h4>
<pre><code class="js"></code></pre>
Name: <input type="text" name="name" value="Bob"/>
<button class="btn btn-default btn-sm" type="button" onclick="example1()">Submit</button>
</li>
<li id="deregister">
<h4>deregister_plugin_path</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="deregister()">Deregister</button>
</li>
<li id="register">
<h4>register_plugin_path</h4>
<pre><code class="js"></code></pre>
Path: <input type="text" name="path" value="C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish"/>
<button class="btn btn-default btn-sm" type="button" onclick="register()">Register path</button>
</li>
<li id="example2">
<h4>Numeric Multiplication</h4>
<pre><code class="js"></code></pre>
<input type="number" name="x" value="6"/> x <input type="number" name="y" value="7"/>
<button class="btn btn-default btn-sm" type="button" onclick="example2()">Multiply</button>
</li>
<li id="example3">
<h4>File Upload</h4>
<pre><code class="js"></code></pre>
<input type="file" name="upload"/>
<button class="btn btn-default btn-sm" type="button" onclick="example3()">Upload</button>
</li>
<li id="example4">
<h4>Request parameters (IP address)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example4()">What's my IP?</button>
</li>
<li id="example5">
<h4>Authentication</h4>
<pre><code class="js"></code></pre>
<p class="bg-info">Note: see <a href="#set_user" onclick="jumpTo('set_user')">api.set_user</a> for the authentication handler.</p>
Username: <input type="text" name="username" value="bob"/>
Password: <input type="password" name="password" value="secret"/>
<button class="btn btn-default btn-sm" type="button" onclick="example5()">Sign In</button>
</li>
<li id="example6">
<h4>Sessions (cookies)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example6()">What's my session id?</button>
</li>
<li id="example7">
<h4>Sessions (header)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example7()">What's my session id?</button>
</li>
<li id="example8">
<h4>Streaming Response</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example8()">Countdown</button>
</li>
<li id="example9">
<h4>Objects</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example9()">Submit</button>
</li>
<li id="example10">
<h4>Errors</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example10()">Submit</button>
</li>
<li id="example11">
<h4>Errors</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example11()">Submit</button>
</li>
<li id="example12">
<h4>Forms</h4>
<p>This example submits a form as a whole instead of individual arguments.
The form input names must match the function argument names.
</p>
<pre><code class="html"></code></pre>
<pre><code class="js"></code></pre>
<div class="example">
<form>
x: <input type="number" name="x" value="6"/><br/>
y: <input type="number" name="y" value="7"/>
</form>
<button class="btn btn-default btn-sm" type="button" onclick="example12()">Multiply</button>
</div>
</li>
<li id="example13">
<h4>JSON</h4>
<p>This example submits data as JSON instead of individual arguments.
The object keys must match the function argument names.
</p>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example13()">Multiply</button>
</li>
</ol>
<div id="spacer">
</div>
</div>
<div class="col-md-6" id="source">
<pre><code class="python"></code></pre>
</div>
</div>
<div class="row" id="output">
</div>
</div>
<script src="script.js"></script>
</body>
</html>

View file

@ -1,146 +0,0 @@
import os
import sys
import tempfile
import pico
# from pico.decorators import request_args, prehandle
from pico import PicoApp
from pico import client
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
pype.Logger.logging.root.removeHandler(handler)
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
@pico.expose()
def publish(json_data_path, staging_dir=None):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
Returns:
dict: return_json_path
Raises:
Exception: description
"""
staging_dir = staging_dir \
or tempfile.mkdtemp(prefix="pype_aport_")
return_json_path = os.path.join(staging_dir, "return_data.json")
log.debug("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
"app", "pype-start.py")
args = [pype_start, "--publish",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
sys.executable, "-u"
] + args,
cwd=os.getenv('AVALON_WORKDIR').replace("\\", "/")
)
return {"return_json_path": return_json_path}
@pico.expose()
def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
@pico.expose()
def nuke_test():
import nuke
n = nuke.createNode("Constant")
log.info(n)
app = PicoApp()
app.register_module(__name__)

130
pype/aport/pipeline.py Normal file
View file

@ -0,0 +1,130 @@
import sys
import os
import getpass
from app.api import Logger
from io_nonsingleton import DbConnector
io = DbConnector()
log = Logger.getLogger(__name__, "aport")
self = sys.modules[__name__]
self.SESSION = None
self._registered_root = {"_": ""}
self.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
self.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
self.AVALON_TASK = os.getenv("AVALON_TASK", None)
self.AVALON_SILO = os.getenv("AVALON_SILO", None)
def get_session():
if not self.SESSION:
io.install()
self.SESSION = io.Session
# for k, v in os.environ.items():
# if 'AVALON' in k:
# print(str((k, v)))
return self.SESSION
def update_current_task(task=None, asset=None, app=None):
"""Update active Session to a new task work area.
This updates the live Session to a different `asset`, `task` or `app`.
Args:
task (str): The task to set.
asset (str): The asset to set.
app (str): The app to set.
Returns:
dict: The changed key, values in the current Session.
"""
mapping = {
"AVALON_ASSET": asset,
"AVALON_TASK": task,
"AVALON_APP": app,
}
changed = {key: value for key, value in mapping.items() if value}
if not changed:
return
# Update silo when asset changed
if "AVALON_ASSET" in changed:
asset_document = io.find_one({"name": changed["AVALON_ASSET"],
"type": "asset"})
assert asset_document, "Asset must exist"
silo = asset_document["silo"]
if silo is None:
silo = asset_document["name"]
changed["AVALON_SILO"] = silo
parents = asset_document['data']['parents']
hierarchy = ""
if len(parents) > 0:
hierarchy = os.path.sep.join(parents)
changed['AVALON_HIERARCHY'] = hierarchy
# Compute work directory (with the temporary changed session so far)
project = io.find_one({"type": "project"},
projection={"config.template.work": True})
template = project["config"]["template"]["work"]
_session = self.SESSION.copy()
_session.update(changed)
changed["AVALON_WORKDIR"] = _format_work_template(template, _session)
# Update the full session in one go to avoid half updates
self.SESSION.update(changed)
# Update the environment
os.environ.update(changed)
return changed
def _format_work_template(template, session=None):
"""Return a formatted configuration template with a Session.
Note: This *cannot* format the templates for published files since the
session does not hold the context for a published file. Instead use
`get_representation_path` to parse the full path to a published file.
Args:
template (str): The template to format.
session (dict, Optional): The Session to use. If not provided use the
currently active global Session.
Returns:
str: The fully formatted path.
"""
if session is None:
session = self.SESSION
project = io.find_one({'type': 'project'})
return template.format(**{
"root": registered_root(),
"project": {
"name": project.get("name", session["AVALON_PROJECT"]),
"code": project["data"].get("code", ''),
},
"silo": session["AVALON_SILO"],
"hierarchy": session['AVALON_HIERARCHY'],
"asset": session["AVALON_ASSET"],
"task": session["AVALON_TASK"],
"app": session["AVALON_APP"],
"user": session.get("AVALON_USER", getpass.getuser())
})
def registered_root():
"""Return currently registered root"""
return os.path.normpath(
self._registered_root["_"]
or self.SESSION.get("AVALON_PROJECTS") or ""
)

View file

@ -1,12 +1,10 @@
from pype import api as pype
from pypeapp import Anatomy, config
log = pype.Logger().get_logger(__name__, "aport")
log = pype.Logger.getLogger(__name__, "aport")
def get_anatomy(**kwarg):
return Anatomy()
return pype.Anatomy
def get_dataflow(**kwarg):
@ -17,8 +15,7 @@ def get_dataflow(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
presets = config.get_init_presets()
aport_dataflow = getattr(presets["dataflow"], str(host), None)
aport_dataflow = getattr(pype.Dataflow, str(host), None)
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
if preset:
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
@ -35,8 +32,7 @@ def get_colorspace(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
presets = config.get_init_presets()
aport_colorspace = getattr(presets["colorspace"], str(host), None)
aport_colorspace = getattr(pype.Colorspace, str(host), None)
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
if preset:
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)

View file

@ -6,6 +6,7 @@ from avalon import (
)
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
@ -26,27 +27,26 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
def process(self, context):
# get json paths from data
rqst_json_data_path = context.data['rqst_json_data_path']
post_json_data_path = context.data['post_json_data_path']
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
fix_paths = {k: v.replace("\\", "/") for k, v in session.items()
if isinstance(v, str)}
session.update(fix_paths)
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = \
staging_dir = os.path.dirname(
post_json_data_path).replace("\\", "/")
if not os.path.exists(staging_dir):
os.makedirs(staging_dir)
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with open(rqst_json_data_path) as f:
context.data['json_data'] = json_data = json.load(f)
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
@ -63,12 +63,13 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_CONFIG", None)
assert templates_dir, "Missing `PYPE_CONFIG` in os.environ"
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
@ -84,16 +85,16 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = current_file
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project()["data"]
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset()["data"]
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}

View file

@ -25,32 +25,41 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.48
def process(self, context):
a_session = context.data.get("avalonSession")
json_data = context.data.get("json_data", None)
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
instances_data = json_data.get("instances", None)
assert instances_data, "No `instance` data in json file"
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
context.data["ftrackTypes"] = ftrack_types
asset_default = presets["asset_default"]
assert instances_data, "No `asset_default` data in json file"
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = pype.get_asset(asset_name)
entity = io.find_one({"name": asset_name,
"type": "asset"})
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("frameStart", None)
frame_start = context.data["assetData"].get("fstart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = entity["data"]["frameStart"]
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
frame_start = asset_default["fstart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
@ -60,7 +69,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
handles = context.data["assetData"].get("handles", None)
if not handles:
# get frame start > second try from parent data
handles = entity["data"]["handles"]
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
@ -77,25 +86,38 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# get current file host
host = a_session["AVALON_APP"]
family = "workfile"
family = "projectfile"
families = "filesave"
subset_name = "{0}_{1}".format(task, family)
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(subset_name)
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families],
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
@ -103,11 +125,27 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
assert name, "No `name` key in json_data.instance: {}".format(inst)
family = inst.get("family", None)
assert family, "No `family` key in json_data.instance: {}".format(inst)
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
tags = inst.get("tags", None)
if tags:
@ -117,32 +155,86 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
tasks = rules_tasks["defaultTasks"]
self.log.debug("tasks: `{}`".format(tasks))
subset_lst = []
subset_dict = {}
for task in tasks:
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
isinstance(subset_dict[sub], list)
except Exception:
subset_dict[sub] = list()
for subset in subsets:
subset_name = "{0}_{1}".format(task, subset)
instance = context.create_instance(subset_name)
# instance.add(inst)
instance.data.update({
"subset": subset_name,
"task": task,
"frameStart": frame_start,
"handles": handles,
"host": host,
"asset": asset,
"label": "{0} - {1} > {2}".format(name, task, subset),
"name": subset_name,
"family": inst["family"],
"families": [subset],
"jsonData": inst,
# "parents": , # bez tasku
# "hierarchy": ,
"publish": True,
})
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)
subset_dict[sub].append(task)
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
ext = repr['representation']
else:
continue
family = inst["family"]
# skip if thumnail in name of subset
if "thumbnail" in subset:
continue
elif "audio" in subset:
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
# create unique subset's name
name = "{0}_{1}_{2}".format(asset,
inst["family"],
subset_name)
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
"files": files,
"label": "{0} - {1}".format(
asset, subset_name),
"name": name,
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"publish": True,
"version": version})
self.log.info(
"collected instance: {}".format(instance.data))
instances.append(instance)
context.data["instances"] = instances

View file

@ -0,0 +1,17 @@
import pyblish.api
class CollectAudioVersion(pyblish.api.InstancePlugin):
"""
"""
label = "Collect Audio Version"
order = pyblish.api.CollectorOrder
families = ['audio']
def process(self, instance):
self.log.info('Audio: {}'.format(instance.data['name']))
instance.data['version'] = '001'
self.log.info('Audio version to: {}'.format(instance.data['version']))

View file

@ -0,0 +1,34 @@
import pyblish.api
class CollectFrameranges(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Frameranges"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# getting metadata from jsonData key
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
fps = metadata['ppro.timeline.fps']
sec_start = metadata['ppro.clip.start']
sec_end = metadata['ppro.clip.end']
fstart = instance.data.get('fstart')
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
instance.data['name'],
fps, sec_start, sec_end, fstart, fend))
instance.data['startFrame'] = fstart
instance.data['endFrame'] = fend
instance.data['fps'] = metadata['ppro.timeline.fps']

View file

@ -0,0 +1,72 @@
import pyblish.api
from avalon import api
class CollectHierarchyContext(pyblish.api.ContextPlugin):
"""Collecting hierarchy context from `parents` and `hierarchy` data
present in `clip` family instances coming from the request json data file
It will add `hierarchical_context` into each instance for integrate
plugins to be able to create needed parents for the context if they
don't exist yet
"""
label = "Collect Hierarchy Context"
order = pyblish.api.CollectorOrder + 0.1
def update_dict(self, ex_dict, new_dict):
for key in ex_dict:
if key in new_dict and isinstance(ex_dict[key], dict):
new_dict[key] = self.update_dict(ex_dict[key], new_dict[key])
else:
new_dict[key] = ex_dict[key]
return new_dict
def process(self, context):
json_data = context.data.get("jsonData", None)
temp_context = {}
for instance in json_data['instances']:
if instance['family'] in 'projectfile':
continue
in_info = {}
name = instance['name']
# suppose that all instances are Shots
in_info['entity_type'] = 'Shot'
instance_pyblish = [
i for i in context.data["instances"] if i.data['asset'] in name][0]
in_info['custom_attributes'] = {
'fend': instance_pyblish.data['endFrame'],
'fstart': instance_pyblish.data['startFrame'],
'fps': instance_pyblish.data['fps']
}
in_info['tasks'] = instance['tasks']
parents = instance.get('parents', [])
actual = {name: in_info}
for parent in reversed(parents):
next_dict = {}
parent_name = parent["entityName"]
next_dict[parent_name] = {}
next_dict[parent_name]["entity_type"] = parent["entityType"]
next_dict[parent_name]["childs"] = actual
actual = next_dict
temp_context = self.update_dict(temp_context, actual)
self.log.debug(temp_context)
# TODO: 100% sure way of get project! Will be Name or Code?
project_name = api.Session["AVALON_PROJECT"]
final_context = {}
final_context[project_name] = {}
final_context[project_name]['entity_type'] = 'Project'
final_context[project_name]['childs'] = temp_context
# adding hierarchy context to instance
context.data["hierarchyContext"] = final_context
self.log.debug("context.data[hierarchyContext] is: {}".format(
context.data["hierarchyContext"]))

View file

@ -0,0 +1,132 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile"]
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
# template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
mock_template = anatomy_filled.publish.path
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one({"type": "project",
"name": project_name},
projection={"config": True, "data": True})
template = project["config"]["template"]["publish"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({"type": "asset",
"name": asset_name,
"parent": project["_id"]})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
subset = io.find_one({"type": "subset",
"name": subset_name,
"parent": asset["_id"]})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -0,0 +1,21 @@
import pyblish.api
class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin):
"""
Set `component_overwrite` to True on all instances `ftrackComponentsList`
"""
order = pyblish.api.IntegratorOrder + 0.49
label = 'Overwrite ftrack created versions'
families = ["clip"]
optional = True
active = False
def process(self, instance):
component_list = instance.data['ftrackComponentsList']
for cl in component_list:
cl['component_overwrite'] = True
self.log.debug('Component {} overwriting'.format(
cl['component_data']['name']))

View file

@ -0,0 +1,140 @@
import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
"""
order = pyblish.api.IntegratorOrder - 0.1
label = 'Integrate Hierarchy To Avalon'
families = ['clip']
def process(self, context):
if "hierarchyContext" not in context.data:
return
self.db = io
if not self.db.Session:
self.db.install()
input_data = context.data["hierarchyContext"]
self.import_to_avalon(input_data)
def import_to_avalon(self, input_data, parent=None):
for name in input_data:
self.log.info('input_data[name]: {}'.format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data['entity_type']
data = {}
# Process project
if entity_type.lower() == 'project':
entity = self.db.find_one({'type': 'project'})
# TODO: should be in validator?
assert (entity is not None), "Didn't find project in DB"
# get data from already existing project
for key, value in entity.get('data', {}).items():
data[key] = value
self.av_project = entity
# Raise error if project or parent are not set
elif self.av_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# Else process assset
else:
entity = self.db.find_one({'type': 'asset', 'name': name})
# Create entity if doesn't exist
if entity is None:
if self.av_project['_id'] == parent['_id']:
silo = None
elif parent['silo'] is None:
silo = parent['name']
else:
silo = parent['silo']
entity = self.create_avalon_asset(name, silo)
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____1____')
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
data = input_data[name]
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
entity = self.db.find_one({'type': 'asset', 'name': name})
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____2____')
# Else get data from already existing
else:
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('________')
for key, value in entity.get('data', {}).items():
data[key] = value
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
# do not store project's id as visualParent (silo asset)
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
# CUSTOM ATTRIBUTES
for k, val in entity_data.get('custom_attributes', {}).items():
data[k] = val
# Update entity data with input data
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
if 'childs' in entity_data:
self.import_to_avalon(entity_data['childs'], entity)
def create_avalon_asset(self, name, silo):
item = {
'schema': 'avalon-core:asset-2.0',
'name': name,
'silo': silo,
'parent': self.av_project['_id'],
'type': 'asset',
'data': {}
}
entity_id = self.db.insert_one(item).inserted_id
return self.db.find_one({'_id': entity_id})

View file

@ -0,0 +1,155 @@
import pyblish.api
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder
label = 'Integrate Hierarchy To Ftrack'
families = ["clip"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in context.data:
return
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
# adding ftrack types from presets
ftrack_types = context.data['ftrackTypes']
self.import_to_ftrack(input_data, ftrack_types)
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type'].capitalize()
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (
'Missing custom attribute')
entity['custom_attributes'][key] = custom_attributes[key]
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
self.session.commit()
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)
for task in tasks_to_create:
self.create_task(
name=task,
task_type=ftrack_types[task],
parent=entity
)
self.session.commit()
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], ftrack_types, entity)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
self.session.commit()
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
self.session.commit()
return entity

View file

@ -0,0 +1,21 @@
import pyblish.api
import os
class IntegrateCleanThumbs(pyblish.api.InstancePlugin):
"""
Cleaning up thumbnail files after they have been integrated
"""
order = pyblish.api.IntegratorOrder + 9
label = 'Clean thumbnail files'
families = ["clip"]
optional = True
active = True
def process(self, instance):
remove_file = [tt for t in instance.data['transfers']
for tt in t if 'jpg' in tt if 'temp' not in tt.lower()]
if len(remove_file) is 1:
os.remove(remove_file[0])
self.log.info('Thumbnail image was erased')

View file

@ -0,0 +1,19 @@
import pyblish.api
class IntegrateWorkfileVersion(pyblish.api.InstancePlugin):
"""
Will desynchronize versioning from actual version of work file
"""
order = pyblish.api.IntegratorOrder - 0.15
label = 'Do not synchronize workfile version'
families = ["clip"]
optional = True
active = False
def process(self, instance):
if instance.data['version']:
del(instance.data['version'])
self.log.info('Instance version was removed')

View file

@ -0,0 +1,51 @@
import pyblish.api
import pype.api
import avalon.api
class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
"""Ensure that autosync value in ftrack project is set to False.
In case was set to True and event server with the sync to avalon event
is running will cause integration to avalon will be override.
"""
order = pyblish.api.ValidatorOrder
families = ['clip']
label = 'Ftrack project\'s auto sync off'
actions = [pype.api.RepairAction]
def process(self, context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
invalid = self.get_invalid(context)
assert not invalid, (
"Ftrack Project has 'Auto sync' set to On."
" That may cause issues during integration."
)
@staticmethod
def get_invalid(context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
invalid = None
if project.get('custom_attributes', {}).get(
'avalon_auto_sync', False):
invalid = project
return invalid
@classmethod
def repair(cls, context):
session = context.data["ftrackSession"]
invalid = cls.get_invalid(context)
invalid['custom_attributes']['avalon_auto_sync'] = False
session.commit()