Merge branch 'develop' into feature/PYPE-349_parallel_event_server

This commit is contained in:
Milan Kolar 2019-10-31 18:42:26 +01:00
commit 8df857bb89
3300 changed files with 17234 additions and 301395 deletions

6
.gitignore vendored
View file

@ -27,3 +27,9 @@ coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Node JS packages
##################
node_modules/
package-lock.json

View file

@ -3,15 +3,17 @@ import sys
from avalon import api as avalon
from pyblish import api as pyblish
from pypeapp import execute, Logger
from app import api as app
from .. import api
from .lib import set_avalon_workdir
t = app.Templates()
log = Logger().get_logger(__name__, "aport")
log = api.Logger.getLogger(__name__, "aport")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
ADDITIONAL_PLUGINS = ['ftrack']
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -33,8 +35,31 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "aport", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
def register_additional_plugin_paths():
'''Adding additional host plugins
'''
for host in ADDITIONAL_PLUGINS:
publish_path = os.path.join(
PLUGINS_DIR, host, "publish").replace("\\", "/")
pyblish.register_plugin_path(publish_path)
# adding path to PUBLISH_PATH environment
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(
"Registered additional plugin path: "
"{}".format(publish_path))
def install():
set_avalon_workdir()
# api.set_avalon_workdir()
log.info("Registering Aport plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
@ -42,6 +67,9 @@ def install():
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# additional plugins
register_additional_plugin_paths()
# Disable all families except for the ones we explicitly want to see
family_states = [
"imagesequence",
@ -51,6 +79,9 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# launch pico server
pico_server_launch()
@ -81,7 +112,7 @@ def pico_server_launch():
"api"
]
execute(
app.forward(
args,
cwd=path
)

View file

@ -1,34 +1,90 @@
# api.py
import os
import sys
import tempfile
import pico
from pico import PicoApp
from pico.decorators import request_args, set_cookie, delete_cookie, stream
from pico.decorators import header, cookie
from app.api import forward, Logger
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
import pipeline as ppl
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
log = Logger.getLogger(__name__, "aport")
@pico.expose()
def publish(json_data_path, gui):
def get_session():
ppl.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
ppl.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
ppl.AVALON_TASK = os.getenv("AVALON_TASK", None)
ppl.AVALON_SILO = os.getenv("AVALON_SILO", None)
return ppl.get_session()
@pico.expose()
def load_representations(project, representations):
'''Querry data from mongo db for defined representations.
Args:
project (str): name of the project
representations (list): representations which are required
Returns:
data (dict): representations in last versions
# testing url:
http://localhost:4242/api/load_representations?project=jakub_projectx&representations=[{%22asset%22:%22e09s031_0040%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22},%20{%22asset%22:%22e09s031_0030%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22}]
# returning:
{"e09s031_0040_referenceDefault":{"_id":"5c6dabaa2af61756b02f7f32","schema":"pype:representation-2.0","type":"representation","parent":"5c6dabaa2af61756b02f7f31","name":"mp4","data":{"path":"C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\thisFolder\\e09\\s031\\e09s031_0040\\publish\\clip\\referenceDefault\\v019\\jkprx_e09s031_0040_referenceDefault_v019.mp4","template":"{publish.root}/{publish.folder}/{version.main}/{publish.file}"},"dependencies":[],"context":{"root":"C:\\Users\\hubert\\_PYPE_testing\\projects","project":{"name":"jakub_projectx","code":"jkprx"},"task":"edit","silo":"thisFolder","asset":"e09s031_0040","family":"clip","subset":"referenceDefault","VERSION":19,"hierarchy":"thisFolder\\e09\\s031","representation":"mp4"}}}
'''
data = {}
# log.info("___project: {}".format(project))
# ppl.io.activate_project(project)
#
# from_mongo = ppl.io.find({"name": repr['representation'],
# "type": "representation"})[:]
for repr in representations:
log.info("asset: {}".format(repr['asset']))
# set context for each asset individually
context(project, repr['asset'], '')
# query data from mongo db for the asset's subset representation
related_repr = [r for r in ppl.io.find({"name": repr['representation'],
"type": "representation",
"context.asset": repr['asset']})[:]]
versions_dict = {r['context']['version']: i
for i, r in enumerate(related_repr)}
versions_list = [v for v in versions_dict.keys()]
sorted(versions_list)
version_index_last = versions_dict[max(versions_list)]
log.info("version_index_last: {}".format(version_index_last))
# create name which will be used on timeline clip
name = '_'.join([repr['asset'], repr['subset']])
# log.info("___related_repr: {}".format(related_repr))
# assign data for the clip representation
version = ppl.io.find_one(
{'_id': related_repr[version_index_last]['parent']})
log.info("version: {}".format(version))
# fixing path workarround
if '.#####.mxf' in related_repr[version_index_last]['data']['path']:
related_repr[version_index_last]['data']['path'] = related_repr[version_index_last]['data']['path'].replace(
'.#####.mxf', '.mxf')
related_repr[version_index_last]['version'] = version
related_repr[version_index_last]['parentClip'] = repr['parentClip']
data[name] = related_repr[version_index_last]
return data
@pico.expose()
def publish(send_json_path, get_json_path, gui):
"""
Runs standalone pyblish and adds link to
data in external json file
@ -37,82 +93,101 @@ def publish(json_data_path, gui):
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
send_json_path (string): path to temp json file with
sending context data
get_json_path (strign): path to temp json file with
returning context data
Returns:
dict: return_json_path
dict: get_json_path
Raises:
Exception: description
"""
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
log.info("staging_dir: {}".format(staging_dir))
return_json_path = os.path.join(staging_dir, "return_data.json").replace("\\", "/")
log.info("avalon.session is: \n{}".format(ppl.SESSION))
log.info("PUBLISH_PATH: \n{}".format(os.environ["PUBLISH_PATH"]))
log.info("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'),
"app", "pype-start.py")
publish = "--publish-gui" if gui else "--publish"
args = [pype_start, publish,
args = [pype_start,
"--root", os.environ['AVALON_PROJECTS'], "--publish-gui",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
"-d", "rqst_json_data_path", send_json_path,
"-d", "post_json_data_path", get_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
log.info("_aport.api Variable `AVALON_PROJECTS` had changed to `{0}`.".format(
os.environ['AVALON_PROJECTS']))
forward([
sys.executable, "-u"
] + args,
cwd=cwd
# cwd=cwd
)
return {"return_json_path": return_json_path}
return {"get_json_path": get_json_path}
@pico.expose()
def context(project_name, asset, task, app):
def context(project, asset, task, app='aport'):
os.environ["AVALON_PROJECT"] = ppl.AVALON_PROJECT = project
os.environ["AVALON_ASSET"] = ppl.AVALON_ASSET = asset
os.environ["AVALON_TASK"] = ppl.AVALON_TASK = task
os.environ["AVALON_SILO"] = ppl.AVALON_SILO = ''
ppl.get_session()
# log.info('ppl.SESSION: {}'.format(ppl.SESSION))
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project_name
io.Session["AVALON_PROJECT"] = project_name
ppl.update_current_task(task, asset, app)
avalon.update_current_task(task, asset, app)
project_code = ppl.io.find_one({"type": "project"})["data"].get("code", '')
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = \
ppl.SESSION["AVALON_PROJECTCODE"] = project_code
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
parents = ppl.io.find_one({"type": 'asset',
"name": ppl.AVALON_ASSET})['data']['parents']
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents).replace("\\", "/")
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
os.environ["AVALON_HIERARCHY"] = \
ppl.SESSION["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in ppl.SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
ppl.SESSION.update(fix_paths)
ppl.SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return ppl.SESSION
@pico.expose()
def anatomy_fill(data):
from pype import api as pype
pype.load_data_from_templates()
anatomy = pype.Anatomy
return anatomy.format(data)
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
aport_plugin_path = os.pathsep.join(
[p.replace("\\", "/")
for p in os.environ["PUBLISH_PATH"].split(os.pathsep)
if "aport" in p or
"ftrack" in p])
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
@ -125,8 +200,8 @@ def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
os.environ["PUBLISH_PATH"].split(os.pathsep)
+ [publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
@ -143,8 +218,8 @@ app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
for handler in Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
print(name)
print(handler)
pype.Logger.logging.root.removeHandler(handler)
Logger.logging.root.removeHandler(handler)
# SPLASH.hide_splash()

View file

@ -0,0 +1,432 @@
"""
Wrapper around interactions with the database
Copy of io module in avalon-core.
- In this case not working as singleton with api.Session!
"""
import os
import time
import errno
import shutil
import logging
import tempfile
import functools
import contextlib
from avalon import schema
import requests
# Third-party dependencies
import pymongo
def auto_reconnect(func):
"""Handling auto reconnect in 3 retry times"""
@functools.wraps(func)
def decorated(*args, **kwargs):
object = args[0]
for retry in range(3):
try:
return func(*args, **kwargs)
except pymongo.errors.AutoReconnect:
object.log.error("Reconnecting..")
time.sleep(0.1)
else:
raise
return decorated
class DbConnector(object):
log = logging.getLogger(__name__)
def __init__(self):
self.Session = {}
self._mongo_client = None
self._sentry_client = None
self._sentry_logging_handler = None
self._database = None
self._is_installed = False
def install(self):
"""Establish a persistent connection to the database"""
if self._is_installed:
return
logging.basicConfig()
self.Session.update(self._from_environment())
timeout = int(self.Session["AVALON_TIMEOUT"])
self._mongo_client = pymongo.MongoClient(
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
for retry in range(3):
try:
t1 = time.time()
self._mongo_client.server_info()
except Exception:
self.log.error("Retrying..")
time.sleep(1)
timeout *= 1.5
else:
break
else:
raise IOError(
"ERROR: Couldn't connect to %s in "
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
self.log.info("Connected to %s, delay %.3f s" % (
self.Session["AVALON_MONGO"], time.time() - t1))
self._install_sentry()
self._database = self._mongo_client[self.Session["AVALON_DB"]]
self._is_installed = True
def _install_sentry(self):
if "AVALON_SENTRY" not in self.Session:
return
try:
from raven import Client
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
except ImportError:
# Note: There was a Sentry address in this Session
return self.log.warning("Sentry disabled, raven not installed")
client = Client(self.Session["AVALON_SENTRY"])
# Transmit log messages to Sentry
handler = SentryHandler(client)
handler.setLevel(logging.WARNING)
setup_logging(handler)
self._sentry_client = client
self._sentry_logging_handler = handler
self.log.info(
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
)
def _from_environment(self):
Session = {
item[0]: os.getenv(item[0], item[1])
for item in (
# Root directory of projects on disk
("AVALON_PROJECTS", None),
# Name of current Project
("AVALON_PROJECT", ""),
# Name of current Asset
("AVALON_ASSET", ""),
# Name of current silo
("AVALON_SILO", ""),
# Name of current task
("AVALON_TASK", None),
# Name of current app
("AVALON_APP", None),
# Path to working directory
("AVALON_WORKDIR", None),
# Name of current Config
# TODO(marcus): Establish a suitable default config
("AVALON_CONFIG", "no_config"),
# Name of Avalon in graphical user interfaces
# Use this to customise the visual appearance of Avalon
# to better integrate with your surrounding pipeline
("AVALON_LABEL", "Avalon"),
# Used during any connections to the outside world
("AVALON_TIMEOUT", "1000"),
# Address to Asset Database
("AVALON_MONGO", "mongodb://localhost:27017"),
# Name of database used in MongoDB
("AVALON_DB", "avalon"),
# Address to Sentry
("AVALON_SENTRY", None),
# Address to Deadline Web Service
# E.g. http://192.167.0.1:8082
("AVALON_DEADLINE", None),
# Enable features not necessarily stable. The user's own risk
("AVALON_EARLY_ADOPTER", None),
# Address of central asset repository, contains
# the following interface:
# /upload
# /download
# /manager (optional)
("AVALON_LOCATION", "http://127.0.0.1"),
# Boolean of whether to upload published material
# to central asset repository
("AVALON_UPLOAD", None),
# Generic username and password
("AVALON_USERNAME", "avalon"),
("AVALON_PASSWORD", "secret"),
# Unique identifier for instances in working files
("AVALON_INSTANCE_ID", "avalon.instance"),
("AVALON_CONTAINER_ID", "avalon.container"),
# Enable debugging
("AVALON_DEBUG", None),
) if os.getenv(item[0], item[1]) is not None
}
Session["schema"] = "avalon-core:session-1.0"
try:
schema.validate(Session)
except schema.ValidationError as e:
# TODO(marcus): Make this mandatory
self.log.warning(e)
return Session
def uninstall(self):
"""Close any connection to the database"""
try:
self._mongo_client.close()
except AttributeError:
pass
self._mongo_client = None
self._database = None
self._is_installed = False
def active_project(self):
"""Return the name of the active project"""
return self.Session["AVALON_PROJECT"]
def activate_project(self, project_name):
self.Session["AVALON_PROJECT"] = project_name
def projects(self):
"""List available projects
Returns:
list of project documents
"""
collection_names = self.collections()
for project in collection_names:
if project in ("system.indexes",):
continue
# Each collection will have exactly one project document
document = self.find_project(project)
if document is not None:
yield document
def locate(self, path):
"""Traverse a hierarchy from top-to-bottom
Example:
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
Returns:
representation (ObjectId)
"""
components = zip(
("project", "asset", "subset", "version", "representation"),
path
)
parent = None
for type_, name in components:
latest = (type_ == "version") and name in (None, -1)
try:
if latest:
parent = self.find_one(
filter={
"type": type_,
"parent": parent
},
projection={"_id": 1},
sort=[("name", -1)]
)["_id"]
else:
parent = self.find_one(
filter={
"type": type_,
"name": name,
"parent": parent
},
projection={"_id": 1},
)["_id"]
except TypeError:
return None
return parent
@auto_reconnect
def collections(self):
return self._database.collection_names()
@auto_reconnect
def find_project(self, project):
return self._database[project].find_one({"type": "project"})
@auto_reconnect
def insert_one(self, item):
assert isinstance(item, dict), "item must be of type <dict>"
schema.validate(item)
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
@auto_reconnect
def insert_many(self, items, ordered=True):
# check if all items are valid
assert isinstance(items, list), "`items` must be of type <list>"
for item in items:
assert isinstance(item, dict), "`item` must be of type <dict>"
schema.validate(item)
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
items,
ordered=ordered)
@auto_reconnect
def find(self, filter, projection=None, sort=None):
return self._database[self.Session["AVALON_PROJECT"]].find(
filter=filter,
projection=projection,
sort=sort
)
@auto_reconnect
def find_one(self, filter, projection=None, sort=None):
assert isinstance(filter, dict), "filter must be <dict>"
return self._database[self.Session["AVALON_PROJECT"]].find_one(
filter=filter,
projection=projection,
sort=sort
)
@auto_reconnect
def save(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].save(
*args, **kwargs)
@auto_reconnect
def replace_one(self, filter, replacement):
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
filter, replacement)
@auto_reconnect
def update_many(self, filter, update):
return self._database[self.Session["AVALON_PROJECT"]].update_many(
filter, update)
@auto_reconnect
def distinct(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].distinct(
*args, **kwargs)
@auto_reconnect
def drop(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].drop(
*args, **kwargs)
@auto_reconnect
def delete_many(self, *args, **kwargs):
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
*args, **kwargs)
def parenthood(self, document):
assert document is not None, "This is a bug"
parents = list()
while document.get("parent") is not None:
document = self.find_one({"_id": document["parent"]})
if document is None:
break
parents.append(document)
return parents
@contextlib.contextmanager
def tempdir(self):
tempdir = tempfile.mkdtemp()
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def download(self, src, dst):
"""Download `src` to `dst`
Arguments:
src (str): URL to source file
dst (str): Absolute path to destination file
Yields tuple (progress, error):
progress (int): Between 0-100
error (Exception): Any exception raised when first making connection
"""
try:
response = requests.get(
src,
stream=True,
auth=requests.auth.HTTPBasicAuth(
self.Session["AVALON_USERNAME"],
self.Session["AVALON_PASSWORD"]
)
)
except requests.ConnectionError as e:
yield None, e
return
with self.tempdir() as dirname:
tmp = os.path.join(dirname, os.path.basename(src))
with open(tmp, "wb") as f:
total_length = response.headers.get("content-length")
if total_length is None: # no content length header
f.write(response.content)
else:
downloaded = 0
total_length = int(total_length)
for data in response.iter_content(chunk_size=4096):
downloaded += len(data)
f.write(data)
yield int(100.0 * downloaded / total_length), None
try:
os.makedirs(os.path.dirname(dst))
except OSError as e:
# An already existing destination directory is fine.
if e.errno != errno.EEXIST:
raise
shutil.copy(tmp, dst)

View file

@ -1,135 +1,26 @@
import os
import re
import sys
from avalon import io, api as avalon, lib as avalonlib
from pype import lib
from pype import api as pype
# from pypeapp.api import (Templates, Logger, format)
from pypeapp import Logger, Anatomy
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
import pype.api as pype
def get_asset():
"""
Obtain Asset string from session or environment variable
Returns:
string: asset name
Raises:
log: error
"""
lib.set_io_database()
asset = io.Session.get("AVALON_ASSET", None) \
or os.getenv("AVALON_ASSET", None)
log.info("asset: {}".format(asset))
assert asset, log.error("missing `AVALON_ASSET`"
"in avalon session "
"or os.environ!")
return asset
def get_anatomy(**kwarg):
return pype.Anatomy
def get_context_data(
project_name=None, hierarchy=None, asset=None, task_name=None
):
"""
Collect all main contextual data
def format_anatomy(data):
from .templates import (
get_anatomy
)
file = script_name()
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
anatomy = get_anatomy()
Returns:
dict: contextual data
# TODO: perhaps should be in try!
padding = anatomy.render.padding
"""
if not task_name:
lib.set_io_database()
task_name = io.Session.get("AVALON_TASK", None) \
or os.getenv("AVALON_TASK", None)
assert task_name, log.error(
"missing `AVALON_TASK` in avalon session or os.environ!"
)
data.update({
"hierarchy": pype.get_hierarchy(),
"frame": "#" * padding,
"VERSION": pype.get_version_from_workfile(file)
})
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
os.environ['AVALON_PROJECT'] = project_name
io.Session['AVALON_PROJECT'] = project_name
if not hierarchy:
hierarchy = pype.get_hierarchy()
project_doc = io.find_one({"type": "project"})
data = {
"task": task_name,
"asset": asset or get_asset(),
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code", '')
},
"hierarchy": hierarchy,
"app": application["application_dir"]
}
return data
def set_avalon_workdir(
project=None, hierarchy=None, asset=None, task=None
):
"""
Updates os.environ and session with filled workdir
Args:
project (string, optional): project name
hierarchy (string, optional): hierarchy path
asset (string, optional): asset name
task (string, optional): task name
Returns:
os.environ[AVALON_WORKDIR]: workdir path
avalon.session[AVALON_WORKDIR]: workdir path
"""
lib.set_io_database()
awd = io.Session.get("AVALON_WORKDIR", None) or \
os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
anatomy_filled = Anatomy(io.Session["AVALON_PROJECT"]).format(data)
awd = anatomy_filled["work"]["folder"]
awd_filled = os.path.normpath(format(awd, data))
io.Session["AVALON_WORKDIR"] = awd_filled
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
def get_workdir_template(data=None):
"""
Obtain workdir templated path from Anatomy()
Args:
data (dict, optional): basic contextual data
Returns:
string: template path
"""
anatomy = Anatomy()
anatomy_filled = anatomy.format(data or get_context_data())
try:
work = anatomy_filled["work"]
except Exception as e:
log.error(
"{0} Error in get_workdir_template(): {1}".format(__name__, str(e))
)
return work
# log.info("format_anatomy:anatomy: {}".format(anatomy))
return anatomy.format(data)

View file

@ -1,252 +0,0 @@
# api.py
import os
import sys
import tempfile
import pico
from pico import PicoApp
from pico.decorators import request_args, set_cookie, delete_cookie, stream
from pico.decorators import header, cookie
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
@pico.expose()
def publish(json_data_path, staging_dir=None):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
Returns:
dict: return_json_path
Raises:
Exception: description
"""
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
os.chdir(cwd)
log.info(os.getcwd())
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
log.info("staging_dir: {}".format(staging_dir))
return_json_path = os.path.join(staging_dir, "return_data.json")
log.info("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
"app", "pype-start.py")
args = [pype_start, "--publish",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
sys.executable, "-u"
] + args,
cwd=cwd
)
return {"return_json_path": return_json_path}
@pico.expose()
def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
@pico.expose()
def nuke_test():
import nuke
n = nuke.createNode("Constant")
log.info(n)
@pico.expose()
def hello(who='world'):
return 'Hello %s' % who
@pico.expose()
def multiply(x, y):
return x * y
@pico.expose()
def fail():
raise Exception('fail!')
@pico.expose()
def make_coffee():
raise ImATeapot()
@pico.expose()
def upload(upload, filename):
if not filename.endswith('.txt'):
raise BadRequest('Upload must be a .txt file!')
return upload.read().decode()
@pico.expose()
@request_args(ip='remote_addr')
def my_ip(ip):
return ip
@pico.expose()
@request_args(ip=lambda req: req.remote_addr)
def my_ip3(ip):
return ip
@pico.prehandle()
def set_user(request, kwargs):
if request.authorization:
if request.authorization.password != 'secret':
raise Unauthorized('Incorrect username or password')
request.user = request.authorization.username
else:
request.user = None
@pico.expose()
@request_args(username='user')
def current_user(username):
return username
@pico.expose()
@request_args(session=cookie('session_id'))
def session_id(session):
return session
@pico.expose()
@set_cookie()
def start_session():
return {'session_id': '42'}
@pico.expose()
@delete_cookie('session_id')
def end_session():
return True
@pico.expose()
@request_args(session=header('x-session-id'))
def session_id2(session):
return session
@pico.expose()
@stream()
def countdown(n=10):
for i in reversed(range(n)):
yield '%i' % i
time.sleep(0.5)
@pico.expose()
def user_description(user):
return '{name} is a {occupation} aged {age}'.format(**user)
@pico.expose()
def show_source():
return open(__file__.replace('.pyc', '.py')).read()
app = PicoApp()
app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in Logger().logging.root.handlers[:]]:
if "pype" not in str(name).lower():
print(name)
print(handler)
Logger().logging.root.removeHandler(handler)

View file

@ -1,196 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Pico Example - Everything</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/styles/default.min.css">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/highlight.min.js"></script>
<script></script>
<style type="text/css">
html, body {
height: 100%;
margin: 0px;
padding: 0px;
}
div {
padding: 5px;
}
#container {
height: 100%;
}
#header {
height: 5%;
}
#main {
height: 70%;
}
#output {
background-color: #333;
color: #aaa;
min-height: 15%;
overflow-y: scroll;
padding: 20px;
position: fixed;
bottom: 0px;
width: 100%;
}
.error {
color: #f00 !important;
}
#examples li{
padding: 10px;
margin: 10px;
background-color: silver;
}
code {
border-radius: 0;*/
margin: 5px;
white-space: pre !important;
}
#source {
height: 100%;
}
#examples {
height: 100%;
}
#spacer {
height: 20%;
}
.highlight {
background-color: yellow;
}
</style>
</head>
<body>
<div id="container">
<div class="row row-eq-height">
<div class="col-md-12">
<h1>Pico Examples</h1>
<p>Here we show some simple examples of using Pico. Click any <code>api.X</code> link to see the corresponding Python source.</p>
</div>
</div>
<div class="row row-eq-height" id="main">
<div class="col-md-6" id="examples">
<ol>
<li id="example1">
<h4>Hello World</h4>
<pre><code class="js"></code></pre>
Name: <input type="text" name="name" value="Bob"/>
<button class="btn btn-default btn-sm" type="button" onclick="example1()">Submit</button>
</li>
<li id="deregister">
<h4>deregister_plugin_path</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="deregister()">Deregister</button>
</li>
<li id="register">
<h4>register_plugin_path</h4>
<pre><code class="js"></code></pre>
Path: <input type="text" name="path" value="C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish"/>
<button class="btn btn-default btn-sm" type="button" onclick="register()">Register path</button>
</li>
<li id="example2">
<h4>Numeric Multiplication</h4>
<pre><code class="js"></code></pre>
<input type="number" name="x" value="6"/> x <input type="number" name="y" value="7"/>
<button class="btn btn-default btn-sm" type="button" onclick="example2()">Multiply</button>
</li>
<li id="example3">
<h4>File Upload</h4>
<pre><code class="js"></code></pre>
<input type="file" name="upload"/>
<button class="btn btn-default btn-sm" type="button" onclick="example3()">Upload</button>
</li>
<li id="example4">
<h4>Request parameters (IP address)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example4()">What's my IP?</button>
</li>
<li id="example5">
<h4>Authentication</h4>
<pre><code class="js"></code></pre>
<p class="bg-info">Note: see <a href="#set_user" onclick="jumpTo('set_user')">api.set_user</a> for the authentication handler.</p>
Username: <input type="text" name="username" value="bob"/>
Password: <input type="password" name="password" value="secret"/>
<button class="btn btn-default btn-sm" type="button" onclick="example5()">Sign In</button>
</li>
<li id="example6">
<h4>Sessions (cookies)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example6()">What's my session id?</button>
</li>
<li id="example7">
<h4>Sessions (header)</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example7()">What's my session id?</button>
</li>
<li id="example8">
<h4>Streaming Response</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example8()">Countdown</button>
</li>
<li id="example9">
<h4>Objects</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example9()">Submit</button>
</li>
<li id="example10">
<h4>Errors</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example10()">Submit</button>
</li>
<li id="example11">
<h4>Errors</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example11()">Submit</button>
</li>
<li id="example12">
<h4>Forms</h4>
<p>This example submits a form as a whole instead of individual arguments.
The form input names must match the function argument names.
</p>
<pre><code class="html"></code></pre>
<pre><code class="js"></code></pre>
<div class="example">
<form>
x: <input type="number" name="x" value="6"/><br/>
y: <input type="number" name="y" value="7"/>
</form>
<button class="btn btn-default btn-sm" type="button" onclick="example12()">Multiply</button>
</div>
</li>
<li id="example13">
<h4>JSON</h4>
<p>This example submits data as JSON instead of individual arguments.
The object keys must match the function argument names.
</p>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="example13()">Multiply</button>
</li>
</ol>
<div id="spacer">
</div>
</div>
<div class="col-md-6" id="source">
<pre><code class="python"></code></pre>
</div>
</div>
<div class="row" id="output">
</div>
</div>
<script src="script.js"></script>
</body>
</html>

View file

@ -1,146 +0,0 @@
import os
import sys
import tempfile
import pico
# from pico.decorators import request_args, prehandle
from pico import PicoApp
from pico import client
from avalon import api as avalon
from avalon import io
import pyblish.api as pyblish
from pypeapp import execute
from pype import api as pype
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in pype.Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
pype.Logger.logging.root.removeHandler(handler)
log = pype.Logger().get_logger(__name__, "aport")
SESSION = avalon.session
if not SESSION:
io.install()
@pico.expose()
def publish(json_data_path, staging_dir=None):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
json_data_path (string): path to temp json file with
context data
staging_dir (strign, optional): path to temp directory
Returns:
dict: return_json_path
Raises:
Exception: description
"""
staging_dir = staging_dir \
or tempfile.mkdtemp(prefix="pype_aport_")
return_json_path = os.path.join(staging_dir, "return_data.json")
log.debug("avalon.session is: \n{}".format(SESSION))
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
"app", "pype-start.py")
args = [pype_start, "--publish",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", json_data_path,
"-d", "post_json_data_path", return_json_path
]
log.debug(args)
# start standalone pyblish qml
execute([
sys.executable, "-u"
] + args,
cwd=os.getenv('AVALON_WORKDIR').replace("\\", "/")
)
return {"return_json_path": return_json_path}
@pico.expose()
def context(project, asset, task, app):
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
os.environ["AVALON_PROJECT"] = project
io.Session["AVALON_PROJECT"] = project
avalon.update_current_task(task, asset, app)
project_code = pype.get_project()["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = project_code
io.Session["AVALON_PROJECTCODE"] = project_code
hierarchy = pype.get_hierarchy()
os.environ["AVALON_HIERARCHY"] = hierarchy
io.Session["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
if isinstance(v, str)}
SESSION.update(fix_paths)
SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return SESSION
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
os.pathsep) if "aport" in p][0]
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
@pico.expose()
def nuke_test():
import nuke
n = nuke.createNode("Constant")
log.info(n)
app = PicoApp()
app.register_module(__name__)

130
pype/aport/pipeline.py Normal file
View file

@ -0,0 +1,130 @@
import sys
import os
import getpass
from app.api import Logger
from io_nonsingleton import DbConnector
io = DbConnector()
log = Logger.getLogger(__name__, "aport")
self = sys.modules[__name__]
self.SESSION = None
self._registered_root = {"_": ""}
self.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
self.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
self.AVALON_TASK = os.getenv("AVALON_TASK", None)
self.AVALON_SILO = os.getenv("AVALON_SILO", None)
def get_session():
if not self.SESSION:
io.install()
self.SESSION = io.Session
# for k, v in os.environ.items():
# if 'AVALON' in k:
# print(str((k, v)))
return self.SESSION
def update_current_task(task=None, asset=None, app=None):
"""Update active Session to a new task work area.
This updates the live Session to a different `asset`, `task` or `app`.
Args:
task (str): The task to set.
asset (str): The asset to set.
app (str): The app to set.
Returns:
dict: The changed key, values in the current Session.
"""
mapping = {
"AVALON_ASSET": asset,
"AVALON_TASK": task,
"AVALON_APP": app,
}
changed = {key: value for key, value in mapping.items() if value}
if not changed:
return
# Update silo when asset changed
if "AVALON_ASSET" in changed:
asset_document = io.find_one({"name": changed["AVALON_ASSET"],
"type": "asset"})
assert asset_document, "Asset must exist"
silo = asset_document["silo"]
if silo is None:
silo = asset_document["name"]
changed["AVALON_SILO"] = silo
parents = asset_document['data']['parents']
hierarchy = ""
if len(parents) > 0:
hierarchy = os.path.sep.join(parents)
changed['AVALON_HIERARCHY'] = hierarchy
# Compute work directory (with the temporary changed session so far)
project = io.find_one({"type": "project"},
projection={"config.template.work": True})
template = project["config"]["template"]["work"]
_session = self.SESSION.copy()
_session.update(changed)
changed["AVALON_WORKDIR"] = _format_work_template(template, _session)
# Update the full session in one go to avoid half updates
self.SESSION.update(changed)
# Update the environment
os.environ.update(changed)
return changed
def _format_work_template(template, session=None):
"""Return a formatted configuration template with a Session.
Note: This *cannot* format the templates for published files since the
session does not hold the context for a published file. Instead use
`get_representation_path` to parse the full path to a published file.
Args:
template (str): The template to format.
session (dict, Optional): The Session to use. If not provided use the
currently active global Session.
Returns:
str: The fully formatted path.
"""
if session is None:
session = self.SESSION
project = io.find_one({'type': 'project'})
return template.format(**{
"root": registered_root(),
"project": {
"name": project.get("name", session["AVALON_PROJECT"]),
"code": project["data"].get("code", ''),
},
"silo": session["AVALON_SILO"],
"hierarchy": session['AVALON_HIERARCHY'],
"asset": session["AVALON_ASSET"],
"task": session["AVALON_TASK"],
"app": session["AVALON_APP"],
"user": session.get("AVALON_USER", getpass.getuser())
})
def registered_root():
"""Return currently registered root"""
return os.path.normpath(
self._registered_root["_"]
or self.SESSION.get("AVALON_PROJECTS") or ""
)

View file

@ -1,12 +1,10 @@
from pype import api as pype
from pypeapp import Anatomy, config
log = pype.Logger().get_logger(__name__, "aport")
log = pype.Logger.getLogger(__name__, "aport")
def get_anatomy(**kwarg):
return Anatomy()
return pype.Anatomy
def get_dataflow(**kwarg):
@ -17,8 +15,7 @@ def get_dataflow(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
presets = config.get_init_presets()
aport_dataflow = getattr(presets["dataflow"], str(host), None)
aport_dataflow = getattr(pype.Dataflow, str(host), None)
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
if preset:
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
@ -35,8 +32,7 @@ def get_colorspace(**kwarg):
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
presets = config.get_init_presets()
aport_colorspace = getattr(presets["colorspace"], str(host), None)
aport_colorspace = getattr(pype.Colorspace, str(host), None)
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
if preset:
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)

View file

@ -0,0 +1,200 @@
import os
import sys
import argparse
import logging
import collections
import json
import re
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from avalon import io, inventory, schema
from pype.ftrack.lib.io_nonsingleton import DbConnector
class PypeUpdateFromV2_2_0(BaseAction):
"""This action is to remove silo field from database and changes asset
schema to newer version
WARNING: it is NOT for situations when you want to switch from avalon-core
to Pype's avalon-core!!!
"""
#: Action identifier.
identifier = "silos.doctor"
#: Action label.
label = "Pype Update"
variant = "- v2.2.0 to v2.3.0 or higher"
#: Action description.
description = "Use when Pype was updated from v2.2.0 to v2.3.0 or higher"
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator"]
icon = "{}/ftrack/action_icons/PypeUpdate.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
# connector to MongoDB (Avalon mongo)
db_con = DbConnector()
def discover(self, session, entities, event):
""" Validation """
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
items = []
item_splitter = {'type': 'label', 'value': '---'}
title = "Updated Pype from v 2.2.0 to v2.3.0 or higher"
items.append({
"type": "label",
"value": (
"NOTE: This doctor action should be used ONLY when Pype"
" was updated from v2.2.0 to v2.3.0 or higher.<br><br><br>"
)
})
items.append({
"type": "label",
"value": (
"Select if want to process <b>all synchronized projects</b>"
" or <b>selection</b>."
)
})
items.append({
"type": "enumerator",
"name": "__process_all__",
"data": [{
"label": "All synchronized projects",
"value": True
}, {
"label": "Selection",
"value": False
}],
"value": False
})
items.append({
"type": "label",
"value": (
"<br/><br/><h2>Synchronized projects:</h2>"
"<i>(ignore if <strong>\"ALL projects\"</strong> selected)</i>"
)
})
self.log.debug("Getting all Ftrack projects")
# Get all Ftrack projects
all_ftrack_projects = [
project["full_name"] for project in session.query("Project").all()
]
self.log.debug("Getting Avalon projects that are also in the Ftrack")
# Get Avalon projects that are in Ftrack
self.db_con.install()
possible_projects = [
project["name"] for project in self.db_con.projects()
if project["name"] in all_ftrack_projects
]
for project in possible_projects:
item_label = {
"type": "label",
"value": project
}
item = {
"label": "- process",
"name": project,
"type": 'boolean',
"value": False
}
items.append(item_splitter)
items.append(item_label)
items.append(item)
if len(possible_projects) == 0:
return {
"success": False,
"message": (
"Nothing to process."
" There are not projects synchronized to avalon."
)
}
else:
return {
"items": items,
"title": title
}
def launch(self, session, entities, event):
if 'values' not in event['data']:
return
projects_selection = {
True: [],
False: []
}
process_all = None
values = event['data']['values']
for key, value in values.items():
if key == "__process_all__":
process_all = value
continue
projects_selection[value].append(key)
# Skip if process_all value is not boolean
# - may happen when user delete string line in combobox
if not isinstance(process_all, bool):
self.log.warning(
"Nothing was processed. User didn't select if want to process"
" selection or all projects!"
)
return {
"success": False,
"message": (
"Nothing was processed. You must select if want to process"
" \"selection\" or \"all projects\"!"
)
}
projects_to_process = projects_selection[True]
if process_all:
projects_to_process.extend(projects_selection[False])
self.db_con.install()
for project in projects_to_process:
self.log.debug("Processing project \"{}\"".format(project))
self.db_con.Session["AVALON_PROJECT"] = project
self.log.debug("- Unsetting silos on assets")
self.db_con.update_many(
{"type": "asset"},
{"$unset": {"silo": ""}}
)
self.log.debug("- setting schema of assets to v.3")
self.db_con.update_many(
{"type": "asset"},
{"$set": {"schema": "avalon-core:asset-3.0"}}
)
return True
def register(session, plugins_presets={}):
"""Register plugin. Called when used as an plugin."""
if not isinstance(session, ftrack_api.session.Session):
return
PypeUpdateFromV2_2_0(session, plugins_presets).register()

View file

@ -45,18 +45,7 @@ def import_to_avalon(
return output
# Validate if entity name match REGEX in schema
try:
avalon_check_name(entity)
except ValidationError:
msg = (
"{} - name \"{}\" includes unsupported symbols"
" like \"dash\" or \"space\""
).format(ent_path, name)
log.error(msg)
errors.append({'Unsupported character': msg})
output['errors'] = errors
return output
avalon_check_name(entity)
entity_type = entity.entity_type
# Project ////////////////////////////////////////////////////////////////
@ -212,11 +201,6 @@ def import_to_avalon(
entity, session, custom_attributes
)
# 1. hierarchical entity have silo set to None
silo = None
if len(data['parents']) > 0:
silo = data['parents'][0]
name = entity['name']
avalon_asset = None
@ -250,9 +234,8 @@ def import_to_avalon(
)
if avalon_asset is None:
item = {
'schema': "avalon-core:asset-2.0",
'schema': "avalon-core:asset-3.0",
'name': name,
'silo': silo,
'parent': ObjectId(projectId),
'type': 'asset',
'data': data
@ -263,14 +246,7 @@ def import_to_avalon(
ent_path, project_name
))
# Raise error if it seems to be different ent. with same name
elif (
avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo
):
db_asset_path_items = [project_name,]
db_asset_path_items.extend(avalon_asset['data']['parents'])
db_asset_path_items.append(name)
elif avalon_asset['data']['parents'] != data['parents']:
msg = (
"{} - In Avalon DB already exists entity with name \"{}\""
"\n- \"{}\""
@ -285,7 +261,7 @@ def import_to_avalon(
mongo_id = avalon_asset['_id']
else:
if avalon_asset['name'] != entity['name']:
if silo is None or changeability_check_childs(entity) is False:
if changeability_check_childs(entity) is False:
msg = (
'{} - You can\'t change name "{}" to "{}"'
', avalon wouldn\'t work properly!'
@ -298,10 +274,7 @@ def import_to_avalon(
session.commit()
errors.append({'Changed name error': msg})
if (
avalon_asset['silo'] != silo or
avalon_asset['data']['parents'] != data['parents']
):
if avalon_asset['data']['parents'] != data['parents']:
old_path = '/'.join(avalon_asset['data']['parents'])
new_path = '/'.join(data['parents'])
@ -313,10 +286,7 @@ def import_to_avalon(
moved_back = False
if 'visualParent' in avalon_asset['data']:
if silo is None:
asset_parent_id = avalon_asset['parent']
else:
asset_parent_id = avalon_asset['data']['visualParent']
asset_parent_id = avalon_asset['data']['visualParent'] or avalon_asset['parent']
asset_parent = database[project_name].find_one(
{'_id': ObjectId(asset_parent_id)}
@ -364,7 +334,6 @@ def import_to_avalon(
{'_id': ObjectId(mongo_id)},
{'$set': {
'name': name,
'silo': silo,
'data': enter_data,
'parent': ObjectId(projectId)
}})
@ -614,36 +583,24 @@ def get_project_apps(entity):
return apps
def avalon_check_name(entity, inSchema=None):
ValidationError = jsonschema.ValidationError
alright = True
name = entity['name']
if " " in name:
alright = False
def avalon_check_name(entity, in_schema=None):
default_pattern = "^[a-zA-Z0-9_.]*$"
data = {}
data['data'] = {}
data['type'] = 'asset'
schema = "avalon-core:asset-2.0"
# TODO have project any REGEX check?
if entity.entity_type in ['Project']:
# data['type'] = 'project'
name = entity['full_name']
# schema = "avalon-core:project-2.0"
name = entity["name"]
schema_name = "asset-3.0"
data['silo'] = 'Film'
if in_schema:
schema_name = in_schema
elif entity.entity_type.lower() == "project":
name = entity["full_name"]
schema_name = "project-2.0"
if inSchema is not None:
schema = inSchema
data['schema'] = schema
data['name'] = name
try:
avalon.schema.validate(data)
except ValidationError:
alright = False
if alright is False:
msg = '"{}" includes unsupported symbols like "dash" or "space"'
schema_obj = avalon.schema._cache.get(schema_name + ".json")
name_pattern = schema_obj.get("properties", {}).get("name", {}).get(
"pattern", default_pattern
)
if not re.match(name_pattern, name):
msg = "\"{}\" includes unsupported symbols like \"dash\" or \"space\""
raise ValueError(msg.format(name))

View file

@ -192,7 +192,7 @@ class DbConnector(object):
) if os.getenv(item[0], item[1]) is not None
}
Session["schema"] = "avalon-core:session-1.0"
Session["schema"] = "avalon-core:session-2.0"
try:
schema.validate(Session)
except schema.ValidationError as e:

View file

@ -77,7 +77,7 @@ def on_open(*args):
# Show outdated pop-up
def _on_show_inventory():
import avalon.tools.cbsceneinventory as tool
import avalon.tools.sceneinventory as tool
tool.show(parent=parent)
dialog = popup.Popup(parent=parent)

View file

@ -178,7 +178,7 @@ def on_open(_):
# Show outdated pop-up
def _on_show_inventory():
import avalon.tools.cbsceneinventory as tool
import avalon.tools.sceneinventory as tool
tool.show(parent=parent)
dialog = popup.Popup(parent=parent)

View file

@ -75,8 +75,8 @@ def override_toolbox_ui():
"res")
icons = os.path.join(res, "icons")
import avalon.tools.cbsceneinventory as inventory
import avalon.tools.cbloader as loader
import avalon.tools.sceneinventory as inventory
import avalon.tools.loader as loader
from avalon.maya.pipeline import launch_workfiles_app
import mayalookassigner

View file

@ -301,7 +301,13 @@ def attribute_values(attr_values):
"""
original = [(attr, cmds.getAttr(attr)) for attr in attr_values]
# NOTE(antirotor): this didn't work for some reason for Yeti attributes
# original = [(attr, cmds.getAttr(attr)) for attr in attr_values]
original = []
for attr in attr_values:
type = cmds.getAttr(attr, type=True)
value = cmds.getAttr(attr)
original.append((attr, str(value) if type == "string" else value))
try:
for attr, value in attr_values.items():
if isinstance(value, string_types):
@ -1768,6 +1774,11 @@ def set_scene_fps(fps, update=True):
'48000': '48000fps'}
# pull from mapping
# this should convert float string to float and int to int
# so 25.0 is converted to 25, but 23.98 will be still float.
decimals = int(str(fps-int(fps))[2:])
if decimals == 0:
fps = int(fps)
unit = fps_mapping.get(str(fps), None)
if unit is None:
raise ValueError("Unsupported FPS value: `%s`" % fps)

View file

@ -5,8 +5,8 @@ from avalon import api as avalon
from pyblish import api as pyblish
from .workio import (
open,
save,
open_file,
save_file,
current_file,
has_unsaved_changes,
file_extensions,
@ -21,8 +21,8 @@ from .tags import add_tags_from_presets
__all__ = [
# Workfiles API
"open",
"save",
"open_file",
"save_file",
"current_file",
"has_unsaved_changes",
"file_extensions",

View file

@ -15,7 +15,7 @@ def has_unsaved_changes():
return True
def save(filepath):
def save_file(filepath):
project = hiero.core.projects()[-1]
if project:
project.saveAs(filepath)
@ -24,7 +24,7 @@ def save(filepath):
project.saveAs(filepath)
def open(filepath):
def open_file(filepath):
hiero.core.openProject(filepath)
return True

View file

@ -6,6 +6,7 @@ from avalon import (
)
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
@ -26,27 +27,26 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
def process(self, context):
# get json paths from data
rqst_json_data_path = context.data['rqst_json_data_path']
post_json_data_path = context.data['post_json_data_path']
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
fix_paths = {k: v.replace("\\", "/") for k, v in session.items()
if isinstance(v, str)}
session.update(fix_paths)
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = \
staging_dir = os.path.dirname(
post_json_data_path).replace("\\", "/")
if not os.path.exists(staging_dir):
os.makedirs(staging_dir)
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with open(rqst_json_data_path) as f:
context.data['json_data'] = json_data = json.load(f)
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
@ -63,12 +63,13 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_CONFIG", None)
assert templates_dir, "Missing `PYPE_CONFIG` in os.environ"
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
@ -84,16 +85,16 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = current_file
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project()["data"]
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset()["data"]
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}

View file

@ -25,32 +25,41 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.48
def process(self, context):
a_session = context.data.get("avalonSession")
json_data = context.data.get("json_data", None)
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
instances_data = json_data.get("instances", None)
assert instances_data, "No `instance` data in json file"
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
context.data["ftrackTypes"] = ftrack_types
asset_default = presets["asset_default"]
assert instances_data, "No `asset_default` data in json file"
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = pype.get_asset(asset_name)
entity = io.find_one({"name": asset_name,
"type": "asset"})
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("frameStart", None)
frame_start = context.data["assetData"].get("fstart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = entity["data"]["frameStart"]
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
frame_start = asset_default["fstart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
@ -60,7 +69,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
handles = context.data["assetData"].get("handles", None)
if not handles:
# get frame start > second try from parent data
handles = entity["data"]["handles"]
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
@ -77,25 +86,38 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# get current file host
host = a_session["AVALON_APP"]
family = "workfile"
family = "projectfile"
families = "filesave"
subset_name = "{0}_{1}".format(task, family)
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(subset_name)
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families],
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
@ -103,11 +125,27 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
assert name, "No `name` key in json_data.instance: {}".format(inst)
family = inst.get("family", None)
assert family, "No `family` key in json_data.instance: {}".format(inst)
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
tags = inst.get("tags", None)
if tags:
@ -117,32 +155,86 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
tasks = rules_tasks["defaultTasks"]
self.log.debug("tasks: `{}`".format(tasks))
subset_lst = []
subset_dict = {}
for task in tasks:
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
isinstance(subset_dict[sub], list)
except Exception:
subset_dict[sub] = list()
for subset in subsets:
subset_name = "{0}_{1}".format(task, subset)
instance = context.create_instance(subset_name)
# instance.add(inst)
instance.data.update({
"subset": subset_name,
"task": task,
"frameStart": frame_start,
"handles": handles,
"host": host,
"asset": asset,
"label": "{0} - {1} > {2}".format(name, task, subset),
"name": subset_name,
"family": inst["family"],
"families": [subset],
"jsonData": inst,
# "parents": , # bez tasku
# "hierarchy": ,
"publish": True,
})
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)
subset_dict[sub].append(task)
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
ext = repr['representation']
else:
continue
family = inst["family"]
# skip if thumnail in name of subset
if "thumbnail" in subset:
continue
elif "audio" in subset:
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
# create unique subset's name
name = "{0}_{1}_{2}".format(asset,
inst["family"],
subset_name)
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
"files": files,
"label": "{0} - {1}".format(
asset, subset_name),
"name": name,
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"publish": True,
"version": version})
self.log.info(
"collected instance: {}".format(instance.data))
instances.append(instance)
context.data["instances"] = instances

View file

@ -26,4 +26,6 @@ class CollectContextLabel(pyblish.api.ContextPlugin):
# Set label
label = "{host} - {scene}".format(host=host.title(), scene=base)
if host == "standalonepublisher":
label = host.title()
context.data["label"] = label

View file

@ -131,6 +131,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
else:
root = cwd
if data.get("ftrack"):
f = data.get("ftrack")
os.environ["FTRACK_API_USER"] = f["FTRACK_API_USER"]
os.environ["FTRACK_API_KEY"] = f["FTRACK_API_KEY"]
os.environ["FTRACK_SERVER"] = f["FTRACK_SERVER"]
metadata = data.get("metadata")
if metadata:
session = metadata.get("session")

View file

@ -44,7 +44,7 @@ class CollectTemplates(pyblish.api.InstancePlugin):
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
silo = asset.get('silo')
subset = io.find_one({"type": "subset",
"name": subset_name,

View file

@ -72,13 +72,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
entity = io.find_one({"type": "asset", "name": name})
# Create entity if doesn"t exist
if entity is None:
if self.project["_id"] == parent["_id"]:
silo = None
elif parent["silo"] is None:
silo = parent["name"]
else:
silo = parent["silo"]
entity = self.create_avalon_asset(name, silo, data)
entity = self.create_avalon_asset(name, data)
# Update entity data with input data
io.update_many({"_id": entity["_id"]}, {"$set": {"data": data}})
@ -86,11 +80,10 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if "childs" in entity_data:
self.import_to_avalon(entity_data["childs"], entity)
def create_avalon_asset(self, name, silo, data):
def create_avalon_asset(self, name, data):
item = {
"schema": "avalon-core:asset-2.0",
"schema": "avalon-core:asset-3.0",
"name": name,
"silo": silo,
"parent": self.project["_id"],
"type": "asset",
"data": data

View file

@ -1,7 +1,6 @@
import os
import logging
import shutil
import clique
import errno
import pyblish.api
@ -25,9 +24,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
families = ["assembly",
"yetiRig",
"yeticache"]
families = ["assembly"]
exclude_families = ["clip"]
def process(self, instance):
@ -41,7 +38,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
@ -158,7 +154,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"version": int(version["name"]),
"hierarchy": hierarchy}
template_publish = project["config"]["template"]["publish"]
# template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files

View file

@ -3,7 +3,6 @@ from os.path import getsize
import logging
import speedcopy
import clique
import traceback
import errno
import pyblish.api
from avalon import api, io
@ -64,7 +63,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"plate",
"look",
"lut",
"audio"
"audio",
"yetiRig",
"yeticache"
]
exclude_families = ["clip"]
@ -110,7 +111,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# extracted_traceback[1], result["error"]
# )
# )
# assert all(result["success"] for result in context.data["results"]), (
# assert all(result["success"] for result in context.data["results"]),(
# "Atomicity not held, aborting.")
# Assemble
@ -251,7 +252,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"silo": asset.get('silo'),
"task": TASK,
"asset": ASSET,
"family": instance.data['family'],
@ -267,7 +268,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template = os.path.normpath(
anatomy.templates[template_name]["path"])
if isinstance(files, list):
sequence_repre = isinstance(files, list)
if sequence_repre:
src_collections, remainder = clique.assemble(files)
self.log.debug(
"src_tail_collections: {}".format(str(src_collections)))
@ -304,6 +307,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_tail = dst_collection.format("{tail}")
index_frame_start = None
if repre.get("frameStart"):
frame_start_padding = len(str(
repre.get("frameEnd")))
@ -328,7 +332,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("source: {}".format(src))
instance.data["transfers"].append([src, dst])
repre['published_path'] = "{0}{1}{2}".format(dst_head, dst_padding_exp, dst_tail)
repre['published_path'] = "{0}{1}{2}".format(dst_head,
dst_padding_exp,
dst_tail)
# for imagesequence version data
hashes = '#' * len(dst_padding)
dst = os.path.normpath("{0}{1}{2}".format(
@ -379,7 +385,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"project": {"name": PROJECT,
"code": project['data']['code']},
'task': TASK,
"silo": asset['silo'],
"silo": asset.get('silo'),
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
@ -388,6 +394,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"representation": repre['ext']
}
}
if sequence_repre and repre.get("frameStart"):
representation['context']['frame'] = repre.get("frameStart")
self.log.debug("__ representation: {}".format(representation))
destination_list.append(dst)
self.log.debug("__ destination_list: {}".format(destination_list))
@ -482,12 +492,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
self.log.debug("families. %s" % instance.data.get('families'))
self.log.debug("families. %s" % type(instance.data.get('families')))
_id = io.insert_one({
"schema": "avalon-core:subset-2.0",
"schema": "pype:subset-3.0",
"type": "subset",
"name": subset_name,
"data": {},
"data": {
"families": instance.data.get('families')
},
"parent": asset["_id"]
}).inserted_id
@ -510,7 +524,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
version_locations = [location for location in locations if
location is not None]
return {"schema": "avalon-core:version-2.0",
return {"schema": "pype:version-3.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,

View file

@ -152,7 +152,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"silo": asset.get('silo'),
"task": api.Session["AVALON_TASK"],
"asset": ASSET,
"family": instance.data['family'],

View file

@ -297,6 +297,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
}
}
if submission_type == "muster":
ftrack = {
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
"FTRACK_API_KEY": os.environ.get("FTRACK_API_KEY"),
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER")
}
metadata.update({"ftrack": ftrack})
# Ensure output dir exists
output_dir = instance.data["outputDir"]
if not os.path.isdir(output_dir):

View file

@ -0,0 +1,190 @@
import pyblish.api
import pype.api
class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
"""
This will validate attributes in ftrack against data in scene.
Attributes to be validated are specified in:
`$PYPE_CONFIG/presets/<host>/ftrack_attributes.json`
This is array (list) of checks in format:
[
[<attribute>, <operator>, <expression>]
]
Where <attribute> is name of ftrack attribute, <operator> is one of:
"is", is_not", "greater_than", "less_than", "contains", "not_contains",
"starts_with", "ends_with"
<expression> is python code that is evaluated by validator. This allows
you to fetch whatever value in scene you want, for example in Maya:
[
"fps", "is",
"from maya import mel; out = mel.eval('currentTimeUnitToFPS()')"
]
will test if ftrack fps attribute on current Task parent is same as fps
info we get from maya. Store the value you need to compare in
variable `out` in your expression.
"""
label = "Validate Custom Ftrack Attributes"
order = pype.api.ValidateContentsOrder
families = ["ftrack"]
optional = True
def process(self, instance):
context = instance.context
task = context.data.get('ftrackTask', False)
if not task:
self._raise(AttributeError,
"Missing FTrack Task entity in context")
host = pyblish.api.current_host()
to_check = context.data["presets"][host].get("ftrack_attributes")
if not to_check:
self.log.warning("ftrack_attributes preset not found")
return
self.log.info("getting attributes from ftrack ...")
# get parent of task
custom_attributes = {}
try:
parent = task["parent"]
custom_attributes = parent["custom_attributes"].items()
except KeyError:
self._raise(KeyError, "missing `parent` or `attributes`")
custom_attributes = dict(custom_attributes)
# get list of hierarchical attributes from ftrack
session = context.data["ftrackSession"]
custom_hier_attributes = self._get_custom_hier_attrs(session)
custom_attributes = {}
_nonhier = {}
custom_hier_attributes = {k: None for k in custom_hier_attributes}
for key, value in dict(parent["custom_attributes"]).items():
if key in custom_hier_attributes:
custom_hier_attributes[key] = value
else:
_nonhier[key] = value
custom_hier_values = self._get_hierarchical_values(
custom_hier_attributes, parent)
custom_hier_values.update(_nonhier)
errors = []
attribs = custom_hier_values
for check in to_check:
ev = {}
# WARNING(Ondrej Samohel): This is really not secure as we are
# basically executing user code. But there's no other way to make
# it flexible enough for users to get stuff from
exec(str(check[2]), {}, ev)
if not ev.get("out"):
errors.append("{} code doesn't return 'out': '{}'".format(
check[0], check[2]))
continue
if check[0] in attribs:
if check[1] == "is":
if attribs[check[0]] != ev["out"]:
errors.append("{}: {} is not {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "is_not":
if attribs[check[0]] == ev["out"]:
errors.append("{}: {} is {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "less_than":
if attribs[check[0]] < ev["out"]:
errors.append("{}: {} is greater {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "greater_than":
if attribs[check[0]] < ev["out"]:
errors.append("{}: {} is less {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "contains":
if attribs[check[0]] in ev["out"]:
errors.append("{}: {} does not contain {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "not_contains":
if attribs[check[0]] not in ev["out"]:
errors.append("{}: {} contains {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "starts_with":
if attribs[check[0]].startswith(ev["out"]):
errors.append("{}: {} does not starts with {}".format(
check[0], attribs[check[0]], ev["out"]))
elif check[1] == "ends_with":
if attribs[check[0]].endswith(ev["out"]):
errors.append("{}: {} does not end with {}".format(
check[0], attribs[check[0]], ev["out"]))
if errors:
self.log.error('There are invalid values for attributes:')
for e in errors:
self.log.error(e)
raise ValueError("ftrack attributes doesn't match")
def _get_custom_hier_attrs(self, session):
hier_custom_attributes = []
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical"
" from CustomAttributeConfiguration"
)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
if cust_attr["is_hierarchical"]:
hier_custom_attributes.append(cust_attr["key"])
return hier_custom_attributes
def _get_hierarchical_values(self, keys_dict, entity):
# check values already set
_set_keys = []
for key, value in keys_dict.items():
if value is not None:
_set_keys.append(key)
# pop set values from keys_dict
set_keys = {}
for key in _set_keys:
set_keys[key] = keys_dict.pop(key)
# find if entity has set values and pop them out
keys_to_pop = []
for key in keys_dict.keys():
_val = entity["custom_attributes"][key]
if _val:
keys_to_pop.append(key)
set_keys[key] = _val
for key in keys_to_pop:
keys_dict.pop(key)
# if there are not keys to find value return found
if not keys_dict:
return set_keys
# end recursion if entity is project
if entity.entity_type.lower() == "project":
for key, value in keys_dict.items():
set_keys[key] = value
else:
result = self._get_hierarchical_values(keys_dict, entity["parent"])
for key, value in result.items():
set_keys[key] = value
return set_keys
def _raise(self, exc, msg):
self.log.error(msg)
raise exc(msg)

View file

@ -116,6 +116,7 @@ class ImportMayaLoader(api.Loader):
with maya.maintained_selection():
cmds.file(self.fname,
i=True,
preserveReferences=True,
namespace=namespace,
returnNewNodes=True,
groupReference=True,

View file

@ -4,7 +4,8 @@ import os
from pypeapp import config
import pymel.core as pm
reload(config)
import pype.maya.plugin
reload(pype.maya.plugin)
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
@ -42,11 +43,17 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
namespace = cmds.referenceQuery(nodes[0], namespace=True)
shapes = cmds.ls(nodes, shapes=True, long=True)
print(shapes)
newNodes = (list(set(nodes) - set(shapes)))
print(newNodes)
groupNode = pm.PyNode(groupName)
roots = set()
print(nodes)
for node in nodes:
for node in newNodes:
try:
roots.add(pm.PyNode(node).getAllParents()[-2])
except:
@ -59,7 +66,6 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
root.setParent(groupNode)
cmds.setAttr(groupName + ".displayHandle", 1)
groupNode
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
@ -68,7 +74,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
groupNode.useOutlinerColor.set(1)
groupNode.outlinerColor.set(c[0], c[1], c[2])
self[:] = nodes
self[:] = newNodes
cmds.setAttr(groupName + ".displayHandle", 1)
# get bounding box
@ -88,7 +94,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
cmds.setAttr(groupName + ".selectHandleY", cy)
cmds.setAttr(groupName + ".selectHandleZ", cz)
return nodes
return newNodes
def switch(self, container, representation):
self.update(container, representation)

View file

@ -47,12 +47,18 @@ class RigLoader(pype.maya.plugin.ReferenceLoader):
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
shapes = cmds.ls(nodes, shapes=True, long=True)
print(shapes)
newNodes = (list(set(nodes) - set(shapes)))
print(newNodes)
# Store for post-process
self[:] = nodes
self[:] = newNodes
if data.get("post_process", True):
self._post_process(name, namespace, context, data)
return nodes
return newNodes
def _post_process(self, name, namespace, context, data):

View file

@ -39,8 +39,10 @@ class YetiCacheLoader(api.Loader):
cmds.loadPlugin("pgYetiMaya", quiet=True)
# Get JSON
fname, ext = os.path.splitext(self.fname)
settings_fname = "{}.fursettings".format(fname)
fbase = re.search(r'^(.+)\.(\d+|#+)\.fur', self.fname)
if not fbase:
raise RuntimeError('Cannot determine fursettings file path')
settings_fname = "{}.fursettings".format(fbase.group(1))
with open(settings_fname, "r") as fp:
fursettings = json.load(fp)
@ -102,7 +104,6 @@ class YetiCacheLoader(api.Loader):
namespace = container["namespace"]
container_node = container["objectName"]
path = api.get_representation_path(representation)
# Get all node data
fname, ext = os.path.splitext(path)
settings_fname = "{}.fursettings".format(fname)
@ -151,7 +152,8 @@ class YetiCacheLoader(api.Loader):
# Update cache file name
file_name = data["name"].replace(":", "_")
cache_file_path = "{}.%04d.fur".format(file_name)
data["attrs"]["cacheFileName"] = os.path.join(path, cache_file_path)
data["attrs"]["cacheFileName"] = os.path.join(
path, cache_file_path)
if cb_id not in scene_lookup:
@ -284,10 +286,15 @@ class YetiCacheLoader(api.Loader):
attributes = node_settings["attrs"]
# Check if cache file name is stored
# get number of # in path and convert it to C prinf format
# like %04d expected by Yeti
fbase = re.search(r'^(.+)\.(\d+|#+)\.fur', self.fname)
if not fbase:
raise RuntimeError('Cannot determine file path')
padding = len(fbase.group(2))
if "cacheFileName" not in attributes:
file_name = original_node.replace(":", "_")
cache_name = "{}.%04d.fur".format(file_name)
cache = os.path.join(self.fname, cache_name)
cache = "{}.%0{}d.fur".format(fbase.group(1), padding)
self.validate_cache(cache)
attributes["cacheFileName"] = cache

View file

@ -93,6 +93,9 @@ class CollectInstances(pyblish.api.ContextPlugin):
parents = self.get_all_parents(members)
members_hierarchy = list(set(members + children + parents))
if 'families' not in data:
data['families'] = [data.get('family')]
# Create the instance
instance = context.create_instance(objset)
instance[:] = members_hierarchy
@ -100,6 +103,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Store the exact members of the object set
instance.data["setMembers"] = members
# Define nice label
name = cmds.ls(objset, long=False)[0] # use short name
label = "{0} ({1})".format(name,
@ -117,6 +121,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Produce diagnostic message for any graphical
# user interface interested in visualising it.
self.log.info("Found: \"%s\" " % instance.data["name"])
self.log.debug("DATA: \"%s\" " % instance.data)
def sort_by_family(instance):
"""Sort by family"""

View file

@ -37,6 +37,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
"label": subset,
"publish": False,
"family": 'workfile',
"families": ['workfile'],
"setMembers": [current_file]
})

View file

@ -45,8 +45,10 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
instance.data["resources"] = yeti_resources
# Force frame range for export
instance.data["frameStart"] = 1
instance.data["frameEnd"] = 1
instance.data["frameStart"] = cmds.playbackOptions(
query=True, animationStartTime=True)
instance.data["frameEnd"] = cmds.playbackOptions(
query=True, animationStartTime=True)
def collect_input_connections(self, instance):
"""Collect the inputs for all nodes in the input_SET"""
@ -114,15 +116,17 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
resources = []
image_search_paths = cmds.getAttr("{}.imageSearchPath".format(node))
texture_filenames = []
if image_search_paths:
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
# Later on check whether this is pipeline OS cross-compatible.
image_search_paths = [p for p in
image_search_paths.split(os.path.pathsep) if p]
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
# Later on check whether this is pipeline OS cross-compatible.
image_search_paths = [p for p in
image_search_paths.split(os.path.pathsep) if p]
# List all related textures
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
self.log.info("Found %i texture(s)" % len(texture_filenames))
# List all related textures
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
self.log.info("Found %i texture(s)" % len(texture_filenames))
# Get all reference nodes
reference_nodes = cmds.pgYetiGraph(node,

View file

@ -72,7 +72,6 @@ def maketx(source, destination, *args):
"--checknan",
# use oiio-optimized settings for tile-size, planarconfig, metadata
"--oiio",
"--colorconvert sRGB linear",
"--filter lanczos3"
]
@ -87,7 +86,7 @@ def maketx(source, destination, *args):
args=cmd,
stderr=subprocess.STDOUT
)
if sys.platform == "win32":
kwargs["creationflags"] = CREATE_NO_WIDOW
try:
@ -375,6 +374,7 @@ class ExtractLook(pype.api.Extractor):
)
if linearise:
self.log.info("tx: converting sRGB -> linear")
colorconvert = "--colorconvert sRGB linear"
else:
colorconvert = ""
@ -425,7 +425,7 @@ class ExtractLook(pype.api.Extractor):
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
silo = asset.get('silo')
subset = io.find_one({"type": "subset",
"name": subset_name,

View file

@ -7,11 +7,9 @@ import pype.api
class ExtractYetiCache(pype.api.Extractor):
"""Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more,
for plain and predictable point caches.
"""Producing Yeti cache files using scene time range.
This will extract Yeti cache file sequence and fur settings.
"""
label = "Extract Yeti Cache"
@ -44,7 +42,8 @@ class ExtractYetiCache(pype.api.Extractor):
else:
kwargs.update({"samples": samples})
self.log.info("Writing out cache")
self.log.info(
"Writing out cache {} - {}".format(start_frame, end_frame))
# Start writing the files for snap shot
# <NAME> will be replace by the Yeti node name
path = os.path.join(dirname, "<NAME>.%04d.fur")
@ -63,10 +62,31 @@ class ExtractYetiCache(pype.api.Extractor):
with open(data_file, "w") as fp:
json.dump(settings, fp, ensure_ascii=False)
# Ensure files can be stored
if "files" not in instance.data:
instance.data["files"] = list()
# build representations
if "representations" not in instance.data:
instance.data["representations"] = []
instance.data["files"].extend([cache_files, "yeti.fursettings"])
self.log.info("cache files: {}".format(cache_files[0]))
instance.data["representations"].append(
{
'name': 'fur',
'ext': 'fur',
'files': cache_files[0] if len(cache_files) == 1 else cache_files,
'stagingDir': dirname,
'anatomy_template': 'publish',
'frameStart': int(start_frame),
'frameEnd': int(end_frame)
}
)
instance.data["representations"].append(
{
'name': 'fursettings',
'ext': 'fursettings',
'files': os.path.basename(data_file),
'stagingDir': dirname,
'anatomy_template': 'publish'
}
)
self.log.info("Extracted {} to {}".format(instance, dirname))

View file

@ -155,10 +155,30 @@ class ExtractYetiRig(pype.api.Extractor):
shader=False)
# Ensure files can be stored
if "files" not in instance.data:
instance.data["files"] = list()
# build representations
if "representations" not in instance.data:
instance.data["representations"] = []
instance.data["files"].extend(["yeti_rig.ma", "yeti.rigsettings"])
self.log.info("rig file: {}".format("yeti_rig.ma"))
instance.data["representations"].append(
{
'name': "ma",
'ext': 'ma',
'files': "yeti_rig.ma",
'stagingDir': dirname,
'anatomy_template': 'publish'
}
)
self.log.info("settings file: {}".format("yeti.rigsettings"))
instance.data["representations"].append(
{
'name': 'rigsettings',
'ext': 'rigsettings',
'files': 'yeti.rigsettings',
'stagingDir': dirname,
'anatomy_template': 'publish'
}
)
self.log.info("Extracted {} to {}".format(instance, dirname))

View file

@ -114,7 +114,7 @@ def preview_fname(folder, scene, layer, padding, ext):
"""
# Following hardcoded "<Scene>/<Scene>_<Layer>/<Layer>"
output = "{scene}/{layer}/{layer}.{number}.{ext}".format(
output = "maya/{scene}/{layer}/{layer}.{number}.{ext}".format(
scene=scene,
layer=layer,
number="#" * padding,
@ -295,6 +295,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
ext=render_variables["ext"])
instance.data["outputDir"] = os.path.dirname(output_filename_0)
self.log.debug("output: {}".format(filepath))
# build path for metadata file
metadata_filename = "{}_metadata.json".format(instance.data["subset"])
output_dir = instance.data["outputDir"]
@ -313,9 +314,11 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
# python named MPython.exe, residing directly in muster bin
# directory.
if platform.system().lower() == "windows":
muster_python = "MPython.exe"
# for muster, those backslashes must be escaped twice
muster_python = ("\"C:\\\\Program Files\\\\Virtual Vertex\\\\"
"Muster 9\\\\MPython.exe\"")
else:
muster_python = "mpython"
muster_python = "/usr/local/muster9/mpython"
# build the path and argument. We are providing separate --pype
# argument with network path to pype as post job actions are run
@ -323,8 +326,13 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
# inherit environment from publisher including PATH, so there's
# no problem finding PYPE, but there is now way (as far as I know)
# to set environment dynamically for dispatcher. Therefor this hack.
args = [muster_python, _get_script(), "--paths", metadata_path,
"--pype", pype_root]
args = [muster_python,
_get_script().replace('\\', '\\\\'),
"--paths",
metadata_path.replace('\\', '\\\\'),
"--pype",
pype_root.replace('\\', '\\\\')]
postjob_command = " ".join(args)
try:
@ -358,7 +366,9 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
"jobId": -1,
"startOn": 0,
"parentId": -1,
"project": scene,
"project": os.environ.get('AVALON_PROJECT') or scene,
"shot": os.environ.get('AVALON_ASSET') or scene,
"camera": instance.data.get("cameras")[0],
"dependMode": 0,
"packetSize": 4,
"packetType": 1,
@ -426,7 +436,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
"subst": False
},
"ADD_FLAGS": {
"value": "",
"value": "-rl {}".format(renderlayer),
"state": True,
"subst": True
}

View file

@ -21,7 +21,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin):
"pointcache",
"camera",
"renderlayer",
"oolorbleed.vrayproxy"]
"colorbleed.vrayproxy"]
def process(self, instance):

View file

@ -12,8 +12,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
"""Validates the global render settings
* File Name Prefix must be as followed:
* vray: maya/<Layer>/<Layer>
* default: maya/<RenderLayer>/<RenderLayer>_<RenderPass>
* vray: maya/<Scene>/<Layer>/<Layer>
* default: maya/<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>
* Frame Padding must be:
* default: 4
@ -36,8 +36,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
actions = [pype.api.RepairAction]
DEFAULT_PADDING = 4
RENDERER_PREFIX = {"vray": "maya/<Layer>/<Layer>"}
DEFAULT_PREFIX = "maya/<RenderLayer>/<RenderLayer>_<RenderPass>"
RENDERER_PREFIX = {"vray": "maya/<scene>/<Layer>/<Layer>"}
DEFAULT_PREFIX = "maya/<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>"
def process(self, instance):

View file

@ -0,0 +1,40 @@
import os
from maya import cmds
import pyblish.api
import pype.api
import pype.maya.action
class ValidateUnicodeStrings(pyblish.api.Validator):
"""Validate all environment variables are string type.
"""
order = pype.api.ValidateContentsOrder
hosts = ['maya']
families = ['review']
label = 'Unicode Strings'
actions = [pype.api.RepairAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Found unicode strings in environment variables.")
@classmethod
def get_invalid(cls, instance):
invalid = []
for key, value in os.environ.items():
if type(value) is type(u't'):
invalid.append((key, value))
return invalid
@classmethod
def repair(cls, instance):
"""Retype all unicodes to strings."""
for key, value in os.environ.items():
if type(value) is type(u't'):
os.environ[key] = str(value)

View file

@ -1,9 +1,6 @@
import pyblish.api
import pype.action
import maya.cmds as cmds
import pype.maya.action
@ -60,4 +57,3 @@ class ValidateYetiRigCacheState(pyblish.api.InstancePlugin):
for node in invalid:
cmds.setAttr("%s.fileMode" % node, 0)
cmds.setAttr("%s.cacheFileName" % node, "", type="string")

View file

@ -182,7 +182,7 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
silo = asset.get('silo')
subset = io.find_one({"type": "subset",
"name": subset_name,

View file

@ -0,0 +1,17 @@
import pyblish.api
class CollectAudioVersion(pyblish.api.InstancePlugin):
"""
"""
label = "Collect Audio Version"
order = pyblish.api.CollectorOrder
families = ['audio']
def process(self, instance):
self.log.info('Audio: {}'.format(instance.data['name']))
instance.data['version'] = '001'
self.log.info('Audio version to: {}'.format(instance.data['version']))

View file

@ -0,0 +1,34 @@
import pyblish.api
class CollectFrameranges(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Frameranges"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# getting metadata from jsonData key
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
fps = metadata['ppro.timeline.fps']
sec_start = metadata['ppro.clip.start']
sec_end = metadata['ppro.clip.end']
fstart = instance.data.get('fstart')
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
instance.data['name'],
fps, sec_start, sec_end, fstart, fend))
instance.data['startFrame'] = fstart
instance.data['endFrame'] = fend
instance.data['fps'] = metadata['ppro.timeline.fps']

View file

@ -0,0 +1,72 @@
import pyblish.api
from avalon import api
class CollectHierarchyContext(pyblish.api.ContextPlugin):
"""Collecting hierarchy context from `parents` and `hierarchy` data
present in `clip` family instances coming from the request json data file
It will add `hierarchical_context` into each instance for integrate
plugins to be able to create needed parents for the context if they
don't exist yet
"""
label = "Collect Hierarchy Context"
order = pyblish.api.CollectorOrder + 0.1
def update_dict(self, ex_dict, new_dict):
for key in ex_dict:
if key in new_dict and isinstance(ex_dict[key], dict):
new_dict[key] = self.update_dict(ex_dict[key], new_dict[key])
else:
new_dict[key] = ex_dict[key]
return new_dict
def process(self, context):
json_data = context.data.get("jsonData", None)
temp_context = {}
for instance in json_data['instances']:
if instance['family'] in 'projectfile':
continue
in_info = {}
name = instance['name']
# suppose that all instances are Shots
in_info['entity_type'] = 'Shot'
instance_pyblish = [
i for i in context.data["instances"] if i.data['asset'] in name][0]
in_info['custom_attributes'] = {
'fend': instance_pyblish.data['endFrame'],
'fstart': instance_pyblish.data['startFrame'],
'fps': instance_pyblish.data['fps']
}
in_info['tasks'] = instance['tasks']
parents = instance.get('parents', [])
actual = {name: in_info}
for parent in reversed(parents):
next_dict = {}
parent_name = parent["entityName"]
next_dict[parent_name] = {}
next_dict[parent_name]["entity_type"] = parent["entityType"]
next_dict[parent_name]["childs"] = actual
actual = next_dict
temp_context = self.update_dict(temp_context, actual)
self.log.debug(temp_context)
# TODO: 100% sure way of get project! Will be Name or Code?
project_name = api.Session["AVALON_PROJECT"]
final_context = {}
final_context[project_name] = {}
final_context[project_name]['entity_type'] = 'Project'
final_context[project_name]['childs'] = temp_context
# adding hierarchy context to instance
context.data["hierarchyContext"] = final_context
self.log.debug("context.data[hierarchyContext] is: {}".format(
context.data["hierarchyContext"]))

View file

@ -0,0 +1,132 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile"]
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
# template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
mock_template = anatomy_filled.publish.path
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one({"type": "project",
"name": project_name},
projection={"config": True, "data": True})
template = project["config"]["template"]["publish"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({"type": "asset",
"name": asset_name,
"parent": project["_id"]})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({"type": "subset",
"name": subset_name,
"parent": asset["_id"]})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -0,0 +1,21 @@
import pyblish.api
class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin):
"""
Set `component_overwrite` to True on all instances `ftrackComponentsList`
"""
order = pyblish.api.IntegratorOrder + 0.49
label = 'Overwrite ftrack created versions'
families = ["clip"]
optional = True
active = False
def process(self, instance):
component_list = instance.data['ftrackComponentsList']
for cl in component_list:
cl['component_overwrite'] = True
self.log.debug('Component {} overwriting'.format(
cl['component_data']['name']))

View file

@ -0,0 +1,140 @@
import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
"""
order = pyblish.api.IntegratorOrder - 0.1
label = 'Integrate Hierarchy To Avalon'
families = ['clip']
def process(self, context):
if "hierarchyContext" not in context.data:
return
self.db = io
if not self.db.Session:
self.db.install()
input_data = context.data["hierarchyContext"]
self.import_to_avalon(input_data)
def import_to_avalon(self, input_data, parent=None):
for name in input_data:
self.log.info('input_data[name]: {}'.format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data['entity_type']
data = {}
# Process project
if entity_type.lower() == 'project':
entity = self.db.find_one({'type': 'project'})
# TODO: should be in validator?
assert (entity is not None), "Didn't find project in DB"
# get data from already existing project
for key, value in entity.get('data', {}).items():
data[key] = value
self.av_project = entity
# Raise error if project or parent are not set
elif self.av_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# Else process assset
else:
entity = self.db.find_one({'type': 'asset', 'name': name})
# Create entity if doesn't exist
if entity is None:
if self.av_project['_id'] == parent['_id']:
silo = None
elif parent['silo'] is None:
silo = parent['name']
else:
silo = parent['silo']
entity = self.create_avalon_asset(name, silo)
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____1____')
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
data = input_data[name]
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
entity = self.db.find_one({'type': 'asset', 'name': name})
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____2____')
# Else get data from already existing
else:
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('________')
for key, value in entity.get('data', {}).items():
data[key] = value
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
# do not store project's id as visualParent (silo asset)
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
# CUSTOM ATTRIBUTES
for k, val in entity_data.get('custom_attributes', {}).items():
data[k] = val
# Update entity data with input data
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
if 'childs' in entity_data:
self.import_to_avalon(entity_data['childs'], entity)
def create_avalon_asset(self, name, silo):
item = {
'schema': 'avalon-core:asset-2.0',
'name': name,
'silo': silo,
'parent': self.av_project['_id'],
'type': 'asset',
'data': {}
}
entity_id = self.db.insert_one(item).inserted_id
return self.db.find_one({'_id': entity_id})

View file

@ -0,0 +1,155 @@
import pyblish.api
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder
label = 'Integrate Hierarchy To Ftrack'
families = ["clip"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in context.data:
return
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
# adding ftrack types from presets
ftrack_types = context.data['ftrackTypes']
self.import_to_ftrack(input_data, ftrack_types)
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type'].capitalize()
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (
'Missing custom attribute')
entity['custom_attributes'][key] = custom_attributes[key]
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
self.session.commit()
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)
for task in tasks_to_create:
self.create_task(
name=task,
task_type=ftrack_types[task],
parent=entity
)
self.session.commit()
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], ftrack_types, entity)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
self.session.commit()
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
self.session.commit()
return entity

View file

@ -0,0 +1,21 @@
import pyblish.api
import os
class IntegrateCleanThumbs(pyblish.api.InstancePlugin):
"""
Cleaning up thumbnail files after they have been integrated
"""
order = pyblish.api.IntegratorOrder + 9
label = 'Clean thumbnail files'
families = ["clip"]
optional = True
active = True
def process(self, instance):
remove_file = [tt for t in instance.data['transfers']
for tt in t if 'jpg' in tt if 'temp' not in tt.lower()]
if len(remove_file) is 1:
os.remove(remove_file[0])
self.log.info('Thumbnail image was erased')

View file

@ -0,0 +1,19 @@
import pyblish.api
class IntegrateWorkfileVersion(pyblish.api.InstancePlugin):
"""
Will desynchronize versioning from actual version of work file
"""
order = pyblish.api.IntegratorOrder - 0.15
label = 'Do not synchronize workfile version'
families = ["clip"]
optional = True
active = False
def process(self, instance):
if instance.data['version']:
del(instance.data['version'])
self.log.info('Instance version was removed')

View file

@ -0,0 +1,51 @@
import pyblish.api
import pype.api
import avalon.api
class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
"""Ensure that autosync value in ftrack project is set to False.
In case was set to True and event server with the sync to avalon event
is running will cause integration to avalon will be override.
"""
order = pyblish.api.ValidatorOrder
families = ['clip']
label = 'Ftrack project\'s auto sync off'
actions = [pype.api.RepairAction]
def process(self, context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
invalid = self.get_invalid(context)
assert not invalid, (
"Ftrack Project has 'Auto sync' set to On."
" That may cause issues during integration."
)
@staticmethod
def get_invalid(context):
session = context.data["ftrackSession"]
project_name = avalon.api.Session["AVALON_PROJECT"]
query = 'Project where full_name is "{}"'.format(project_name)
project = session.query(query).one()
invalid = None
if project.get('custom_attributes', {}).get(
'avalon_auto_sync', False):
invalid = project
return invalid
@classmethod
def repair(cls, context):
session = context.data["ftrackSession"]
invalid = cls.get_invalid(context)
invalid['custom_attributes']['avalon_auto_sync'] = False
session.commit()

View file

@ -109,7 +109,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
if component["preview"]:
instance.data["families"].append("review")
instance.data["repreProfiles"] = ["h264"]
component["tags"] = ["review", "delete"]
component["tags"] = ["review"]
self.log.debug("Adding review family")
instance.data["representations"].append(component)

View file

@ -1,11 +1,12 @@
import os
import tempfile
import pyblish.api
from pype.vendor import clique
import pype.api
class ExtractReviewSaP(pyblish.api.InstancePlugin):
class ExtractReviewSP(pyblish.api.InstancePlugin):
"""Extracting Review mov file for Ftrack
Compulsory attribute of representation is tags list with "review",
@ -16,7 +17,7 @@ class ExtractReviewSaP(pyblish.api.InstancePlugin):
filter values use preset's attributes `ext_filter`
"""
label = "Extract Review SaP"
label = "Extract Review SP"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
hosts = ["standalonepublisher"]
@ -24,8 +25,8 @@ class ExtractReviewSaP(pyblish.api.InstancePlugin):
def process(self, instance):
# adding plugin attributes from presets
presets = instance.context.data["presets"]
publish_presets = presets["plugins"]["global"]["publish"]
try:
publish_presets = presets["plugins"]["standalonepublisher"]["publish"]
plugin_attrs = publish_presets[self.__class__.__name__]
except KeyError:
raise KeyError("Preset for plugin \"{}\" are not set".format(
@ -82,9 +83,10 @@ class ExtractReviewSaP(pyblish.api.InstancePlugin):
full_input_path = os.path.join(staging_dir, repre["files"])
filename = repre["files"].split(".")[0]
# prepare output file
repr_file = filename + "_{0}.{1}".format(name, ext)
full_output_path = os.path.join(staging_dir, repr_file)
out_stagigng_dir = tempfile.mkdtemp(prefix="extract_review_")
full_output_path = os.path.join(out_stagigng_dir, repr_file)
self.log.info("input {}".format(full_input_path))
self.log.info("output {}".format(full_output_path))
@ -122,9 +124,15 @@ class ExtractReviewSaP(pyblish.api.InstancePlugin):
output_args = []
# preset's output data
output_args.extend(profile.get("output", []))
# set length of video by len of inserted files
output_args.append("-frames {}".format(len(repre["files"])))
if isinstance(repre["files"], list):
# set length of video by len of inserted files
video_len = len(repre["files"])
else:
video_len = repre["frameEnd"] - repre["frameStart"] + 1
output_args.append(
"-frames {}".format(video_len)
)
# letter_box
lb_string = (
@ -163,16 +171,25 @@ class ExtractReviewSaP(pyblish.api.InstancePlugin):
"name": name,
"ext": ext,
"files": repr_file,
"stagingDir": out_stagigng_dir,
"tags": new_tags,
"outputName": name
"outputName": name,
"startFrameReview": 1,
"endFrameReview": video_len
})
if repre_new.get("preview"):
repre_new.pop("preview")
# cleanup thumbnail from new repre
if repre_new.get("thumbnail"):
repre_new.pop("thumbnail")
if "thumbnail" in repre_new["tags"]:
repre_new["tags"].remove("thumbnail")
# adding representation
self.log.debug("Adding: {}".format(repre_new))
# cleanup repre from preview
if "preview" in repre:
repre.pop("preview")
if "preview" in repre["tags"]:
repre["tags"].remove("preview")
new_repres.append(repre_new)
for repre in instance.data["representations"]:

View file

@ -5,7 +5,7 @@ import pyblish.api
import pype.api
class ExtractThumbnail(pyblish.api.InstancePlugin):
class ExtractThumbnailSP(pyblish.api.InstancePlugin):
"""Extract jpeg thumbnail from component input from standalone publisher
Uses jpeg file from component if possible (when single or multiple jpegs
@ -13,7 +13,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
input file/s single jpeg to temp.
"""
label = "Extract Thumbnail"
label = "Extract Thumbnail SP"
hosts = ["standalonepublisher"]
order = pyblish.api.ExtractorOrder

View file

@ -1,38 +1,67 @@
import os
import sys
import shutil
from pysync import walktree
from avalon import api as avalon
from avalon.lib import launch
from pyblish import api as pyblish
from pypeapp import Logger
from app import api as app
from pprint import pprint
from .. import api
from pype.aport.lib import set_avalon_workdir
from ..widgets.message_window import message
import requests
log = Logger().get_logger(__name__, "premiere")
log = api.Logger.getLogger(__name__, "premiere")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
EXTENSIONS_PATH_LOCAL = os.getenv("EXTENSIONS_PATH", None)
EXTENSIONS_CACHE_PATH = os.getenv("EXTENSIONS_CACHE_PATH", None)
EXTENSIONS_PATH_REMOTE = os.path.join(os.path.dirname(__file__), "extensions")
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
_clearing_cache = ["com.pype.rename", "com.pype.avalon"]
PUBLISH_PATH = os.path.join(
PLUGINS_DIR, "premiere", "publish"
).replace("\\", "/")
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[PUBLISH_PATH]
)
else:
os.environ["PUBLISH_PATH"] = PUBLISH_PATH
LOAD_PATH = os.path.join(PLUGINS_DIR, "premiere", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "premiere", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "premiere", "inventory")
def clearing_caches_ui():
'''Before every start of premiere it will make sure there is not
outdated stuff in cep_cache dir'''
for d in os.listdir(EXTENSIONS_CACHE_PATH):
match = [p for p in _clearing_cache
if str(p) in d]
if match:
try:
path = os.path.normpath(os.path.join(EXTENSIONS_CACHE_PATH, d))
log.info("Removing dir: {}".format(path))
shutil.rmtree(path, ignore_errors=True)
except Exception as e:
log.debug("problem: {}".format(e))
def request_aport(url_path, data={}):
try:
api.add_tool_to_environment(["aport"])
api.add_tool_to_environment(["aport_0.1"])
ip = os.getenv("PICO_IP", None)
if ip and ip.startswith('http'):
@ -45,14 +74,14 @@ def request_aport(url_path, data={}):
return req
except Exception as e:
message(title="Premiere Aport Server",
api.message(title="Premiere Aport Server",
message="Before you can run Premiere, start Aport Server. \n Error: {}".format(
e),
level="critical")
def extensions_sync():
import time
# import time
process_pairs = list()
# get extensions dir in pype.premiere.extensions
# build dir path to premiere cep extensions
@ -70,36 +99,55 @@ def extensions_sync():
log.info("Extension {0} from `{1}` coppied to `{2}`".format(
name, src, dst
))
time.sleep(10)
# time.sleep(10)
return
def install():
set_avalon_workdir()
api.set_avalon_workdir()
log.info("Registering Premiera plug-ins..")
reg_paths = request_aport("/api/register_plugin_path",
{"publish_path": PUBLISH_PATH})
log.info(str(reg_paths))
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
# avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
# avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# Disable all families except for the ones we explicitly want to see
family_states = [
"imagesequence",
"mov"
# family_states = [
# "imagesequence",
# "mov"
#
# ]
# avalon.data["familiesStateDefault"] = False
# avalon.data["familiesStateToggled"] = family_states
]
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# remove cep_cache from user temp dir
clearing_caches_ui()
# synchronize extensions
extensions_sync()
message(title="pyblish_paths", message=str(reg_paths), level="info")
message = "The Pype extension has been installed. " \
"\nThe following publishing paths has been registered: " \
"\n\n{}".format(
reg_paths)
api.message(title="pyblish_paths", message=message, level="info")
# launching premiere
exe = r"C:\Program Files\Adobe\Adobe Premiere Pro CC 2019\Adobe Premiere Pro.exe".replace(
"\\", "/")
log.info("____path exists: {}".format(os.path.exists(exe)))
app.forward(args=[exe],
silent=False,
cwd=os.getcwd(),
env=dict(os.environ),
shell=None)
def uninstall():
@ -107,3 +155,6 @@ def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()

View file

@ -1,106 +0,0 @@
{
"time": "20190112T181028Z",
"user": "jakub.jezek",
"currentFile": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/editorial/e01/work/conform/jkprx_e01_conform_v001.prproj",
"cwd": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/editorial/e01/work/conform",
"date": "2019-01-12T17:10:28.377000Z",
"framerate": "24.0",
"host": "premiere",
"hostVersion": "2019",
"isRenderedReference": true,
"referenceFile": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/reference/e01/sequence01/conform/jkprx_e01_conform_v001.mov",
"instances": [
{
"publish": true,
"family": "clip",
"name": "e01_s010_0010",
"filePath": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/footage/raw/day01/bbt_test_001_raw.mov",
"tags": [
{"task": "compositing"},
{"task": "roto"},
{"task": "3d"}
],
"layer": "V1",
"sequence": "sequence01",
"representation": "mov",
"metadata": [
{"colorspace": "BT.709"},
{"fps": 24},
{"hasAudio": true},
{"format.width": 720},
{"format.height": 404},
{"format.pixelaspect": 1},
{"source.start": "00:00:00:00"},
{"source.end": "00:01:52:12"},
{"source.duration": "00:01:52:13"},
{"clip.start": "01:00:00:00"},
{"clip.end": "01:00:42:07"},
{"clip.duration": "00:00:42:08"},
{"clip.audio": {
"audioChannels": 2,
"audioRate": 48000
}},
{"timeline.audio": [
{"metadata": [
{"audioChannels": 2},
{"audioRate": 48000},
{"source.start": "00:00:00:00"},
{"source.end": "00:01:52:12"},
{"source.duration": "00:01:52:13"},
{"clip.start": "01:00:00:00"},
{"clip.end": "01:00:42:07"},
{"clip.duration": "00:00:42:08"}
],
"layer": "A2",
"path": "file/path/to/audio.wav"}
]}
]
},
{
"publish": true,
"family": "clip",
"name": "e01_s010_0020",
"filePath": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/footage/raw/day01/bbt_test_001_raw.mov",
"tags": [
{"task": "compositing"},
{"task": "roto"},
{"task": "3d"}
],
"layer": "V1",
"sequence": "sequence01",
"representation": "mov",
"metadata": [
{"colorspace": "BT.709"},
{"fps": 24},
{"hasAudio": true},
{"format.width": 720},
{"format.height": 404},
{"format.pixelaspect": 1},
{"source.start": "00:00:00:00"},
{"source.end": "00:01:52:12"},
{"source.duration": "00:01:52:13"},
{"clip.start": "01:00:00:00"},
{"clip.end": "01:00:42:07"},
{"clip.duration": "00:00:42:08"},
{"clip.audio": {
"audioChannels": 2,
"audioRate": 48000
}},
{"timeline.audio": [
{"metadata": [
{"audioChannels": 2},
{"audioRate": 48000},
{"source.start": "00:00:00:00"},
{"source.end": "00:01:52:12"},
{"source.duration": "00:01:52:13"},
{"clip.start": "01:00:00:00"},
{"clip.end": "01:00:42:07"},
{"clip.duration": "00:00:42:08"}
],
"layer": "A2",
"path": "file/path/to/audio.wav"}
]}
]
}
]
}

View file

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionList>
<Extension Id="com.pond5.ppro">
<HostList>
<!-- Premiere -->
<Host Name="PPRO" Port="8089" />
</HostList>
</Extension>
</ExtensionList>

View file

@ -1,65 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionManifest Version="6.0" ExtensionBundleId="com.pond5.ppro" ExtensionBundleVersion="1.0.9"
ExtensionBundleName="ppro" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<Author>
<![CDATA[Pond5]]>
</Author>
<ExtensionList>
<Extension Id="com.pond5.ppro" Version="1.0.9" />
</ExtensionList>
<ExecutionEnvironment>
<HostList>
<Host Name="PPRO" Version="9.0" />
</HostList>
<LocaleList>
<Locale Code="All" />
</LocaleList>
<RequiredRuntimeList>
<RequiredRuntime Name="CSXS" Version="6.0" />
</RequiredRuntimeList>
</ExecutionEnvironment>
<DispatchInfoList>
<Extension Id="com.pond5.ppro">
<DispatchInfo >
<Resources>
<MainPath>./index_remote.html</MainPath>
<ScriptPath>./jsx/pond5.jsx</ScriptPath>
<CEFCommandLine>
<Parameter>--enable-nodejs</Parameter>
<Parameter>--mixed-context</Parameter>
<Parameter>--disable-application-cache</Parameter>
</CEFCommandLine>
</Resources>
<Lifecycle>
<AutoVisible>true</AutoVisible>
</Lifecycle>
<UI>
<Type>Panel</Type>
<Menu>Pond5</Menu>
<Geometry>
<Size>
<Height>470</Height>
<Width>875</Width>
</Size>
<MaxSize>
<Height></Height>
<Width></Width>
</MaxSize>
<MinSize>
<Height></Height>
<Width></Width>
</MinSize>
</Geometry>
<Icons>
<Icon Type="Normal">./icons/iconNormal.png</Icon>
<Icon Type="RollOver">./icons/iconRollover.png</Icon>
<Icon Type="Disabled">./icons/iconDisabled.png</Icon>
<Icon Type="DarkNormal">./icons/iconDarkNormal.png</Icon>
<Icon Type="DarkRollOver">./icons/iconDarkRollover.png</Icon>
</Icons>
</UI>
</DispatchInfo>
</Extension>
</DispatchInfoList>
</ExtensionManifest>

File diff suppressed because one or more lines are too long

View file

@ -1,7 +0,0 @@
html, body, iframe {
width: 100%;
height: 100%;
border: 0px;
margin: 0px;
overflow: hidden;
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

View file

@ -1,14 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Pond5</title>
<link rel="stylesheet" type="text/css" href="css/app.css">
</head>
<body onLoad="onLoaded()">
</body>
<script src="js/app.js"></script>
</html>

View file

@ -1,5 +0,0 @@
// switch between live and local code
function onLoaded()
{
window.location.href = "https://plugin.pond5.com/PPRO/index.html";
}

View file

@ -1,489 +0,0 @@
/*
json2.js
2014-02-04
Public Domain.
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
See http://www.JSON.org/js.html
This code should be minified before deployment.
See http://javascript.crockford.com/jsmin.html
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
NOT CONTROL.
This file creates a global JSON object containing two methods: stringify
and parse.
JSON.stringify(value, replacer, space)
value any JavaScript value, usually an object or array.
replacer an optional parameter that determines how object
values are stringified for objects. It can be a
function or an array of strings.
space an optional parameter that specifies the indentation
of nested structures. If it is omitted, the text will
be packed without extra whitespace. If it is a number,
it will specify the number of spaces to indent at each
level. If it is a string (such as '\t' or '&nbsp;'),
it contains the characters used to indent at each level.
This method produces a JSON text from a JavaScript value.
When an object value is found, if the object contains a toJSON
method, its toJSON method will be called and the result will be
stringified. A toJSON method does not serialize: it returns the
value represented by the name/value pair that should be serialized,
or undefined if nothing should be serialized. The toJSON method
will be passed the key associated with the value, and this will be
bound to the value
For example, this would serialize Dates as ISO strings.
Date.prototype.toJSON = function (key) {
function f(n) {
// Format integers to have at least two digits.
return n < 10 ? '0' + n : n;
}
return this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z';
};
You can provide an optional replacer method. It will be passed the
key and value of each member, with this bound to the containing
object. The value that is returned from your method will be
serialized. If your method returns undefined, then the member will
be excluded from the serialization.
If the replacer parameter is an array of strings, then it will be
used to select the members to be serialized. It filters the results
such that only members with keys listed in the replacer array are
stringified.
Values that do not have JSON representations, such as undefined or
functions, will not be serialized. Such values in objects will be
dropped; in arrays they will be replaced with null. You can use
a replacer function to replace those with JSON values.
JSON.stringify(undefined) returns undefined.
The optional space parameter produces a stringification of the
value that is filled with line breaks and indentation to make it
easier to read.
If the space parameter is a non-empty string, then that string will
be used for indentation. If the space parameter is a number, then
the indentation will be that many spaces.
Example:
text = JSON.stringify(['e', {pluribus: 'unum'}]);
// text is '["e",{"pluribus":"unum"}]'
text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
// text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
text = JSON.stringify([new Date()], function (key, value) {
return this[key] instanceof Date ?
'Date(' + this[key] + ')' : value;
});
// text is '["Date(---current time---)"]'
JSON.parse(text, reviver)
This method parses a JSON text to produce an object or array.
It can throw a SyntaxError exception.
The optional reviver parameter is a function that can filter and
transform the results. It receives each of the keys and values,
and its return value is used instead of the original value.
If it returns what it received, then the structure is not modified.
If it returns undefined then the member is deleted.
Example:
// Parse the text. Values that look like ISO date strings will
// be converted to Date objects.
myData = JSON.parse(text, function (key, value) {
var a;
if (typeof value === 'string') {
a =
/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
if (a) {
return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+a[5], +a[6]));
}
}
return value;
});
myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
var d;
if (typeof value === 'string' &&
value.slice(0, 5) === 'Date(' &&
value.slice(-1) === ')') {
d = new Date(value.slice(5, -1));
if (d) {
return d;
}
}
return value;
});
This is a reference implementation. You are free to copy, modify, or
redistribute.
*/
/*jslint evil: true, regexp: true */
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
lastIndex, length, parse, prototype, push, replace, slice, stringify,
test, toJSON, toString, valueOf
*/
// Create a JSON object only if one does not already exist. We create the
// methods in a closure to avoid creating global variables.
if (typeof JSON !== 'object') {
JSON = {};
}
(function () {
'use strict';
function f(n) {
// Format integers to have at least two digits.
return n < 10 ? '0' + n : n;
}
if (typeof Date.prototype.toJSON !== 'function') {
Date.prototype.toJSON = function () {
return isFinite(this.valueOf())
? this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z'
: null;
};
String.prototype.toJSON =
Number.prototype.toJSON =
Boolean.prototype.toJSON = function () {
return this.valueOf();
};
}
var cx,
escapable,
gap,
indent,
meta,
rep;
function quote(string) {
// If the string contains no control characters, no quote characters, and no
// backslash characters, then we can safely slap some quotes around it.
// Otherwise we must also replace the offending characters with safe escape
// sequences.
escapable.lastIndex = 0;
return escapable.test(string) ? '"' + string.replace(escapable, function (a) {
var c = meta[a];
return typeof c === 'string'
? c
: '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
}) + '"' : '"' + string + '"';
}
function str(key, holder) {
// Produce a string from holder[key].
var i, // The loop counter.
k, // The member key.
v, // The member value.
length,
mind = gap,
partial,
value = holder[key];
// If the value has a toJSON method, call it to obtain a replacement value.
if (value && typeof value === 'object' &&
typeof value.toJSON === 'function') {
value = value.toJSON(key);
}
// If we were called with a replacer function, then call the replacer to
// obtain a replacement value.
if (typeof rep === 'function') {
value = rep.call(holder, key, value);
}
// What happens next depends on the value's type.
switch (typeof value) {
case 'string':
return quote(value);
case 'number':
// JSON numbers must be finite. Encode non-finite numbers as null.
return isFinite(value) ? String(value) : 'null';
case 'boolean':
case 'null':
// If the value is a boolean or null, convert it to a string. Note:
// typeof null does not produce 'null'. The case is included here in
// the remote chance that this gets fixed someday.
return String(value);
// If the type is 'object', we might be dealing with an object or an array or
// null.
case 'object':
// Due to a specification blunder in ECMAScript, typeof null is 'object',
// so watch out for that case.
if (!value) {
return 'null';
}
// Make an array to hold the partial results of stringifying this object value.
gap += indent;
partial = [];
// Is the value an array?
if (Object.prototype.toString.apply(value) === '[object Array]') {
// The value is an array. Stringify every element. Use null as a placeholder
// for non-JSON values.
length = value.length;
for (i = 0; i < length; i += 1) {
partial[i] = str(i, value) || 'null';
}
// Join all of the elements together, separated with commas, and wrap them in
// brackets.
v = partial.length === 0
? '[]'
: gap
? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']'
: '[' + partial.join(',') + ']';
gap = mind;
return v;
}
// If the replacer is an array, use it to select the members to be stringified.
if (rep && typeof rep === 'object') {
length = rep.length;
for (i = 0; i < length; i += 1) {
if (typeof rep[i] === 'string') {
k = rep[i];
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
} else {
// Otherwise, iterate through all of the keys in the object.
for (k in value) {
if (Object.prototype.hasOwnProperty.call(value, k)) {
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
}
// Join all of the member texts together, separated with commas,
// and wrap them in braces.
v = partial.length === 0
? '{}'
: gap
? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}'
: '{' + partial.join(',') + '}';
gap = mind;
return v;
}
}
// If the JSON object does not yet have a stringify method, give it one.
if (typeof JSON.stringify !== 'function') {
escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
meta = { // table of character substitutions
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'"' : '\\"',
'\\': '\\\\'
};
JSON.stringify = function (value, replacer, space) {
// The stringify method takes a value and an optional replacer, and an optional
// space parameter, and returns a JSON text. The replacer can be a function
// that can replace values, or an array of strings that will select the keys.
// A default replacer method can be provided. Use of the space parameter can
// produce text that is more easily readable.
var i;
gap = '';
indent = '';
// If the space parameter is a number, make an indent string containing that
// many spaces.
if (typeof space === 'number') {
for (i = 0; i < space; i += 1) {
indent += ' ';
}
// If the space parameter is a string, it will be used as the indent string.
} else if (typeof space === 'string') {
indent = space;
}
// If there is a replacer, it must be a function or an array.
// Otherwise, throw an error.
rep = replacer;
if (replacer && typeof replacer !== 'function' &&
(typeof replacer !== 'object' ||
typeof replacer.length !== 'number')) {
throw new Error('JSON.stringify');
}
// Make a fake root object containing our value under the key of ''.
// Return the result of stringifying the value.
return str('', {'': value});
};
}
// If the JSON object does not yet have a parse method, give it one.
if (typeof JSON.parse !== 'function') {
cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
JSON.parse = function (text, reviver) {
// The parse method takes a text and an optional reviver function, and returns
// a JavaScript value if the text is a valid JSON text.
var j;
function walk(holder, key) {
// The walk method is used to recursively walk the resulting structure so
// that modifications can be made.
var k, v, value = holder[key];
if (value && typeof value === 'object') {
for (k in value) {
if (Object.prototype.hasOwnProperty.call(value, k)) {
v = walk(value, k);
if (v !== undefined) {
value[k] = v;
} else {
delete value[k];
}
}
}
}
return reviver.call(holder, key, value);
}
// Parsing happens in four stages. In the first stage, we replace certain
// Unicode characters with escape sequences. JavaScript handles many characters
// incorrectly, either silently deleting them, or treating them as line endings.
text = String(text);
cx.lastIndex = 0;
if (cx.test(text)) {
text = text.replace(cx, function (a) {
return '\\u' +
('0000' + a.charCodeAt(0).toString(16)).slice(-4);
});
}
// In the second stage, we run the text against regular expressions that look
// for non-JSON patterns. We are especially concerned with '()' and 'new'
// because they can cause invocation, and '=' because it can cause mutation.
// But just to be safe, we want to reject all unexpected forms.
// We split the second stage into 4 regexp operations in order to work around
// crippling inefficiencies in IE's and Safari's regexp engines. First we
// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
// replace all simple value tokens with ']' characters. Third, we delete all
// open brackets that follow a colon or comma or that begin the text. Finally,
// we look to see that the remaining characters are only whitespace or ']' or
// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
if (/^[\],:{}\s]*$/
.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@')
.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']')
.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
// In the third stage we use the eval function to compile the text into a
// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
// in JavaScript: it can begin a block or an object literal. We wrap the text
// in parens to eliminate the ambiguity.
j = eval('(' + text + ')');
// In the optional fourth stage, we recursively walk the new structure, passing
// each name/value pair to a reviver function for possible transformation.
return typeof reviver === 'function'
? walk({'': j}, '')
: j;
}
// If the text is not JSON parseable, then a SyntaxError is thrown.
throw new SyntaxError('JSON.parse');
};
}
}());

View file

@ -1,277 +0,0 @@
var projectItems = [];
var sequences = [];
function importClips(obj) {
app.project.importFiles(obj.paths);
return JSON.stringify(obj);
}
function getEnv() {
app.enableQE();
var obj = {
os: qe.platform,
name: app.project.name,
path: app.project.path
}
return JSON.stringify(obj);
}
function getSequences() {
var project = app.project;
// var sequences = [];
for (var i = 0; i < project.sequences.numSequences; i++) {
var seq = project.sequences[i];
seq.clipNames = [];
sequences[i] = seq;
log('sequences[i] id: ' + project.sequences[i].sequenceID);
}
var obj = {
sequences: sequences
}
return JSON.stringify(obj);
}
function getSequenceItems(seqs) {
app.enableQE();
qe.project.init();
sequences = seqs;
// log('getSequenceItems sequences obj from app: ' + sequences);
var rootFolder = app.project.rootItem;
var binCounter = -1;
var rootSeqCounter = -1; //count sequences in root folder
//walk through root folder of project to differentiate between bins, sequences and clips
for (var i = 0; i < rootFolder.children.numItems; i++) {
// log('\nroot item at ' + i + " is " + rootFolder.children[i].name + " of type " + rootFolder.children[i].type);
var item = rootFolder.children[i];
// log('item has video tracks? ' + item.videoTracks);
if (item.type == 2) { //bin
binCounter++;
walkBins(item, 'root', binCounter);
} else if (item.type == 1 && !item.getMediaPath()) //sequence OR other type of object
{
// log('\nObject of type 1 in root: ' + typeof item + ' ' + item.name);
if (objectIsSequence(item)) { //objects of type can also be other objects such as titles, so check if it really is a sequence
// log('\nSequence in root: ' + item.name );
rootSeqCounter++;
var seq = qe.project.getSequenceAt(rootSeqCounter);
// log('\nSequence in root, guid: ' + seq );
for (var property in seq) {
if (seq.hasOwnProperty(property)) {
// log('\nSequence in root: ' + seq );
//log('qe sequence prop: ' + property );
}
}
getClipNames(seq, sequences);
}
}
}
function objectIsSequence() {
var isSequence = false;
for (var s = 0; s < app.project.sequences.numSequences; s++)
if (item.name == app.project.sequences[s].name)
isSequence = true;
return isSequence
}
// walk through bins recursively
function walkBins(item, source, rootBinCounter) {
app.enableQE();
// log('\nget clips for bin ' + item.name );
var bin;
if (source == 'root') //bin in root folder
bin = qe.project.getBinAt(rootBinCounter);
else // bin in other bin
bin = item;
for (var i = 0; i < bin.numBins; i++) //if bin contains bin(s) walk through them
walkBins(bin.getBinAt(i));
// log('Bin ' + bin.name + ' has ' + bin.numSequences + ' sequences ' );
var seqCounter = -1;
for (var j = 0; j < bin.numSequences; j++) {
//if(objectIsSequence(item)) {//objects of type can also be other objects such as titles, so check if it really is a sequence?
//not needed because getSequenceAt apparently only looks at sequences already?
var seq = bin.getSequenceAt(j);
// log('\nSequence in bin, guid: ' + seq.guid );
getClipNames(seq, sequences);
//}
}
}
//walk through sequences and video & audiotracks to find clip names in sequences
function getClipNames(seq, sequences) {
for (var k = 0; k < sequences.length; k++) {
// log('getClipNames seq.guid ' + seq.guid );
//log(' getClipNames sequences[k].id ' + sequences[k].sequenceID );
if (seq.guid == sequences[k].sequenceID) {
// log('Sequence ' + seq.name + ' has ' + app.project.sequences[k].videoTracks.numTracks +' video tracks' );
// log('Sequence ' + seq.name + ' has ' + app.project.sequences[k].audioTracks.numTracks +' audio tracks' );
//VIDEO CLIPS IN SEQUENCES
for (var l = 0; l < sequences[k].videoTracks.numTracks; l++) {
var videoTrack = seq.getVideoTrackAt(l);
// log(seq.name + ' has video track '+ videoTrack.name + ' at index ' + l);
var clipCounter = 0;
var numOfClips = app.project.sequences[k].videoTracks[l].clips.numTracks;
// log('\n' + bin.name + ' ' + seq.name + ' ' + videoTrack.name + ' has ' + numOfClips + ' clips');
for (var m = 0; m < numOfClips; m++) {
var clip = app.project.sequences[k].videoTracks[l].clips[m];
// log('clips in video tracks: ' + m + ' - ' + clip); //TrackItem, doesn't have name property
//if a clip was deleted and another one added, the index of the new one is one or more higher
while (clipCounter < numOfClips) //undefined because of old clips
{
if (videoTrack.getItemAt(m).name) {
clipCounter++;
// log('getClipNames ' + seq.name + ' ' + videoTrack.name + ' has ' + videoTrack.getItemAt(m).name); //Object
for (var s = 0; s < sequences.length; s++)
if (seq.guid == sequences[s].sequenceID)
sequences[s].clipNames.push(videoTrack.getItemAt(m).name);
}
m++;
}
}
}
// log('jsx after video loop clipsInSequences:' + clipsInSequences);
//AUDIO CLIPS IN SEQUENCES
for (var l = 0; l < sequences[k].audioTracks.numTracks; l++) {
var audioTrack = seq.getAudioTrackAt(l);
//log(bin.name + ' ' + seq.name + ' has audio track '+ audioTrack.name + ' at index ' + l);
//log('\n' + bin.name + ' ' + seq.name + ' ' + audioTrack.name + ' has ' + app.project.sequences[k].audioTracks[l].clips.numTracks + ' clips');
var clipCounter = 0;
var numOfClips = app.project.sequences[k].audioTracks[l].clips.numTracks;
for (var m = 0; m < numOfClips; m++) {
var clip = app.project.sequences[k].audioTracks[l].clips[m];
// log('clips in audio tracks: ' + m + ' - ' + clip);
//if a clip was deleted and another one added, the index of the new one is one or more higher
while (clipCounter < numOfClips) //undefined because of old clips
{
if (audioTrack.getItemAt(m).name) {
clipCounter++;
// log(seq.name + ' ' + audioTrack.name + ' has ' + audioTrack.getItemAt(m).name);
for (var s = 0; s < sequences.length; s++)
if (seq.guid == sequences[s].sequenceID)
sequences[s].clipNames.push(audioTrack.getItemAt(m).name);
}
m++;
}
}
}
} //end if
} //end for
} //end getClipNames
log('sequences returned:' + sequences);
//return result to ReplaceService.js
var obj = {
data: sequences
};
// log('jsx getClipNames obj:' + obj);
return JSON.stringify(obj);
}
//getSequenceItems();
function getProjectItems() {
projectItems = [];
app.enableQE();
qe.project.init();
var rootFolder = app.project.rootItem;
//walk through root folder of project to differentiate between bins, sequences and clips
for (var i = 0; i < rootFolder.children.numItems; i++) {
// log('\nroot item at ' + i + " is of type " + rootFolder.children[i].type);
var item = rootFolder.children[i];
if (item.type == 2) { //bin
// log('\n' );
walkBins(item);
} else if (item.type == 1 && item.getMediaPath()) //clip in root
{
// log('Root folder has ' + item + ' ' + item.name);
projectItems.push(item);
}
}
// walk through bins recursively
function walkBins(bin) {
app.enableQE();
// $.writeln('bin.name + ' has ' + bin.children.numItems);
for (var i = 0; i < bin.children.numItems; i++) {
var object = bin.children[i];
// log(bin.name + ' has ' + object + ' ' + object.name + ' of type ' + object.type + ' and has mediapath ' + object.getMediaPath() );
if (object.type == 2) { //bin
// log(object.name + ' has ' + object.children.numItems );
for (var j = 0; j < object.children.numItems; j++) {
var obj = object.children[j];
if (obj.type == 1 && obj.getMediaPath()) { //clip in sub bin
//log(object.name + ' has ' + obj + ' ' + obj.name );
projectItems.push(obj);
} else if (obj.type == 2) { //bin
walkBins(obj);
}
}
} else if (object.type == 1 && object.getMediaPath()) //clip in bin in root
{
// log(bin.name + ' has ' + object + ' ' + object.name );
projectItems.push(object);
}
}
}
log('\nprojectItems:' + projectItems.length + ' ' + projectItems);
return projectItems;
}
function replaceClips(obj) {
log('num of projectItems:' + projectItems.length);
var hiresVOs = obj.hiresOnFS;
for (var i = 0; i < hiresVOs.length; i++) {
log('hires vo name: ' + hiresVOs[i].name);
log('hires vo id: ' + hiresVOs[i].id);
log('hires vo path: ' + hiresVOs[i].path);
log('hires vo replace: ' + hiresVOs[i].replace);
for (var j = 0; j < projectItems.length; j++) {
// log('projectItem id: ' + projectItems[j].name.split(' ')[0] + ' ' + hiresVOs[i].id + ' can change path ' + projectItems[j].canChangeMediaPath() );
if (projectItems[j].name.split(' ')[0] == hiresVOs[i].id && hiresVOs[i].replace && projectItems[j].canChangeMediaPath()) {
log('replace: ' + projectItems[j].name + ' with ' + hiresVOs[i].name);
projectItems[j].name = hiresVOs[i].name;
projectItems[j].changeMediaPath(hiresVOs[i].path);
}
}
}
}
function log(info) {
try {
var xLib = new ExternalObject("lib:\PlugPlugExternalObject");
} catch (e) {
alert(e);
}
if (xLib) {
var eventObj = new CSXSEvent();
eventObj.type = "LogEvent";
eventObj.data = info;
eventObj.dispatch();
}
}
function message(msg) {
$.writeln(msg); // Using '$' object will invoke ExtendScript Toolkit, if installed.
}

View file

@ -1 +0,0 @@
application/vnd.adobe.air-ucf-package+zip

View file

@ -1,83 +0,0 @@
#!/usr/bin/env node
'use strict';
var nopt = require('nopt');
var path = require('path');
var version = require('../package.json').version;
var knownOptions = {
'list': Boolean,
'extract': Boolean,
'path': path
};
var shortcuts = {
'x': ['--extract'],
'l': ['--list'],
'p': ['--path'],
'v': ['--version']
};
var parsedOptions = nopt(knownOptions, shortcuts);
var pad = function (string, length) {
string = String(string);
if (length <= string.length) {
return string;
}
return string + (new Array(length - string.length).join(' '));
};
var octal = function (number, digits) {
var result = '';
for (var i = 0; i < digits; i++) {
result = (number & 0x07) + result;
number >>= 3;
}
return result;
};
var DecompressZip = require('../lib/decompress-zip');
var zip = new DecompressZip(parsedOptions.argv.remain[0]);
zip.on('file', function (file) {
console.log([octal(file.mode, 4), pad(file.type, 13), pad(file.compressedSize, 10), pad(file.uncompressedSize, 10), file.path].join(' '));
});
zip.on('list', function (fileList) {
// console.log(fileList);
});
zip.on('extract', function (result) {
console.log(result);
});
zip.on('error', function (error) {
console.error(error.message, error.stack);
});
if (parsedOptions.version) {
console.log('version ' + version);
} else if (parsedOptions.list) {
console.log('Mode Type Zip size Full size Path');
console.log('---- ---- -------- --------- ----');
zip.list();
} else if (parsedOptions.extract) {
var options = {};
if (parsedOptions.path) {
options.path = parsedOptions.path;
}
zip.extract(options);
} else {
console.log('Usage: decompress-zip <options> <file>');
console.log(' -x, --extract extract the given file');
console.log(' -l, --list list the contents of the given file');
console.log(' -v, --version extract the given file');
console.log(' -p, --path <path> extract the file into <path>');
console.log(' -h, --help show this message');
}

View file

@ -1,33 +0,0 @@
#!/usr/bin/env node
var mkdirp = require('../');
var minimist = require('minimist');
var fs = require('fs');
var argv = minimist(process.argv.slice(2), {
alias: { m: 'mode', h: 'help' },
string: [ 'mode' ]
});
if (argv.help) {
fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout);
return;
}
var paths = argv._.slice();
var mode = argv.mode ? parseInt(argv.mode, 8) : undefined;
(function next () {
if (paths.length === 0) return;
var p = paths.shift();
if (mode === undefined) mkdirp(p, cb)
else mkdirp(p, mode, cb)
function cb (err) {
if (err) {
console.error(err.message);
process.exit(1);
}
else next();
}
})();

View file

@ -1,54 +0,0 @@
#!/usr/bin/env node
var nopt = require("../lib/nopt")
, path = require("path")
, types = { num: Number
, bool: Boolean
, help: Boolean
, list: Array
, "num-list": [Number, Array]
, "str-list": [String, Array]
, "bool-list": [Boolean, Array]
, str: String
, clear: Boolean
, config: Boolean
, length: Number
, file: path
}
, shorthands = { s: [ "--str", "astring" ]
, b: [ "--bool" ]
, nb: [ "--no-bool" ]
, tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
, "?": ["--help"]
, h: ["--help"]
, H: ["--help"]
, n: [ "--num", "125" ]
, c: ["--config"]
, l: ["--length"]
, f: ["--file"]
}
, parsed = nopt( types
, shorthands
, process.argv
, 2 )
console.log("parsed", parsed)
if (parsed.help) {
console.log("")
console.log("nopt cli tester")
console.log("")
console.log("types")
console.log(Object.keys(types).map(function M (t) {
var type = types[t]
if (Array.isArray(type)) {
return [t, type.map(function (type) { return type.name })]
}
return [t, type && type.name]
}).reduce(function (s, i) {
s[i[0]] = i[1]
return s
}, {}))
console.log("")
console.log("shorthands")
console.log(shorthands)
}

View file

@ -1,50 +0,0 @@
#!/usr/bin/env node
var rimraf = require('./')
var help = false
var dashdash = false
var noglob = false
var args = process.argv.slice(2).filter(function(arg) {
if (dashdash)
return !!arg
else if (arg === '--')
dashdash = true
else if (arg === '--no-glob' || arg === '-G')
noglob = true
else if (arg === '--glob' || arg === '-g')
noglob = false
else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/))
help = true
else
return !!arg
})
if (help || args.length === 0) {
// If they didn't ask for help, then this is not a "success"
var log = help ? console.log : console.error
log('Usage: rimraf <path> [<path> ...]')
log('')
log(' Deletes all files and folders at "path" recursively.')
log('')
log('Options:')
log('')
log(' -h, --help Display this usage info')
log(' -G, --no-glob Do not expand glob patterns in arguments')
log(' -g, --glob Expand glob patterns in arguments (default)')
process.exit(help ? 0 : 1)
} else
go(0)
function go (n) {
if (n >= args.length)
return
var options = {}
if (noglob)
options = { glob: false }
rimraf(args[n], options, function (er) {
if (er)
throw er
go(n+1)
})
}

View file

@ -1,201 +0,0 @@
#!/usr/bin/env node
// -*- mode: js -*-
// vim: set filetype=javascript :
// Copyright 2015 Joyent, Inc. All rights reserved.
var dashdash = require('dashdash');
var sshpk = require('../lib/index');
var fs = require('fs');
var path = require('path');
var tty = require('tty');
var readline = require('readline');
var getPassword = require('getpass').getPass;
var options = [
{
names: ['outformat', 't'],
type: 'string',
help: 'Output format'
},
{
names: ['informat', 'T'],
type: 'string',
help: 'Input format'
},
{
names: ['file', 'f'],
type: 'string',
help: 'Input file name (default stdin)'
},
{
names: ['out', 'o'],
type: 'string',
help: 'Output file name (default stdout)'
},
{
names: ['private', 'p'],
type: 'bool',
help: 'Produce a private key as output'
},
{
names: ['derive', 'd'],
type: 'string',
help: 'Output a new key derived from this one, with given algo'
},
{
names: ['identify', 'i'],
type: 'bool',
help: 'Print key metadata instead of converting'
},
{
names: ['comment', 'c'],
type: 'string',
help: 'Set key comment, if output format supports'
},
{
names: ['help', 'h'],
type: 'bool',
help: 'Shows this help text'
}
];
if (require.main === module) {
var parser = dashdash.createParser({
options: options
});
try {
var opts = parser.parse(process.argv);
} catch (e) {
console.error('sshpk-conv: error: %s', e.message);
process.exit(1);
}
if (opts.help || opts._args.length > 1) {
var help = parser.help({}).trimRight();
console.error('sshpk-conv: converts between SSH key formats\n');
console.error(help);
console.error('\navailable formats:');
console.error(' - pem, pkcs1 eg id_rsa');
console.error(' - ssh eg id_rsa.pub');
console.error(' - pkcs8 format you want for openssl');
console.error(' - openssh like output of ssh-keygen -o');
console.error(' - rfc4253 raw OpenSSH wire format');
process.exit(1);
}
/*
* Key derivation can only be done on private keys, so use of the -d
* option necessarily implies -p.
*/
if (opts.derive)
opts.private = true;
var inFile = process.stdin;
var inFileName = 'stdin';
var inFilePath;
if (opts.file) {
inFilePath = opts.file;
} else if (opts._args.length === 1) {
inFilePath = opts._args[0];
}
if (inFilePath)
inFileName = path.basename(inFilePath);
try {
if (inFilePath) {
fs.accessSync(inFilePath, fs.R_OK);
inFile = fs.createReadStream(inFilePath);
}
} catch (e) {
console.error('sshpk-conv: error opening input file' +
': ' + e.name + ': ' + e.message);
process.exit(1);
}
var outFile = process.stdout;
try {
if (opts.out && !opts.identify) {
fs.accessSync(path.dirname(opts.out), fs.W_OK);
outFile = fs.createWriteStream(opts.out);
}
} catch (e) {
console.error('sshpk-conv: error opening output file' +
': ' + e.name + ': ' + e.message);
process.exit(1);
}
var bufs = [];
inFile.on('readable', function () {
var data;
while ((data = inFile.read()))
bufs.push(data);
});
var parseOpts = {};
parseOpts.filename = inFileName;
inFile.on('end', function processKey() {
var buf = Buffer.concat(bufs);
var fmt = 'auto';
if (opts.informat)
fmt = opts.informat;
var f = sshpk.parseKey;
if (opts.private)
f = sshpk.parsePrivateKey;
try {
var key = f(buf, fmt, parseOpts);
} catch (e) {
if (e.name === 'KeyEncryptedError') {
getPassword(function (err, pw) {
if (err) {
console.log('sshpk-conv: ' +
err.name + ': ' +
err.message);
process.exit(1);
}
parseOpts.passphrase = pw;
processKey();
});
return;
}
console.error('sshpk-conv: ' +
e.name + ': ' + e.message);
process.exit(1);
}
if (opts.derive)
key = key.derive(opts.derive);
if (opts.comment)
key.comment = opts.comment;
if (!opts.identify) {
fmt = undefined;
if (opts.outformat)
fmt = opts.outformat;
outFile.write(key.toBuffer(fmt));
if (fmt === 'ssh' ||
(!opts.private && fmt === undefined))
outFile.write('\n');
outFile.once('drain', function () {
process.exit(0);
});
} else {
var kind = 'public';
if (sshpk.PrivateKey.isPrivateKey(key))
kind = 'private';
console.log('%s: a %d bit %s %s key', inFileName,
key.size, key.type.toUpperCase(), kind);
if (key.type === 'ecdsa')
console.log('ECDSA curve: %s', key.curve);
if (key.comment)
console.log('Comment: %s', key.comment);
console.log('Fingerprint:');
console.log(' ' + key.fingerprint().toString());
console.log(' ' + key.fingerprint('md5').toString());
process.exit(0);
}
});
}

View file

@ -1,191 +0,0 @@
#!/usr/bin/env node
// -*- mode: js -*-
// vim: set filetype=javascript :
// Copyright 2015 Joyent, Inc. All rights reserved.
var dashdash = require('dashdash');
var sshpk = require('../lib/index');
var fs = require('fs');
var path = require('path');
var getPassword = require('getpass').getPass;
var options = [
{
names: ['hash', 'H'],
type: 'string',
help: 'Hash algorithm (sha1, sha256, sha384, sha512)'
},
{
names: ['verbose', 'v'],
type: 'bool',
help: 'Display verbose info about key and hash used'
},
{
names: ['identity', 'i'],
type: 'string',
help: 'Path to key to use'
},
{
names: ['file', 'f'],
type: 'string',
help: 'Input filename'
},
{
names: ['out', 'o'],
type: 'string',
help: 'Output filename'
},
{
names: ['format', 't'],
type: 'string',
help: 'Signature format (asn1, ssh, raw)'
},
{
names: ['binary', 'b'],
type: 'bool',
help: 'Output raw binary instead of base64'
},
{
names: ['help', 'h'],
type: 'bool',
help: 'Shows this help text'
}
];
var parseOpts = {};
if (require.main === module) {
var parser = dashdash.createParser({
options: options
});
try {
var opts = parser.parse(process.argv);
} catch (e) {
console.error('sshpk-sign: error: %s', e.message);
process.exit(1);
}
if (opts.help || opts._args.length > 1) {
var help = parser.help({}).trimRight();
console.error('sshpk-sign: sign data using an SSH key\n');
console.error(help);
process.exit(1);
}
if (!opts.identity) {
var help = parser.help({}).trimRight();
console.error('sshpk-sign: the -i or --identity option ' +
'is required\n');
console.error(help);
process.exit(1);
}
var keyData = fs.readFileSync(opts.identity);
parseOpts.filename = opts.identity;
run();
}
function run() {
var key;
try {
key = sshpk.parsePrivateKey(keyData, 'auto', parseOpts);
} catch (e) {
if (e.name === 'KeyEncryptedError') {
getPassword(function (err, pw) {
parseOpts.passphrase = pw;
run();
});
return;
}
console.error('sshpk-sign: error loading private key "' +
opts.identity + '": ' + e.name + ': ' + e.message);
process.exit(1);
}
var hash = opts.hash || key.defaultHashAlgorithm();
var signer;
try {
signer = key.createSign(hash);
} catch (e) {
console.error('sshpk-sign: error creating signer: ' +
e.name + ': ' + e.message);
process.exit(1);
}
if (opts.verbose) {
console.error('sshpk-sign: using %s-%s with a %d bit key',
key.type, hash, key.size);
}
var inFile = process.stdin;
var inFileName = 'stdin';
var inFilePath;
if (opts.file) {
inFilePath = opts.file;
} else if (opts._args.length === 1) {
inFilePath = opts._args[0];
}
if (inFilePath)
inFileName = path.basename(inFilePath);
try {
if (inFilePath) {
fs.accessSync(inFilePath, fs.R_OK);
inFile = fs.createReadStream(inFilePath);
}
} catch (e) {
console.error('sshpk-sign: error opening input file' +
': ' + e.name + ': ' + e.message);
process.exit(1);
}
var outFile = process.stdout;
try {
if (opts.out && !opts.identify) {
fs.accessSync(path.dirname(opts.out), fs.W_OK);
outFile = fs.createWriteStream(opts.out);
}
} catch (e) {
console.error('sshpk-sign: error opening output file' +
': ' + e.name + ': ' + e.message);
process.exit(1);
}
inFile.pipe(signer);
inFile.on('end', function () {
var sig;
try {
sig = signer.sign();
} catch (e) {
console.error('sshpk-sign: error signing data: ' +
e.name + ': ' + e.message);
process.exit(1);
}
var fmt = opts.format || 'asn1';
var output;
try {
output = sig.toBuffer(fmt);
if (!opts.binary)
output = output.toString('base64');
} catch (e) {
console.error('sshpk-sign: error converting signature' +
' to ' + fmt + ' format: ' + e.name + ': ' +
e.message);
process.exit(1);
}
outFile.write(output);
if (!opts.binary)
outFile.write('\n');
outFile.once('drain', function () {
process.exit(0);
});
});
}

View file

@ -1,166 +0,0 @@
#!/usr/bin/env node
// -*- mode: js -*-
// vim: set filetype=javascript :
// Copyright 2015 Joyent, Inc. All rights reserved.
var dashdash = require('dashdash');
var sshpk = require('../lib/index');
var fs = require('fs');
var path = require('path');
var options = [
{
names: ['hash', 'H'],
type: 'string',
help: 'Hash algorithm (sha1, sha256, sha384, sha512)'
},
{
names: ['verbose', 'v'],
type: 'bool',
help: 'Display verbose info about key and hash used'
},
{
names: ['identity', 'i'],
type: 'string',
help: 'Path to (public) key to use'
},
{
names: ['file', 'f'],
type: 'string',
help: 'Input filename'
},
{
names: ['format', 't'],
type: 'string',
help: 'Signature format (asn1, ssh, raw)'
},
{
names: ['signature', 's'],
type: 'string',
help: 'base64-encoded signature data'
},
{
names: ['help', 'h'],
type: 'bool',
help: 'Shows this help text'
}
];
if (require.main === module) {
var parser = dashdash.createParser({
options: options
});
try {
var opts = parser.parse(process.argv);
} catch (e) {
console.error('sshpk-verify: error: %s', e.message);
process.exit(3);
}
if (opts.help || opts._args.length > 1) {
var help = parser.help({}).trimRight();
console.error('sshpk-verify: sign data using an SSH key\n');
console.error(help);
process.exit(3);
}
if (!opts.identity) {
var help = parser.help({}).trimRight();
console.error('sshpk-verify: the -i or --identity option ' +
'is required\n');
console.error(help);
process.exit(3);
}
if (!opts.signature) {
var help = parser.help({}).trimRight();
console.error('sshpk-verify: the -s or --signature option ' +
'is required\n');
console.error(help);
process.exit(3);
}
var keyData = fs.readFileSync(opts.identity);
var key;
try {
key = sshpk.parseKey(keyData);
} catch (e) {
console.error('sshpk-verify: error loading key "' +
opts.identity + '": ' + e.name + ': ' + e.message);
process.exit(2);
}
var fmt = opts.format || 'asn1';
var sigData = new Buffer(opts.signature, 'base64');
var sig;
try {
sig = sshpk.parseSignature(sigData, key.type, fmt);
} catch (e) {
console.error('sshpk-verify: error parsing signature: ' +
e.name + ': ' + e.message);
process.exit(2);
}
var hash = opts.hash || key.defaultHashAlgorithm();
var verifier;
try {
verifier = key.createVerify(hash);
} catch (e) {
console.error('sshpk-verify: error creating verifier: ' +
e.name + ': ' + e.message);
process.exit(2);
}
if (opts.verbose) {
console.error('sshpk-verify: using %s-%s with a %d bit key',
key.type, hash, key.size);
}
var inFile = process.stdin;
var inFileName = 'stdin';
var inFilePath;
if (opts.file) {
inFilePath = opts.file;
} else if (opts._args.length === 1) {
inFilePath = opts._args[0];
}
if (inFilePath)
inFileName = path.basename(inFilePath);
try {
if (inFilePath) {
fs.accessSync(inFilePath, fs.R_OK);
inFile = fs.createReadStream(inFilePath);
}
} catch (e) {
console.error('sshpk-verify: error opening input file' +
': ' + e.name + ': ' + e.message);
process.exit(2);
}
inFile.pipe(verifier);
inFile.on('end', function () {
var ret;
try {
ret = verifier.verify(sig);
} catch (e) {
console.error('sshpk-verify: error verifying data: ' +
e.name + ': ' + e.message);
process.exit(1);
}
if (ret) {
console.error('OK');
process.exit(0);
}
console.error('NOT OK');
process.exit(1);
});
}

View file

@ -1,50 +0,0 @@
#!/usr/bin/env node
var assert = require('assert');
function usage() {
console.log('Usage:');
console.log(' uuid');
console.log(' uuid v1');
console.log(' uuid v4');
console.log(' uuid v5 <name> <namespace uuid>');
console.log(' uuid --help');
console.log('\nNote: <namespace uuid> may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122');
}
var args = process.argv.slice(2);
if (args.indexOf('--help') >= 0) {
usage();
process.exit(0);
}
var version = args.shift() || 'v4';
switch (version) {
case 'v1':
var uuidV1 = require('../v1');
console.log(uuidV1());
break;
case 'v4':
var uuidV4 = require('../v4');
console.log(uuidV4());
break;
case 'v5':
var uuidV5 = require('../v5');
var name = args.shift();
var namespace = args.shift();
assert(name != null, 'v5 name not specified');
assert(namespace != null, 'v5 namespace not specified');
if (namespace == 'URL') namespace = uuidV5.URL;
if (namespace == 'DNS') namespace = uuidV5.DNS;
console.log(uuidV5(name, namespace));
break;
default:
usage();
process.exit(1);
}

View file

@ -1 +0,0 @@
*.DS_Store

View file

@ -1,15 +0,0 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View file

@ -1,23 +0,0 @@
# abbrev-js
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
Usage:
var abbrev = require("abbrev");
abbrev("foo", "fool", "folding", "flop");
// returns:
{ fl: 'flop'
, flo: 'flop'
, flop: 'flop'
, fol: 'folding'
, fold: 'folding'
, foldi: 'folding'
, foldin: 'folding'
, folding: 'folding'
, foo: 'foo'
, fool: 'fool'
}
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.

View file

@ -1,61 +0,0 @@
module.exports = exports = abbrev.abbrev = abbrev
abbrev.monkeyPatch = monkeyPatch
function monkeyPatch () {
Object.defineProperty(Array.prototype, 'abbrev', {
value: function () { return abbrev(this) },
enumerable: false, configurable: true, writable: true
})
Object.defineProperty(Object.prototype, 'abbrev', {
value: function () { return abbrev(Object.keys(this)) },
enumerable: false, configurable: true, writable: true
})
}
function abbrev (list) {
if (arguments.length !== 1 || !Array.isArray(list)) {
list = Array.prototype.slice.call(arguments, 0)
}
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
}
// sort them lexicographically, so that they're next to their nearest kin
args = args.sort(lexSort)
// walk through each, seeing how much it has in common with the next and previous
var abbrevs = {}
, prev = ""
for (var i = 0, l = args.length ; i < l ; i ++) {
var current = args[i]
, next = args[i + 1] || ""
, nextMatches = true
, prevMatches = true
if (current === next) continue
for (var j = 0, cl = current.length ; j < cl ; j ++) {
var curChar = current.charAt(j)
nextMatches = nextMatches && curChar === next.charAt(j)
prevMatches = prevMatches && curChar === prev.charAt(j)
if (!nextMatches && !prevMatches) {
j ++
break
}
}
prev = current
if (j === cl) {
abbrevs[current] = current
continue
}
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
abbrevs[a] = current
a += current.charAt(j)
}
}
return abbrevs
}
function lexSort (a, b) {
return a === b ? 0 : a > b ? 1 : -1
}

View file

@ -1,93 +0,0 @@
{
"_args": [
[
{
"raw": "abbrev@1",
"scope": null,
"escapedName": "abbrev",
"name": "abbrev",
"rawSpec": "1",
"spec": ">=1.0.0 <2.0.0",
"type": "range"
},
"/Library/Application Support/Adobe/CEP/extensions/aeft/node_modules/nopt"
]
],
"_from": "abbrev@>=1.0.0 <2.0.0",
"_id": "abbrev@1.1.0",
"_inCache": true,
"_location": "/abbrev",
"_nodeVersion": "8.0.0-pre",
"_npmOperationalInternal": {
"host": "packages-12-west.internal.npmjs.com",
"tmp": "tmp/abbrev-1.1.0.tgz_1487054000015_0.9229173036292195"
},
"_npmUser": {
"name": "isaacs",
"email": "i@izs.me"
},
"_npmVersion": "4.3.0",
"_phantomChildren": {},
"_requested": {
"raw": "abbrev@1",
"scope": null,
"escapedName": "abbrev",
"name": "abbrev",
"rawSpec": "1",
"spec": ">=1.0.0 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/nopt",
"/touch/nopt"
],
"_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.0.tgz",
"_shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f",
"_shrinkwrap": null,
"_spec": "abbrev@1",
"_where": "/Library/Application Support/Adobe/CEP/extensions/aeft/node_modules/nopt",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/abbrev-js/issues"
},
"dependencies": {},
"description": "Like ruby's abbrev module, but in js",
"devDependencies": {
"tap": "^10.1"
},
"directories": {},
"dist": {
"shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f",
"tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.0.tgz"
},
"files": [
"abbrev.js"
],
"gitHead": "7136d4d95449dc44115d4f78b80ec907724f64e0",
"homepage": "https://github.com/isaacs/abbrev-js#readme",
"license": "ISC",
"main": "abbrev.js",
"maintainers": [
{
"name": "isaacs",
"email": "i@izs.me"
}
],
"name": "abbrev",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/isaacs/abbrev-js.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test.js --100"
},
"version": "1.1.0"
}

View file

@ -1,20 +0,0 @@
var Ajv = require('ajv');
var ajv = Ajv({allErrors: true});
var schema = {
"properties": {
"foo": { "type": "string" },
"bar": { "type": "number", "maximum": 3 }
}
};
var validate = ajv.compile(schema);
test({"foo": "abc", "bar": 2});
test({"foo": 2, "bar": 4});
function test(data) {
var valid = validate(data);
if (valid) console.log('Valid!');
else console.log('Invalid: ' + ajv.errorsText(validate.errors));
}

View file

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Evgeny Poberezkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,284 +0,0 @@
declare var ajv: {
(options?: ajv.Options): ajv.Ajv;
new (options?: ajv.Options): ajv.Ajv;
}
declare namespace ajv {
interface Ajv {
/**
* Validate data using schema
* Schema will be compiled and cached (using serialized JSON as key. [json-stable-stringify](https://github.com/substack/json-stable-stringify) is used to serialize.
* @param {String|Object} schemaKeyRef key, ref or schema object
* @param {Any} data to be validated
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
*/
validate(schemaKeyRef: Object | string, data: any): boolean;
/**
* Create validating function for passed schema.
* @param {Object} schema schema object
* @return {Function} validating function
*/
compile(schema: Object): ValidateFunction;
/**
* Creates validating function for passed schema with asynchronous loading of missing schemas.
* `loadSchema` option should be a function that accepts schema uri and node-style callback.
* @this Ajv
* @param {Object} schema schema object
* @param {Function} callback node-style callback, it is always called with 2 parameters: error (or null) and validating function.
*/
compileAsync(schema: Object, callback: (err: Error, validate: ValidateFunction) => any): void;
/**
* Adds schema to the instance.
* @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
* @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
*/
addSchema(schema: Array<Object> | Object, key?: string): void;
/**
* Add schema that will be used to validate other schemas
* options in META_IGNORE_OPTIONS are alway set to false
* @param {Object} schema schema object
* @param {String} key optional schema key
*/
addMetaSchema(schema: Object, key?: string): void;
/**
* Validate schema
* @param {Object} schema schema to validate
* @return {Boolean} true if schema is valid
*/
validateSchema(schema: Object): boolean;
/**
* Get compiled schema from the instance by `key` or `ref`.
* @param {String} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
* @return {Function} schema validating function (with property `schema`).
*/
getSchema(keyRef: string): ValidateFunction;
/**
* Remove cached schema(s).
* If no parameter is passed all schemas but meta-schemas are removed.
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
* @param {String|Object|RegExp} schemaKeyRef key, ref, pattern to match key/ref or schema object
*/
removeSchema(schemaKeyRef?: Object | string | RegExp): void;
/**
* Add custom format
* @param {String} name format name
* @param {String|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
*/
addFormat(name: string, format: FormatValidator | FormatDefinition): void;
/**
* Define custom keyword
* @this Ajv
* @param {String} keyword custom keyword, should be a valid identifier, should be different from all standard, custom and macro keywords.
* @param {Object} definition keyword definition object with properties `type` (type(s) which the keyword applies to), `validate` or `compile`.
*/
addKeyword(keyword: string, definition: KeywordDefinition): void;
/**
* Get keyword definition
* @this Ajv
* @param {String} keyword pre-defined or custom keyword.
* @return {Object|Boolean} custom keyword definition, `true` if it is a predefined keyword, `false` otherwise.
*/
getKeyword(keyword: string): Object | boolean;
/**
* Remove keyword
* @this Ajv
* @param {String} keyword pre-defined or custom keyword.
*/
removeKeyword(keyword: string): void;
/**
* Convert array of error message objects to string
* @param {Array<Object>} errors optional array of validation errors, if not passed errors from the instance are used.
* @param {Object} options optional options with properties `separator` and `dataVar`.
* @return {String} human readable string with all errors descriptions
*/
errorsText(errors?: Array<ErrorObject>, options?: ErrorsTextOptions): string;
errors?: Array<ErrorObject>;
}
interface Thenable <R> {
then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
}
interface ValidateFunction {
(
data: any,
dataPath?: string,
parentData?: Object | Array<any>,
parentDataProperty?: string | number,
rootData?: Object | Array<any>
): boolean | Thenable<boolean>;
errors?: Array<ErrorObject>;
schema?: Object;
}
interface Options {
v5?: boolean;
allErrors?: boolean;
verbose?: boolean;
jsonPointers?: boolean;
uniqueItems?: boolean;
unicode?: boolean;
format?: string;
formats?: Object;
unknownFormats?: boolean | string | Array<string>;
schemas?: Array<Object> | Object;
ownProperties?: boolean;
missingRefs?: boolean | string;
extendRefs?: boolean | string;
loadSchema?: (uri: string, cb: (err: Error, schema: Object) => any) => any;
removeAdditional?: boolean | string;
useDefaults?: boolean | string;
coerceTypes?: boolean | string;
async?: boolean | string;
transpile?: string | ((code: string) => string);
meta?: boolean | Object;
validateSchema?: boolean | string;
addUsedSchema?: boolean;
inlineRefs?: boolean | number;
passContext?: boolean;
loopRequired?: number;
multipleOfPrecision?: number;
errorDataPath?: string;
messages?: boolean;
sourceCode?: boolean;
beautify?: boolean | Object;
cache?: Object;
}
type FormatValidator = string | RegExp | ((data: string) => boolean);
interface FormatDefinition {
validate: FormatValidator;
compare: (data1: string, data2: string) => number;
async?: boolean;
}
interface KeywordDefinition {
type?: string | Array<string>;
async?: boolean;
errors?: boolean | string;
// schema: false makes validate not to expect schema (ValidateFunction)
schema?: boolean;
modifying?: boolean;
valid?: boolean;
// one and only one of the following properties should be present
validate?: ValidateFunction | SchemaValidateFunction;
compile?: (schema: Object, parentSchema: Object) => ValidateFunction;
macro?: (schema: Object, parentSchema: Object) => Object;
inline?: (it: Object, keyword: string, schema: Object, parentSchema: Object) => string;
}
interface SchemaValidateFunction {
(
schema: Object,
data: any,
parentSchema?: Object,
dataPath?: string,
parentData?: Object | Array<any>,
parentDataProperty?: string | number
): boolean | Thenable<boolean>;
errors?: Array<ErrorObject>;
}
interface ErrorsTextOptions {
separator?: string;
dataVar?: string;
}
interface ErrorObject {
keyword: string;
dataPath: string;
schemaPath: string;
params: ErrorParameters;
// Excluded if messages set to false.
message?: string;
// These are added with the `verbose` option.
schema?: Object;
parentSchema?: Object;
data?: any;
}
type ErrorParameters = RefParams | LimitParams | AdditionalPropertiesParams |
DependenciesParams | FormatParams | ComparisonParams |
MultipleOfParams | PatternParams | RequiredParams |
TypeParams | UniqueItemsParams | CustomParams |
PatternGroupsParams | PatternRequiredParams |
SwitchParams | NoParams | EnumParams;
interface RefParams {
ref: string;
}
interface LimitParams {
limit: number;
}
interface AdditionalPropertiesParams {
additionalProperty: string;
}
interface DependenciesParams {
property: string;
missingProperty: string;
depsCount: number;
deps: string;
}
interface FormatParams {
format: string
}
interface ComparisonParams {
comparison: string;
limit: number | string;
exclusive: boolean;
}
interface MultipleOfParams {
multipleOf: number;
}
interface PatternParams {
pattern: string;
}
interface RequiredParams {
missingProperty: string;
}
interface TypeParams {
type: string;
}
interface UniqueItemsParams {
i: number;
j: number;
}
interface CustomParams {
keyword: string;
}
interface PatternGroupsParams {
reason: string;
limit: number;
pattern: string;
}
interface PatternRequiredParams {
missingPattern: string;
}
interface SwitchParams {
caseIndex: number;
}
interface NoParams {}
interface EnumParams {
allowedValues: Array<any>;
}
}
export = ajv;

View file

@ -1,420 +0,0 @@
'use strict';
var compileSchema = require('./compile')
, resolve = require('./compile/resolve')
, Cache = require('./cache')
, SchemaObject = require('./compile/schema_obj')
, stableStringify = require('json-stable-stringify')
, formats = require('./compile/formats')
, rules = require('./compile/rules')
, v5 = require('./v5')
, util = require('./compile/util')
, async = require('./async')
, co = require('co');
module.exports = Ajv;
Ajv.prototype.compileAsync = async.compile;
var customKeyword = require('./keyword');
Ajv.prototype.addKeyword = customKeyword.add;
Ajv.prototype.getKeyword = customKeyword.get;
Ajv.prototype.removeKeyword = customKeyword.remove;
Ajv.ValidationError = require('./compile/validation_error');
var META_SCHEMA_ID = 'http://json-schema.org/draft-04/schema';
var SCHEMA_URI_FORMAT = /^(?:(?:[a-z][a-z0-9+-.]*:)?\/\/)?[^\s]*$/i;
function SCHEMA_URI_FORMAT_FUNC(str) {
return SCHEMA_URI_FORMAT.test(str);
}
var META_IGNORE_OPTIONS = [ 'removeAdditional', 'useDefaults', 'coerceTypes' ];
/**
* Creates validator instance.
* Usage: `Ajv(opts)`
* @param {Object} opts optional options
* @return {Object} ajv instance
*/
function Ajv(opts) {
if (!(this instanceof Ajv)) return new Ajv(opts);
var self = this;
opts = this._opts = util.copy(opts) || {};
this._schemas = {};
this._refs = {};
this._fragments = {};
this._formats = formats(opts.format);
this._cache = opts.cache || new Cache;
this._loadingSchemas = {};
this._compilations = [];
this.RULES = rules();
// this is done on purpose, so that methods are bound to the instance
// (without using bind) so that they can be used without the instance
this.validate = validate;
this.compile = compile;
this.addSchema = addSchema;
this.addMetaSchema = addMetaSchema;
this.validateSchema = validateSchema;
this.getSchema = getSchema;
this.removeSchema = removeSchema;
this.addFormat = addFormat;
this.errorsText = errorsText;
this._addSchema = _addSchema;
this._compile = _compile;
opts.loopRequired = opts.loopRequired || Infinity;
if (opts.async || opts.transpile) async.setup(opts);
if (opts.beautify === true) opts.beautify = { indent_size: 2 };
if (opts.errorDataPath == 'property') opts._errorDataPathProperty = true;
this._metaOpts = getMetaSchemaOptions();
if (opts.formats) addInitialFormats();
addDraft4MetaSchema();
if (opts.v5) v5.enable(this);
if (typeof opts.meta == 'object') addMetaSchema(opts.meta);
addInitialSchemas();
/**
* Validate data using schema
* Schema will be compiled and cached (using serialized JSON as key. [json-stable-stringify](https://github.com/substack/json-stable-stringify) is used to serialize.
* @param {String|Object} schemaKeyRef key, ref or schema object
* @param {Any} data to be validated
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
*/
function validate(schemaKeyRef, data) {
var v;
if (typeof schemaKeyRef == 'string') {
v = getSchema(schemaKeyRef);
if (!v) throw new Error('no schema with key or ref "' + schemaKeyRef + '"');
} else {
var schemaObj = _addSchema(schemaKeyRef);
v = schemaObj.validate || _compile(schemaObj);
}
var valid = v(data);
if (v.$async === true)
return self._opts.async == '*' ? co(valid) : valid;
self.errors = v.errors;
return valid;
}
/**
* Create validating function for passed schema.
* @param {Object} schema schema object
* @param {Boolean} _meta true if schema is a meta-schema. Used internally to compile meta schemas of custom keywords.
* @return {Function} validating function
*/
function compile(schema, _meta) {
var schemaObj = _addSchema(schema, undefined, _meta);
return schemaObj.validate || _compile(schemaObj);
}
/**
* Adds schema to the instance.
* @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
* @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
* @param {Boolean} _skipValidation true to skip schema validation. Used internally, option validateSchema should be used instead.
* @param {Boolean} _meta true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
*/
function addSchema(schema, key, _skipValidation, _meta) {
if (Array.isArray(schema)){
for (var i=0; i<schema.length; i++) addSchema(schema[i], undefined, _skipValidation, _meta);
return;
}
// can key/id have # inside?
key = resolve.normalizeId(key || schema.id);
checkUnique(key);
self._schemas[key] = _addSchema(schema, _skipValidation, _meta, true);
}
/**
* Add schema that will be used to validate other schemas
* options in META_IGNORE_OPTIONS are alway set to false
* @param {Object} schema schema object
* @param {String} key optional schema key
* @param {Boolean} skipValidation true to skip schema validation, can be used to override validateSchema option for meta-schema
*/
function addMetaSchema(schema, key, skipValidation) {
addSchema(schema, key, skipValidation, true);
}
/**
* Validate schema
* @param {Object} schema schema to validate
* @param {Boolean} throwOrLogError pass true to throw (or log) an error if invalid
* @return {Boolean} true if schema is valid
*/
function validateSchema(schema, throwOrLogError) {
var $schema = schema.$schema || self._opts.defaultMeta || defaultMeta();
var currentUriFormat = self._formats.uri;
self._formats.uri = typeof currentUriFormat == 'function'
? SCHEMA_URI_FORMAT_FUNC
: SCHEMA_URI_FORMAT;
var valid;
try { valid = validate($schema, schema); }
finally { self._formats.uri = currentUriFormat; }
if (!valid && throwOrLogError) {
var message = 'schema is invalid: ' + errorsText();
if (self._opts.validateSchema == 'log') console.error(message);
else throw new Error(message);
}
return valid;
}
function defaultMeta() {
var meta = self._opts.meta;
self._opts.defaultMeta = typeof meta == 'object'
? meta.id || meta
: self._opts.v5
? v5.META_SCHEMA_ID
: META_SCHEMA_ID;
return self._opts.defaultMeta;
}
/**
* Get compiled schema from the instance by `key` or `ref`.
* @param {String} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
* @return {Function} schema validating function (with property `schema`).
*/
function getSchema(keyRef) {
var schemaObj = _getSchemaObj(keyRef);
switch (typeof schemaObj) {
case 'object': return schemaObj.validate || _compile(schemaObj);
case 'string': return getSchema(schemaObj);
case 'undefined': return _getSchemaFragment(keyRef);
}
}
function _getSchemaFragment(ref) {
var res = resolve.schema.call(self, { schema: {} }, ref);
if (res) {
var schema = res.schema
, root = res.root
, baseId = res.baseId;
var v = compileSchema.call(self, schema, root, undefined, baseId);
self._fragments[ref] = new SchemaObject({
ref: ref,
fragment: true,
schema: schema,
root: root,
baseId: baseId,
validate: v
});
return v;
}
}
function _getSchemaObj(keyRef) {
keyRef = resolve.normalizeId(keyRef);
return self._schemas[keyRef] || self._refs[keyRef] || self._fragments[keyRef];
}
/**
* Remove cached schema(s).
* If no parameter is passed all schemas but meta-schemas are removed.
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
* @param {String|Object|RegExp} schemaKeyRef key, ref, pattern to match key/ref or schema object
*/
function removeSchema(schemaKeyRef) {
if (schemaKeyRef instanceof RegExp) {
_removeAllSchemas(self._schemas, schemaKeyRef);
_removeAllSchemas(self._refs, schemaKeyRef);
return;
}
switch (typeof schemaKeyRef) {
case 'undefined':
_removeAllSchemas(self._schemas);
_removeAllSchemas(self._refs);
self._cache.clear();
return;
case 'string':
var schemaObj = _getSchemaObj(schemaKeyRef);
if (schemaObj) self._cache.del(schemaObj.jsonStr);
delete self._schemas[schemaKeyRef];
delete self._refs[schemaKeyRef];
return;
case 'object':
var jsonStr = stableStringify(schemaKeyRef);
self._cache.del(jsonStr);
var id = schemaKeyRef.id;
if (id) {
id = resolve.normalizeId(id);
delete self._schemas[id];
delete self._refs[id];
}
}
}
function _removeAllSchemas(schemas, regex) {
for (var keyRef in schemas) {
var schemaObj = schemas[keyRef];
if (!schemaObj.meta && (!regex || regex.test(keyRef))) {
self._cache.del(schemaObj.jsonStr);
delete schemas[keyRef];
}
}
}
function _addSchema(schema, skipValidation, meta, shouldAddSchema) {
if (typeof schema != 'object') throw new Error('schema should be object');
var jsonStr = stableStringify(schema);
var cached = self._cache.get(jsonStr);
if (cached) return cached;
shouldAddSchema = shouldAddSchema || self._opts.addUsedSchema !== false;
var id = resolve.normalizeId(schema.id);
if (id && shouldAddSchema) checkUnique(id);
var willValidate = self._opts.validateSchema !== false && !skipValidation;
var recursiveMeta;
if (willValidate && !(recursiveMeta = schema.id && schema.id == schema.$schema))
validateSchema(schema, true);
var localRefs = resolve.ids.call(self, schema);
var schemaObj = new SchemaObject({
id: id,
schema: schema,
localRefs: localRefs,
jsonStr: jsonStr,
meta: meta
});
if (id[0] != '#' && shouldAddSchema) self._refs[id] = schemaObj;
self._cache.put(jsonStr, schemaObj);
if (willValidate && recursiveMeta) validateSchema(schema, true);
return schemaObj;
}
function _compile(schemaObj, root) {
if (schemaObj.compiling) {
schemaObj.validate = callValidate;
callValidate.schema = schemaObj.schema;
callValidate.errors = null;
callValidate.root = root ? root : callValidate;
if (schemaObj.schema.$async === true)
callValidate.$async = true;
return callValidate;
}
schemaObj.compiling = true;
var currentOpts;
if (schemaObj.meta) {
currentOpts = self._opts;
self._opts = self._metaOpts;
}
var v;
try { v = compileSchema.call(self, schemaObj.schema, root, schemaObj.localRefs); }
finally {
schemaObj.compiling = false;
if (schemaObj.meta) self._opts = currentOpts;
}
schemaObj.validate = v;
schemaObj.refs = v.refs;
schemaObj.refVal = v.refVal;
schemaObj.root = v.root;
return v;
function callValidate() {
var _validate = schemaObj.validate;
var result = _validate.apply(null, arguments);
callValidate.errors = _validate.errors;
return result;
}
}
/**
* Convert array of error message objects to string
* @param {Array<Object>} errors optional array of validation errors, if not passed errors from the instance are used.
* @param {Object} options optional options with properties `separator` and `dataVar`.
* @return {String} human readable string with all errors descriptions
*/
function errorsText(errors, options) {
errors = errors || self.errors;
if (!errors) return 'No errors';
options = options || {};
var separator = options.separator === undefined ? ', ' : options.separator;
var dataVar = options.dataVar === undefined ? 'data' : options.dataVar;
var text = '';
for (var i=0; i<errors.length; i++) {
var e = errors[i];
if (e) text += dataVar + e.dataPath + ' ' + e.message + separator;
}
return text.slice(0, -separator.length);
}
/**
* Add custom format
* @param {String} name format name
* @param {String|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
*/
function addFormat(name, format) {
if (typeof format == 'string') format = new RegExp(format);
self._formats[name] = format;
}
function addDraft4MetaSchema() {
if (self._opts.meta !== false) {
var metaSchema = require('./refs/json-schema-draft-04.json');
addMetaSchema(metaSchema, META_SCHEMA_ID, true);
self._refs['http://json-schema.org/schema'] = META_SCHEMA_ID;
}
}
function addInitialSchemas() {
var optsSchemas = self._opts.schemas;
if (!optsSchemas) return;
if (Array.isArray(optsSchemas)) addSchema(optsSchemas);
else for (var key in optsSchemas) addSchema(optsSchemas[key], key);
}
function addInitialFormats() {
for (var name in self._opts.formats) {
var format = self._opts.formats[name];
addFormat(name, format);
}
}
function checkUnique(id) {
if (self._schemas[id] || self._refs[id])
throw new Error('schema with key or id "' + id + '" already exists');
}
function getMetaSchemaOptions() {
var metaOpts = util.copy(self._opts);
for (var i=0; i<META_IGNORE_OPTIONS.length; i++)
delete metaOpts[META_IGNORE_OPTIONS[i]];
return metaOpts;
}
}

View file

@ -1,218 +0,0 @@
'use strict';
module.exports = {
setup: setupAsync,
compile: compileAsync
};
var util = require('./compile/util');
var ASYNC = {
'*': checkGenerators,
'co*': checkGenerators,
'es7': checkAsyncFunction
};
var TRANSPILE = {
'nodent': getNodent,
'regenerator': getRegenerator
};
var MODES = [
{ async: 'co*' },
{ async: 'es7', transpile: 'nodent' },
{ async: 'co*', transpile: 'regenerator' }
];
var regenerator, nodent;
function setupAsync(opts, required) {
if (required !== false) required = true;
var async = opts.async
, transpile = opts.transpile
, check;
switch (typeof transpile) {
case 'string':
var get = TRANSPILE[transpile];
if (!get) throw new Error('bad transpiler: ' + transpile);
return (opts._transpileFunc = get(opts, required));
case 'undefined':
case 'boolean':
if (typeof async == 'string') {
check = ASYNC[async];
if (!check) throw new Error('bad async mode: ' + async);
return (opts.transpile = check(opts, required));
}
for (var i=0; i<MODES.length; i++) {
var _opts = MODES[i];
if (setupAsync(_opts, false)) {
util.copy(_opts, opts);
return opts.transpile;
}
}
/* istanbul ignore next */
throw new Error('generators, nodent and regenerator are not available');
case 'function':
return (opts._transpileFunc = opts.transpile);
default:
throw new Error('bad transpiler: ' + transpile);
}
}
function checkGenerators(opts, required) {
/* jshint evil: true */
try {
(new Function('(function*(){})()'))();
return true;
} catch(e) {
/* istanbul ignore next */
if (required) throw new Error('generators not supported');
}
}
function checkAsyncFunction(opts, required) {
/* jshint evil: true */
try {
(new Function('(async function(){})()'))();
/* istanbul ignore next */
return true;
} catch(e) {
if (required) throw new Error('es7 async functions not supported');
}
}
function getRegenerator(opts, required) {
try {
if (!regenerator) {
var name = 'regenerator';
regenerator = require(name);
regenerator.runtime();
}
if (!opts.async || opts.async === true)
opts.async = 'es7';
return regeneratorTranspile;
} catch(e) {
/* istanbul ignore next */
if (required) throw new Error('regenerator not available');
}
}
function regeneratorTranspile(code) {
return regenerator.compile(code).code;
}
function getNodent(opts, required) {
/* jshint evil: true */
try {
if (!nodent) {
var name = 'nodent';
nodent = require(name)({ log: false, dontInstallRequireHook: true });
}
if (opts.async != 'es7') {
if (opts.async && opts.async !== true) console.warn('nodent transpiles only es7 async functions');
opts.async = 'es7';
}
return nodentTranspile;
} catch(e) {
/* istanbul ignore next */
if (required) throw new Error('nodent not available');
}
}
function nodentTranspile(code) {
return nodent.compile(code, '', { promises: true, sourcemap: false }).code;
}
/**
* Creates validating function for passed schema with asynchronous loading of missing schemas.
* `loadSchema` option should be a function that accepts schema uri and node-style callback.
* @this Ajv
* @param {Object} schema schema object
* @param {Function} callback node-style callback, it is always called with 2 parameters: error (or null) and validating function.
*/
function compileAsync(schema, callback) {
/* eslint no-shadow: 0 */
/* jshint validthis: true */
var schemaObj;
var self = this;
try {
schemaObj = this._addSchema(schema);
} catch(e) {
setTimeout(function() { callback(e); });
return;
}
if (schemaObj.validate) {
setTimeout(function() { callback(null, schemaObj.validate); });
} else {
if (typeof this._opts.loadSchema != 'function')
throw new Error('options.loadSchema should be a function');
_compileAsync(schema, callback, true);
}
function _compileAsync(schema, callback, firstCall) {
var validate;
try { validate = self.compile(schema); }
catch(e) {
if (e.missingSchema) loadMissingSchema(e);
else deferCallback(e);
return;
}
deferCallback(null, validate);
function loadMissingSchema(e) {
var ref = e.missingSchema;
if (self._refs[ref] || self._schemas[ref])
return callback(new Error('Schema ' + ref + ' is loaded but ' + e.missingRef + ' cannot be resolved'));
var _callbacks = self._loadingSchemas[ref];
if (_callbacks) {
if (typeof _callbacks == 'function')
self._loadingSchemas[ref] = [_callbacks, schemaLoaded];
else
_callbacks[_callbacks.length] = schemaLoaded;
} else {
self._loadingSchemas[ref] = schemaLoaded;
self._opts.loadSchema(ref, function (err, sch) {
var _callbacks = self._loadingSchemas[ref];
delete self._loadingSchemas[ref];
if (typeof _callbacks == 'function') {
_callbacks(err, sch);
} else {
for (var i=0; i<_callbacks.length; i++)
_callbacks[i](err, sch);
}
});
}
function schemaLoaded(err, sch) {
if (err) return callback(err);
if (!(self._refs[ref] || self._schemas[ref])) {
try {
self.addSchema(sch, ref);
} catch(e) {
callback(e);
return;
}
}
_compileAsync(schema, callback);
}
}
function deferCallback(err, validate) {
if (firstCall) setTimeout(function() { callback(err, validate); });
else return callback(err, validate);
}
}
}

View file

@ -1,26 +0,0 @@
'use strict';
var Cache = module.exports = function Cache() {
this._cache = {};
};
Cache.prototype.put = function Cache_put(key, value) {
this._cache[key] = value;
};
Cache.prototype.get = function Cache_get(key) {
return this._cache[key];
};
Cache.prototype.del = function Cache_del(key) {
delete this._cache[key];
};
Cache.prototype.clear = function Cache_clear() {
this._cache = {};
};

Some files were not shown because too many files have changed in this diff Show more