mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge remote-tracking branch 'origin/develop' into feature/PYPE-523-validator-for-comparing-arbitrary-attributes
This commit is contained in:
commit
68fb2c2e27
3246 changed files with 16031 additions and 300912 deletions
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -27,3 +27,9 @@ coverage.xml
|
|||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
|
||||
# Node JS packages
|
||||
##################
|
||||
node_modules/
|
||||
package-lock.json
|
||||
|
|
|
|||
|
|
@ -3,15 +3,17 @@ import sys
|
|||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from pypeapp import execute, Logger
|
||||
from app import api as app
|
||||
|
||||
from .. import api
|
||||
from .lib import set_avalon_workdir
|
||||
t = app.Templates()
|
||||
|
||||
log = Logger().get_logger(__name__, "aport")
|
||||
log = api.Logger.getLogger(__name__, "aport")
|
||||
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
|
||||
ADDITIONAL_PLUGINS = ['ftrack']
|
||||
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
|
@ -33,8 +35,31 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "aport", "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
|
||||
|
||||
|
||||
def register_additional_plugin_paths():
|
||||
'''Adding additional host plugins
|
||||
'''
|
||||
|
||||
for host in ADDITIONAL_PLUGINS:
|
||||
publish_path = os.path.join(
|
||||
PLUGINS_DIR, host, "publish").replace("\\", "/")
|
||||
pyblish.register_plugin_path(publish_path)
|
||||
|
||||
# adding path to PUBLISH_PATH environment
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
os.environ["PUBLISH_PATH"] = os.pathsep.join(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep) +
|
||||
[publish_path]
|
||||
)
|
||||
else:
|
||||
os.environ["PUBLISH_PATH"] = publish_path
|
||||
|
||||
log.info(
|
||||
"Registered additional plugin path: "
|
||||
"{}".format(publish_path))
|
||||
|
||||
|
||||
def install():
|
||||
set_avalon_workdir()
|
||||
# api.set_avalon_workdir()
|
||||
|
||||
log.info("Registering Aport plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
|
|
@ -42,6 +67,9 @@ def install():
|
|||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# additional plugins
|
||||
register_additional_plugin_paths()
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"imagesequence",
|
||||
|
|
@ -51,6 +79,9 @@ def install():
|
|||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
# load data from templates
|
||||
api.load_data_from_templates()
|
||||
|
||||
# launch pico server
|
||||
pico_server_launch()
|
||||
|
||||
|
|
@ -81,7 +112,7 @@ def pico_server_launch():
|
|||
"api"
|
||||
]
|
||||
|
||||
execute(
|
||||
app.forward(
|
||||
args,
|
||||
cwd=path
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,34 +1,90 @@
|
|||
# api.py
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pico
|
||||
from pico import PicoApp
|
||||
from pico.decorators import request_args, set_cookie, delete_cookie, stream
|
||||
from pico.decorators import header, cookie
|
||||
from app.api import forward, Logger
|
||||
|
||||
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
|
||||
import pipeline as ppl
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon import io
|
||||
|
||||
import pyblish.api as pyblish
|
||||
|
||||
from pypeapp import execute
|
||||
from pype import api as pype
|
||||
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "aport")
|
||||
|
||||
|
||||
SESSION = avalon.session
|
||||
if not SESSION:
|
||||
io.install()
|
||||
log = Logger.getLogger(__name__, "aport")
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def publish(json_data_path, gui):
|
||||
def get_session():
|
||||
ppl.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
|
||||
ppl.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
|
||||
ppl.AVALON_TASK = os.getenv("AVALON_TASK", None)
|
||||
ppl.AVALON_SILO = os.getenv("AVALON_SILO", None)
|
||||
return ppl.get_session()
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def load_representations(project, representations):
|
||||
'''Querry data from mongo db for defined representations.
|
||||
|
||||
Args:
|
||||
project (str): name of the project
|
||||
representations (list): representations which are required
|
||||
|
||||
Returns:
|
||||
data (dict): representations in last versions
|
||||
|
||||
# testing url:
|
||||
http://localhost:4242/api/load_representations?project=jakub_projectx&representations=[{%22asset%22:%22e09s031_0040%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22},%20{%22asset%22:%22e09s031_0030%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22}]
|
||||
|
||||
# returning:
|
||||
{"e09s031_0040_referenceDefault":{"_id":"5c6dabaa2af61756b02f7f32","schema":"pype:representation-2.0","type":"representation","parent":"5c6dabaa2af61756b02f7f31","name":"mp4","data":{"path":"C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\thisFolder\\e09\\s031\\e09s031_0040\\publish\\clip\\referenceDefault\\v019\\jkprx_e09s031_0040_referenceDefault_v019.mp4","template":"{publish.root}/{publish.folder}/{version.main}/{publish.file}"},"dependencies":[],"context":{"root":"C:\\Users\\hubert\\_PYPE_testing\\projects","project":{"name":"jakub_projectx","code":"jkprx"},"task":"edit","silo":"thisFolder","asset":"e09s031_0040","family":"clip","subset":"referenceDefault","VERSION":19,"hierarchy":"thisFolder\\e09\\s031","representation":"mp4"}}}
|
||||
'''
|
||||
data = {}
|
||||
# log.info("___project: {}".format(project))
|
||||
# ppl.io.activate_project(project)
|
||||
#
|
||||
# from_mongo = ppl.io.find({"name": repr['representation'],
|
||||
# "type": "representation"})[:]
|
||||
|
||||
for repr in representations:
|
||||
log.info("asset: {}".format(repr['asset']))
|
||||
# set context for each asset individually
|
||||
context(project, repr['asset'], '')
|
||||
|
||||
# query data from mongo db for the asset's subset representation
|
||||
related_repr = [r for r in ppl.io.find({"name": repr['representation'],
|
||||
"type": "representation",
|
||||
"context.asset": repr['asset']})[:]]
|
||||
|
||||
versions_dict = {r['context']['version']: i
|
||||
for i, r in enumerate(related_repr)}
|
||||
versions_list = [v for v in versions_dict.keys()]
|
||||
sorted(versions_list)
|
||||
|
||||
version_index_last = versions_dict[max(versions_list)]
|
||||
|
||||
log.info("version_index_last: {}".format(version_index_last))
|
||||
# create name which will be used on timeline clip
|
||||
name = '_'.join([repr['asset'], repr['subset']])
|
||||
|
||||
# log.info("___related_repr: {}".format(related_repr))
|
||||
# assign data for the clip representation
|
||||
version = ppl.io.find_one(
|
||||
{'_id': related_repr[version_index_last]['parent']})
|
||||
log.info("version: {}".format(version))
|
||||
|
||||
# fixing path workarround
|
||||
if '.#####.mxf' in related_repr[version_index_last]['data']['path']:
|
||||
related_repr[version_index_last]['data']['path'] = related_repr[version_index_last]['data']['path'].replace(
|
||||
'.#####.mxf', '.mxf')
|
||||
|
||||
related_repr[version_index_last]['version'] = version
|
||||
related_repr[version_index_last]['parentClip'] = repr['parentClip']
|
||||
data[name] = related_repr[version_index_last]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def publish(send_json_path, get_json_path, gui):
|
||||
"""
|
||||
Runs standalone pyblish and adds link to
|
||||
data in external json file
|
||||
|
|
@ -37,82 +93,101 @@ def publish(json_data_path, gui):
|
|||
host is needed
|
||||
|
||||
Args:
|
||||
json_data_path (string): path to temp json file with
|
||||
context data
|
||||
staging_dir (strign, optional): path to temp directory
|
||||
send_json_path (string): path to temp json file with
|
||||
sending context data
|
||||
get_json_path (strign): path to temp json file with
|
||||
returning context data
|
||||
|
||||
Returns:
|
||||
dict: return_json_path
|
||||
dict: get_json_path
|
||||
|
||||
Raises:
|
||||
Exception: description
|
||||
|
||||
"""
|
||||
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
|
||||
|
||||
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
|
||||
log.info("staging_dir: {}".format(staging_dir))
|
||||
return_json_path = os.path.join(staging_dir, "return_data.json").replace("\\", "/")
|
||||
log.info("avalon.session is: \n{}".format(ppl.SESSION))
|
||||
log.info("PUBLISH_PATH: \n{}".format(os.environ["PUBLISH_PATH"]))
|
||||
|
||||
log.info("avalon.session is: \n{}".format(SESSION))
|
||||
|
||||
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
|
||||
pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'),
|
||||
"app", "pype-start.py")
|
||||
|
||||
publish = "--publish-gui" if gui else "--publish"
|
||||
|
||||
args = [pype_start, publish,
|
||||
args = [pype_start,
|
||||
"--root", os.environ['AVALON_PROJECTS'], "--publish-gui",
|
||||
"-pp", os.environ["PUBLISH_PATH"],
|
||||
"-d", "rqst_json_data_path", json_data_path,
|
||||
"-d", "post_json_data_path", return_json_path
|
||||
"-d", "rqst_json_data_path", send_json_path,
|
||||
"-d", "post_json_data_path", get_json_path
|
||||
]
|
||||
|
||||
log.debug(args)
|
||||
|
||||
# start standalone pyblish qml
|
||||
execute([
|
||||
log.info("_aport.api Variable `AVALON_PROJECTS` had changed to `{0}`.".format(
|
||||
os.environ['AVALON_PROJECTS']))
|
||||
forward([
|
||||
sys.executable, "-u"
|
||||
] + args,
|
||||
cwd=cwd
|
||||
# cwd=cwd
|
||||
)
|
||||
|
||||
return {"return_json_path": return_json_path}
|
||||
return {"get_json_path": get_json_path}
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def context(project_name, asset, task, app):
|
||||
def context(project, asset, task, app='aport'):
|
||||
os.environ["AVALON_PROJECT"] = ppl.AVALON_PROJECT = project
|
||||
os.environ["AVALON_ASSET"] = ppl.AVALON_ASSET = asset
|
||||
os.environ["AVALON_TASK"] = ppl.AVALON_TASK = task
|
||||
os.environ["AVALON_SILO"] = ppl.AVALON_SILO = ''
|
||||
|
||||
ppl.get_session()
|
||||
# log.info('ppl.SESSION: {}'.format(ppl.SESSION))
|
||||
|
||||
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
|
||||
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
io.Session["AVALON_PROJECT"] = project_name
|
||||
ppl.update_current_task(task, asset, app)
|
||||
|
||||
avalon.update_current_task(task, asset, app)
|
||||
project_code = ppl.io.find_one({"type": "project"})["data"].get("code", '')
|
||||
|
||||
project_code = pype.get_project()["data"].get("code", '')
|
||||
os.environ["AVALON_PROJECTCODE"] = \
|
||||
ppl.SESSION["AVALON_PROJECTCODE"] = project_code
|
||||
|
||||
os.environ["AVALON_PROJECTCODE"] = project_code
|
||||
io.Session["AVALON_PROJECTCODE"] = project_code
|
||||
parents = ppl.io.find_one({"type": 'asset',
|
||||
"name": ppl.AVALON_ASSET})['data']['parents']
|
||||
|
||||
hierarchy = pype.get_hierarchy()
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
io.Session["AVALON_HIERARCHY"] = hierarchy
|
||||
if parents and len(parents) > 0:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*parents).replace("\\", "/")
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
os.environ["AVALON_HIERARCHY"] = \
|
||||
ppl.SESSION["AVALON_HIERARCHY"] = hierarchy
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in ppl.SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
SESSION.update({"AVALON_HIERARCHY": hierarchy,
|
||||
"AVALON_PROJECTCODE": project_code,
|
||||
"current_dir": os.getcwd().replace("\\", "/")
|
||||
})
|
||||
|
||||
return SESSION
|
||||
ppl.SESSION.update(fix_paths)
|
||||
ppl.SESSION.update({"AVALON_HIERARCHY": hierarchy,
|
||||
"AVALON_PROJECTCODE": project_code,
|
||||
"current_dir": os.getcwd().replace("\\", "/")
|
||||
})
|
||||
|
||||
return ppl.SESSION
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def anatomy_fill(data):
|
||||
from pype import api as pype
|
||||
pype.load_data_from_templates()
|
||||
anatomy = pype.Anatomy
|
||||
return anatomy.format(data)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def deregister_plugin_path():
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
|
||||
os.pathsep) if "aport" in p][0]
|
||||
aport_plugin_path = os.pathsep.join(
|
||||
[p.replace("\\", "/")
|
||||
for p in os.environ["PUBLISH_PATH"].split(os.pathsep)
|
||||
if "aport" in p or
|
||||
"ftrack" in p])
|
||||
os.environ["PUBLISH_PATH"] = aport_plugin_path
|
||||
else:
|
||||
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
|
||||
|
|
@ -125,8 +200,8 @@ def register_plugin_path(publish_path):
|
|||
deregister_plugin_path()
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
os.environ["PUBLISH_PATH"] = os.pathsep.join(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep) +
|
||||
[publish_path.replace("\\", "/")]
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep)
|
||||
+ [publish_path.replace("\\", "/")]
|
||||
)
|
||||
else:
|
||||
os.environ["PUBLISH_PATH"] = publish_path
|
||||
|
|
@ -143,8 +218,8 @@ app.register_module(__name__)
|
|||
|
||||
# remove all Handlers created by pico
|
||||
for name, handler in [(handler.get_name(), handler)
|
||||
for handler in pype.Logger.logging.root.handlers[:]]:
|
||||
for handler in Logger.logging.root.handlers[:]]:
|
||||
if "pype" not in str(name).lower():
|
||||
print(name)
|
||||
print(handler)
|
||||
pype.Logger.logging.root.removeHandler(handler)
|
||||
Logger.logging.root.removeHandler(handler)
|
||||
|
||||
# SPLASH.hide_splash()
|
||||
|
|
|
|||
432
pype/aport/io_nonsingleton.py
Normal file
432
pype/aport/io_nonsingleton.py
Normal file
|
|
@ -0,0 +1,432 @@
|
|||
"""
|
||||
Wrapper around interactions with the database
|
||||
|
||||
Copy of io module in avalon-core.
|
||||
- In this case not working as singleton with api.Session!
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
|
||||
from avalon import schema
|
||||
import requests
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
@functools.wraps(func)
|
||||
def decorated(*args, **kwargs):
|
||||
object = args[0]
|
||||
for retry in range(3):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
object.log.error("Reconnecting..")
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
class DbConnector(object):
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self.Session = {}
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
self._sentry_logging_handler = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
return
|
||||
|
||||
logging.basicConfig()
|
||||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
self._mongo_client = pymongo.MongoClient(
|
||||
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
t1 = time.time()
|
||||
self._mongo_client.server_info()
|
||||
|
||||
except Exception:
|
||||
self.log.error("Retrying..")
|
||||
time.sleep(1)
|
||||
timeout *= 1.5
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
self.Session["AVALON_MONGO"], time.time() - t1))
|
||||
|
||||
self._install_sentry()
|
||||
|
||||
self._database = self._mongo_client[self.Session["AVALON_DB"]]
|
||||
self._is_installed = True
|
||||
|
||||
def _install_sentry(self):
|
||||
if "AVALON_SENTRY" not in self.Session:
|
||||
return
|
||||
|
||||
try:
|
||||
from raven import Client
|
||||
from raven.handlers.logging import SentryHandler
|
||||
from raven.conf import setup_logging
|
||||
except ImportError:
|
||||
# Note: There was a Sentry address in this Session
|
||||
return self.log.warning("Sentry disabled, raven not installed")
|
||||
|
||||
client = Client(self.Session["AVALON_SENTRY"])
|
||||
|
||||
# Transmit log messages to Sentry
|
||||
handler = SentryHandler(client)
|
||||
handler.setLevel(logging.WARNING)
|
||||
|
||||
setup_logging(handler)
|
||||
|
||||
self._sentry_client = client
|
||||
self._sentry_logging_handler = handler
|
||||
self.log.info(
|
||||
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
|
||||
)
|
||||
|
||||
def _from_environment(self):
|
||||
Session = {
|
||||
item[0]: os.getenv(item[0], item[1])
|
||||
for item in (
|
||||
# Root directory of projects on disk
|
||||
("AVALON_PROJECTS", None),
|
||||
|
||||
# Name of current Project
|
||||
("AVALON_PROJECT", ""),
|
||||
|
||||
# Name of current Asset
|
||||
("AVALON_ASSET", ""),
|
||||
|
||||
# Name of current silo
|
||||
("AVALON_SILO", ""),
|
||||
|
||||
# Name of current task
|
||||
("AVALON_TASK", None),
|
||||
|
||||
# Name of current app
|
||||
("AVALON_APP", None),
|
||||
|
||||
# Path to working directory
|
||||
("AVALON_WORKDIR", None),
|
||||
|
||||
# Name of current Config
|
||||
# TODO(marcus): Establish a suitable default config
|
||||
("AVALON_CONFIG", "no_config"),
|
||||
|
||||
# Name of Avalon in graphical user interfaces
|
||||
# Use this to customise the visual appearance of Avalon
|
||||
# to better integrate with your surrounding pipeline
|
||||
("AVALON_LABEL", "Avalon"),
|
||||
|
||||
# Used during any connections to the outside world
|
||||
("AVALON_TIMEOUT", "1000"),
|
||||
|
||||
# Address to Asset Database
|
||||
("AVALON_MONGO", "mongodb://localhost:27017"),
|
||||
|
||||
# Name of database used in MongoDB
|
||||
("AVALON_DB", "avalon"),
|
||||
|
||||
# Address to Sentry
|
||||
("AVALON_SENTRY", None),
|
||||
|
||||
# Address to Deadline Web Service
|
||||
# E.g. http://192.167.0.1:8082
|
||||
("AVALON_DEADLINE", None),
|
||||
|
||||
# Enable features not necessarily stable. The user's own risk
|
||||
("AVALON_EARLY_ADOPTER", None),
|
||||
|
||||
# Address of central asset repository, contains
|
||||
# the following interface:
|
||||
# /upload
|
||||
# /download
|
||||
# /manager (optional)
|
||||
("AVALON_LOCATION", "http://127.0.0.1"),
|
||||
|
||||
# Boolean of whether to upload published material
|
||||
# to central asset repository
|
||||
("AVALON_UPLOAD", None),
|
||||
|
||||
# Generic username and password
|
||||
("AVALON_USERNAME", "avalon"),
|
||||
("AVALON_PASSWORD", "secret"),
|
||||
|
||||
# Unique identifier for instances in working files
|
||||
("AVALON_INSTANCE_ID", "avalon.instance"),
|
||||
("AVALON_CONTAINER_ID", "avalon.container"),
|
||||
|
||||
# Enable debugging
|
||||
("AVALON_DEBUG", None),
|
||||
|
||||
) if os.getenv(item[0], item[1]) is not None
|
||||
}
|
||||
|
||||
Session["schema"] = "avalon-core:session-1.0"
|
||||
try:
|
||||
schema.validate(Session)
|
||||
except schema.ValidationError as e:
|
||||
# TODO(marcus): Make this mandatory
|
||||
self.log.warning(e)
|
||||
|
||||
return Session
|
||||
|
||||
def uninstall(self):
|
||||
"""Close any connection to the database"""
|
||||
try:
|
||||
self._mongo_client.close()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._mongo_client = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def active_project(self):
|
||||
"""Return the name of the active project"""
|
||||
return self.Session["AVALON_PROJECT"]
|
||||
|
||||
def activate_project(self, project_name):
|
||||
self.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
def projects(self):
|
||||
"""List available projects
|
||||
|
||||
Returns:
|
||||
list of project documents
|
||||
|
||||
"""
|
||||
|
||||
collection_names = self.collections()
|
||||
for project in collection_names:
|
||||
if project in ("system.indexes",):
|
||||
continue
|
||||
|
||||
# Each collection will have exactly one project document
|
||||
document = self.find_project(project)
|
||||
|
||||
if document is not None:
|
||||
yield document
|
||||
|
||||
def locate(self, path):
|
||||
"""Traverse a hierarchy from top-to-bottom
|
||||
|
||||
Example:
|
||||
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
|
||||
|
||||
Returns:
|
||||
representation (ObjectId)
|
||||
|
||||
"""
|
||||
|
||||
components = zip(
|
||||
("project", "asset", "subset", "version", "representation"),
|
||||
path
|
||||
)
|
||||
|
||||
parent = None
|
||||
for type_, name in components:
|
||||
latest = (type_ == "version") and name in (None, -1)
|
||||
|
||||
try:
|
||||
if latest:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
sort=[("name", -1)]
|
||||
)["_id"]
|
||||
else:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"name": name,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
)["_id"]
|
||||
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
return parent
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
|
||||
@auto_reconnect
|
||||
def find_project(self, project):
|
||||
return self._database[project].find_one({"type": "project"})
|
||||
|
||||
@auto_reconnect
|
||||
def insert_one(self, item):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
schema.validate(item)
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
|
||||
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True):
|
||||
# check if all items are valid
|
||||
assert isinstance(items, list), "`items` must be of type <list>"
|
||||
for item in items:
|
||||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
schema.validate(item)
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
|
||||
items,
|
||||
ordered=ordered)
|
||||
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find_one(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def save(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].save(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
|
||||
filter, replacement)
|
||||
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].update_many(
|
||||
filter, update)
|
||||
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].distinct(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def drop(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].drop(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def delete_many(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
|
||||
*args, **kwargs)
|
||||
|
||||
def parenthood(self, document):
|
||||
assert document is not None, "This is a bug"
|
||||
|
||||
parents = list()
|
||||
|
||||
while document.get("parent") is not None:
|
||||
document = self.find_one({"_id": document["parent"]})
|
||||
|
||||
if document is None:
|
||||
break
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir(self):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
def download(self, src, dst):
|
||||
"""Download `src` to `dst`
|
||||
|
||||
Arguments:
|
||||
src (str): URL to source file
|
||||
dst (str): Absolute path to destination file
|
||||
|
||||
Yields tuple (progress, error):
|
||||
progress (int): Between 0-100
|
||||
error (Exception): Any exception raised when first making connection
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
src,
|
||||
stream=True,
|
||||
auth=requests.auth.HTTPBasicAuth(
|
||||
self.Session["AVALON_USERNAME"],
|
||||
self.Session["AVALON_PASSWORD"]
|
||||
)
|
||||
)
|
||||
except requests.ConnectionError as e:
|
||||
yield None, e
|
||||
return
|
||||
|
||||
with self.tempdir() as dirname:
|
||||
tmp = os.path.join(dirname, os.path.basename(src))
|
||||
|
||||
with open(tmp, "wb") as f:
|
||||
total_length = response.headers.get("content-length")
|
||||
|
||||
if total_length is None: # no content length header
|
||||
f.write(response.content)
|
||||
else:
|
||||
downloaded = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
downloaded += len(data)
|
||||
f.write(data)
|
||||
|
||||
yield int(100.0 * downloaded / total_length), None
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dst))
|
||||
except OSError as e:
|
||||
# An already existing destination directory is fine.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
shutil.copy(tmp, dst)
|
||||
|
|
@ -1,135 +1,26 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from avalon import io, api as avalon, lib as avalonlib
|
||||
from pype import lib
|
||||
from pype import api as pype
|
||||
# from pypeapp.api import (Templates, Logger, format)
|
||||
from pypeapp import Logger, Anatomy
|
||||
log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config"))
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
def get_asset():
|
||||
"""
|
||||
Obtain Asset string from session or environment variable
|
||||
|
||||
Returns:
|
||||
string: asset name
|
||||
|
||||
Raises:
|
||||
log: error
|
||||
"""
|
||||
lib.set_io_database()
|
||||
asset = io.Session.get("AVALON_ASSET", None) \
|
||||
or os.getenv("AVALON_ASSET", None)
|
||||
log.info("asset: {}".format(asset))
|
||||
assert asset, log.error("missing `AVALON_ASSET`"
|
||||
"in avalon session "
|
||||
"or os.environ!")
|
||||
return asset
|
||||
def get_anatomy(**kwarg):
|
||||
return pype.Anatomy
|
||||
|
||||
|
||||
def get_context_data(
|
||||
project_name=None, hierarchy=None, asset=None, task_name=None
|
||||
):
|
||||
"""
|
||||
Collect all main contextual data
|
||||
def format_anatomy(data):
|
||||
from .templates import (
|
||||
get_anatomy
|
||||
)
|
||||
file = script_name()
|
||||
|
||||
Args:
|
||||
project (string, optional): project name
|
||||
hierarchy (string, optional): hierarchy path
|
||||
asset (string, optional): asset name
|
||||
task (string, optional): task name
|
||||
anatomy = get_anatomy()
|
||||
|
||||
Returns:
|
||||
dict: contextual data
|
||||
# TODO: perhaps should be in try!
|
||||
padding = anatomy.render.padding
|
||||
|
||||
"""
|
||||
if not task_name:
|
||||
lib.set_io_database()
|
||||
task_name = io.Session.get("AVALON_TASK", None) \
|
||||
or os.getenv("AVALON_TASK", None)
|
||||
assert task_name, log.error(
|
||||
"missing `AVALON_TASK` in avalon session or os.environ!"
|
||||
)
|
||||
data.update({
|
||||
"hierarchy": pype.get_hierarchy(),
|
||||
"frame": "#" * padding,
|
||||
"VERSION": pype.get_version_from_workfile(file)
|
||||
})
|
||||
|
||||
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
|
||||
os.environ['AVALON_PROJECT'] = project_name
|
||||
io.Session['AVALON_PROJECT'] = project_name
|
||||
|
||||
if not hierarchy:
|
||||
hierarchy = pype.get_hierarchy()
|
||||
|
||||
project_doc = io.find_one({"type": "project"})
|
||||
|
||||
data = {
|
||||
"task": task_name,
|
||||
"asset": asset or get_asset(),
|
||||
"project": {
|
||||
"name": project_doc["name"],
|
||||
"code": project_doc["data"].get("code", '')
|
||||
},
|
||||
"hierarchy": hierarchy,
|
||||
"app": application["application_dir"]
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def set_avalon_workdir(
|
||||
project=None, hierarchy=None, asset=None, task=None
|
||||
):
|
||||
"""
|
||||
Updates os.environ and session with filled workdir
|
||||
|
||||
Args:
|
||||
project (string, optional): project name
|
||||
hierarchy (string, optional): hierarchy path
|
||||
asset (string, optional): asset name
|
||||
task (string, optional): task name
|
||||
|
||||
Returns:
|
||||
os.environ[AVALON_WORKDIR]: workdir path
|
||||
avalon.session[AVALON_WORKDIR]: workdir path
|
||||
|
||||
"""
|
||||
|
||||
lib.set_io_database()
|
||||
awd = io.Session.get("AVALON_WORKDIR", None) or \
|
||||
os.getenv("AVALON_WORKDIR", None)
|
||||
|
||||
data = get_context_data(project, hierarchy, asset, task)
|
||||
|
||||
if (not awd) or ("{" not in awd):
|
||||
anatomy_filled = Anatomy(io.Session["AVALON_PROJECT"]).format(data)
|
||||
awd = anatomy_filled["work"]["folder"]
|
||||
|
||||
awd_filled = os.path.normpath(format(awd, data))
|
||||
|
||||
io.Session["AVALON_WORKDIR"] = awd_filled
|
||||
os.environ["AVALON_WORKDIR"] = awd_filled
|
||||
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
|
||||
|
||||
|
||||
def get_workdir_template(data=None):
|
||||
"""
|
||||
Obtain workdir templated path from Anatomy()
|
||||
|
||||
Args:
|
||||
data (dict, optional): basic contextual data
|
||||
|
||||
Returns:
|
||||
string: template path
|
||||
"""
|
||||
|
||||
anatomy = Anatomy()
|
||||
anatomy_filled = anatomy.format(data or get_context_data())
|
||||
|
||||
try:
|
||||
work = anatomy_filled["work"]
|
||||
except Exception as e:
|
||||
log.error(
|
||||
"{0} Error in get_workdir_template(): {1}".format(__name__, str(e))
|
||||
)
|
||||
|
||||
return work
|
||||
# log.info("format_anatomy:anatomy: {}".format(anatomy))
|
||||
return anatomy.format(data)
|
||||
|
|
|
|||
|
|
@ -1,252 +0,0 @@
|
|||
# api.py
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pico
|
||||
from pico import PicoApp
|
||||
from pico.decorators import request_args, set_cookie, delete_cookie, stream
|
||||
from pico.decorators import header, cookie
|
||||
|
||||
from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon import io
|
||||
|
||||
import pyblish.api as pyblish
|
||||
|
||||
from pypeapp import execute
|
||||
from pype import api as pype
|
||||
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "aport")
|
||||
|
||||
|
||||
SESSION = avalon.session
|
||||
if not SESSION:
|
||||
io.install()
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def publish(json_data_path, staging_dir=None):
|
||||
"""
|
||||
Runs standalone pyblish and adds link to
|
||||
data in external json file
|
||||
|
||||
It is necessary to run `register_plugin_path` if particular
|
||||
host is needed
|
||||
|
||||
Args:
|
||||
json_data_path (string): path to temp json file with
|
||||
context data
|
||||
staging_dir (strign, optional): path to temp directory
|
||||
|
||||
Returns:
|
||||
dict: return_json_path
|
||||
|
||||
Raises:
|
||||
Exception: description
|
||||
|
||||
"""
|
||||
cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/")
|
||||
os.chdir(cwd)
|
||||
log.info(os.getcwd())
|
||||
staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/")
|
||||
log.info("staging_dir: {}".format(staging_dir))
|
||||
return_json_path = os.path.join(staging_dir, "return_data.json")
|
||||
|
||||
log.info("avalon.session is: \n{}".format(SESSION))
|
||||
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
|
||||
"app", "pype-start.py")
|
||||
|
||||
args = [pype_start, "--publish",
|
||||
"-pp", os.environ["PUBLISH_PATH"],
|
||||
"-d", "rqst_json_data_path", json_data_path,
|
||||
"-d", "post_json_data_path", return_json_path
|
||||
]
|
||||
|
||||
log.debug(args)
|
||||
|
||||
# start standalone pyblish qml
|
||||
execute([
|
||||
sys.executable, "-u"
|
||||
] + args,
|
||||
cwd=cwd
|
||||
)
|
||||
|
||||
return {"return_json_path": return_json_path}
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def context(project, asset, task, app):
|
||||
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
|
||||
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
io.Session["AVALON_PROJECT"] = project
|
||||
|
||||
avalon.update_current_task(task, asset, app)
|
||||
|
||||
project_code = pype.get_project()["data"].get("code", '')
|
||||
|
||||
os.environ["AVALON_PROJECTCODE"] = project_code
|
||||
io.Session["AVALON_PROJECTCODE"] = project_code
|
||||
|
||||
hierarchy = pype.get_hierarchy()
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
io.Session["AVALON_HIERARCHY"] = hierarchy
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
SESSION.update({"AVALON_HIERARCHY": hierarchy,
|
||||
"AVALON_PROJECTCODE": project_code,
|
||||
"current_dir": os.getcwd().replace("\\", "/")
|
||||
})
|
||||
|
||||
return SESSION
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def deregister_plugin_path():
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
|
||||
os.pathsep) if "aport" in p][0]
|
||||
os.environ["PUBLISH_PATH"] = aport_plugin_path
|
||||
else:
|
||||
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
|
||||
|
||||
return "Publish path deregistered"
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def register_plugin_path(publish_path):
|
||||
deregister_plugin_path()
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
os.environ["PUBLISH_PATH"] = os.pathsep.join(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep) +
|
||||
[publish_path.replace("\\", "/")]
|
||||
)
|
||||
else:
|
||||
os.environ["PUBLISH_PATH"] = publish_path
|
||||
|
||||
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
|
||||
|
||||
return "Publish registered paths: {}".format(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep)
|
||||
)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def nuke_test():
|
||||
import nuke
|
||||
n = nuke.createNode("Constant")
|
||||
log.info(n)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def hello(who='world'):
|
||||
return 'Hello %s' % who
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def multiply(x, y):
|
||||
return x * y
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def fail():
|
||||
raise Exception('fail!')
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def make_coffee():
|
||||
raise ImATeapot()
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def upload(upload, filename):
|
||||
if not filename.endswith('.txt'):
|
||||
raise BadRequest('Upload must be a .txt file!')
|
||||
return upload.read().decode()
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@request_args(ip='remote_addr')
|
||||
def my_ip(ip):
|
||||
return ip
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@request_args(ip=lambda req: req.remote_addr)
|
||||
def my_ip3(ip):
|
||||
return ip
|
||||
|
||||
|
||||
@pico.prehandle()
|
||||
def set_user(request, kwargs):
|
||||
if request.authorization:
|
||||
if request.authorization.password != 'secret':
|
||||
raise Unauthorized('Incorrect username or password')
|
||||
request.user = request.authorization.username
|
||||
else:
|
||||
request.user = None
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@request_args(username='user')
|
||||
def current_user(username):
|
||||
return username
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@request_args(session=cookie('session_id'))
|
||||
def session_id(session):
|
||||
return session
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@set_cookie()
|
||||
def start_session():
|
||||
return {'session_id': '42'}
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@delete_cookie('session_id')
|
||||
def end_session():
|
||||
return True
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@request_args(session=header('x-session-id'))
|
||||
def session_id2(session):
|
||||
return session
|
||||
|
||||
|
||||
@pico.expose()
|
||||
@stream()
|
||||
def countdown(n=10):
|
||||
for i in reversed(range(n)):
|
||||
yield '%i' % i
|
||||
time.sleep(0.5)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def user_description(user):
|
||||
return '{name} is a {occupation} aged {age}'.format(**user)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def show_source():
|
||||
return open(__file__.replace('.pyc', '.py')).read()
|
||||
|
||||
|
||||
app = PicoApp()
|
||||
app.register_module(__name__)
|
||||
|
||||
# remove all Handlers created by pico
|
||||
for name, handler in [(handler.get_name(), handler)
|
||||
for handler in Logger().logging.root.handlers[:]]:
|
||||
if "pype" not in str(name).lower():
|
||||
print(name)
|
||||
print(handler)
|
||||
Logger().logging.root.removeHandler(handler)
|
||||
|
|
@ -1,196 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Pico Example - Everything</title>
|
||||
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
|
||||
<script src="/pico.js"></script>
|
||||
<!-- Or load our module proxy -->
|
||||
<script src="/api.js"></script>
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
|
||||
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
|
||||
|
||||
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/styles/default.min.css">
|
||||
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/highlight.min.js"></script>
|
||||
<script></script>
|
||||
|
||||
<style type="text/css">
|
||||
html, body {
|
||||
height: 100%;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
div {
|
||||
padding: 5px;
|
||||
}
|
||||
#container {
|
||||
height: 100%;
|
||||
}
|
||||
#header {
|
||||
height: 5%;
|
||||
}
|
||||
#main {
|
||||
height: 70%;
|
||||
}
|
||||
#output {
|
||||
background-color: #333;
|
||||
color: #aaa;
|
||||
min-height: 15%;
|
||||
overflow-y: scroll;
|
||||
padding: 20px;
|
||||
position: fixed;
|
||||
bottom: 0px;
|
||||
width: 100%;
|
||||
}
|
||||
.error {
|
||||
color: #f00 !important;
|
||||
}
|
||||
#examples li{
|
||||
padding: 10px;
|
||||
margin: 10px;
|
||||
background-color: silver;
|
||||
}
|
||||
code {
|
||||
border-radius: 0;*/
|
||||
margin: 5px;
|
||||
white-space: pre !important;
|
||||
}
|
||||
#source {
|
||||
height: 100%;
|
||||
}
|
||||
#examples {
|
||||
height: 100%;
|
||||
}
|
||||
#spacer {
|
||||
height: 20%;
|
||||
}
|
||||
|
||||
.highlight {
|
||||
background-color: yellow;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="container">
|
||||
<div class="row row-eq-height">
|
||||
<div class="col-md-12">
|
||||
<h1>Pico Examples</h1>
|
||||
<p>Here we show some simple examples of using Pico. Click any <code>api.X</code> link to see the corresponding Python source.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row row-eq-height" id="main">
|
||||
<div class="col-md-6" id="examples">
|
||||
<ol>
|
||||
|
||||
<li id="example1">
|
||||
<h4>Hello World</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
Name: <input type="text" name="name" value="Bob"/>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example1()">Submit</button>
|
||||
</li>
|
||||
<li id="deregister">
|
||||
<h4>deregister_plugin_path</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="deregister()">Deregister</button>
|
||||
</li>
|
||||
<li id="register">
|
||||
<h4>register_plugin_path</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
Path: <input type="text" name="path" value="C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish"/>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="register()">Register path</button>
|
||||
</li>
|
||||
<li id="example2">
|
||||
<h4>Numeric Multiplication</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<input type="number" name="x" value="6"/> x <input type="number" name="y" value="7"/>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example2()">Multiply</button>
|
||||
</li>
|
||||
<li id="example3">
|
||||
<h4>File Upload</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<input type="file" name="upload"/>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example3()">Upload</button>
|
||||
</li>
|
||||
<li id="example4">
|
||||
<h4>Request parameters (IP address)</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example4()">What's my IP?</button>
|
||||
</li>
|
||||
<li id="example5">
|
||||
<h4>Authentication</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<p class="bg-info">Note: see <a href="#set_user" onclick="jumpTo('set_user')">api.set_user</a> for the authentication handler.</p>
|
||||
Username: <input type="text" name="username" value="bob"/>
|
||||
Password: <input type="password" name="password" value="secret"/>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example5()">Sign In</button>
|
||||
</li>
|
||||
<li id="example6">
|
||||
<h4>Sessions (cookies)</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example6()">What's my session id?</button>
|
||||
</li>
|
||||
<li id="example7">
|
||||
<h4>Sessions (header)</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example7()">What's my session id?</button>
|
||||
</li>
|
||||
<li id="example8">
|
||||
<h4>Streaming Response</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example8()">Countdown</button>
|
||||
</li>
|
||||
<li id="example9">
|
||||
<h4>Objects</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example9()">Submit</button>
|
||||
</li>
|
||||
<li id="example10">
|
||||
<h4>Errors</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example10()">Submit</button>
|
||||
</li>
|
||||
<li id="example11">
|
||||
<h4>Errors</h4>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example11()">Submit</button>
|
||||
</li>
|
||||
<li id="example12">
|
||||
<h4>Forms</h4>
|
||||
<p>This example submits a form as a whole instead of individual arguments.
|
||||
The form input names must match the function argument names.
|
||||
</p>
|
||||
<pre><code class="html"></code></pre>
|
||||
<pre><code class="js"></code></pre>
|
||||
<div class="example">
|
||||
<form>
|
||||
x: <input type="number" name="x" value="6"/><br/>
|
||||
y: <input type="number" name="y" value="7"/>
|
||||
</form>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example12()">Multiply</button>
|
||||
</div>
|
||||
</li>
|
||||
<li id="example13">
|
||||
<h4>JSON</h4>
|
||||
<p>This example submits data as JSON instead of individual arguments.
|
||||
The object keys must match the function argument names.
|
||||
</p>
|
||||
<pre><code class="js"></code></pre>
|
||||
<button class="btn btn-default btn-sm" type="button" onclick="example13()">Multiply</button>
|
||||
</li>
|
||||
</ol>
|
||||
<div id="spacer">
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6" id="source">
|
||||
<pre><code class="python"></code></pre>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row" id="output">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="script.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,146 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import pico
|
||||
# from pico.decorators import request_args, prehandle
|
||||
from pico import PicoApp
|
||||
from pico import client
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon import io
|
||||
|
||||
import pyblish.api as pyblish
|
||||
|
||||
from pypeapp import execute
|
||||
from pype import api as pype
|
||||
|
||||
# remove all Handlers created by pico
|
||||
for name, handler in [(handler.get_name(), handler)
|
||||
for handler in pype.Logger.logging.root.handlers[:]]:
|
||||
if "pype" not in str(name).lower():
|
||||
pype.Logger.logging.root.removeHandler(handler)
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "aport")
|
||||
|
||||
|
||||
SESSION = avalon.session
|
||||
if not SESSION:
|
||||
io.install()
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def publish(json_data_path, staging_dir=None):
|
||||
"""
|
||||
Runs standalone pyblish and adds link to
|
||||
data in external json file
|
||||
|
||||
It is necessary to run `register_plugin_path` if particular
|
||||
host is needed
|
||||
|
||||
Args:
|
||||
json_data_path (string): path to temp json file with
|
||||
context data
|
||||
staging_dir (strign, optional): path to temp directory
|
||||
|
||||
Returns:
|
||||
dict: return_json_path
|
||||
|
||||
Raises:
|
||||
Exception: description
|
||||
|
||||
"""
|
||||
staging_dir = staging_dir \
|
||||
or tempfile.mkdtemp(prefix="pype_aport_")
|
||||
|
||||
return_json_path = os.path.join(staging_dir, "return_data.json")
|
||||
|
||||
log.debug("avalon.session is: \n{}".format(SESSION))
|
||||
pype_start = os.path.join(os.getenv('PYPE_ROOT'),
|
||||
"app", "pype-start.py")
|
||||
|
||||
args = [pype_start, "--publish",
|
||||
"-pp", os.environ["PUBLISH_PATH"],
|
||||
"-d", "rqst_json_data_path", json_data_path,
|
||||
"-d", "post_json_data_path", return_json_path
|
||||
]
|
||||
|
||||
log.debug(args)
|
||||
|
||||
# start standalone pyblish qml
|
||||
execute([
|
||||
sys.executable, "-u"
|
||||
] + args,
|
||||
cwd=os.getenv('AVALON_WORKDIR').replace("\\", "/")
|
||||
)
|
||||
|
||||
return {"return_json_path": return_json_path}
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def context(project, asset, task, app):
|
||||
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
|
||||
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
io.Session["AVALON_PROJECT"] = project
|
||||
|
||||
avalon.update_current_task(task, asset, app)
|
||||
|
||||
project_code = pype.get_project()["data"].get("code", '')
|
||||
|
||||
os.environ["AVALON_PROJECTCODE"] = project_code
|
||||
io.Session["AVALON_PROJECTCODE"] = project_code
|
||||
|
||||
hierarchy = pype.get_hierarchy()
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
io.Session["AVALON_HIERARCHY"] = hierarchy
|
||||
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
SESSION.update({"AVALON_HIERARCHY": hierarchy,
|
||||
"AVALON_PROJECTCODE": project_code,
|
||||
"current_dir": os.getcwd().replace("\\", "/")
|
||||
})
|
||||
|
||||
return SESSION
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def deregister_plugin_path():
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split(
|
||||
os.pathsep) if "aport" in p][0]
|
||||
os.environ["PUBLISH_PATH"] = aport_plugin_path
|
||||
else:
|
||||
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
|
||||
|
||||
return "Publish path deregistered"
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def register_plugin_path(publish_path):
|
||||
deregister_plugin_path()
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
os.environ["PUBLISH_PATH"] = os.pathsep.join(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep) +
|
||||
[publish_path.replace("\\", "/")]
|
||||
)
|
||||
else:
|
||||
os.environ["PUBLISH_PATH"] = publish_path
|
||||
|
||||
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
|
||||
|
||||
return "Publish registered paths: {}".format(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep)
|
||||
)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def nuke_test():
|
||||
import nuke
|
||||
n = nuke.createNode("Constant")
|
||||
log.info(n)
|
||||
|
||||
|
||||
app = PicoApp()
|
||||
app.register_module(__name__)
|
||||
130
pype/aport/pipeline.py
Normal file
130
pype/aport/pipeline.py
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
import sys
|
||||
import os
|
||||
import getpass
|
||||
|
||||
from app.api import Logger
|
||||
from io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
io = DbConnector()
|
||||
log = Logger.getLogger(__name__, "aport")
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self.SESSION = None
|
||||
self._registered_root = {"_": ""}
|
||||
self.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
|
||||
self.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
|
||||
self.AVALON_TASK = os.getenv("AVALON_TASK", None)
|
||||
self.AVALON_SILO = os.getenv("AVALON_SILO", None)
|
||||
|
||||
|
||||
def get_session():
|
||||
if not self.SESSION:
|
||||
io.install()
|
||||
self.SESSION = io.Session
|
||||
|
||||
# for k, v in os.environ.items():
|
||||
# if 'AVALON' in k:
|
||||
# print(str((k, v)))
|
||||
|
||||
return self.SESSION
|
||||
|
||||
|
||||
def update_current_task(task=None, asset=None, app=None):
|
||||
"""Update active Session to a new task work area.
|
||||
|
||||
This updates the live Session to a different `asset`, `task` or `app`.
|
||||
|
||||
Args:
|
||||
task (str): The task to set.
|
||||
asset (str): The asset to set.
|
||||
app (str): The app to set.
|
||||
|
||||
Returns:
|
||||
dict: The changed key, values in the current Session.
|
||||
|
||||
"""
|
||||
|
||||
mapping = {
|
||||
"AVALON_ASSET": asset,
|
||||
"AVALON_TASK": task,
|
||||
"AVALON_APP": app,
|
||||
}
|
||||
changed = {key: value for key, value in mapping.items() if value}
|
||||
if not changed:
|
||||
return
|
||||
|
||||
# Update silo when asset changed
|
||||
if "AVALON_ASSET" in changed:
|
||||
asset_document = io.find_one({"name": changed["AVALON_ASSET"],
|
||||
"type": "asset"})
|
||||
assert asset_document, "Asset must exist"
|
||||
silo = asset_document["silo"]
|
||||
if silo is None:
|
||||
silo = asset_document["name"]
|
||||
changed["AVALON_SILO"] = silo
|
||||
parents = asset_document['data']['parents']
|
||||
hierarchy = ""
|
||||
if len(parents) > 0:
|
||||
hierarchy = os.path.sep.join(parents)
|
||||
changed['AVALON_HIERARCHY'] = hierarchy
|
||||
|
||||
# Compute work directory (with the temporary changed session so far)
|
||||
project = io.find_one({"type": "project"},
|
||||
projection={"config.template.work": True})
|
||||
template = project["config"]["template"]["work"]
|
||||
_session = self.SESSION.copy()
|
||||
_session.update(changed)
|
||||
changed["AVALON_WORKDIR"] = _format_work_template(template, _session)
|
||||
|
||||
# Update the full session in one go to avoid half updates
|
||||
self.SESSION.update(changed)
|
||||
|
||||
# Update the environment
|
||||
os.environ.update(changed)
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def _format_work_template(template, session=None):
|
||||
"""Return a formatted configuration template with a Session.
|
||||
|
||||
Note: This *cannot* format the templates for published files since the
|
||||
session does not hold the context for a published file. Instead use
|
||||
`get_representation_path` to parse the full path to a published file.
|
||||
|
||||
Args:
|
||||
template (str): The template to format.
|
||||
session (dict, Optional): The Session to use. If not provided use the
|
||||
currently active global Session.
|
||||
|
||||
Returns:
|
||||
str: The fully formatted path.
|
||||
|
||||
"""
|
||||
if session is None:
|
||||
session = self.SESSION
|
||||
|
||||
project = io.find_one({'type': 'project'})
|
||||
|
||||
return template.format(**{
|
||||
"root": registered_root(),
|
||||
"project": {
|
||||
"name": project.get("name", session["AVALON_PROJECT"]),
|
||||
"code": project["data"].get("code", ''),
|
||||
},
|
||||
"silo": session["AVALON_SILO"],
|
||||
"hierarchy": session['AVALON_HIERARCHY'],
|
||||
"asset": session["AVALON_ASSET"],
|
||||
"task": session["AVALON_TASK"],
|
||||
"app": session["AVALON_APP"],
|
||||
"user": session.get("AVALON_USER", getpass.getuser())
|
||||
})
|
||||
|
||||
|
||||
def registered_root():
|
||||
"""Return currently registered root"""
|
||||
return os.path.normpath(
|
||||
self._registered_root["_"]
|
||||
or self.SESSION.get("AVALON_PROJECTS") or ""
|
||||
)
|
||||
|
|
@ -1,12 +1,10 @@
|
|||
from pype import api as pype
|
||||
from pypeapp import Anatomy, config
|
||||
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "aport")
|
||||
log = pype.Logger.getLogger(__name__, "aport")
|
||||
|
||||
|
||||
def get_anatomy(**kwarg):
|
||||
return Anatomy()
|
||||
return pype.Anatomy
|
||||
|
||||
|
||||
def get_dataflow(**kwarg):
|
||||
|
|
@ -17,8 +15,7 @@ def get_dataflow(**kwarg):
|
|||
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
presets = config.get_init_presets()
|
||||
aport_dataflow = getattr(presets["dataflow"], str(host), None)
|
||||
aport_dataflow = getattr(pype.Dataflow, str(host), None)
|
||||
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
|
||||
if preset:
|
||||
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
|
||||
|
|
@ -35,8 +32,7 @@ def get_colorspace(**kwarg):
|
|||
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
|
||||
"Missing mandatory kwargs `host`, `cls`")
|
||||
|
||||
presets = config.get_init_presets()
|
||||
aport_colorspace = getattr(presets["colorspace"], str(host), None)
|
||||
aport_colorspace = getattr(pype.Colorspace, str(host), None)
|
||||
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
|
||||
if preset:
|
||||
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)
|
||||
|
|
@ -6,6 +6,7 @@ from avalon import (
|
|||
)
|
||||
from pype import api as pype
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
||||
|
|
@ -26,27 +27,26 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
|
||||
# get json paths from data
|
||||
rqst_json_data_path = context.data['rqst_json_data_path']
|
||||
post_json_data_path = context.data['post_json_data_path']
|
||||
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
|
||||
post_json_data_path = Path(context.data['post_json_data_path'])
|
||||
|
||||
# get avalon session data and convert \ to /
|
||||
session = avalon.session
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in session.items()
|
||||
if isinstance(v, str)}
|
||||
session.update(fix_paths)
|
||||
self.log.info(os.environ['AVALON_PROJECTS'])
|
||||
projects = Path(session['AVALON_PROJECTS']).resolve()
|
||||
wd = Path(session['AVALON_WORKDIR']).resolve()
|
||||
session['AVALON_PROJECTS'] = str(projects)
|
||||
session['AVALON_WORKDIR'] = str(wd)
|
||||
|
||||
context.data["avalonSession"] = session
|
||||
self.log.debug("avalonSession: {}".format(session))
|
||||
|
||||
# get stagin directory from recieved path to json
|
||||
context.data["stagingDir"] = \
|
||||
staging_dir = os.path.dirname(
|
||||
post_json_data_path).replace("\\", "/")
|
||||
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
|
||||
|
||||
# get data from json file recieved
|
||||
with open(rqst_json_data_path) as f:
|
||||
context.data['json_data'] = json_data = json.load(f)
|
||||
with rqst_json_data_path.open(mode='r') as f:
|
||||
context.data['jsonData'] = json_data = json.load(f)
|
||||
assert json_data, "No `data` in json file"
|
||||
|
||||
# get and check host type
|
||||
|
|
@ -63,12 +63,13 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
pyblish.api.register_host(host)
|
||||
|
||||
# get path to studio templates
|
||||
templates_dir = os.getenv("PYPE_CONFIG", None)
|
||||
assert templates_dir, "Missing `PYPE_CONFIG` in os.environ"
|
||||
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
|
||||
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
|
||||
|
||||
# get presets for host
|
||||
presets_dir = os.path.join(templates_dir, "presets", host)
|
||||
assert os.path.exists(presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
|
||||
assert os.path.exists(
|
||||
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
|
||||
|
||||
# load all available preset json files
|
||||
preset_data = dict()
|
||||
|
|
@ -84,16 +85,16 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
# get current file
|
||||
current_file = json_data.get("currentFile", None)
|
||||
assert current_file, "No `currentFile` data in json file"
|
||||
context.data["currentFile"] = current_file
|
||||
context.data["currentFile"] = Path(current_file).resolve()
|
||||
|
||||
# get project data from avalon
|
||||
project_data = pype.get_project()["data"]
|
||||
project_data = pype.get_project_data()
|
||||
assert project_data, "No `project_data` data in avalon db"
|
||||
context.data["projectData"] = project_data
|
||||
self.log.debug("project_data: {}".format(project_data))
|
||||
|
||||
# get asset data from avalon and fix all paths
|
||||
asset_data = pype.get_asset()["data"]
|
||||
asset_data = pype.get_asset_data()
|
||||
assert asset_data, "No `asset_data` data in avalon db"
|
||||
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
|
||||
if isinstance(v, str)}
|
||||
|
|
|
|||
|
|
@ -25,32 +25,41 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.48
|
||||
|
||||
def process(self, context):
|
||||
|
||||
a_session = context.data.get("avalonSession")
|
||||
json_data = context.data.get("json_data", None)
|
||||
json_data = context.data.get("jsonData", None)
|
||||
assert json_data, "No `json_data` data in json file"
|
||||
|
||||
instances_data = json_data.get("instances", None)
|
||||
assert instances_data, "No `instance` data in json file"
|
||||
|
||||
staging_dir = json_data.get("stagingDir", None)
|
||||
assert staging_dir, "No `stagingDir` path in json file"
|
||||
|
||||
presets = context.data["presets"]
|
||||
rules_tasks = presets["rules_tasks"]
|
||||
ftrack_types = rules_tasks["ftrackTypes"]
|
||||
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
|
||||
|
||||
context.data["ftrackTypes"] = ftrack_types
|
||||
|
||||
asset_default = presets["asset_default"]
|
||||
assert instances_data, "No `asset_default` data in json file"
|
||||
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
|
||||
|
||||
asset_name = a_session["AVALON_ASSET"]
|
||||
entity = pype.get_asset(asset_name)
|
||||
entity = io.find_one({"name": asset_name,
|
||||
"type": "asset"})
|
||||
|
||||
# get frame start > first try from asset data
|
||||
frame_start = context.data["assetData"].get("frameStart", None)
|
||||
frame_start = context.data["assetData"].get("fstart", None)
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on assetData")
|
||||
# get frame start > second try from parent data
|
||||
frame_start = entity["data"]["frameStart"]
|
||||
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on any parent entity")
|
||||
# get frame start > third try from parent data
|
||||
frame_start = asset_default["frameStart"]
|
||||
frame_start = asset_default["fstart"]
|
||||
|
||||
assert frame_start, "No `frame_start` data found, "
|
||||
"please set `fstart` on asset"
|
||||
|
|
@ -60,7 +69,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
handles = context.data["assetData"].get("handles", None)
|
||||
if not handles:
|
||||
# get frame start > second try from parent data
|
||||
handles = entity["data"]["handles"]
|
||||
handles = pype.get_data_hierarchical_attr(entity, "handles")
|
||||
if not handles:
|
||||
# get frame start > third try from parent data
|
||||
handles = asset_default["handles"]
|
||||
|
|
@ -77,25 +86,38 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
|
||||
# get current file host
|
||||
host = a_session["AVALON_APP"]
|
||||
family = "workfile"
|
||||
family = "projectfile"
|
||||
families = "filesave"
|
||||
subset_name = "{0}_{1}".format(task, family)
|
||||
subset_name = "{0}{1}".format(task, 'Default')
|
||||
instance_name = "{0}_{1}_{2}".format(name,
|
||||
family,
|
||||
subset_name)
|
||||
# Set label
|
||||
label = "{0} - {1} > {2}".format(name, task, families)
|
||||
|
||||
# get project file instance Data
|
||||
pf_instance = [inst for inst in instances_data
|
||||
if inst.get("family", None) in 'projectfile']
|
||||
self.log.debug('pf_instance: {}'.format(pf_instance))
|
||||
# get working file into instance for publishing
|
||||
instance = context.create_instance(subset_name)
|
||||
instance = context.create_instance(instance_name)
|
||||
if pf_instance:
|
||||
instance.data.update(pf_instance[0])
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"task": task,
|
||||
"representation": ext[1:],
|
||||
"host": host,
|
||||
"asset": asset_name,
|
||||
"label": label,
|
||||
"name": name,
|
||||
# "hierarchy": hierarchy,
|
||||
# "parents": parents,
|
||||
"family": family,
|
||||
"families": [families],
|
||||
"families": [families, 'ftrack'],
|
||||
"publish": True,
|
||||
# "files": files_list
|
||||
})
|
||||
instances.append(instance)
|
||||
|
||||
|
|
@ -103,11 +125,27 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
# for key, value in inst.items():
|
||||
# self.log.debug('instance[key]: {}'.format(key))
|
||||
#
|
||||
version = inst.get("version", None)
|
||||
assert version, "No `version` string in json file"
|
||||
|
||||
name = asset = inst.get("name", None)
|
||||
assert name, "No `name` key in json_data.instance: {}".format(inst)
|
||||
|
||||
family = inst.get("family", None)
|
||||
assert family, "No `family` key in json_data.instance: {}".format(inst)
|
||||
assert family, "No `family` key in json_data.instance: {}".format(
|
||||
inst)
|
||||
|
||||
if family in 'projectfile':
|
||||
continue
|
||||
|
||||
files_list = inst.get("files", None)
|
||||
assert files_list, "`files` are empty in json file"
|
||||
|
||||
hierarchy = inst.get("hierarchy", None)
|
||||
assert hierarchy, "No `hierarchy` data in json file"
|
||||
|
||||
parents = inst.get("parents", None)
|
||||
assert parents, "No `parents` data in json file"
|
||||
|
||||
tags = inst.get("tags", None)
|
||||
if tags:
|
||||
|
|
@ -117,32 +155,86 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
tasks = rules_tasks["defaultTasks"]
|
||||
self.log.debug("tasks: `{}`".format(tasks))
|
||||
|
||||
subset_lst = []
|
||||
subset_dict = {}
|
||||
for task in tasks:
|
||||
# create list of tasks for creation
|
||||
if not inst.get('tasks', None):
|
||||
inst['tasks'] = list()
|
||||
if not inst.get('tasksTypes', None):
|
||||
inst['tasksTypes'] = {}
|
||||
|
||||
# append taks into list for later hierarchy cration
|
||||
ftrack_task_type = ftrack_types[task]
|
||||
if task not in inst['tasks']:
|
||||
inst['tasks'].append(task)
|
||||
inst['tasksTypes'][task] = ftrack_task_type
|
||||
|
||||
host = rules_tasks["taskHost"][task]
|
||||
subsets = rules_tasks["taskSubsets"][task]
|
||||
for sub in subsets:
|
||||
self.log.debug(sub)
|
||||
try:
|
||||
isinstance(subset_dict[sub], list)
|
||||
except Exception:
|
||||
subset_dict[sub] = list()
|
||||
|
||||
for subset in subsets:
|
||||
subset_name = "{0}_{1}".format(task, subset)
|
||||
instance = context.create_instance(subset_name)
|
||||
# instance.add(inst)
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"task": task,
|
||||
"frameStart": frame_start,
|
||||
"handles": handles,
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"label": "{0} - {1} > {2}".format(name, task, subset),
|
||||
"name": subset_name,
|
||||
"family": inst["family"],
|
||||
"families": [subset],
|
||||
"jsonData": inst,
|
||||
# "parents": , # bez tasku
|
||||
# "hierarchy": ,
|
||||
"publish": True,
|
||||
})
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
subset_dict[sub].append(task)
|
||||
|
||||
subset_lst.extend([s for s in subsets if s not in subset_lst])
|
||||
|
||||
for subset in subset_lst:
|
||||
if inst["representations"].get(subset, None):
|
||||
repr = inst["representations"][subset]
|
||||
ext = repr['representation']
|
||||
else:
|
||||
continue
|
||||
family = inst["family"]
|
||||
# skip if thumnail in name of subset
|
||||
if "thumbnail" in subset:
|
||||
continue
|
||||
elif "audio" in subset:
|
||||
family = subset
|
||||
subset_name = "{0}{1}".format(subset, "Main")
|
||||
elif "reference" in subset:
|
||||
family ="render"
|
||||
subset_name = "{0}{1}".format(family, "Reference")
|
||||
else:
|
||||
subset_name = "{0}{1}".format(subset, 'Default')
|
||||
|
||||
# create unique subset's name
|
||||
name = "{0}_{1}_{2}".format(asset,
|
||||
inst["family"],
|
||||
subset_name)
|
||||
|
||||
instance = context.create_instance(name)
|
||||
files = [f for f in files_list
|
||||
if subset in f or "thumbnail" in f
|
||||
]
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"tasks": subset_dict[subset],
|
||||
"taskTypes": inst['tasksTypes'],
|
||||
"fstart": frame_start,
|
||||
"handles": handles,
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"hierarchy": hierarchy,
|
||||
"parents": parents,
|
||||
"files": files,
|
||||
"label": "{0} - {1}".format(
|
||||
asset, subset_name),
|
||||
"name": name,
|
||||
"family": family,
|
||||
"families": [subset, inst["family"], 'ftrack'],
|
||||
"jsonData": inst,
|
||||
"publish": True,
|
||||
"version": version})
|
||||
self.log.info(
|
||||
"collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
|
||||
context.data["instances"] = instances
|
||||
|
||||
|
|
|
|||
17
pype/plugins/premiere/publish/collect_audio_version.py
Normal file
17
pype/plugins/premiere/publish/collect_audio_version.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAudioVersion(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Audio Version"
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ['audio']
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info('Audio: {}'.format(instance.data['name']))
|
||||
instance.data['version'] = '001'
|
||||
self.log.info('Audio version to: {}'.format(instance.data['version']))
|
||||
34
pype/plugins/premiere/publish/collect_frameranges.py
Normal file
34
pype/plugins/premiere/publish/collect_frameranges.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFrameranges(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting frameranges needed for ftrack integration
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Clip Frameranges"
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# getting metadata from jsonData key
|
||||
metadata = instance.data.get('jsonData').get('metadata')
|
||||
|
||||
# getting important metadata time calculation
|
||||
fps = metadata['ppro.timeline.fps']
|
||||
sec_start = metadata['ppro.clip.start']
|
||||
sec_end = metadata['ppro.clip.end']
|
||||
fstart = instance.data.get('fstart')
|
||||
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
|
||||
|
||||
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
|
||||
instance.data['name'],
|
||||
fps, sec_start, sec_end, fstart, fend))
|
||||
|
||||
instance.data['startFrame'] = fstart
|
||||
instance.data['endFrame'] = fend
|
||||
instance.data['fps'] = metadata['ppro.timeline.fps']
|
||||
72
pype/plugins/premiere/publish/collect_hierarchy_context.py
Normal file
72
pype/plugins/premiere/publish/collect_hierarchy_context.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import pyblish.api
|
||||
from avalon import api
|
||||
|
||||
|
||||
class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
||||
"""Collecting hierarchy context from `parents` and `hierarchy` data
|
||||
present in `clip` family instances coming from the request json data file
|
||||
|
||||
It will add `hierarchical_context` into each instance for integrate
|
||||
plugins to be able to create needed parents for the context if they
|
||||
don't exist yet
|
||||
"""
|
||||
|
||||
label = "Collect Hierarchy Context"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def update_dict(self, ex_dict, new_dict):
|
||||
for key in ex_dict:
|
||||
if key in new_dict and isinstance(ex_dict[key], dict):
|
||||
new_dict[key] = self.update_dict(ex_dict[key], new_dict[key])
|
||||
else:
|
||||
new_dict[key] = ex_dict[key]
|
||||
return new_dict
|
||||
|
||||
def process(self, context):
|
||||
json_data = context.data.get("jsonData", None)
|
||||
temp_context = {}
|
||||
for instance in json_data['instances']:
|
||||
if instance['family'] in 'projectfile':
|
||||
continue
|
||||
|
||||
in_info = {}
|
||||
name = instance['name']
|
||||
# suppose that all instances are Shots
|
||||
in_info['entity_type'] = 'Shot'
|
||||
|
||||
instance_pyblish = [
|
||||
i for i in context.data["instances"] if i.data['asset'] in name][0]
|
||||
in_info['custom_attributes'] = {
|
||||
'fend': instance_pyblish.data['endFrame'],
|
||||
'fstart': instance_pyblish.data['startFrame'],
|
||||
'fps': instance_pyblish.data['fps']
|
||||
}
|
||||
|
||||
in_info['tasks'] = instance['tasks']
|
||||
|
||||
parents = instance.get('parents', [])
|
||||
|
||||
actual = {name: in_info}
|
||||
|
||||
for parent in reversed(parents):
|
||||
next_dict = {}
|
||||
parent_name = parent["entityName"]
|
||||
next_dict[parent_name] = {}
|
||||
next_dict[parent_name]["entity_type"] = parent["entityType"]
|
||||
next_dict[parent_name]["childs"] = actual
|
||||
actual = next_dict
|
||||
|
||||
temp_context = self.update_dict(temp_context, actual)
|
||||
self.log.debug(temp_context)
|
||||
|
||||
# TODO: 100% sure way of get project! Will be Name or Code?
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
final_context = {}
|
||||
final_context[project_name] = {}
|
||||
final_context[project_name]['entity_type'] = 'Project'
|
||||
final_context[project_name]['childs'] = temp_context
|
||||
|
||||
# adding hierarchy context to instance
|
||||
context.data["hierarchyContext"] = final_context
|
||||
self.log.debug("context.data[hierarchyContext] is: {}".format(
|
||||
context.data["hierarchyContext"]))
|
||||
132
pype/plugins/premiere/publish/integrate_assumed_destination.py
Normal file
132
pype/plugins/premiere/publish/integrate_assumed_destination.py
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
|
||||
"""Generate the assumed destination path where the file will be stored"""
|
||||
|
||||
label = "Integrate Assumed Destination"
|
||||
order = pyblish.api.IntegratorOrder - 0.05
|
||||
families = ["clip", "projectfile"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.create_destination_template(instance)
|
||||
|
||||
template_data = instance.data["assumedTemplateData"]
|
||||
# template = instance.data["template"]
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
# template = anatomy.publish.path
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
mock_template = anatomy_filled.publish.path
|
||||
|
||||
# For now assume resources end up in a "resources" folder in the
|
||||
# published folder
|
||||
mock_destination = os.path.join(os.path.dirname(mock_template),
|
||||
"resources")
|
||||
|
||||
# Clean the path
|
||||
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
|
||||
|
||||
# Define resource destination and transfers
|
||||
resources = instance.data.get("resources", list())
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for resource in resources:
|
||||
|
||||
# Add destination to the resource
|
||||
source_filename = os.path.basename(resource["source"])
|
||||
destination = os.path.join(mock_destination, source_filename)
|
||||
|
||||
# Force forward slashes to fix issue with software unable
|
||||
# to work correctly with backslashes in specific scenarios
|
||||
# (e.g. escape characters in PLN-151 V-Ray UDIM)
|
||||
destination = destination.replace("\\", "/")
|
||||
|
||||
resource['destination'] = destination
|
||||
|
||||
# Collect transfers for the individual files of the resource
|
||||
# e.g. all individual files of a cache or UDIM textures.
|
||||
files = resource['files']
|
||||
for fsrc in files:
|
||||
fname = os.path.basename(fsrc)
|
||||
fdest = os.path.join(mock_destination, fname)
|
||||
transfers.append([fsrc, fdest])
|
||||
|
||||
instance.data["resources"] = resources
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def create_destination_template(self, instance):
|
||||
"""Create a filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
{subset}.{representation}
|
||||
Args:
|
||||
instance: the instance to publish
|
||||
|
||||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info(subset_name)
|
||||
asset_name = instance.data["asset"]
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
|
||||
template = project["config"]["template"]["publish"]
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset['silo']
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
"code": project['data']['code']},
|
||||
"silo": silo,
|
||||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
self.log.info(template_data)
|
||||
instance.data["template"] = template
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateFtrackComponentOverwrite(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Set `component_overwrite` to True on all instances `ftrackComponentsList`
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.49
|
||||
label = 'Overwrite ftrack created versions'
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
active = False
|
||||
|
||||
def process(self, instance):
|
||||
component_list = instance.data['ftrackComponentsList']
|
||||
|
||||
for cl in component_list:
|
||||
cl['component_overwrite'] = True
|
||||
self.log.debug('Component {} overwriting'.format(
|
||||
cl['component_data']['name']))
|
||||
140
pype/plugins/premiere/publish/integrate_hierarchy_avalon.py
Normal file
140
pype/plugins/premiere/publish/integrate_hierarchy_avalon.py
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Create entities in ftrack based on collected data from premiere
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = 'Integrate Hierarchy To Avalon'
|
||||
families = ['clip']
|
||||
|
||||
def process(self, context):
|
||||
if "hierarchyContext" not in context.data:
|
||||
return
|
||||
|
||||
self.db = io
|
||||
if not self.db.Session:
|
||||
self.db.install()
|
||||
|
||||
input_data = context.data["hierarchyContext"]
|
||||
self.import_to_avalon(input_data)
|
||||
|
||||
def import_to_avalon(self, input_data, parent=None):
|
||||
|
||||
for name in input_data:
|
||||
self.log.info('input_data[name]: {}'.format(input_data[name]))
|
||||
entity_data = input_data[name]
|
||||
entity_type = entity_data['entity_type']
|
||||
|
||||
data = {}
|
||||
# Process project
|
||||
if entity_type.lower() == 'project':
|
||||
entity = self.db.find_one({'type': 'project'})
|
||||
# TODO: should be in validator?
|
||||
assert (entity is not None), "Didn't find project in DB"
|
||||
|
||||
# get data from already existing project
|
||||
for key, value in entity.get('data', {}).items():
|
||||
data[key] = value
|
||||
|
||||
self.av_project = entity
|
||||
# Raise error if project or parent are not set
|
||||
elif self.av_project is None or parent is None:
|
||||
raise AssertionError(
|
||||
"Collected items are not in right order!"
|
||||
)
|
||||
# Else process assset
|
||||
else:
|
||||
entity = self.db.find_one({'type': 'asset', 'name': name})
|
||||
# Create entity if doesn't exist
|
||||
if entity is None:
|
||||
if self.av_project['_id'] == parent['_id']:
|
||||
silo = None
|
||||
elif parent['silo'] is None:
|
||||
silo = parent['name']
|
||||
else:
|
||||
silo = parent['silo']
|
||||
entity = self.create_avalon_asset(name, silo)
|
||||
self.log.info('entity: {}'.format(entity))
|
||||
self.log.info('data: {}'.format(entity.get('data', {})))
|
||||
self.log.info('____1____')
|
||||
data['entityType'] = entity_type
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data['tasks'] = tasks
|
||||
parents = []
|
||||
visualParent = None
|
||||
data = input_data[name]
|
||||
if self.av_project['_id'] != parent['_id']:
|
||||
visualParent = parent['_id']
|
||||
parents.extend(parent.get('data', {}).get('parents', []))
|
||||
parents.append(parent['name'])
|
||||
data['visualParent'] = visualParent
|
||||
data['parents'] = parents
|
||||
|
||||
self.db.update_many(
|
||||
{'_id': entity['_id']},
|
||||
{'$set': {
|
||||
'data': data,
|
||||
}})
|
||||
|
||||
entity = self.db.find_one({'type': 'asset', 'name': name})
|
||||
self.log.info('entity: {}'.format(entity))
|
||||
self.log.info('data: {}'.format(entity.get('data', {})))
|
||||
self.log.info('____2____')
|
||||
|
||||
# Else get data from already existing
|
||||
else:
|
||||
self.log.info('entity: {}'.format(entity))
|
||||
self.log.info('data: {}'.format(entity.get('data', {})))
|
||||
self.log.info('________')
|
||||
for key, value in entity.get('data', {}).items():
|
||||
data[key] = value
|
||||
|
||||
data['entityType'] = entity_type
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data['tasks'] = tasks
|
||||
parents = []
|
||||
visualParent = None
|
||||
# do not store project's id as visualParent (silo asset)
|
||||
|
||||
if self.av_project['_id'] != parent['_id']:
|
||||
visualParent = parent['_id']
|
||||
parents.extend(parent.get('data', {}).get('parents', []))
|
||||
parents.append(parent['name'])
|
||||
data['visualParent'] = visualParent
|
||||
data['parents'] = parents
|
||||
|
||||
# CUSTOM ATTRIBUTES
|
||||
for k, val in entity_data.get('custom_attributes', {}).items():
|
||||
data[k] = val
|
||||
|
||||
# Update entity data with input data
|
||||
self.db.update_many(
|
||||
{'_id': entity['_id']},
|
||||
{'$set': {
|
||||
'data': data,
|
||||
}})
|
||||
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_avalon(entity_data['childs'], entity)
|
||||
|
||||
def create_avalon_asset(self, name, silo):
|
||||
item = {
|
||||
'schema': 'avalon-core:asset-2.0',
|
||||
'name': name,
|
||||
'silo': silo,
|
||||
'parent': self.av_project['_id'],
|
||||
'type': 'asset',
|
||||
'data': {}
|
||||
}
|
||||
entity_id = self.db.insert_one(item).inserted_id
|
||||
|
||||
return self.db.find_one({'_id': entity_id})
|
||||
155
pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py
Normal file
155
pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Create entities in ftrack based on collected data from premiere
|
||||
Example of entry data:
|
||||
{
|
||||
"ProjectXS": {
|
||||
"entity_type": "Project",
|
||||
"custom_attributes": {
|
||||
"fps": 24,...
|
||||
},
|
||||
"tasks": [
|
||||
"Compositing",
|
||||
"Lighting",... *task must exist as task type in project schema*
|
||||
],
|
||||
"childs": {
|
||||
"sq01": {
|
||||
"entity_type": "Sequence",
|
||||
...
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = 'Integrate Hierarchy To Ftrack'
|
||||
families = ["clip"]
|
||||
optional = False
|
||||
|
||||
def process(self, context):
|
||||
self.context = context
|
||||
if "hierarchyContext" not in context.data:
|
||||
return
|
||||
|
||||
self.ft_project = None
|
||||
self.session = context.data["ftrackSession"]
|
||||
|
||||
input_data = context.data["hierarchyContext"]
|
||||
|
||||
# adding ftrack types from presets
|
||||
ftrack_types = context.data['ftrackTypes']
|
||||
|
||||
self.import_to_ftrack(input_data, ftrack_types)
|
||||
|
||||
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
|
||||
for entity_name in input_data:
|
||||
entity_data = input_data[entity_name]
|
||||
entity_type = entity_data['entity_type'].capitalize()
|
||||
|
||||
if entity_type.lower() == 'project':
|
||||
query = 'Project where full_name is "{}"'.format(entity_name)
|
||||
entity = self.session.query(query).one()
|
||||
self.ft_project = entity
|
||||
self.task_types = self.get_all_task_types(entity)
|
||||
|
||||
elif self.ft_project is None or parent is None:
|
||||
raise AssertionError(
|
||||
"Collected items are not in right order!"
|
||||
)
|
||||
|
||||
# try to find if entity already exists
|
||||
else:
|
||||
query = '{} where name is "{}" and parent_id is "{}"'.format(
|
||||
entity_type, entity_name, parent['id']
|
||||
)
|
||||
try:
|
||||
entity = self.session.query(query).one()
|
||||
except Exception:
|
||||
entity = None
|
||||
|
||||
# Create entity if not exists
|
||||
if entity is None:
|
||||
entity = self.create_entity(
|
||||
name=entity_name,
|
||||
type=entity_type,
|
||||
parent=parent
|
||||
)
|
||||
# self.log.info('entity: {}'.format(dict(entity)))
|
||||
# CUSTOM ATTRIBUTES
|
||||
custom_attributes = entity_data.get('custom_attributes', [])
|
||||
instances = [
|
||||
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
|
||||
for key in custom_attributes:
|
||||
assert (key in entity['custom_attributes']), (
|
||||
'Missing custom attribute')
|
||||
|
||||
entity['custom_attributes'][key] = custom_attributes[key]
|
||||
for instance in instances:
|
||||
instance.data['ftrackShotId'] = entity['id']
|
||||
|
||||
self.session.commit()
|
||||
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
existing_tasks = []
|
||||
tasks_to_create = []
|
||||
for child in entity['children']:
|
||||
if child.entity_type.lower() == 'task':
|
||||
existing_tasks.append(child['name'])
|
||||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
if task in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
continue
|
||||
tasks_to_create.append(task)
|
||||
|
||||
for task in tasks_to_create:
|
||||
self.create_task(
|
||||
name=task,
|
||||
task_type=ftrack_types[task],
|
||||
parent=entity
|
||||
)
|
||||
self.session.commit()
|
||||
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_ftrack(
|
||||
entity_data['childs'], ftrack_types, entity)
|
||||
|
||||
def get_all_task_types(self, project):
|
||||
tasks = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_types = proj_template['_task_type_schema']['types']
|
||||
|
||||
for type in temp_task_types:
|
||||
if type['name'] not in tasks:
|
||||
tasks[type['name']] = type
|
||||
|
||||
return tasks
|
||||
|
||||
def create_task(self, name, task_type, parent):
|
||||
task = self.session.create('Task', {
|
||||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
# TODO not secured!!! - check if task_type exists
|
||||
self.log.info(task_type)
|
||||
self.log.info(self.task_types)
|
||||
task['type'] = self.task_types[task_type]
|
||||
|
||||
self.session.commit()
|
||||
|
||||
return task
|
||||
|
||||
def create_entity(self, name, type, parent):
|
||||
entity = self.session.create(type, {
|
||||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
self.session.commit()
|
||||
|
||||
return entity
|
||||
21
pype/plugins/premiere/publish/integrate_no_thumbnails.py
Normal file
21
pype/plugins/premiere/publish/integrate_no_thumbnails.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class IntegrateCleanThumbs(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Cleaning up thumbnail files after they have been integrated
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 9
|
||||
label = 'Clean thumbnail files'
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
remove_file = [tt for t in instance.data['transfers']
|
||||
for tt in t if 'jpg' in tt if 'temp' not in tt.lower()]
|
||||
if len(remove_file) is 1:
|
||||
os.remove(remove_file[0])
|
||||
self.log.info('Thumbnail image was erased')
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateWorkfileVersion(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Will desynchronize versioning from actual version of work file
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.15
|
||||
label = 'Do not synchronize workfile version'
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
active = False
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data['version']:
|
||||
del(instance.data['version'])
|
||||
self.log.info('Instance version was removed')
|
||||
51
pype/plugins/premiere/publish/validate_auto_sync_off.py
Normal file
51
pype/plugins/premiere/publish/validate_auto_sync_off.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
import pyblish.api
|
||||
import pype.api
|
||||
import avalon.api
|
||||
|
||||
|
||||
class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
|
||||
"""Ensure that autosync value in ftrack project is set to False.
|
||||
|
||||
In case was set to True and event server with the sync to avalon event
|
||||
is running will cause integration to avalon will be override.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ['clip']
|
||||
label = 'Ftrack project\'s auto sync off'
|
||||
actions = [pype.api.RepairAction]
|
||||
|
||||
def process(self, context):
|
||||
session = context.data["ftrackSession"]
|
||||
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||
query = 'Project where full_name is "{}"'.format(project_name)
|
||||
project = session.query(query).one()
|
||||
invalid = self.get_invalid(context)
|
||||
|
||||
assert not invalid, (
|
||||
"Ftrack Project has 'Auto sync' set to On."
|
||||
" That may cause issues during integration."
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(context):
|
||||
session = context.data["ftrackSession"]
|
||||
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||
query = 'Project where full_name is "{}"'.format(project_name)
|
||||
project = session.query(query).one()
|
||||
|
||||
invalid = None
|
||||
|
||||
if project.get('custom_attributes', {}).get(
|
||||
'avalon_auto_sync', False):
|
||||
invalid = project
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
session = context.data["ftrackSession"]
|
||||
invalid = cls.get_invalid(context)
|
||||
invalid['custom_attributes']['avalon_auto_sync'] = False
|
||||
session.commit()
|
||||
|
|
@ -1,38 +1,67 @@
|
|||
import os
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
from pysync import walktree
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon.lib import launch
|
||||
from pyblish import api as pyblish
|
||||
from pypeapp import Logger
|
||||
from app import api as app
|
||||
from pprint import pprint
|
||||
from .. import api
|
||||
from pype.aport.lib import set_avalon_workdir
|
||||
|
||||
from ..widgets.message_window import message
|
||||
|
||||
import requests
|
||||
|
||||
log = Logger().get_logger(__name__, "premiere")
|
||||
|
||||
log = api.Logger.getLogger(__name__, "premiere")
|
||||
|
||||
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
|
||||
EXTENSIONS_PATH_LOCAL = os.getenv("EXTENSIONS_PATH", None)
|
||||
EXTENSIONS_CACHE_PATH = os.getenv("EXTENSIONS_CACHE_PATH", None)
|
||||
EXTENSIONS_PATH_REMOTE = os.path.join(os.path.dirname(__file__), "extensions")
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
_clearing_cache = ["com.pype.rename", "com.pype.avalon"]
|
||||
|
||||
PUBLISH_PATH = os.path.join(
|
||||
PLUGINS_DIR, "premiere", "publish"
|
||||
).replace("\\", "/")
|
||||
|
||||
if os.getenv("PUBLISH_PATH", None):
|
||||
os.environ["PUBLISH_PATH"] = os.pathsep.join(
|
||||
os.environ["PUBLISH_PATH"].split(os.pathsep) +
|
||||
[PUBLISH_PATH]
|
||||
)
|
||||
else:
|
||||
os.environ["PUBLISH_PATH"] = PUBLISH_PATH
|
||||
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "premiere", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "premiere", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "premiere", "inventory")
|
||||
|
||||
def clearing_caches_ui():
|
||||
'''Before every start of premiere it will make sure there is not
|
||||
outdated stuff in cep_cache dir'''
|
||||
|
||||
for d in os.listdir(EXTENSIONS_CACHE_PATH):
|
||||
match = [p for p in _clearing_cache
|
||||
if str(p) in d]
|
||||
|
||||
if match:
|
||||
try:
|
||||
path = os.path.normpath(os.path.join(EXTENSIONS_CACHE_PATH, d))
|
||||
log.info("Removing dir: {}".format(path))
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
except Exception as e:
|
||||
log.debug("problem: {}".format(e))
|
||||
|
||||
def request_aport(url_path, data={}):
|
||||
try:
|
||||
api.add_tool_to_environment(["aport"])
|
||||
api.add_tool_to_environment(["aport_0.1"])
|
||||
|
||||
ip = os.getenv("PICO_IP", None)
|
||||
if ip and ip.startswith('http'):
|
||||
|
|
@ -45,14 +74,14 @@ def request_aport(url_path, data={}):
|
|||
return req
|
||||
|
||||
except Exception as e:
|
||||
message(title="Premiere Aport Server",
|
||||
api.message(title="Premiere Aport Server",
|
||||
message="Before you can run Premiere, start Aport Server. \n Error: {}".format(
|
||||
e),
|
||||
level="critical")
|
||||
|
||||
|
||||
def extensions_sync():
|
||||
import time
|
||||
# import time
|
||||
process_pairs = list()
|
||||
# get extensions dir in pype.premiere.extensions
|
||||
# build dir path to premiere cep extensions
|
||||
|
|
@ -70,36 +99,55 @@ def extensions_sync():
|
|||
log.info("Extension {0} from `{1}` coppied to `{2}`".format(
|
||||
name, src, dst
|
||||
))
|
||||
time.sleep(10)
|
||||
# time.sleep(10)
|
||||
return
|
||||
|
||||
|
||||
def install():
|
||||
|
||||
set_avalon_workdir()
|
||||
api.set_avalon_workdir()
|
||||
log.info("Registering Premiera plug-ins..")
|
||||
|
||||
reg_paths = request_aport("/api/register_plugin_path",
|
||||
{"publish_path": PUBLISH_PATH})
|
||||
|
||||
log.info(str(reg_paths))
|
||||
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
# avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
# avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
# avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"imagesequence",
|
||||
"mov"
|
||||
# family_states = [
|
||||
# "imagesequence",
|
||||
# "mov"
|
||||
#
|
||||
# ]
|
||||
# avalon.data["familiesStateDefault"] = False
|
||||
# avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
]
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
# load data from templates
|
||||
api.load_data_from_templates()
|
||||
|
||||
# remove cep_cache from user temp dir
|
||||
clearing_caches_ui()
|
||||
|
||||
# synchronize extensions
|
||||
extensions_sync()
|
||||
message(title="pyblish_paths", message=str(reg_paths), level="info")
|
||||
message = "The Pype extension has been installed. " \
|
||||
"\nThe following publishing paths has been registered: " \
|
||||
"\n\n{}".format(
|
||||
reg_paths)
|
||||
|
||||
api.message(title="pyblish_paths", message=message, level="info")
|
||||
|
||||
# launching premiere
|
||||
exe = r"C:\Program Files\Adobe\Adobe Premiere Pro CC 2019\Adobe Premiere Pro.exe".replace(
|
||||
"\\", "/")
|
||||
|
||||
log.info("____path exists: {}".format(os.path.exists(exe)))
|
||||
|
||||
app.forward(args=[exe],
|
||||
silent=False,
|
||||
cwd=os.getcwd(),
|
||||
env=dict(os.environ),
|
||||
shell=None)
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
@ -107,3 +155,6 @@ def uninstall():
|
|||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
|
||||
# reset data from templates
|
||||
api.reset_data_from_templates()
|
||||
|
|
|
|||
|
|
@ -1,106 +0,0 @@
|
|||
{
|
||||
"time": "20190112T181028Z",
|
||||
"user": "jakub.jezek",
|
||||
"currentFile": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/editorial/e01/work/conform/jkprx_e01_conform_v001.prproj",
|
||||
"cwd": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/editorial/e01/work/conform",
|
||||
"date": "2019-01-12T17:10:28.377000Z",
|
||||
"framerate": "24.0",
|
||||
"host": "premiere",
|
||||
"hostVersion": "2019",
|
||||
"isRenderedReference": true,
|
||||
"referenceFile": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/reference/e01/sequence01/conform/jkprx_e01_conform_v001.mov",
|
||||
"instances": [
|
||||
{
|
||||
"publish": true,
|
||||
"family": "clip",
|
||||
"name": "e01_s010_0010",
|
||||
"filePath": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/footage/raw/day01/bbt_test_001_raw.mov",
|
||||
"tags": [
|
||||
{"task": "compositing"},
|
||||
{"task": "roto"},
|
||||
{"task": "3d"}
|
||||
],
|
||||
"layer": "V1",
|
||||
"sequence": "sequence01",
|
||||
"representation": "mov",
|
||||
"metadata": [
|
||||
{"colorspace": "BT.709"},
|
||||
{"fps": 24},
|
||||
{"hasAudio": true},
|
||||
{"format.width": 720},
|
||||
{"format.height": 404},
|
||||
{"format.pixelaspect": 1},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"},
|
||||
{"clip.audio": {
|
||||
"audioChannels": 2,
|
||||
"audioRate": 48000
|
||||
}},
|
||||
{"timeline.audio": [
|
||||
{"metadata": [
|
||||
{"audioChannels": 2},
|
||||
{"audioRate": 48000},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"}
|
||||
],
|
||||
"layer": "A2",
|
||||
"path": "file/path/to/audio.wav"}
|
||||
]}
|
||||
]
|
||||
},
|
||||
{
|
||||
"publish": true,
|
||||
"family": "clip",
|
||||
"name": "e01_s010_0020",
|
||||
"filePath": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/footage/raw/day01/bbt_test_001_raw.mov",
|
||||
"tags": [
|
||||
{"task": "compositing"},
|
||||
{"task": "roto"},
|
||||
{"task": "3d"}
|
||||
],
|
||||
"layer": "V1",
|
||||
"sequence": "sequence01",
|
||||
"representation": "mov",
|
||||
"metadata": [
|
||||
{"colorspace": "BT.709"},
|
||||
{"fps": 24},
|
||||
{"hasAudio": true},
|
||||
{"format.width": 720},
|
||||
{"format.height": 404},
|
||||
{"format.pixelaspect": 1},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"},
|
||||
{"clip.audio": {
|
||||
"audioChannels": 2,
|
||||
"audioRate": 48000
|
||||
}},
|
||||
{"timeline.audio": [
|
||||
{"metadata": [
|
||||
{"audioChannels": 2},
|
||||
{"audioRate": 48000},
|
||||
{"source.start": "00:00:00:00"},
|
||||
{"source.end": "00:01:52:12"},
|
||||
{"source.duration": "00:01:52:13"},
|
||||
{"clip.start": "01:00:00:00"},
|
||||
{"clip.end": "01:00:42:07"},
|
||||
{"clip.duration": "00:00:42:08"}
|
||||
],
|
||||
"layer": "A2",
|
||||
"path": "file/path/to/audio.wav"}
|
||||
]}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ExtensionList>
|
||||
<Extension Id="com.pond5.ppro">
|
||||
<HostList>
|
||||
|
||||
<!-- Premiere -->
|
||||
<Host Name="PPRO" Port="8089" />
|
||||
|
||||
</HostList>
|
||||
</Extension>
|
||||
</ExtensionList>
|
||||
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<ExtensionManifest Version="6.0" ExtensionBundleId="com.pond5.ppro" ExtensionBundleVersion="1.0.9"
|
||||
ExtensionBundleName="ppro" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<Author>
|
||||
<![CDATA[Pond5]]>
|
||||
</Author>
|
||||
<ExtensionList>
|
||||
<Extension Id="com.pond5.ppro" Version="1.0.9" />
|
||||
</ExtensionList>
|
||||
<ExecutionEnvironment>
|
||||
<HostList>
|
||||
<Host Name="PPRO" Version="9.0" />
|
||||
</HostList>
|
||||
<LocaleList>
|
||||
<Locale Code="All" />
|
||||
</LocaleList>
|
||||
<RequiredRuntimeList>
|
||||
<RequiredRuntime Name="CSXS" Version="6.0" />
|
||||
</RequiredRuntimeList>
|
||||
</ExecutionEnvironment>
|
||||
<DispatchInfoList>
|
||||
<Extension Id="com.pond5.ppro">
|
||||
<DispatchInfo >
|
||||
<Resources>
|
||||
<MainPath>./index_remote.html</MainPath>
|
||||
<ScriptPath>./jsx/pond5.jsx</ScriptPath>
|
||||
<CEFCommandLine>
|
||||
<Parameter>--enable-nodejs</Parameter>
|
||||
<Parameter>--mixed-context</Parameter>
|
||||
<Parameter>--disable-application-cache</Parameter>
|
||||
</CEFCommandLine>
|
||||
</Resources>
|
||||
<Lifecycle>
|
||||
<AutoVisible>true</AutoVisible>
|
||||
</Lifecycle>
|
||||
<UI>
|
||||
<Type>Panel</Type>
|
||||
<Menu>Pond5</Menu>
|
||||
<Geometry>
|
||||
<Size>
|
||||
<Height>470</Height>
|
||||
<Width>875</Width>
|
||||
</Size>
|
||||
<MaxSize>
|
||||
<Height></Height>
|
||||
<Width></Width>
|
||||
</MaxSize>
|
||||
<MinSize>
|
||||
<Height></Height>
|
||||
<Width></Width>
|
||||
</MinSize>
|
||||
</Geometry>
|
||||
<Icons>
|
||||
<Icon Type="Normal">./icons/iconNormal.png</Icon>
|
||||
<Icon Type="RollOver">./icons/iconRollover.png</Icon>
|
||||
<Icon Type="Disabled">./icons/iconDisabled.png</Icon>
|
||||
<Icon Type="DarkNormal">./icons/iconDarkNormal.png</Icon>
|
||||
<Icon Type="DarkRollOver">./icons/iconDarkRollover.png</Icon>
|
||||
</Icons>
|
||||
</UI>
|
||||
</DispatchInfo>
|
||||
</Extension>
|
||||
</DispatchInfoList>
|
||||
</ExtensionManifest>
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1,7 +0,0 @@
|
|||
html, body, iframe {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border: 0px;
|
||||
margin: 0px;
|
||||
overflow: hidden;
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 18 KiB |
|
|
@ -1,14 +0,0 @@
|
|||
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Pond5</title>
|
||||
<link rel="stylesheet" type="text/css" href="css/app.css">
|
||||
</head>
|
||||
<body onLoad="onLoaded()">
|
||||
|
||||
|
||||
</body>
|
||||
<script src="js/app.js"></script>
|
||||
</html>
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
// switch between live and local code
|
||||
function onLoaded()
|
||||
{
|
||||
window.location.href = "https://plugin.pond5.com/PPRO/index.html";
|
||||
}
|
||||
|
|
@ -1,489 +0,0 @@
|
|||
/*
|
||||
json2.js
|
||||
2014-02-04
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
|
||||
|
||||
This file creates a global JSON object containing two methods: stringify
|
||||
and parse.
|
||||
|
||||
JSON.stringify(value, replacer, space)
|
||||
value any JavaScript value, usually an object or array.
|
||||
|
||||
replacer an optional parameter that determines how object
|
||||
values are stringified for objects. It can be a
|
||||
function or an array of strings.
|
||||
|
||||
space an optional parameter that specifies the indentation
|
||||
of nested structures. If it is omitted, the text will
|
||||
be packed without extra whitespace. If it is a number,
|
||||
it will specify the number of spaces to indent at each
|
||||
level. If it is a string (such as '\t' or ' '),
|
||||
it contains the characters used to indent at each level.
|
||||
|
||||
This method produces a JSON text from a JavaScript value.
|
||||
|
||||
When an object value is found, if the object contains a toJSON
|
||||
method, its toJSON method will be called and the result will be
|
||||
stringified. A toJSON method does not serialize: it returns the
|
||||
value represented by the name/value pair that should be serialized,
|
||||
or undefined if nothing should be serialized. The toJSON method
|
||||
will be passed the key associated with the value, and this will be
|
||||
bound to the value
|
||||
|
||||
For example, this would serialize Dates as ISO strings.
|
||||
|
||||
Date.prototype.toJSON = function (key) {
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
return this.getUTCFullYear() + '-' +
|
||||
f(this.getUTCMonth() + 1) + '-' +
|
||||
f(this.getUTCDate()) + 'T' +
|
||||
f(this.getUTCHours()) + ':' +
|
||||
f(this.getUTCMinutes()) + ':' +
|
||||
f(this.getUTCSeconds()) + 'Z';
|
||||
};
|
||||
|
||||
You can provide an optional replacer method. It will be passed the
|
||||
key and value of each member, with this bound to the containing
|
||||
object. The value that is returned from your method will be
|
||||
serialized. If your method returns undefined, then the member will
|
||||
be excluded from the serialization.
|
||||
|
||||
If the replacer parameter is an array of strings, then it will be
|
||||
used to select the members to be serialized. It filters the results
|
||||
such that only members with keys listed in the replacer array are
|
||||
stringified.
|
||||
|
||||
Values that do not have JSON representations, such as undefined or
|
||||
functions, will not be serialized. Such values in objects will be
|
||||
dropped; in arrays they will be replaced with null. You can use
|
||||
a replacer function to replace those with JSON values.
|
||||
JSON.stringify(undefined) returns undefined.
|
||||
|
||||
The optional space parameter produces a stringification of the
|
||||
value that is filled with line breaks and indentation to make it
|
||||
easier to read.
|
||||
|
||||
If the space parameter is a non-empty string, then that string will
|
||||
be used for indentation. If the space parameter is a number, then
|
||||
the indentation will be that many spaces.
|
||||
|
||||
Example:
|
||||
|
||||
text = JSON.stringify(['e', {pluribus: 'unum'}]);
|
||||
// text is '["e",{"pluribus":"unum"}]'
|
||||
|
||||
|
||||
text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
|
||||
// text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
|
||||
|
||||
text = JSON.stringify([new Date()], function (key, value) {
|
||||
return this[key] instanceof Date ?
|
||||
'Date(' + this[key] + ')' : value;
|
||||
});
|
||||
// text is '["Date(---current time---)"]'
|
||||
|
||||
|
||||
JSON.parse(text, reviver)
|
||||
This method parses a JSON text to produce an object or array.
|
||||
It can throw a SyntaxError exception.
|
||||
|
||||
The optional reviver parameter is a function that can filter and
|
||||
transform the results. It receives each of the keys and values,
|
||||
and its return value is used instead of the original value.
|
||||
If it returns what it received, then the structure is not modified.
|
||||
If it returns undefined then the member is deleted.
|
||||
|
||||
Example:
|
||||
|
||||
// Parse the text. Values that look like ISO date strings will
|
||||
// be converted to Date objects.
|
||||
|
||||
myData = JSON.parse(text, function (key, value) {
|
||||
var a;
|
||||
if (typeof value === 'string') {
|
||||
a =
|
||||
/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
|
||||
if (a) {
|
||||
return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
|
||||
+a[5], +a[6]));
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
|
||||
var d;
|
||||
if (typeof value === 'string' &&
|
||||
value.slice(0, 5) === 'Date(' &&
|
||||
value.slice(-1) === ')') {
|
||||
d = new Date(value.slice(5, -1));
|
||||
if (d) {
|
||||
return d;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
|
||||
This is a reference implementation. You are free to copy, modify, or
|
||||
redistribute.
|
||||
*/
|
||||
|
||||
/*jslint evil: true, regexp: true */
|
||||
|
||||
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
|
||||
call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
|
||||
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
|
||||
lastIndex, length, parse, prototype, push, replace, slice, stringify,
|
||||
test, toJSON, toString, valueOf
|
||||
*/
|
||||
|
||||
|
||||
// Create a JSON object only if one does not already exist. We create the
|
||||
// methods in a closure to avoid creating global variables.
|
||||
|
||||
if (typeof JSON !== 'object') {
|
||||
JSON = {};
|
||||
}
|
||||
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
if (typeof Date.prototype.toJSON !== 'function') {
|
||||
|
||||
Date.prototype.toJSON = function () {
|
||||
|
||||
return isFinite(this.valueOf())
|
||||
? this.getUTCFullYear() + '-' +
|
||||
f(this.getUTCMonth() + 1) + '-' +
|
||||
f(this.getUTCDate()) + 'T' +
|
||||
f(this.getUTCHours()) + ':' +
|
||||
f(this.getUTCMinutes()) + ':' +
|
||||
f(this.getUTCSeconds()) + 'Z'
|
||||
: null;
|
||||
};
|
||||
|
||||
String.prototype.toJSON =
|
||||
Number.prototype.toJSON =
|
||||
Boolean.prototype.toJSON = function () {
|
||||
return this.valueOf();
|
||||
};
|
||||
}
|
||||
|
||||
var cx,
|
||||
escapable,
|
||||
gap,
|
||||
indent,
|
||||
meta,
|
||||
rep;
|
||||
|
||||
|
||||
function quote(string) {
|
||||
|
||||
// If the string contains no control characters, no quote characters, and no
|
||||
// backslash characters, then we can safely slap some quotes around it.
|
||||
// Otherwise we must also replace the offending characters with safe escape
|
||||
// sequences.
|
||||
|
||||
escapable.lastIndex = 0;
|
||||
return escapable.test(string) ? '"' + string.replace(escapable, function (a) {
|
||||
var c = meta[a];
|
||||
return typeof c === 'string'
|
||||
? c
|
||||
: '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
|
||||
}) + '"' : '"' + string + '"';
|
||||
}
|
||||
|
||||
|
||||
function str(key, holder) {
|
||||
|
||||
// Produce a string from holder[key].
|
||||
|
||||
var i, // The loop counter.
|
||||
k, // The member key.
|
||||
v, // The member value.
|
||||
length,
|
||||
mind = gap,
|
||||
partial,
|
||||
value = holder[key];
|
||||
|
||||
// If the value has a toJSON method, call it to obtain a replacement value.
|
||||
|
||||
if (value && typeof value === 'object' &&
|
||||
typeof value.toJSON === 'function') {
|
||||
value = value.toJSON(key);
|
||||
}
|
||||
|
||||
// If we were called with a replacer function, then call the replacer to
|
||||
// obtain a replacement value.
|
||||
|
||||
if (typeof rep === 'function') {
|
||||
value = rep.call(holder, key, value);
|
||||
}
|
||||
|
||||
// What happens next depends on the value's type.
|
||||
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return quote(value);
|
||||
|
||||
case 'number':
|
||||
|
||||
// JSON numbers must be finite. Encode non-finite numbers as null.
|
||||
|
||||
return isFinite(value) ? String(value) : 'null';
|
||||
|
||||
case 'boolean':
|
||||
case 'null':
|
||||
|
||||
// If the value is a boolean or null, convert it to a string. Note:
|
||||
// typeof null does not produce 'null'. The case is included here in
|
||||
// the remote chance that this gets fixed someday.
|
||||
|
||||
return String(value);
|
||||
|
||||
// If the type is 'object', we might be dealing with an object or an array or
|
||||
// null.
|
||||
|
||||
case 'object':
|
||||
|
||||
// Due to a specification blunder in ECMAScript, typeof null is 'object',
|
||||
// so watch out for that case.
|
||||
|
||||
if (!value) {
|
||||
return 'null';
|
||||
}
|
||||
|
||||
// Make an array to hold the partial results of stringifying this object value.
|
||||
|
||||
gap += indent;
|
||||
partial = [];
|
||||
|
||||
// Is the value an array?
|
||||
|
||||
if (Object.prototype.toString.apply(value) === '[object Array]') {
|
||||
|
||||
// The value is an array. Stringify every element. Use null as a placeholder
|
||||
// for non-JSON values.
|
||||
|
||||
length = value.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
partial[i] = str(i, value) || 'null';
|
||||
}
|
||||
|
||||
// Join all of the elements together, separated with commas, and wrap them in
|
||||
// brackets.
|
||||
|
||||
v = partial.length === 0
|
||||
? '[]'
|
||||
: gap
|
||||
? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']'
|
||||
: '[' + partial.join(',') + ']';
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
|
||||
// If the replacer is an array, use it to select the members to be stringified.
|
||||
|
||||
if (rep && typeof rep === 'object') {
|
||||
length = rep.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
if (typeof rep[i] === 'string') {
|
||||
k = rep[i];
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (gap ? ': ' : ':') + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
// Otherwise, iterate through all of the keys in the object.
|
||||
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (gap ? ': ' : ':') + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Join all of the member texts together, separated with commas,
|
||||
// and wrap them in braces.
|
||||
|
||||
v = partial.length === 0
|
||||
? '{}'
|
||||
: gap
|
||||
? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}'
|
||||
: '{' + partial.join(',') + '}';
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
// If the JSON object does not yet have a stringify method, give it one.
|
||||
|
||||
if (typeof JSON.stringify !== 'function') {
|
||||
escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
meta = { // table of character substitutions
|
||||
'\b': '\\b',
|
||||
'\t': '\\t',
|
||||
'\n': '\\n',
|
||||
'\f': '\\f',
|
||||
'\r': '\\r',
|
||||
'"' : '\\"',
|
||||
'\\': '\\\\'
|
||||
};
|
||||
JSON.stringify = function (value, replacer, space) {
|
||||
|
||||
// The stringify method takes a value and an optional replacer, and an optional
|
||||
// space parameter, and returns a JSON text. The replacer can be a function
|
||||
// that can replace values, or an array of strings that will select the keys.
|
||||
// A default replacer method can be provided. Use of the space parameter can
|
||||
// produce text that is more easily readable.
|
||||
|
||||
var i;
|
||||
gap = '';
|
||||
indent = '';
|
||||
|
||||
// If the space parameter is a number, make an indent string containing that
|
||||
// many spaces.
|
||||
|
||||
if (typeof space === 'number') {
|
||||
for (i = 0; i < space; i += 1) {
|
||||
indent += ' ';
|
||||
}
|
||||
|
||||
// If the space parameter is a string, it will be used as the indent string.
|
||||
|
||||
} else if (typeof space === 'string') {
|
||||
indent = space;
|
||||
}
|
||||
|
||||
// If there is a replacer, it must be a function or an array.
|
||||
// Otherwise, throw an error.
|
||||
|
||||
rep = replacer;
|
||||
if (replacer && typeof replacer !== 'function' &&
|
||||
(typeof replacer !== 'object' ||
|
||||
typeof replacer.length !== 'number')) {
|
||||
throw new Error('JSON.stringify');
|
||||
}
|
||||
|
||||
// Make a fake root object containing our value under the key of ''.
|
||||
// Return the result of stringifying the value.
|
||||
|
||||
return str('', {'': value});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// If the JSON object does not yet have a parse method, give it one.
|
||||
|
||||
if (typeof JSON.parse !== 'function') {
|
||||
cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
JSON.parse = function (text, reviver) {
|
||||
|
||||
// The parse method takes a text and an optional reviver function, and returns
|
||||
// a JavaScript value if the text is a valid JSON text.
|
||||
|
||||
var j;
|
||||
|
||||
function walk(holder, key) {
|
||||
|
||||
// The walk method is used to recursively walk the resulting structure so
|
||||
// that modifications can be made.
|
||||
|
||||
var k, v, value = holder[key];
|
||||
if (value && typeof value === 'object') {
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = walk(value, k);
|
||||
if (v !== undefined) {
|
||||
value[k] = v;
|
||||
} else {
|
||||
delete value[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(holder, key, value);
|
||||
}
|
||||
|
||||
|
||||
// Parsing happens in four stages. In the first stage, we replace certain
|
||||
// Unicode characters with escape sequences. JavaScript handles many characters
|
||||
// incorrectly, either silently deleting them, or treating them as line endings.
|
||||
|
||||
text = String(text);
|
||||
cx.lastIndex = 0;
|
||||
if (cx.test(text)) {
|
||||
text = text.replace(cx, function (a) {
|
||||
return '\\u' +
|
||||
('0000' + a.charCodeAt(0).toString(16)).slice(-4);
|
||||
});
|
||||
}
|
||||
|
||||
// In the second stage, we run the text against regular expressions that look
|
||||
// for non-JSON patterns. We are especially concerned with '()' and 'new'
|
||||
// because they can cause invocation, and '=' because it can cause mutation.
|
||||
// But just to be safe, we want to reject all unexpected forms.
|
||||
|
||||
// We split the second stage into 4 regexp operations in order to work around
|
||||
// crippling inefficiencies in IE's and Safari's regexp engines. First we
|
||||
// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
|
||||
// replace all simple value tokens with ']' characters. Third, we delete all
|
||||
// open brackets that follow a colon or comma or that begin the text. Finally,
|
||||
// we look to see that the remaining characters are only whitespace or ']' or
|
||||
// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
|
||||
|
||||
if (/^[\],:{}\s]*$/
|
||||
.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@')
|
||||
.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']')
|
||||
.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
|
||||
|
||||
// In the third stage we use the eval function to compile the text into a
|
||||
// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
|
||||
// in JavaScript: it can begin a block or an object literal. We wrap the text
|
||||
// in parens to eliminate the ambiguity.
|
||||
|
||||
j = eval('(' + text + ')');
|
||||
|
||||
// In the optional fourth stage, we recursively walk the new structure, passing
|
||||
// each name/value pair to a reviver function for possible transformation.
|
||||
|
||||
return typeof reviver === 'function'
|
||||
? walk({'': j}, '')
|
||||
: j;
|
||||
}
|
||||
|
||||
// If the text is not JSON parseable, then a SyntaxError is thrown.
|
||||
|
||||
throw new SyntaxError('JSON.parse');
|
||||
};
|
||||
}
|
||||
}());
|
||||
|
|
@ -1,277 +0,0 @@
|
|||
var projectItems = [];
|
||||
var sequences = [];
|
||||
|
||||
function importClips(obj) {
|
||||
app.project.importFiles(obj.paths);
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
|
||||
function getEnv() {
|
||||
app.enableQE();
|
||||
var obj = {
|
||||
os: qe.platform,
|
||||
name: app.project.name,
|
||||
path: app.project.path
|
||||
}
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
|
||||
function getSequences() {
|
||||
var project = app.project;
|
||||
// var sequences = [];
|
||||
for (var i = 0; i < project.sequences.numSequences; i++) {
|
||||
var seq = project.sequences[i];
|
||||
seq.clipNames = [];
|
||||
sequences[i] = seq;
|
||||
log('sequences[i] id: ' + project.sequences[i].sequenceID);
|
||||
}
|
||||
|
||||
var obj = {
|
||||
sequences: sequences
|
||||
}
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
|
||||
function getSequenceItems(seqs) {
|
||||
app.enableQE();
|
||||
qe.project.init();
|
||||
sequences = seqs;
|
||||
// log('getSequenceItems sequences obj from app: ' + sequences);
|
||||
|
||||
var rootFolder = app.project.rootItem;
|
||||
var binCounter = -1;
|
||||
var rootSeqCounter = -1; //count sequences in root folder
|
||||
|
||||
//walk through root folder of project to differentiate between bins, sequences and clips
|
||||
for (var i = 0; i < rootFolder.children.numItems; i++) {
|
||||
// log('\nroot item at ' + i + " is " + rootFolder.children[i].name + " of type " + rootFolder.children[i].type);
|
||||
var item = rootFolder.children[i];
|
||||
// log('item has video tracks? ' + item.videoTracks);
|
||||
if (item.type == 2) { //bin
|
||||
binCounter++;
|
||||
walkBins(item, 'root', binCounter);
|
||||
} else if (item.type == 1 && !item.getMediaPath()) //sequence OR other type of object
|
||||
{
|
||||
// log('\nObject of type 1 in root: ' + typeof item + ' ' + item.name);
|
||||
|
||||
if (objectIsSequence(item)) { //objects of type can also be other objects such as titles, so check if it really is a sequence
|
||||
// log('\nSequence in root: ' + item.name );
|
||||
rootSeqCounter++;
|
||||
var seq = qe.project.getSequenceAt(rootSeqCounter);
|
||||
// log('\nSequence in root, guid: ' + seq );
|
||||
for (var property in seq) {
|
||||
if (seq.hasOwnProperty(property)) {
|
||||
// log('\nSequence in root: ' + seq );
|
||||
//log('qe sequence prop: ' + property );
|
||||
}
|
||||
}
|
||||
getClipNames(seq, sequences);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function objectIsSequence() {
|
||||
var isSequence = false;
|
||||
|
||||
for (var s = 0; s < app.project.sequences.numSequences; s++)
|
||||
if (item.name == app.project.sequences[s].name)
|
||||
isSequence = true;
|
||||
|
||||
return isSequence
|
||||
}
|
||||
|
||||
// walk through bins recursively
|
||||
function walkBins(item, source, rootBinCounter) {
|
||||
app.enableQE();
|
||||
// log('\nget clips for bin ' + item.name );
|
||||
|
||||
var bin;
|
||||
if (source == 'root') //bin in root folder
|
||||
bin = qe.project.getBinAt(rootBinCounter);
|
||||
else // bin in other bin
|
||||
bin = item;
|
||||
|
||||
for (var i = 0; i < bin.numBins; i++) //if bin contains bin(s) walk through them
|
||||
walkBins(bin.getBinAt(i));
|
||||
|
||||
// log('Bin ' + bin.name + ' has ' + bin.numSequences + ' sequences ' );
|
||||
var seqCounter = -1;
|
||||
for (var j = 0; j < bin.numSequences; j++) {
|
||||
//if(objectIsSequence(item)) {//objects of type can also be other objects such as titles, so check if it really is a sequence?
|
||||
//not needed because getSequenceAt apparently only looks at sequences already?
|
||||
var seq = bin.getSequenceAt(j);
|
||||
// log('\nSequence in bin, guid: ' + seq.guid );
|
||||
getClipNames(seq, sequences);
|
||||
//}
|
||||
}
|
||||
}
|
||||
|
||||
//walk through sequences and video & audiotracks to find clip names in sequences
|
||||
function getClipNames(seq, sequences) {
|
||||
|
||||
for (var k = 0; k < sequences.length; k++) {
|
||||
// log('getClipNames seq.guid ' + seq.guid );
|
||||
//log(' getClipNames sequences[k].id ' + sequences[k].sequenceID );
|
||||
if (seq.guid == sequences[k].sequenceID) {
|
||||
// log('Sequence ' + seq.name + ' has ' + app.project.sequences[k].videoTracks.numTracks +' video tracks' );
|
||||
// log('Sequence ' + seq.name + ' has ' + app.project.sequences[k].audioTracks.numTracks +' audio tracks' );
|
||||
|
||||
//VIDEO CLIPS IN SEQUENCES
|
||||
for (var l = 0; l < sequences[k].videoTracks.numTracks; l++) {
|
||||
var videoTrack = seq.getVideoTrackAt(l);
|
||||
// log(seq.name + ' has video track '+ videoTrack.name + ' at index ' + l);
|
||||
|
||||
var clipCounter = 0;
|
||||
var numOfClips = app.project.sequences[k].videoTracks[l].clips.numTracks;
|
||||
// log('\n' + bin.name + ' ' + seq.name + ' ' + videoTrack.name + ' has ' + numOfClips + ' clips');
|
||||
for (var m = 0; m < numOfClips; m++) {
|
||||
var clip = app.project.sequences[k].videoTracks[l].clips[m];
|
||||
// log('clips in video tracks: ' + m + ' - ' + clip); //TrackItem, doesn't have name property
|
||||
//if a clip was deleted and another one added, the index of the new one is one or more higher
|
||||
while (clipCounter < numOfClips) //undefined because of old clips
|
||||
{
|
||||
if (videoTrack.getItemAt(m).name) {
|
||||
clipCounter++;
|
||||
// log('getClipNames ' + seq.name + ' ' + videoTrack.name + ' has ' + videoTrack.getItemAt(m).name); //Object
|
||||
|
||||
for (var s = 0; s < sequences.length; s++)
|
||||
if (seq.guid == sequences[s].sequenceID)
|
||||
sequences[s].clipNames.push(videoTrack.getItemAt(m).name);
|
||||
}
|
||||
m++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// log('jsx after video loop clipsInSequences:' + clipsInSequences);
|
||||
|
||||
//AUDIO CLIPS IN SEQUENCES
|
||||
for (var l = 0; l < sequences[k].audioTracks.numTracks; l++) {
|
||||
var audioTrack = seq.getAudioTrackAt(l);
|
||||
//log(bin.name + ' ' + seq.name + ' has audio track '+ audioTrack.name + ' at index ' + l);
|
||||
//log('\n' + bin.name + ' ' + seq.name + ' ' + audioTrack.name + ' has ' + app.project.sequences[k].audioTracks[l].clips.numTracks + ' clips');
|
||||
var clipCounter = 0;
|
||||
var numOfClips = app.project.sequences[k].audioTracks[l].clips.numTracks;
|
||||
|
||||
for (var m = 0; m < numOfClips; m++) {
|
||||
var clip = app.project.sequences[k].audioTracks[l].clips[m];
|
||||
// log('clips in audio tracks: ' + m + ' - ' + clip);
|
||||
//if a clip was deleted and another one added, the index of the new one is one or more higher
|
||||
while (clipCounter < numOfClips) //undefined because of old clips
|
||||
{
|
||||
if (audioTrack.getItemAt(m).name) {
|
||||
clipCounter++;
|
||||
// log(seq.name + ' ' + audioTrack.name + ' has ' + audioTrack.getItemAt(m).name);
|
||||
|
||||
for (var s = 0; s < sequences.length; s++)
|
||||
if (seq.guid == sequences[s].sequenceID)
|
||||
sequences[s].clipNames.push(audioTrack.getItemAt(m).name);
|
||||
}
|
||||
m++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} //end if
|
||||
} //end for
|
||||
} //end getClipNames
|
||||
log('sequences returned:' + sequences);
|
||||
//return result to ReplaceService.js
|
||||
var obj = {
|
||||
data: sequences
|
||||
};
|
||||
// log('jsx getClipNames obj:' + obj);
|
||||
return JSON.stringify(obj);
|
||||
}
|
||||
|
||||
//getSequenceItems();
|
||||
|
||||
function getProjectItems() {
|
||||
projectItems = [];
|
||||
app.enableQE();
|
||||
qe.project.init();
|
||||
|
||||
var rootFolder = app.project.rootItem;
|
||||
//walk through root folder of project to differentiate between bins, sequences and clips
|
||||
for (var i = 0; i < rootFolder.children.numItems; i++) {
|
||||
// log('\nroot item at ' + i + " is of type " + rootFolder.children[i].type);
|
||||
var item = rootFolder.children[i];
|
||||
|
||||
if (item.type == 2) { //bin
|
||||
// log('\n' );
|
||||
walkBins(item);
|
||||
} else if (item.type == 1 && item.getMediaPath()) //clip in root
|
||||
{
|
||||
// log('Root folder has ' + item + ' ' + item.name);
|
||||
projectItems.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
// walk through bins recursively
|
||||
function walkBins(bin) {
|
||||
app.enableQE();
|
||||
|
||||
// $.writeln('bin.name + ' has ' + bin.children.numItems);
|
||||
for (var i = 0; i < bin.children.numItems; i++) {
|
||||
var object = bin.children[i];
|
||||
// log(bin.name + ' has ' + object + ' ' + object.name + ' of type ' + object.type + ' and has mediapath ' + object.getMediaPath() );
|
||||
if (object.type == 2) { //bin
|
||||
// log(object.name + ' has ' + object.children.numItems );
|
||||
for (var j = 0; j < object.children.numItems; j++) {
|
||||
var obj = object.children[j];
|
||||
if (obj.type == 1 && obj.getMediaPath()) { //clip in sub bin
|
||||
//log(object.name + ' has ' + obj + ' ' + obj.name );
|
||||
projectItems.push(obj);
|
||||
} else if (obj.type == 2) { //bin
|
||||
walkBins(obj);
|
||||
}
|
||||
}
|
||||
} else if (object.type == 1 && object.getMediaPath()) //clip in bin in root
|
||||
{
|
||||
// log(bin.name + ' has ' + object + ' ' + object.name );
|
||||
projectItems.push(object);
|
||||
}
|
||||
}
|
||||
}
|
||||
log('\nprojectItems:' + projectItems.length + ' ' + projectItems);
|
||||
return projectItems;
|
||||
}
|
||||
|
||||
function replaceClips(obj) {
|
||||
|
||||
log('num of projectItems:' + projectItems.length);
|
||||
var hiresVOs = obj.hiresOnFS;
|
||||
for (var i = 0; i < hiresVOs.length; i++) {
|
||||
log('hires vo name: ' + hiresVOs[i].name);
|
||||
log('hires vo id: ' + hiresVOs[i].id);
|
||||
log('hires vo path: ' + hiresVOs[i].path);
|
||||
log('hires vo replace: ' + hiresVOs[i].replace);
|
||||
|
||||
for (var j = 0; j < projectItems.length; j++) {
|
||||
// log('projectItem id: ' + projectItems[j].name.split(' ')[0] + ' ' + hiresVOs[i].id + ' can change path ' + projectItems[j].canChangeMediaPath() );
|
||||
if (projectItems[j].name.split(' ')[0] == hiresVOs[i].id && hiresVOs[i].replace && projectItems[j].canChangeMediaPath()) {
|
||||
log('replace: ' + projectItems[j].name + ' with ' + hiresVOs[i].name);
|
||||
projectItems[j].name = hiresVOs[i].name;
|
||||
projectItems[j].changeMediaPath(hiresVOs[i].path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function log(info) {
|
||||
try {
|
||||
var xLib = new ExternalObject("lib:\PlugPlugExternalObject");
|
||||
} catch (e) {
|
||||
alert(e);
|
||||
}
|
||||
|
||||
if (xLib) {
|
||||
var eventObj = new CSXSEvent();
|
||||
eventObj.type = "LogEvent";
|
||||
eventObj.data = info;
|
||||
eventObj.dispatch();
|
||||
}
|
||||
}
|
||||
|
||||
function message(msg) {
|
||||
$.writeln(msg); // Using '$' object will invoke ExtendScript Toolkit, if installed.
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
application/vnd.adobe.air-ucf-package+zip
|
||||
83
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/decompress-zip
generated
vendored
83
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/decompress-zip
generated
vendored
|
|
@ -1,83 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
var nopt = require('nopt');
|
||||
var path = require('path');
|
||||
var version = require('../package.json').version;
|
||||
|
||||
var knownOptions = {
|
||||
'list': Boolean,
|
||||
'extract': Boolean,
|
||||
'path': path
|
||||
};
|
||||
|
||||
var shortcuts = {
|
||||
'x': ['--extract'],
|
||||
'l': ['--list'],
|
||||
'p': ['--path'],
|
||||
'v': ['--version']
|
||||
};
|
||||
|
||||
var parsedOptions = nopt(knownOptions, shortcuts);
|
||||
|
||||
var pad = function (string, length) {
|
||||
string = String(string);
|
||||
|
||||
if (length <= string.length) {
|
||||
return string;
|
||||
}
|
||||
|
||||
return string + (new Array(length - string.length).join(' '));
|
||||
};
|
||||
|
||||
var octal = function (number, digits) {
|
||||
var result = '';
|
||||
|
||||
for (var i = 0; i < digits; i++) {
|
||||
result = (number & 0x07) + result;
|
||||
number >>= 3;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var DecompressZip = require('../lib/decompress-zip');
|
||||
var zip = new DecompressZip(parsedOptions.argv.remain[0]);
|
||||
|
||||
zip.on('file', function (file) {
|
||||
console.log([octal(file.mode, 4), pad(file.type, 13), pad(file.compressedSize, 10), pad(file.uncompressedSize, 10), file.path].join(' '));
|
||||
});
|
||||
|
||||
zip.on('list', function (fileList) {
|
||||
// console.log(fileList);
|
||||
});
|
||||
|
||||
zip.on('extract', function (result) {
|
||||
console.log(result);
|
||||
});
|
||||
|
||||
zip.on('error', function (error) {
|
||||
console.error(error.message, error.stack);
|
||||
});
|
||||
|
||||
if (parsedOptions.version) {
|
||||
console.log('version ' + version);
|
||||
} else if (parsedOptions.list) {
|
||||
console.log('Mode Type Zip size Full size Path');
|
||||
console.log('---- ---- -------- --------- ----');
|
||||
zip.list();
|
||||
} else if (parsedOptions.extract) {
|
||||
var options = {};
|
||||
|
||||
if (parsedOptions.path) {
|
||||
options.path = parsedOptions.path;
|
||||
}
|
||||
|
||||
zip.extract(options);
|
||||
} else {
|
||||
console.log('Usage: decompress-zip <options> <file>');
|
||||
console.log(' -x, --extract extract the given file');
|
||||
console.log(' -l, --list list the contents of the given file');
|
||||
console.log(' -v, --version extract the given file');
|
||||
console.log(' -p, --path <path> extract the file into <path>');
|
||||
console.log(' -h, --help show this message');
|
||||
}
|
||||
33
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/mkdirp
generated
vendored
33
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/mkdirp
generated
vendored
|
|
@ -1,33 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var mkdirp = require('../');
|
||||
var minimist = require('minimist');
|
||||
var fs = require('fs');
|
||||
|
||||
var argv = minimist(process.argv.slice(2), {
|
||||
alias: { m: 'mode', h: 'help' },
|
||||
string: [ 'mode' ]
|
||||
});
|
||||
if (argv.help) {
|
||||
fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout);
|
||||
return;
|
||||
}
|
||||
|
||||
var paths = argv._.slice();
|
||||
var mode = argv.mode ? parseInt(argv.mode, 8) : undefined;
|
||||
|
||||
(function next () {
|
||||
if (paths.length === 0) return;
|
||||
var p = paths.shift();
|
||||
|
||||
if (mode === undefined) mkdirp(p, cb)
|
||||
else mkdirp(p, mode, cb)
|
||||
|
||||
function cb (err) {
|
||||
if (err) {
|
||||
console.error(err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
else next();
|
||||
}
|
||||
})();
|
||||
54
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/nopt
generated
vendored
54
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/nopt
generated
vendored
|
|
@ -1,54 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
var nopt = require("../lib/nopt")
|
||||
, path = require("path")
|
||||
, types = { num: Number
|
||||
, bool: Boolean
|
||||
, help: Boolean
|
||||
, list: Array
|
||||
, "num-list": [Number, Array]
|
||||
, "str-list": [String, Array]
|
||||
, "bool-list": [Boolean, Array]
|
||||
, str: String
|
||||
, clear: Boolean
|
||||
, config: Boolean
|
||||
, length: Number
|
||||
, file: path
|
||||
}
|
||||
, shorthands = { s: [ "--str", "astring" ]
|
||||
, b: [ "--bool" ]
|
||||
, nb: [ "--no-bool" ]
|
||||
, tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
|
||||
, "?": ["--help"]
|
||||
, h: ["--help"]
|
||||
, H: ["--help"]
|
||||
, n: [ "--num", "125" ]
|
||||
, c: ["--config"]
|
||||
, l: ["--length"]
|
||||
, f: ["--file"]
|
||||
}
|
||||
, parsed = nopt( types
|
||||
, shorthands
|
||||
, process.argv
|
||||
, 2 )
|
||||
|
||||
console.log("parsed", parsed)
|
||||
|
||||
if (parsed.help) {
|
||||
console.log("")
|
||||
console.log("nopt cli tester")
|
||||
console.log("")
|
||||
console.log("types")
|
||||
console.log(Object.keys(types).map(function M (t) {
|
||||
var type = types[t]
|
||||
if (Array.isArray(type)) {
|
||||
return [t, type.map(function (type) { return type.name })]
|
||||
}
|
||||
return [t, type && type.name]
|
||||
}).reduce(function (s, i) {
|
||||
s[i[0]] = i[1]
|
||||
return s
|
||||
}, {}))
|
||||
console.log("")
|
||||
console.log("shorthands")
|
||||
console.log(shorthands)
|
||||
}
|
||||
50
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/rimraf
generated
vendored
50
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/rimraf
generated
vendored
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var rimraf = require('./')
|
||||
|
||||
var help = false
|
||||
var dashdash = false
|
||||
var noglob = false
|
||||
var args = process.argv.slice(2).filter(function(arg) {
|
||||
if (dashdash)
|
||||
return !!arg
|
||||
else if (arg === '--')
|
||||
dashdash = true
|
||||
else if (arg === '--no-glob' || arg === '-G')
|
||||
noglob = true
|
||||
else if (arg === '--glob' || arg === '-g')
|
||||
noglob = false
|
||||
else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/))
|
||||
help = true
|
||||
else
|
||||
return !!arg
|
||||
})
|
||||
|
||||
if (help || args.length === 0) {
|
||||
// If they didn't ask for help, then this is not a "success"
|
||||
var log = help ? console.log : console.error
|
||||
log('Usage: rimraf <path> [<path> ...]')
|
||||
log('')
|
||||
log(' Deletes all files and folders at "path" recursively.')
|
||||
log('')
|
||||
log('Options:')
|
||||
log('')
|
||||
log(' -h, --help Display this usage info')
|
||||
log(' -G, --no-glob Do not expand glob patterns in arguments')
|
||||
log(' -g, --glob Expand glob patterns in arguments (default)')
|
||||
process.exit(help ? 0 : 1)
|
||||
} else
|
||||
go(0)
|
||||
|
||||
function go (n) {
|
||||
if (n >= args.length)
|
||||
return
|
||||
var options = {}
|
||||
if (noglob)
|
||||
options = { glob: false }
|
||||
rimraf(args[n], options, function (er) {
|
||||
if (er)
|
||||
throw er
|
||||
go(n+1)
|
||||
})
|
||||
}
|
||||
201
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/sshpk-conv
generated
vendored
201
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/sshpk-conv
generated
vendored
|
|
@ -1,201 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
// -*- mode: js -*-
|
||||
// vim: set filetype=javascript :
|
||||
// Copyright 2015 Joyent, Inc. All rights reserved.
|
||||
|
||||
var dashdash = require('dashdash');
|
||||
var sshpk = require('../lib/index');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var tty = require('tty');
|
||||
var readline = require('readline');
|
||||
var getPassword = require('getpass').getPass;
|
||||
|
||||
var options = [
|
||||
{
|
||||
names: ['outformat', 't'],
|
||||
type: 'string',
|
||||
help: 'Output format'
|
||||
},
|
||||
{
|
||||
names: ['informat', 'T'],
|
||||
type: 'string',
|
||||
help: 'Input format'
|
||||
},
|
||||
{
|
||||
names: ['file', 'f'],
|
||||
type: 'string',
|
||||
help: 'Input file name (default stdin)'
|
||||
},
|
||||
{
|
||||
names: ['out', 'o'],
|
||||
type: 'string',
|
||||
help: 'Output file name (default stdout)'
|
||||
},
|
||||
{
|
||||
names: ['private', 'p'],
|
||||
type: 'bool',
|
||||
help: 'Produce a private key as output'
|
||||
},
|
||||
{
|
||||
names: ['derive', 'd'],
|
||||
type: 'string',
|
||||
help: 'Output a new key derived from this one, with given algo'
|
||||
},
|
||||
{
|
||||
names: ['identify', 'i'],
|
||||
type: 'bool',
|
||||
help: 'Print key metadata instead of converting'
|
||||
},
|
||||
{
|
||||
names: ['comment', 'c'],
|
||||
type: 'string',
|
||||
help: 'Set key comment, if output format supports'
|
||||
},
|
||||
{
|
||||
names: ['help', 'h'],
|
||||
type: 'bool',
|
||||
help: 'Shows this help text'
|
||||
}
|
||||
];
|
||||
|
||||
if (require.main === module) {
|
||||
var parser = dashdash.createParser({
|
||||
options: options
|
||||
});
|
||||
|
||||
try {
|
||||
var opts = parser.parse(process.argv);
|
||||
} catch (e) {
|
||||
console.error('sshpk-conv: error: %s', e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (opts.help || opts._args.length > 1) {
|
||||
var help = parser.help({}).trimRight();
|
||||
console.error('sshpk-conv: converts between SSH key formats\n');
|
||||
console.error(help);
|
||||
console.error('\navailable formats:');
|
||||
console.error(' - pem, pkcs1 eg id_rsa');
|
||||
console.error(' - ssh eg id_rsa.pub');
|
||||
console.error(' - pkcs8 format you want for openssl');
|
||||
console.error(' - openssh like output of ssh-keygen -o');
|
||||
console.error(' - rfc4253 raw OpenSSH wire format');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
/*
|
||||
* Key derivation can only be done on private keys, so use of the -d
|
||||
* option necessarily implies -p.
|
||||
*/
|
||||
if (opts.derive)
|
||||
opts.private = true;
|
||||
|
||||
var inFile = process.stdin;
|
||||
var inFileName = 'stdin';
|
||||
|
||||
var inFilePath;
|
||||
if (opts.file) {
|
||||
inFilePath = opts.file;
|
||||
} else if (opts._args.length === 1) {
|
||||
inFilePath = opts._args[0];
|
||||
}
|
||||
|
||||
if (inFilePath)
|
||||
inFileName = path.basename(inFilePath);
|
||||
|
||||
try {
|
||||
if (inFilePath) {
|
||||
fs.accessSync(inFilePath, fs.R_OK);
|
||||
inFile = fs.createReadStream(inFilePath);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('sshpk-conv: error opening input file' +
|
||||
': ' + e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var outFile = process.stdout;
|
||||
|
||||
try {
|
||||
if (opts.out && !opts.identify) {
|
||||
fs.accessSync(path.dirname(opts.out), fs.W_OK);
|
||||
outFile = fs.createWriteStream(opts.out);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('sshpk-conv: error opening output file' +
|
||||
': ' + e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var bufs = [];
|
||||
inFile.on('readable', function () {
|
||||
var data;
|
||||
while ((data = inFile.read()))
|
||||
bufs.push(data);
|
||||
});
|
||||
var parseOpts = {};
|
||||
parseOpts.filename = inFileName;
|
||||
inFile.on('end', function processKey() {
|
||||
var buf = Buffer.concat(bufs);
|
||||
var fmt = 'auto';
|
||||
if (opts.informat)
|
||||
fmt = opts.informat;
|
||||
var f = sshpk.parseKey;
|
||||
if (opts.private)
|
||||
f = sshpk.parsePrivateKey;
|
||||
try {
|
||||
var key = f(buf, fmt, parseOpts);
|
||||
} catch (e) {
|
||||
if (e.name === 'KeyEncryptedError') {
|
||||
getPassword(function (err, pw) {
|
||||
if (err) {
|
||||
console.log('sshpk-conv: ' +
|
||||
err.name + ': ' +
|
||||
err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
parseOpts.passphrase = pw;
|
||||
processKey();
|
||||
});
|
||||
return;
|
||||
}
|
||||
console.error('sshpk-conv: ' +
|
||||
e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (opts.derive)
|
||||
key = key.derive(opts.derive);
|
||||
|
||||
if (opts.comment)
|
||||
key.comment = opts.comment;
|
||||
|
||||
if (!opts.identify) {
|
||||
fmt = undefined;
|
||||
if (opts.outformat)
|
||||
fmt = opts.outformat;
|
||||
outFile.write(key.toBuffer(fmt));
|
||||
if (fmt === 'ssh' ||
|
||||
(!opts.private && fmt === undefined))
|
||||
outFile.write('\n');
|
||||
outFile.once('drain', function () {
|
||||
process.exit(0);
|
||||
});
|
||||
} else {
|
||||
var kind = 'public';
|
||||
if (sshpk.PrivateKey.isPrivateKey(key))
|
||||
kind = 'private';
|
||||
console.log('%s: a %d bit %s %s key', inFileName,
|
||||
key.size, key.type.toUpperCase(), kind);
|
||||
if (key.type === 'ecdsa')
|
||||
console.log('ECDSA curve: %s', key.curve);
|
||||
if (key.comment)
|
||||
console.log('Comment: %s', key.comment);
|
||||
console.log('Fingerprint:');
|
||||
console.log(' ' + key.fingerprint().toString());
|
||||
console.log(' ' + key.fingerprint('md5').toString());
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
191
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/sshpk-sign
generated
vendored
191
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/sshpk-sign
generated
vendored
|
|
@ -1,191 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
// -*- mode: js -*-
|
||||
// vim: set filetype=javascript :
|
||||
// Copyright 2015 Joyent, Inc. All rights reserved.
|
||||
|
||||
var dashdash = require('dashdash');
|
||||
var sshpk = require('../lib/index');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var getPassword = require('getpass').getPass;
|
||||
|
||||
var options = [
|
||||
{
|
||||
names: ['hash', 'H'],
|
||||
type: 'string',
|
||||
help: 'Hash algorithm (sha1, sha256, sha384, sha512)'
|
||||
},
|
||||
{
|
||||
names: ['verbose', 'v'],
|
||||
type: 'bool',
|
||||
help: 'Display verbose info about key and hash used'
|
||||
},
|
||||
{
|
||||
names: ['identity', 'i'],
|
||||
type: 'string',
|
||||
help: 'Path to key to use'
|
||||
},
|
||||
{
|
||||
names: ['file', 'f'],
|
||||
type: 'string',
|
||||
help: 'Input filename'
|
||||
},
|
||||
{
|
||||
names: ['out', 'o'],
|
||||
type: 'string',
|
||||
help: 'Output filename'
|
||||
},
|
||||
{
|
||||
names: ['format', 't'],
|
||||
type: 'string',
|
||||
help: 'Signature format (asn1, ssh, raw)'
|
||||
},
|
||||
{
|
||||
names: ['binary', 'b'],
|
||||
type: 'bool',
|
||||
help: 'Output raw binary instead of base64'
|
||||
},
|
||||
{
|
||||
names: ['help', 'h'],
|
||||
type: 'bool',
|
||||
help: 'Shows this help text'
|
||||
}
|
||||
];
|
||||
|
||||
var parseOpts = {};
|
||||
|
||||
if (require.main === module) {
|
||||
var parser = dashdash.createParser({
|
||||
options: options
|
||||
});
|
||||
|
||||
try {
|
||||
var opts = parser.parse(process.argv);
|
||||
} catch (e) {
|
||||
console.error('sshpk-sign: error: %s', e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (opts.help || opts._args.length > 1) {
|
||||
var help = parser.help({}).trimRight();
|
||||
console.error('sshpk-sign: sign data using an SSH key\n');
|
||||
console.error(help);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!opts.identity) {
|
||||
var help = parser.help({}).trimRight();
|
||||
console.error('sshpk-sign: the -i or --identity option ' +
|
||||
'is required\n');
|
||||
console.error(help);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var keyData = fs.readFileSync(opts.identity);
|
||||
parseOpts.filename = opts.identity;
|
||||
|
||||
run();
|
||||
}
|
||||
|
||||
function run() {
|
||||
var key;
|
||||
try {
|
||||
key = sshpk.parsePrivateKey(keyData, 'auto', parseOpts);
|
||||
} catch (e) {
|
||||
if (e.name === 'KeyEncryptedError') {
|
||||
getPassword(function (err, pw) {
|
||||
parseOpts.passphrase = pw;
|
||||
run();
|
||||
});
|
||||
return;
|
||||
}
|
||||
console.error('sshpk-sign: error loading private key "' +
|
||||
opts.identity + '": ' + e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var hash = opts.hash || key.defaultHashAlgorithm();
|
||||
|
||||
var signer;
|
||||
try {
|
||||
signer = key.createSign(hash);
|
||||
} catch (e) {
|
||||
console.error('sshpk-sign: error creating signer: ' +
|
||||
e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (opts.verbose) {
|
||||
console.error('sshpk-sign: using %s-%s with a %d bit key',
|
||||
key.type, hash, key.size);
|
||||
}
|
||||
|
||||
var inFile = process.stdin;
|
||||
var inFileName = 'stdin';
|
||||
|
||||
var inFilePath;
|
||||
if (opts.file) {
|
||||
inFilePath = opts.file;
|
||||
} else if (opts._args.length === 1) {
|
||||
inFilePath = opts._args[0];
|
||||
}
|
||||
|
||||
if (inFilePath)
|
||||
inFileName = path.basename(inFilePath);
|
||||
|
||||
try {
|
||||
if (inFilePath) {
|
||||
fs.accessSync(inFilePath, fs.R_OK);
|
||||
inFile = fs.createReadStream(inFilePath);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('sshpk-sign: error opening input file' +
|
||||
': ' + e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var outFile = process.stdout;
|
||||
|
||||
try {
|
||||
if (opts.out && !opts.identify) {
|
||||
fs.accessSync(path.dirname(opts.out), fs.W_OK);
|
||||
outFile = fs.createWriteStream(opts.out);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('sshpk-sign: error opening output file' +
|
||||
': ' + e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
inFile.pipe(signer);
|
||||
inFile.on('end', function () {
|
||||
var sig;
|
||||
try {
|
||||
sig = signer.sign();
|
||||
} catch (e) {
|
||||
console.error('sshpk-sign: error signing data: ' +
|
||||
e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var fmt = opts.format || 'asn1';
|
||||
var output;
|
||||
try {
|
||||
output = sig.toBuffer(fmt);
|
||||
if (!opts.binary)
|
||||
output = output.toString('base64');
|
||||
} catch (e) {
|
||||
console.error('sshpk-sign: error converting signature' +
|
||||
' to ' + fmt + ' format: ' + e.name + ': ' +
|
||||
e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
outFile.write(output);
|
||||
if (!opts.binary)
|
||||
outFile.write('\n');
|
||||
outFile.once('drain', function () {
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
}
|
||||
166
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/sshpk-verify
generated
vendored
166
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/sshpk-verify
generated
vendored
|
|
@ -1,166 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
// -*- mode: js -*-
|
||||
// vim: set filetype=javascript :
|
||||
// Copyright 2015 Joyent, Inc. All rights reserved.
|
||||
|
||||
var dashdash = require('dashdash');
|
||||
var sshpk = require('../lib/index');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var options = [
|
||||
{
|
||||
names: ['hash', 'H'],
|
||||
type: 'string',
|
||||
help: 'Hash algorithm (sha1, sha256, sha384, sha512)'
|
||||
},
|
||||
{
|
||||
names: ['verbose', 'v'],
|
||||
type: 'bool',
|
||||
help: 'Display verbose info about key and hash used'
|
||||
},
|
||||
{
|
||||
names: ['identity', 'i'],
|
||||
type: 'string',
|
||||
help: 'Path to (public) key to use'
|
||||
},
|
||||
{
|
||||
names: ['file', 'f'],
|
||||
type: 'string',
|
||||
help: 'Input filename'
|
||||
},
|
||||
{
|
||||
names: ['format', 't'],
|
||||
type: 'string',
|
||||
help: 'Signature format (asn1, ssh, raw)'
|
||||
},
|
||||
{
|
||||
names: ['signature', 's'],
|
||||
type: 'string',
|
||||
help: 'base64-encoded signature data'
|
||||
},
|
||||
{
|
||||
names: ['help', 'h'],
|
||||
type: 'bool',
|
||||
help: 'Shows this help text'
|
||||
}
|
||||
];
|
||||
|
||||
if (require.main === module) {
|
||||
var parser = dashdash.createParser({
|
||||
options: options
|
||||
});
|
||||
|
||||
try {
|
||||
var opts = parser.parse(process.argv);
|
||||
} catch (e) {
|
||||
console.error('sshpk-verify: error: %s', e.message);
|
||||
process.exit(3);
|
||||
}
|
||||
|
||||
if (opts.help || opts._args.length > 1) {
|
||||
var help = parser.help({}).trimRight();
|
||||
console.error('sshpk-verify: sign data using an SSH key\n');
|
||||
console.error(help);
|
||||
process.exit(3);
|
||||
}
|
||||
|
||||
if (!opts.identity) {
|
||||
var help = parser.help({}).trimRight();
|
||||
console.error('sshpk-verify: the -i or --identity option ' +
|
||||
'is required\n');
|
||||
console.error(help);
|
||||
process.exit(3);
|
||||
}
|
||||
|
||||
if (!opts.signature) {
|
||||
var help = parser.help({}).trimRight();
|
||||
console.error('sshpk-verify: the -s or --signature option ' +
|
||||
'is required\n');
|
||||
console.error(help);
|
||||
process.exit(3);
|
||||
}
|
||||
|
||||
var keyData = fs.readFileSync(opts.identity);
|
||||
|
||||
var key;
|
||||
try {
|
||||
key = sshpk.parseKey(keyData);
|
||||
} catch (e) {
|
||||
console.error('sshpk-verify: error loading key "' +
|
||||
opts.identity + '": ' + e.name + ': ' + e.message);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
var fmt = opts.format || 'asn1';
|
||||
var sigData = new Buffer(opts.signature, 'base64');
|
||||
|
||||
var sig;
|
||||
try {
|
||||
sig = sshpk.parseSignature(sigData, key.type, fmt);
|
||||
} catch (e) {
|
||||
console.error('sshpk-verify: error parsing signature: ' +
|
||||
e.name + ': ' + e.message);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
var hash = opts.hash || key.defaultHashAlgorithm();
|
||||
|
||||
var verifier;
|
||||
try {
|
||||
verifier = key.createVerify(hash);
|
||||
} catch (e) {
|
||||
console.error('sshpk-verify: error creating verifier: ' +
|
||||
e.name + ': ' + e.message);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
if (opts.verbose) {
|
||||
console.error('sshpk-verify: using %s-%s with a %d bit key',
|
||||
key.type, hash, key.size);
|
||||
}
|
||||
|
||||
var inFile = process.stdin;
|
||||
var inFileName = 'stdin';
|
||||
|
||||
var inFilePath;
|
||||
if (opts.file) {
|
||||
inFilePath = opts.file;
|
||||
} else if (opts._args.length === 1) {
|
||||
inFilePath = opts._args[0];
|
||||
}
|
||||
|
||||
if (inFilePath)
|
||||
inFileName = path.basename(inFilePath);
|
||||
|
||||
try {
|
||||
if (inFilePath) {
|
||||
fs.accessSync(inFilePath, fs.R_OK);
|
||||
inFile = fs.createReadStream(inFilePath);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('sshpk-verify: error opening input file' +
|
||||
': ' + e.name + ': ' + e.message);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
inFile.pipe(verifier);
|
||||
inFile.on('end', function () {
|
||||
var ret;
|
||||
try {
|
||||
ret = verifier.verify(sig);
|
||||
} catch (e) {
|
||||
console.error('sshpk-verify: error verifying data: ' +
|
||||
e.name + ': ' + e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (ret) {
|
||||
console.error('OK');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
console.error('NOT OK');
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
50
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/uuid
generated
vendored
50
pype/premiere/extensions/com.pond5.ppro/node_modules/.bin/uuid
generated
vendored
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
var assert = require('assert');
|
||||
|
||||
function usage() {
|
||||
console.log('Usage:');
|
||||
console.log(' uuid');
|
||||
console.log(' uuid v1');
|
||||
console.log(' uuid v4');
|
||||
console.log(' uuid v5 <name> <namespace uuid>');
|
||||
console.log(' uuid --help');
|
||||
console.log('\nNote: <namespace uuid> may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122');
|
||||
}
|
||||
|
||||
var args = process.argv.slice(2);
|
||||
|
||||
if (args.indexOf('--help') >= 0) {
|
||||
usage();
|
||||
process.exit(0);
|
||||
}
|
||||
var version = args.shift() || 'v4';
|
||||
|
||||
switch (version) {
|
||||
case 'v1':
|
||||
var uuidV1 = require('../v1');
|
||||
console.log(uuidV1());
|
||||
break;
|
||||
|
||||
case 'v4':
|
||||
var uuidV4 = require('../v4');
|
||||
console.log(uuidV4());
|
||||
break;
|
||||
|
||||
case 'v5':
|
||||
var uuidV5 = require('../v5');
|
||||
|
||||
var name = args.shift();
|
||||
var namespace = args.shift();
|
||||
assert(name != null, 'v5 name not specified');
|
||||
assert(namespace != null, 'v5 namespace not specified');
|
||||
|
||||
if (namespace == 'URL') namespace = uuidV5.URL;
|
||||
if (namespace == 'DNS') namespace = uuidV5.DNS;
|
||||
|
||||
console.log(uuidV5(name, namespace));
|
||||
break;
|
||||
|
||||
default:
|
||||
usage();
|
||||
process.exit(1);
|
||||
}
|
||||
1
pype/premiere/extensions/com.pond5.ppro/node_modules/.gitignore
generated
vendored
1
pype/premiere/extensions/com.pond5.ppro/node_modules/.gitignore
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
*.DS_Store
|
||||
15
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/LICENSE
generated
vendored
15
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/LICENSE
generated
vendored
|
|
@ -1,15 +0,0 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
23
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/README.md
generated
vendored
23
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/README.md
generated
vendored
|
|
@ -1,23 +0,0 @@
|
|||
# abbrev-js
|
||||
|
||||
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
|
||||
|
||||
Usage:
|
||||
|
||||
var abbrev = require("abbrev");
|
||||
abbrev("foo", "fool", "folding", "flop");
|
||||
|
||||
// returns:
|
||||
{ fl: 'flop'
|
||||
, flo: 'flop'
|
||||
, flop: 'flop'
|
||||
, fol: 'folding'
|
||||
, fold: 'folding'
|
||||
, foldi: 'folding'
|
||||
, foldin: 'folding'
|
||||
, folding: 'folding'
|
||||
, foo: 'foo'
|
||||
, fool: 'fool'
|
||||
}
|
||||
|
||||
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
|
||||
61
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/abbrev.js
generated
vendored
61
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/abbrev.js
generated
vendored
|
|
@ -1,61 +0,0 @@
|
|||
module.exports = exports = abbrev.abbrev = abbrev
|
||||
|
||||
abbrev.monkeyPatch = monkeyPatch
|
||||
|
||||
function monkeyPatch () {
|
||||
Object.defineProperty(Array.prototype, 'abbrev', {
|
||||
value: function () { return abbrev(this) },
|
||||
enumerable: false, configurable: true, writable: true
|
||||
})
|
||||
|
||||
Object.defineProperty(Object.prototype, 'abbrev', {
|
||||
value: function () { return abbrev(Object.keys(this)) },
|
||||
enumerable: false, configurable: true, writable: true
|
||||
})
|
||||
}
|
||||
|
||||
function abbrev (list) {
|
||||
if (arguments.length !== 1 || !Array.isArray(list)) {
|
||||
list = Array.prototype.slice.call(arguments, 0)
|
||||
}
|
||||
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
|
||||
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
|
||||
}
|
||||
|
||||
// sort them lexicographically, so that they're next to their nearest kin
|
||||
args = args.sort(lexSort)
|
||||
|
||||
// walk through each, seeing how much it has in common with the next and previous
|
||||
var abbrevs = {}
|
||||
, prev = ""
|
||||
for (var i = 0, l = args.length ; i < l ; i ++) {
|
||||
var current = args[i]
|
||||
, next = args[i + 1] || ""
|
||||
, nextMatches = true
|
||||
, prevMatches = true
|
||||
if (current === next) continue
|
||||
for (var j = 0, cl = current.length ; j < cl ; j ++) {
|
||||
var curChar = current.charAt(j)
|
||||
nextMatches = nextMatches && curChar === next.charAt(j)
|
||||
prevMatches = prevMatches && curChar === prev.charAt(j)
|
||||
if (!nextMatches && !prevMatches) {
|
||||
j ++
|
||||
break
|
||||
}
|
||||
}
|
||||
prev = current
|
||||
if (j === cl) {
|
||||
abbrevs[current] = current
|
||||
continue
|
||||
}
|
||||
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
|
||||
abbrevs[a] = current
|
||||
a += current.charAt(j)
|
||||
}
|
||||
}
|
||||
return abbrevs
|
||||
}
|
||||
|
||||
function lexSort (a, b) {
|
||||
return a === b ? 0 : a > b ? 1 : -1
|
||||
}
|
||||
93
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/package.json
generated
vendored
93
pype/premiere/extensions/com.pond5.ppro/node_modules/abbrev/package.json
generated
vendored
|
|
@ -1,93 +0,0 @@
|
|||
{
|
||||
"_args": [
|
||||
[
|
||||
{
|
||||
"raw": "abbrev@1",
|
||||
"scope": null,
|
||||
"escapedName": "abbrev",
|
||||
"name": "abbrev",
|
||||
"rawSpec": "1",
|
||||
"spec": ">=1.0.0 <2.0.0",
|
||||
"type": "range"
|
||||
},
|
||||
"/Library/Application Support/Adobe/CEP/extensions/aeft/node_modules/nopt"
|
||||
]
|
||||
],
|
||||
"_from": "abbrev@>=1.0.0 <2.0.0",
|
||||
"_id": "abbrev@1.1.0",
|
||||
"_inCache": true,
|
||||
"_location": "/abbrev",
|
||||
"_nodeVersion": "8.0.0-pre",
|
||||
"_npmOperationalInternal": {
|
||||
"host": "packages-12-west.internal.npmjs.com",
|
||||
"tmp": "tmp/abbrev-1.1.0.tgz_1487054000015_0.9229173036292195"
|
||||
},
|
||||
"_npmUser": {
|
||||
"name": "isaacs",
|
||||
"email": "i@izs.me"
|
||||
},
|
||||
"_npmVersion": "4.3.0",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"raw": "abbrev@1",
|
||||
"scope": null,
|
||||
"escapedName": "abbrev",
|
||||
"name": "abbrev",
|
||||
"rawSpec": "1",
|
||||
"spec": ">=1.0.0 <2.0.0",
|
||||
"type": "range"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/nopt",
|
||||
"/touch/nopt"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.0.tgz",
|
||||
"_shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f",
|
||||
"_shrinkwrap": null,
|
||||
"_spec": "abbrev@1",
|
||||
"_where": "/Library/Application Support/Adobe/CEP/extensions/aeft/node_modules/nopt",
|
||||
"author": {
|
||||
"name": "Isaac Z. Schlueter",
|
||||
"email": "i@izs.me"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/isaacs/abbrev-js/issues"
|
||||
},
|
||||
"dependencies": {},
|
||||
"description": "Like ruby's abbrev module, but in js",
|
||||
"devDependencies": {
|
||||
"tap": "^10.1"
|
||||
},
|
||||
"directories": {},
|
||||
"dist": {
|
||||
"shasum": "d0554c2256636e2f56e7c2e5ad183f859428d81f",
|
||||
"tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.0.tgz"
|
||||
},
|
||||
"files": [
|
||||
"abbrev.js"
|
||||
],
|
||||
"gitHead": "7136d4d95449dc44115d4f78b80ec907724f64e0",
|
||||
"homepage": "https://github.com/isaacs/abbrev-js#readme",
|
||||
"license": "ISC",
|
||||
"main": "abbrev.js",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "isaacs",
|
||||
"email": "i@izs.me"
|
||||
}
|
||||
],
|
||||
"name": "abbrev",
|
||||
"optionalDependencies": {},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+ssh://git@github.com/isaacs/abbrev-js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"postpublish": "git push origin --all; git push origin --tags",
|
||||
"postversion": "npm publish",
|
||||
"preversion": "npm test",
|
||||
"test": "tap test.js --100"
|
||||
},
|
||||
"version": "1.1.0"
|
||||
}
|
||||
20
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/.tonic_example.js
generated
vendored
20
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/.tonic_example.js
generated
vendored
|
|
@ -1,20 +0,0 @@
|
|||
var Ajv = require('ajv');
|
||||
var ajv = Ajv({allErrors: true});
|
||||
|
||||
var schema = {
|
||||
"properties": {
|
||||
"foo": { "type": "string" },
|
||||
"bar": { "type": "number", "maximum": 3 }
|
||||
}
|
||||
};
|
||||
|
||||
var validate = ajv.compile(schema);
|
||||
|
||||
test({"foo": "abc", "bar": 2});
|
||||
test({"foo": 2, "bar": 4});
|
||||
|
||||
function test(data) {
|
||||
var valid = validate(data);
|
||||
if (valid) console.log('Valid!');
|
||||
else console.log('Invalid: ' + ajv.errorsText(validate.errors));
|
||||
}
|
||||
22
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/LICENSE
generated
vendored
22
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/LICENSE
generated
vendored
|
|
@ -1,22 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Evgeny Poberezkin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
1213
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/README.md
generated
vendored
1213
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/README.md
generated
vendored
File diff suppressed because it is too large
Load diff
8023
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/ajv.bundle.js
generated
vendored
8023
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/ajv.bundle.js
generated
vendored
File diff suppressed because it is too large
Load diff
6
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/ajv.min.js
generated
vendored
6
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/ajv.min.js
generated
vendored
File diff suppressed because one or more lines are too long
1
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/ajv.min.js.map
generated
vendored
1
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/ajv.min.js.map
generated
vendored
File diff suppressed because one or more lines are too long
8
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/nodent.min.js
generated
vendored
8
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/nodent.min.js
generated
vendored
File diff suppressed because one or more lines are too long
32
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/regenerator.min.js
generated
vendored
32
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/dist/regenerator.min.js
generated
vendored
File diff suppressed because one or more lines are too long
284
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/ajv.d.ts
generated
vendored
284
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/ajv.d.ts
generated
vendored
|
|
@ -1,284 +0,0 @@
|
|||
declare var ajv: {
|
||||
(options?: ajv.Options): ajv.Ajv;
|
||||
new (options?: ajv.Options): ajv.Ajv;
|
||||
}
|
||||
|
||||
declare namespace ajv {
|
||||
interface Ajv {
|
||||
/**
|
||||
* Validate data using schema
|
||||
* Schema will be compiled and cached (using serialized JSON as key. [json-stable-stringify](https://github.com/substack/json-stable-stringify) is used to serialize.
|
||||
* @param {String|Object} schemaKeyRef key, ref or schema object
|
||||
* @param {Any} data to be validated
|
||||
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
|
||||
*/
|
||||
validate(schemaKeyRef: Object | string, data: any): boolean;
|
||||
/**
|
||||
* Create validating function for passed schema.
|
||||
* @param {Object} schema schema object
|
||||
* @return {Function} validating function
|
||||
*/
|
||||
compile(schema: Object): ValidateFunction;
|
||||
/**
|
||||
* Creates validating function for passed schema with asynchronous loading of missing schemas.
|
||||
* `loadSchema` option should be a function that accepts schema uri and node-style callback.
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema object
|
||||
* @param {Function} callback node-style callback, it is always called with 2 parameters: error (or null) and validating function.
|
||||
*/
|
||||
compileAsync(schema: Object, callback: (err: Error, validate: ValidateFunction) => any): void;
|
||||
/**
|
||||
* Adds schema to the instance.
|
||||
* @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
|
||||
* @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
|
||||
*/
|
||||
addSchema(schema: Array<Object> | Object, key?: string): void;
|
||||
/**
|
||||
* Add schema that will be used to validate other schemas
|
||||
* options in META_IGNORE_OPTIONS are alway set to false
|
||||
* @param {Object} schema schema object
|
||||
* @param {String} key optional schema key
|
||||
*/
|
||||
addMetaSchema(schema: Object, key?: string): void;
|
||||
/**
|
||||
* Validate schema
|
||||
* @param {Object} schema schema to validate
|
||||
* @return {Boolean} true if schema is valid
|
||||
*/
|
||||
validateSchema(schema: Object): boolean;
|
||||
/**
|
||||
* Get compiled schema from the instance by `key` or `ref`.
|
||||
* @param {String} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
|
||||
* @return {Function} schema validating function (with property `schema`).
|
||||
*/
|
||||
getSchema(keyRef: string): ValidateFunction;
|
||||
/**
|
||||
* Remove cached schema(s).
|
||||
* If no parameter is passed all schemas but meta-schemas are removed.
|
||||
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
|
||||
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
|
||||
* @param {String|Object|RegExp} schemaKeyRef key, ref, pattern to match key/ref or schema object
|
||||
*/
|
||||
removeSchema(schemaKeyRef?: Object | string | RegExp): void;
|
||||
/**
|
||||
* Add custom format
|
||||
* @param {String} name format name
|
||||
* @param {String|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
|
||||
*/
|
||||
addFormat(name: string, format: FormatValidator | FormatDefinition): void;
|
||||
/**
|
||||
* Define custom keyword
|
||||
* @this Ajv
|
||||
* @param {String} keyword custom keyword, should be a valid identifier, should be different from all standard, custom and macro keywords.
|
||||
* @param {Object} definition keyword definition object with properties `type` (type(s) which the keyword applies to), `validate` or `compile`.
|
||||
*/
|
||||
addKeyword(keyword: string, definition: KeywordDefinition): void;
|
||||
/**
|
||||
* Get keyword definition
|
||||
* @this Ajv
|
||||
* @param {String} keyword pre-defined or custom keyword.
|
||||
* @return {Object|Boolean} custom keyword definition, `true` if it is a predefined keyword, `false` otherwise.
|
||||
*/
|
||||
getKeyword(keyword: string): Object | boolean;
|
||||
/**
|
||||
* Remove keyword
|
||||
* @this Ajv
|
||||
* @param {String} keyword pre-defined or custom keyword.
|
||||
*/
|
||||
removeKeyword(keyword: string): void;
|
||||
/**
|
||||
* Convert array of error message objects to string
|
||||
* @param {Array<Object>} errors optional array of validation errors, if not passed errors from the instance are used.
|
||||
* @param {Object} options optional options with properties `separator` and `dataVar`.
|
||||
* @return {String} human readable string with all errors descriptions
|
||||
*/
|
||||
errorsText(errors?: Array<ErrorObject>, options?: ErrorsTextOptions): string;
|
||||
errors?: Array<ErrorObject>;
|
||||
}
|
||||
|
||||
interface Thenable <R> {
|
||||
then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
|
||||
}
|
||||
|
||||
interface ValidateFunction {
|
||||
(
|
||||
data: any,
|
||||
dataPath?: string,
|
||||
parentData?: Object | Array<any>,
|
||||
parentDataProperty?: string | number,
|
||||
rootData?: Object | Array<any>
|
||||
): boolean | Thenable<boolean>;
|
||||
errors?: Array<ErrorObject>;
|
||||
schema?: Object;
|
||||
}
|
||||
|
||||
interface Options {
|
||||
v5?: boolean;
|
||||
allErrors?: boolean;
|
||||
verbose?: boolean;
|
||||
jsonPointers?: boolean;
|
||||
uniqueItems?: boolean;
|
||||
unicode?: boolean;
|
||||
format?: string;
|
||||
formats?: Object;
|
||||
unknownFormats?: boolean | string | Array<string>;
|
||||
schemas?: Array<Object> | Object;
|
||||
ownProperties?: boolean;
|
||||
missingRefs?: boolean | string;
|
||||
extendRefs?: boolean | string;
|
||||
loadSchema?: (uri: string, cb: (err: Error, schema: Object) => any) => any;
|
||||
removeAdditional?: boolean | string;
|
||||
useDefaults?: boolean | string;
|
||||
coerceTypes?: boolean | string;
|
||||
async?: boolean | string;
|
||||
transpile?: string | ((code: string) => string);
|
||||
meta?: boolean | Object;
|
||||
validateSchema?: boolean | string;
|
||||
addUsedSchema?: boolean;
|
||||
inlineRefs?: boolean | number;
|
||||
passContext?: boolean;
|
||||
loopRequired?: number;
|
||||
multipleOfPrecision?: number;
|
||||
errorDataPath?: string;
|
||||
messages?: boolean;
|
||||
sourceCode?: boolean;
|
||||
beautify?: boolean | Object;
|
||||
cache?: Object;
|
||||
}
|
||||
|
||||
type FormatValidator = string | RegExp | ((data: string) => boolean);
|
||||
|
||||
interface FormatDefinition {
|
||||
validate: FormatValidator;
|
||||
compare: (data1: string, data2: string) => number;
|
||||
async?: boolean;
|
||||
}
|
||||
|
||||
interface KeywordDefinition {
|
||||
type?: string | Array<string>;
|
||||
async?: boolean;
|
||||
errors?: boolean | string;
|
||||
// schema: false makes validate not to expect schema (ValidateFunction)
|
||||
schema?: boolean;
|
||||
modifying?: boolean;
|
||||
valid?: boolean;
|
||||
// one and only one of the following properties should be present
|
||||
validate?: ValidateFunction | SchemaValidateFunction;
|
||||
compile?: (schema: Object, parentSchema: Object) => ValidateFunction;
|
||||
macro?: (schema: Object, parentSchema: Object) => Object;
|
||||
inline?: (it: Object, keyword: string, schema: Object, parentSchema: Object) => string;
|
||||
}
|
||||
|
||||
interface SchemaValidateFunction {
|
||||
(
|
||||
schema: Object,
|
||||
data: any,
|
||||
parentSchema?: Object,
|
||||
dataPath?: string,
|
||||
parentData?: Object | Array<any>,
|
||||
parentDataProperty?: string | number
|
||||
): boolean | Thenable<boolean>;
|
||||
errors?: Array<ErrorObject>;
|
||||
}
|
||||
|
||||
interface ErrorsTextOptions {
|
||||
separator?: string;
|
||||
dataVar?: string;
|
||||
}
|
||||
|
||||
interface ErrorObject {
|
||||
keyword: string;
|
||||
dataPath: string;
|
||||
schemaPath: string;
|
||||
params: ErrorParameters;
|
||||
// Excluded if messages set to false.
|
||||
message?: string;
|
||||
// These are added with the `verbose` option.
|
||||
schema?: Object;
|
||||
parentSchema?: Object;
|
||||
data?: any;
|
||||
}
|
||||
|
||||
type ErrorParameters = RefParams | LimitParams | AdditionalPropertiesParams |
|
||||
DependenciesParams | FormatParams | ComparisonParams |
|
||||
MultipleOfParams | PatternParams | RequiredParams |
|
||||
TypeParams | UniqueItemsParams | CustomParams |
|
||||
PatternGroupsParams | PatternRequiredParams |
|
||||
SwitchParams | NoParams | EnumParams;
|
||||
|
||||
interface RefParams {
|
||||
ref: string;
|
||||
}
|
||||
|
||||
interface LimitParams {
|
||||
limit: number;
|
||||
}
|
||||
|
||||
interface AdditionalPropertiesParams {
|
||||
additionalProperty: string;
|
||||
}
|
||||
|
||||
interface DependenciesParams {
|
||||
property: string;
|
||||
missingProperty: string;
|
||||
depsCount: number;
|
||||
deps: string;
|
||||
}
|
||||
|
||||
interface FormatParams {
|
||||
format: string
|
||||
}
|
||||
|
||||
interface ComparisonParams {
|
||||
comparison: string;
|
||||
limit: number | string;
|
||||
exclusive: boolean;
|
||||
}
|
||||
|
||||
interface MultipleOfParams {
|
||||
multipleOf: number;
|
||||
}
|
||||
|
||||
interface PatternParams {
|
||||
pattern: string;
|
||||
}
|
||||
|
||||
interface RequiredParams {
|
||||
missingProperty: string;
|
||||
}
|
||||
|
||||
interface TypeParams {
|
||||
type: string;
|
||||
}
|
||||
|
||||
interface UniqueItemsParams {
|
||||
i: number;
|
||||
j: number;
|
||||
}
|
||||
|
||||
interface CustomParams {
|
||||
keyword: string;
|
||||
}
|
||||
|
||||
interface PatternGroupsParams {
|
||||
reason: string;
|
||||
limit: number;
|
||||
pattern: string;
|
||||
}
|
||||
|
||||
interface PatternRequiredParams {
|
||||
missingPattern: string;
|
||||
}
|
||||
|
||||
interface SwitchParams {
|
||||
caseIndex: number;
|
||||
}
|
||||
|
||||
interface NoParams {}
|
||||
|
||||
interface EnumParams {
|
||||
allowedValues: Array<any>;
|
||||
}
|
||||
}
|
||||
|
||||
export = ajv;
|
||||
420
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/ajv.js
generated
vendored
420
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/ajv.js
generated
vendored
|
|
@ -1,420 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var compileSchema = require('./compile')
|
||||
, resolve = require('./compile/resolve')
|
||||
, Cache = require('./cache')
|
||||
, SchemaObject = require('./compile/schema_obj')
|
||||
, stableStringify = require('json-stable-stringify')
|
||||
, formats = require('./compile/formats')
|
||||
, rules = require('./compile/rules')
|
||||
, v5 = require('./v5')
|
||||
, util = require('./compile/util')
|
||||
, async = require('./async')
|
||||
, co = require('co');
|
||||
|
||||
module.exports = Ajv;
|
||||
|
||||
Ajv.prototype.compileAsync = async.compile;
|
||||
|
||||
var customKeyword = require('./keyword');
|
||||
Ajv.prototype.addKeyword = customKeyword.add;
|
||||
Ajv.prototype.getKeyword = customKeyword.get;
|
||||
Ajv.prototype.removeKeyword = customKeyword.remove;
|
||||
Ajv.ValidationError = require('./compile/validation_error');
|
||||
|
||||
var META_SCHEMA_ID = 'http://json-schema.org/draft-04/schema';
|
||||
var SCHEMA_URI_FORMAT = /^(?:(?:[a-z][a-z0-9+-.]*:)?\/\/)?[^\s]*$/i;
|
||||
function SCHEMA_URI_FORMAT_FUNC(str) {
|
||||
return SCHEMA_URI_FORMAT.test(str);
|
||||
}
|
||||
|
||||
var META_IGNORE_OPTIONS = [ 'removeAdditional', 'useDefaults', 'coerceTypes' ];
|
||||
|
||||
/**
|
||||
* Creates validator instance.
|
||||
* Usage: `Ajv(opts)`
|
||||
* @param {Object} opts optional options
|
||||
* @return {Object} ajv instance
|
||||
*/
|
||||
function Ajv(opts) {
|
||||
if (!(this instanceof Ajv)) return new Ajv(opts);
|
||||
var self = this;
|
||||
|
||||
opts = this._opts = util.copy(opts) || {};
|
||||
this._schemas = {};
|
||||
this._refs = {};
|
||||
this._fragments = {};
|
||||
this._formats = formats(opts.format);
|
||||
this._cache = opts.cache || new Cache;
|
||||
this._loadingSchemas = {};
|
||||
this._compilations = [];
|
||||
this.RULES = rules();
|
||||
|
||||
// this is done on purpose, so that methods are bound to the instance
|
||||
// (without using bind) so that they can be used without the instance
|
||||
this.validate = validate;
|
||||
this.compile = compile;
|
||||
this.addSchema = addSchema;
|
||||
this.addMetaSchema = addMetaSchema;
|
||||
this.validateSchema = validateSchema;
|
||||
this.getSchema = getSchema;
|
||||
this.removeSchema = removeSchema;
|
||||
this.addFormat = addFormat;
|
||||
this.errorsText = errorsText;
|
||||
|
||||
this._addSchema = _addSchema;
|
||||
this._compile = _compile;
|
||||
|
||||
opts.loopRequired = opts.loopRequired || Infinity;
|
||||
if (opts.async || opts.transpile) async.setup(opts);
|
||||
if (opts.beautify === true) opts.beautify = { indent_size: 2 };
|
||||
if (opts.errorDataPath == 'property') opts._errorDataPathProperty = true;
|
||||
this._metaOpts = getMetaSchemaOptions();
|
||||
|
||||
if (opts.formats) addInitialFormats();
|
||||
addDraft4MetaSchema();
|
||||
if (opts.v5) v5.enable(this);
|
||||
if (typeof opts.meta == 'object') addMetaSchema(opts.meta);
|
||||
addInitialSchemas();
|
||||
|
||||
|
||||
/**
|
||||
* Validate data using schema
|
||||
* Schema will be compiled and cached (using serialized JSON as key. [json-stable-stringify](https://github.com/substack/json-stable-stringify) is used to serialize.
|
||||
* @param {String|Object} schemaKeyRef key, ref or schema object
|
||||
* @param {Any} data to be validated
|
||||
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
|
||||
*/
|
||||
function validate(schemaKeyRef, data) {
|
||||
var v;
|
||||
if (typeof schemaKeyRef == 'string') {
|
||||
v = getSchema(schemaKeyRef);
|
||||
if (!v) throw new Error('no schema with key or ref "' + schemaKeyRef + '"');
|
||||
} else {
|
||||
var schemaObj = _addSchema(schemaKeyRef);
|
||||
v = schemaObj.validate || _compile(schemaObj);
|
||||
}
|
||||
|
||||
var valid = v(data);
|
||||
if (v.$async === true)
|
||||
return self._opts.async == '*' ? co(valid) : valid;
|
||||
self.errors = v.errors;
|
||||
return valid;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create validating function for passed schema.
|
||||
* @param {Object} schema schema object
|
||||
* @param {Boolean} _meta true if schema is a meta-schema. Used internally to compile meta schemas of custom keywords.
|
||||
* @return {Function} validating function
|
||||
*/
|
||||
function compile(schema, _meta) {
|
||||
var schemaObj = _addSchema(schema, undefined, _meta);
|
||||
return schemaObj.validate || _compile(schemaObj);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds schema to the instance.
|
||||
* @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
|
||||
* @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
|
||||
* @param {Boolean} _skipValidation true to skip schema validation. Used internally, option validateSchema should be used instead.
|
||||
* @param {Boolean} _meta true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
|
||||
*/
|
||||
function addSchema(schema, key, _skipValidation, _meta) {
|
||||
if (Array.isArray(schema)){
|
||||
for (var i=0; i<schema.length; i++) addSchema(schema[i], undefined, _skipValidation, _meta);
|
||||
return;
|
||||
}
|
||||
// can key/id have # inside?
|
||||
key = resolve.normalizeId(key || schema.id);
|
||||
checkUnique(key);
|
||||
self._schemas[key] = _addSchema(schema, _skipValidation, _meta, true);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add schema that will be used to validate other schemas
|
||||
* options in META_IGNORE_OPTIONS are alway set to false
|
||||
* @param {Object} schema schema object
|
||||
* @param {String} key optional schema key
|
||||
* @param {Boolean} skipValidation true to skip schema validation, can be used to override validateSchema option for meta-schema
|
||||
*/
|
||||
function addMetaSchema(schema, key, skipValidation) {
|
||||
addSchema(schema, key, skipValidation, true);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Validate schema
|
||||
* @param {Object} schema schema to validate
|
||||
* @param {Boolean} throwOrLogError pass true to throw (or log) an error if invalid
|
||||
* @return {Boolean} true if schema is valid
|
||||
*/
|
||||
function validateSchema(schema, throwOrLogError) {
|
||||
var $schema = schema.$schema || self._opts.defaultMeta || defaultMeta();
|
||||
var currentUriFormat = self._formats.uri;
|
||||
self._formats.uri = typeof currentUriFormat == 'function'
|
||||
? SCHEMA_URI_FORMAT_FUNC
|
||||
: SCHEMA_URI_FORMAT;
|
||||
var valid;
|
||||
try { valid = validate($schema, schema); }
|
||||
finally { self._formats.uri = currentUriFormat; }
|
||||
if (!valid && throwOrLogError) {
|
||||
var message = 'schema is invalid: ' + errorsText();
|
||||
if (self._opts.validateSchema == 'log') console.error(message);
|
||||
else throw new Error(message);
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
|
||||
|
||||
function defaultMeta() {
|
||||
var meta = self._opts.meta;
|
||||
self._opts.defaultMeta = typeof meta == 'object'
|
||||
? meta.id || meta
|
||||
: self._opts.v5
|
||||
? v5.META_SCHEMA_ID
|
||||
: META_SCHEMA_ID;
|
||||
return self._opts.defaultMeta;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get compiled schema from the instance by `key` or `ref`.
|
||||
* @param {String} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
|
||||
* @return {Function} schema validating function (with property `schema`).
|
||||
*/
|
||||
function getSchema(keyRef) {
|
||||
var schemaObj = _getSchemaObj(keyRef);
|
||||
switch (typeof schemaObj) {
|
||||
case 'object': return schemaObj.validate || _compile(schemaObj);
|
||||
case 'string': return getSchema(schemaObj);
|
||||
case 'undefined': return _getSchemaFragment(keyRef);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _getSchemaFragment(ref) {
|
||||
var res = resolve.schema.call(self, { schema: {} }, ref);
|
||||
if (res) {
|
||||
var schema = res.schema
|
||||
, root = res.root
|
||||
, baseId = res.baseId;
|
||||
var v = compileSchema.call(self, schema, root, undefined, baseId);
|
||||
self._fragments[ref] = new SchemaObject({
|
||||
ref: ref,
|
||||
fragment: true,
|
||||
schema: schema,
|
||||
root: root,
|
||||
baseId: baseId,
|
||||
validate: v
|
||||
});
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _getSchemaObj(keyRef) {
|
||||
keyRef = resolve.normalizeId(keyRef);
|
||||
return self._schemas[keyRef] || self._refs[keyRef] || self._fragments[keyRef];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove cached schema(s).
|
||||
* If no parameter is passed all schemas but meta-schemas are removed.
|
||||
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
|
||||
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
|
||||
* @param {String|Object|RegExp} schemaKeyRef key, ref, pattern to match key/ref or schema object
|
||||
*/
|
||||
function removeSchema(schemaKeyRef) {
|
||||
if (schemaKeyRef instanceof RegExp) {
|
||||
_removeAllSchemas(self._schemas, schemaKeyRef);
|
||||
_removeAllSchemas(self._refs, schemaKeyRef);
|
||||
return;
|
||||
}
|
||||
switch (typeof schemaKeyRef) {
|
||||
case 'undefined':
|
||||
_removeAllSchemas(self._schemas);
|
||||
_removeAllSchemas(self._refs);
|
||||
self._cache.clear();
|
||||
return;
|
||||
case 'string':
|
||||
var schemaObj = _getSchemaObj(schemaKeyRef);
|
||||
if (schemaObj) self._cache.del(schemaObj.jsonStr);
|
||||
delete self._schemas[schemaKeyRef];
|
||||
delete self._refs[schemaKeyRef];
|
||||
return;
|
||||
case 'object':
|
||||
var jsonStr = stableStringify(schemaKeyRef);
|
||||
self._cache.del(jsonStr);
|
||||
var id = schemaKeyRef.id;
|
||||
if (id) {
|
||||
id = resolve.normalizeId(id);
|
||||
delete self._schemas[id];
|
||||
delete self._refs[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _removeAllSchemas(schemas, regex) {
|
||||
for (var keyRef in schemas) {
|
||||
var schemaObj = schemas[keyRef];
|
||||
if (!schemaObj.meta && (!regex || regex.test(keyRef))) {
|
||||
self._cache.del(schemaObj.jsonStr);
|
||||
delete schemas[keyRef];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _addSchema(schema, skipValidation, meta, shouldAddSchema) {
|
||||
if (typeof schema != 'object') throw new Error('schema should be object');
|
||||
var jsonStr = stableStringify(schema);
|
||||
var cached = self._cache.get(jsonStr);
|
||||
if (cached) return cached;
|
||||
|
||||
shouldAddSchema = shouldAddSchema || self._opts.addUsedSchema !== false;
|
||||
|
||||
var id = resolve.normalizeId(schema.id);
|
||||
if (id && shouldAddSchema) checkUnique(id);
|
||||
|
||||
var willValidate = self._opts.validateSchema !== false && !skipValidation;
|
||||
var recursiveMeta;
|
||||
if (willValidate && !(recursiveMeta = schema.id && schema.id == schema.$schema))
|
||||
validateSchema(schema, true);
|
||||
|
||||
var localRefs = resolve.ids.call(self, schema);
|
||||
|
||||
var schemaObj = new SchemaObject({
|
||||
id: id,
|
||||
schema: schema,
|
||||
localRefs: localRefs,
|
||||
jsonStr: jsonStr,
|
||||
meta: meta
|
||||
});
|
||||
|
||||
if (id[0] != '#' && shouldAddSchema) self._refs[id] = schemaObj;
|
||||
self._cache.put(jsonStr, schemaObj);
|
||||
|
||||
if (willValidate && recursiveMeta) validateSchema(schema, true);
|
||||
|
||||
return schemaObj;
|
||||
}
|
||||
|
||||
|
||||
function _compile(schemaObj, root) {
|
||||
if (schemaObj.compiling) {
|
||||
schemaObj.validate = callValidate;
|
||||
callValidate.schema = schemaObj.schema;
|
||||
callValidate.errors = null;
|
||||
callValidate.root = root ? root : callValidate;
|
||||
if (schemaObj.schema.$async === true)
|
||||
callValidate.$async = true;
|
||||
return callValidate;
|
||||
}
|
||||
schemaObj.compiling = true;
|
||||
|
||||
var currentOpts;
|
||||
if (schemaObj.meta) {
|
||||
currentOpts = self._opts;
|
||||
self._opts = self._metaOpts;
|
||||
}
|
||||
|
||||
var v;
|
||||
try { v = compileSchema.call(self, schemaObj.schema, root, schemaObj.localRefs); }
|
||||
finally {
|
||||
schemaObj.compiling = false;
|
||||
if (schemaObj.meta) self._opts = currentOpts;
|
||||
}
|
||||
|
||||
schemaObj.validate = v;
|
||||
schemaObj.refs = v.refs;
|
||||
schemaObj.refVal = v.refVal;
|
||||
schemaObj.root = v.root;
|
||||
return v;
|
||||
|
||||
|
||||
function callValidate() {
|
||||
var _validate = schemaObj.validate;
|
||||
var result = _validate.apply(null, arguments);
|
||||
callValidate.errors = _validate.errors;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Convert array of error message objects to string
|
||||
* @param {Array<Object>} errors optional array of validation errors, if not passed errors from the instance are used.
|
||||
* @param {Object} options optional options with properties `separator` and `dataVar`.
|
||||
* @return {String} human readable string with all errors descriptions
|
||||
*/
|
||||
function errorsText(errors, options) {
|
||||
errors = errors || self.errors;
|
||||
if (!errors) return 'No errors';
|
||||
options = options || {};
|
||||
var separator = options.separator === undefined ? ', ' : options.separator;
|
||||
var dataVar = options.dataVar === undefined ? 'data' : options.dataVar;
|
||||
|
||||
var text = '';
|
||||
for (var i=0; i<errors.length; i++) {
|
||||
var e = errors[i];
|
||||
if (e) text += dataVar + e.dataPath + ' ' + e.message + separator;
|
||||
}
|
||||
return text.slice(0, -separator.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add custom format
|
||||
* @param {String} name format name
|
||||
* @param {String|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
|
||||
*/
|
||||
function addFormat(name, format) {
|
||||
if (typeof format == 'string') format = new RegExp(format);
|
||||
self._formats[name] = format;
|
||||
}
|
||||
|
||||
|
||||
function addDraft4MetaSchema() {
|
||||
if (self._opts.meta !== false) {
|
||||
var metaSchema = require('./refs/json-schema-draft-04.json');
|
||||
addMetaSchema(metaSchema, META_SCHEMA_ID, true);
|
||||
self._refs['http://json-schema.org/schema'] = META_SCHEMA_ID;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function addInitialSchemas() {
|
||||
var optsSchemas = self._opts.schemas;
|
||||
if (!optsSchemas) return;
|
||||
if (Array.isArray(optsSchemas)) addSchema(optsSchemas);
|
||||
else for (var key in optsSchemas) addSchema(optsSchemas[key], key);
|
||||
}
|
||||
|
||||
|
||||
function addInitialFormats() {
|
||||
for (var name in self._opts.formats) {
|
||||
var format = self._opts.formats[name];
|
||||
addFormat(name, format);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function checkUnique(id) {
|
||||
if (self._schemas[id] || self._refs[id])
|
||||
throw new Error('schema with key or id "' + id + '" already exists');
|
||||
}
|
||||
|
||||
|
||||
function getMetaSchemaOptions() {
|
||||
var metaOpts = util.copy(self._opts);
|
||||
for (var i=0; i<META_IGNORE_OPTIONS.length; i++)
|
||||
delete metaOpts[META_IGNORE_OPTIONS[i]];
|
||||
return metaOpts;
|
||||
}
|
||||
}
|
||||
218
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/async.js
generated
vendored
218
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/async.js
generated
vendored
|
|
@ -1,218 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
setup: setupAsync,
|
||||
compile: compileAsync
|
||||
};
|
||||
|
||||
|
||||
var util = require('./compile/util');
|
||||
|
||||
var ASYNC = {
|
||||
'*': checkGenerators,
|
||||
'co*': checkGenerators,
|
||||
'es7': checkAsyncFunction
|
||||
};
|
||||
|
||||
var TRANSPILE = {
|
||||
'nodent': getNodent,
|
||||
'regenerator': getRegenerator
|
||||
};
|
||||
|
||||
var MODES = [
|
||||
{ async: 'co*' },
|
||||
{ async: 'es7', transpile: 'nodent' },
|
||||
{ async: 'co*', transpile: 'regenerator' }
|
||||
];
|
||||
|
||||
|
||||
var regenerator, nodent;
|
||||
|
||||
|
||||
function setupAsync(opts, required) {
|
||||
if (required !== false) required = true;
|
||||
var async = opts.async
|
||||
, transpile = opts.transpile
|
||||
, check;
|
||||
|
||||
switch (typeof transpile) {
|
||||
case 'string':
|
||||
var get = TRANSPILE[transpile];
|
||||
if (!get) throw new Error('bad transpiler: ' + transpile);
|
||||
return (opts._transpileFunc = get(opts, required));
|
||||
case 'undefined':
|
||||
case 'boolean':
|
||||
if (typeof async == 'string') {
|
||||
check = ASYNC[async];
|
||||
if (!check) throw new Error('bad async mode: ' + async);
|
||||
return (opts.transpile = check(opts, required));
|
||||
}
|
||||
|
||||
for (var i=0; i<MODES.length; i++) {
|
||||
var _opts = MODES[i];
|
||||
if (setupAsync(_opts, false)) {
|
||||
util.copy(_opts, opts);
|
||||
return opts.transpile;
|
||||
}
|
||||
}
|
||||
/* istanbul ignore next */
|
||||
throw new Error('generators, nodent and regenerator are not available');
|
||||
case 'function':
|
||||
return (opts._transpileFunc = opts.transpile);
|
||||
default:
|
||||
throw new Error('bad transpiler: ' + transpile);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function checkGenerators(opts, required) {
|
||||
/* jshint evil: true */
|
||||
try {
|
||||
(new Function('(function*(){})()'))();
|
||||
return true;
|
||||
} catch(e) {
|
||||
/* istanbul ignore next */
|
||||
if (required) throw new Error('generators not supported');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function checkAsyncFunction(opts, required) {
|
||||
/* jshint evil: true */
|
||||
try {
|
||||
(new Function('(async function(){})()'))();
|
||||
/* istanbul ignore next */
|
||||
return true;
|
||||
} catch(e) {
|
||||
if (required) throw new Error('es7 async functions not supported');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function getRegenerator(opts, required) {
|
||||
try {
|
||||
if (!regenerator) {
|
||||
var name = 'regenerator';
|
||||
regenerator = require(name);
|
||||
regenerator.runtime();
|
||||
}
|
||||
if (!opts.async || opts.async === true)
|
||||
opts.async = 'es7';
|
||||
return regeneratorTranspile;
|
||||
} catch(e) {
|
||||
/* istanbul ignore next */
|
||||
if (required) throw new Error('regenerator not available');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function regeneratorTranspile(code) {
|
||||
return regenerator.compile(code).code;
|
||||
}
|
||||
|
||||
|
||||
function getNodent(opts, required) {
|
||||
/* jshint evil: true */
|
||||
try {
|
||||
if (!nodent) {
|
||||
var name = 'nodent';
|
||||
nodent = require(name)({ log: false, dontInstallRequireHook: true });
|
||||
}
|
||||
if (opts.async != 'es7') {
|
||||
if (opts.async && opts.async !== true) console.warn('nodent transpiles only es7 async functions');
|
||||
opts.async = 'es7';
|
||||
}
|
||||
return nodentTranspile;
|
||||
} catch(e) {
|
||||
/* istanbul ignore next */
|
||||
if (required) throw new Error('nodent not available');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function nodentTranspile(code) {
|
||||
return nodent.compile(code, '', { promises: true, sourcemap: false }).code;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates validating function for passed schema with asynchronous loading of missing schemas.
|
||||
* `loadSchema` option should be a function that accepts schema uri and node-style callback.
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema object
|
||||
* @param {Function} callback node-style callback, it is always called with 2 parameters: error (or null) and validating function.
|
||||
*/
|
||||
function compileAsync(schema, callback) {
|
||||
/* eslint no-shadow: 0 */
|
||||
/* jshint validthis: true */
|
||||
var schemaObj;
|
||||
var self = this;
|
||||
try {
|
||||
schemaObj = this._addSchema(schema);
|
||||
} catch(e) {
|
||||
setTimeout(function() { callback(e); });
|
||||
return;
|
||||
}
|
||||
if (schemaObj.validate) {
|
||||
setTimeout(function() { callback(null, schemaObj.validate); });
|
||||
} else {
|
||||
if (typeof this._opts.loadSchema != 'function')
|
||||
throw new Error('options.loadSchema should be a function');
|
||||
_compileAsync(schema, callback, true);
|
||||
}
|
||||
|
||||
|
||||
function _compileAsync(schema, callback, firstCall) {
|
||||
var validate;
|
||||
try { validate = self.compile(schema); }
|
||||
catch(e) {
|
||||
if (e.missingSchema) loadMissingSchema(e);
|
||||
else deferCallback(e);
|
||||
return;
|
||||
}
|
||||
deferCallback(null, validate);
|
||||
|
||||
function loadMissingSchema(e) {
|
||||
var ref = e.missingSchema;
|
||||
if (self._refs[ref] || self._schemas[ref])
|
||||
return callback(new Error('Schema ' + ref + ' is loaded but ' + e.missingRef + ' cannot be resolved'));
|
||||
var _callbacks = self._loadingSchemas[ref];
|
||||
if (_callbacks) {
|
||||
if (typeof _callbacks == 'function')
|
||||
self._loadingSchemas[ref] = [_callbacks, schemaLoaded];
|
||||
else
|
||||
_callbacks[_callbacks.length] = schemaLoaded;
|
||||
} else {
|
||||
self._loadingSchemas[ref] = schemaLoaded;
|
||||
self._opts.loadSchema(ref, function (err, sch) {
|
||||
var _callbacks = self._loadingSchemas[ref];
|
||||
delete self._loadingSchemas[ref];
|
||||
if (typeof _callbacks == 'function') {
|
||||
_callbacks(err, sch);
|
||||
} else {
|
||||
for (var i=0; i<_callbacks.length; i++)
|
||||
_callbacks[i](err, sch);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function schemaLoaded(err, sch) {
|
||||
if (err) return callback(err);
|
||||
if (!(self._refs[ref] || self._schemas[ref])) {
|
||||
try {
|
||||
self.addSchema(sch, ref);
|
||||
} catch(e) {
|
||||
callback(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
_compileAsync(schema, callback);
|
||||
}
|
||||
}
|
||||
|
||||
function deferCallback(err, validate) {
|
||||
if (firstCall) setTimeout(function() { callback(err, validate); });
|
||||
else return callback(err, validate);
|
||||
}
|
||||
}
|
||||
}
|
||||
26
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/cache.js
generated
vendored
26
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/cache.js
generated
vendored
|
|
@ -1,26 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
|
||||
var Cache = module.exports = function Cache() {
|
||||
this._cache = {};
|
||||
};
|
||||
|
||||
|
||||
Cache.prototype.put = function Cache_put(key, value) {
|
||||
this._cache[key] = value;
|
||||
};
|
||||
|
||||
|
||||
Cache.prototype.get = function Cache_get(key) {
|
||||
return this._cache[key];
|
||||
};
|
||||
|
||||
|
||||
Cache.prototype.del = function Cache_del(key) {
|
||||
delete this._cache[key];
|
||||
};
|
||||
|
||||
|
||||
Cache.prototype.clear = function Cache_clear() {
|
||||
this._cache = {};
|
||||
};
|
||||
28
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/_rules.js
generated
vendored
28
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/_rules.js
generated
vendored
|
|
@ -1,28 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
//all requires must be explicit because browserify won't work with dynamic requires
|
||||
module.exports = {
|
||||
'$ref': require('../dotjs/ref'),
|
||||
allOf: require('../dotjs/allOf'),
|
||||
anyOf: require('../dotjs/anyOf'),
|
||||
dependencies: require('../dotjs/dependencies'),
|
||||
'enum': require('../dotjs/enum'),
|
||||
format: require('../dotjs/format'),
|
||||
items: require('../dotjs/items'),
|
||||
maximum: require('../dotjs/_limit'),
|
||||
minimum: require('../dotjs/_limit'),
|
||||
maxItems: require('../dotjs/_limitItems'),
|
||||
minItems: require('../dotjs/_limitItems'),
|
||||
maxLength: require('../dotjs/_limitLength'),
|
||||
minLength: require('../dotjs/_limitLength'),
|
||||
maxProperties: require('../dotjs/_limitProperties'),
|
||||
minProperties: require('../dotjs/_limitProperties'),
|
||||
multipleOf: require('../dotjs/multipleOf'),
|
||||
not: require('../dotjs/not'),
|
||||
oneOf: require('../dotjs/oneOf'),
|
||||
pattern: require('../dotjs/pattern'),
|
||||
properties: require('../dotjs/properties'),
|
||||
required: require('../dotjs/required'),
|
||||
uniqueItems: require('../dotjs/uniqueItems'),
|
||||
validate: require('../dotjs/validate')
|
||||
};
|
||||
45
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/equal.js
generated
vendored
45
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/equal.js
generated
vendored
|
|
@ -1,45 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
/*eslint complexity: 0*/
|
||||
|
||||
module.exports = function equal(a, b) {
|
||||
if (a === b) return true;
|
||||
|
||||
var arrA = Array.isArray(a)
|
||||
, arrB = Array.isArray(b)
|
||||
, i;
|
||||
|
||||
if (arrA && arrB) {
|
||||
if (a.length != b.length) return false;
|
||||
for (i = 0; i < a.length; i++)
|
||||
if (!equal(a[i], b[i])) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (arrA != arrB) return false;
|
||||
|
||||
if (a && b && typeof a === 'object' && typeof b === 'object') {
|
||||
var keys = Object.keys(a);
|
||||
if (keys.length !== Object.keys(b).length) return false;
|
||||
|
||||
var dateA = a instanceof Date
|
||||
, dateB = b instanceof Date;
|
||||
if (dateA && dateB) return a.getTime() == b.getTime();
|
||||
if (dateA != dateB) return false;
|
||||
|
||||
var regexpA = a instanceof RegExp
|
||||
, regexpB = b instanceof RegExp;
|
||||
if (regexpA && regexpB) return a.toString() == b.toString();
|
||||
if (regexpA != regexpB) return false;
|
||||
|
||||
for (i = 0; i < keys.length; i++)
|
||||
if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;
|
||||
|
||||
for (i = 0; i < keys.length; i++)
|
||||
if(!equal(a[keys[i]], b[keys[i]])) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
164
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/formats.js
generated
vendored
164
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/formats.js
generated
vendored
|
|
@ -1,164 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
var DATE = /^\d\d\d\d-(\d\d)-(\d\d)$/;
|
||||
var DAYS = [0,31,29,31,30,31,30,31,31,30,31,30,31];
|
||||
var TIME = /^(\d\d):(\d\d):(\d\d)(\.\d+)?(z|[+-]\d\d:\d\d)?$/i;
|
||||
var HOSTNAME = /^[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[-0-9a-z]{0,61}[0-9a-z])?)*$/i;
|
||||
var URI = /^(?:[a-z][a-z0-9+\-.]*:)?(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)(?:\?(?:[a-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9a-f]{2})*)?(?:\#(?:[a-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9a-f]{2})*)?$/i;
|
||||
var UUID = /^(?:urn\:uuid\:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i;
|
||||
var JSON_POINTER = /^(?:\/(?:[^~\/]|~0|~1)*)*$|^\#(?:\/(?:[a-z0-9_\-\.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i;
|
||||
var RELATIVE_JSON_POINTER = /^(?:0|[1-9][0-9]*)(?:\#|(?:\/(?:[^~\/]|~0|~1)*)*)$/;
|
||||
|
||||
|
||||
module.exports = formats;
|
||||
|
||||
function formats(mode) {
|
||||
mode = mode == 'full' ? 'full' : 'fast';
|
||||
var formatDefs = util.copy(formats[mode]);
|
||||
for (var fName in formats.compare) {
|
||||
formatDefs[fName] = {
|
||||
validate: formatDefs[fName],
|
||||
compare: formats.compare[fName]
|
||||
};
|
||||
}
|
||||
return formatDefs;
|
||||
}
|
||||
|
||||
|
||||
formats.fast = {
|
||||
// date: http://tools.ietf.org/html/rfc3339#section-5.6
|
||||
date: /^\d\d\d\d-[0-1]\d-[0-3]\d$/,
|
||||
// date-time: http://tools.ietf.org/html/rfc3339#section-5.6
|
||||
time: /^[0-2]\d:[0-5]\d:[0-5]\d(?:\.\d+)?(?:z|[+-]\d\d:\d\d)?$/i,
|
||||
'date-time': /^\d\d\d\d-[0-1]\d-[0-3]\d[t\s][0-2]\d:[0-5]\d:[0-5]\d(?:\.\d+)?(?:z|[+-]\d\d:\d\d)$/i,
|
||||
// uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js
|
||||
uri: /^(?:[a-z][a-z0-9+-.]*)?(?:\:|\/)\/?[^\s]*$/i,
|
||||
// email (sources from jsen validator):
|
||||
// http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363
|
||||
// http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'willful violation')
|
||||
email: /^[a-z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i,
|
||||
hostname: HOSTNAME,
|
||||
// optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html
|
||||
ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/,
|
||||
// optimized http://stackoverflow.com/questions/53497/regular-expression-that-matches-valid-ipv6-addresses
|
||||
ipv6: /^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i,
|
||||
regex: regex,
|
||||
// uuid: http://tools.ietf.org/html/rfc4122
|
||||
uuid: UUID,
|
||||
// JSON-pointer: https://tools.ietf.org/html/rfc6901
|
||||
// uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A
|
||||
'json-pointer': JSON_POINTER,
|
||||
// relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00
|
||||
'relative-json-pointer': RELATIVE_JSON_POINTER
|
||||
};
|
||||
|
||||
|
||||
formats.full = {
|
||||
date: date,
|
||||
time: time,
|
||||
'date-time': date_time,
|
||||
uri: uri,
|
||||
email: /^[a-z0-9!#$%&'*+\/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&''*+\/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,
|
||||
hostname: hostname,
|
||||
ipv4: /^(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/,
|
||||
ipv6: /^\s*(?:(?:(?:[0-9a-f]{1,4}:){7}(?:[0-9a-f]{1,4}|:))|(?:(?:[0-9a-f]{1,4}:){6}(?::[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){5}(?:(?:(?::[0-9a-f]{1,4}){1,2})|:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(?:(?:[0-9a-f]{1,4}:){4}(?:(?:(?::[0-9a-f]{1,4}){1,3})|(?:(?::[0-9a-f]{1,4})?:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){3}(?:(?:(?::[0-9a-f]{1,4}){1,4})|(?:(?::[0-9a-f]{1,4}){0,2}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){2}(?:(?:(?::[0-9a-f]{1,4}){1,5})|(?:(?::[0-9a-f]{1,4}){0,3}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?:(?:[0-9a-f]{1,4}:){1}(?:(?:(?::[0-9a-f]{1,4}){1,6})|(?:(?::[0-9a-f]{1,4}){0,4}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(?::(?:(?:(?::[0-9a-f]{1,4}){1,7})|(?:(?::[0-9a-f]{1,4}){0,5}:(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(?:%.+)?\s*$/i,
|
||||
regex: regex,
|
||||
uuid: UUID,
|
||||
'json-pointer': JSON_POINTER,
|
||||
'relative-json-pointer': RELATIVE_JSON_POINTER
|
||||
};
|
||||
|
||||
|
||||
formats.compare = {
|
||||
date: compareDate,
|
||||
time: compareTime,
|
||||
'date-time': compareDateTime
|
||||
};
|
||||
|
||||
|
||||
function date(str) {
|
||||
// full-date from http://tools.ietf.org/html/rfc3339#section-5.6
|
||||
var matches = str.match(DATE);
|
||||
if (!matches) return false;
|
||||
|
||||
var month = +matches[1];
|
||||
var day = +matches[2];
|
||||
return month >= 1 && month <= 12 && day >= 1 && day <= DAYS[month];
|
||||
}
|
||||
|
||||
|
||||
function time(str, full) {
|
||||
var matches = str.match(TIME);
|
||||
if (!matches) return false;
|
||||
|
||||
var hour = matches[1];
|
||||
var minute = matches[2];
|
||||
var second = matches[3];
|
||||
var timeZone = matches[5];
|
||||
return hour <= 23 && minute <= 59 && second <= 59 && (!full || timeZone);
|
||||
}
|
||||
|
||||
|
||||
var DATE_TIME_SEPARATOR = /t|\s/i;
|
||||
function date_time(str) {
|
||||
// http://tools.ietf.org/html/rfc3339#section-5.6
|
||||
var dateTime = str.split(DATE_TIME_SEPARATOR);
|
||||
return dateTime.length == 2 && date(dateTime[0]) && time(dateTime[1], true);
|
||||
}
|
||||
|
||||
|
||||
function hostname(str) {
|
||||
// https://tools.ietf.org/html/rfc1034#section-3.5
|
||||
// https://tools.ietf.org/html/rfc1123#section-2
|
||||
return str.length <= 255 && HOSTNAME.test(str);
|
||||
}
|
||||
|
||||
|
||||
var NOT_URI_FRAGMENT = /\/|\:/;
|
||||
function uri(str) {
|
||||
// http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required "."
|
||||
return NOT_URI_FRAGMENT.test(str) && URI.test(str);
|
||||
}
|
||||
|
||||
|
||||
function regex(str) {
|
||||
try {
|
||||
new RegExp(str);
|
||||
return true;
|
||||
} catch(e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function compareDate(d1, d2) {
|
||||
if (!(d1 && d2)) return;
|
||||
if (d1 > d2) return 1;
|
||||
if (d1 < d2) return -1;
|
||||
if (d1 === d2) return 0;
|
||||
}
|
||||
|
||||
|
||||
function compareTime(t1, t2) {
|
||||
if (!(t1 && t2)) return;
|
||||
t1 = t1.match(TIME);
|
||||
t2 = t2.match(TIME);
|
||||
if (!(t1 && t2)) return;
|
||||
t1 = t1[1] + t1[2] + t1[3] + (t1[4]||'');
|
||||
t2 = t2[1] + t2[2] + t2[3] + (t2[4]||'');
|
||||
if (t1 > t2) return 1;
|
||||
if (t1 < t2) return -1;
|
||||
if (t1 === t2) return 0;
|
||||
}
|
||||
|
||||
|
||||
function compareDateTime(dt1, dt2) {
|
||||
if (!(dt1 && dt2)) return;
|
||||
dt1 = dt1.split(DATE_TIME_SEPARATOR);
|
||||
dt2 = dt2.split(DATE_TIME_SEPARATOR);
|
||||
var res = compareDate(dt1[0], dt2[0]);
|
||||
if (res === undefined) return;
|
||||
return res || compareTime(dt1[1], dt2[1]);
|
||||
}
|
||||
390
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/index.js
generated
vendored
390
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/index.js
generated
vendored
|
|
@ -1,390 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var resolve = require('./resolve')
|
||||
, util = require('./util')
|
||||
, stableStringify = require('json-stable-stringify')
|
||||
, async = require('../async');
|
||||
|
||||
var beautify;
|
||||
|
||||
function loadBeautify(){
|
||||
if (beautify === undefined) {
|
||||
var name = 'js-beautify';
|
||||
try { beautify = require(name).js_beautify; }
|
||||
catch(e) { beautify = false; }
|
||||
}
|
||||
}
|
||||
|
||||
var validateGenerator = require('../dotjs/validate');
|
||||
|
||||
/**
|
||||
* Functions below are used inside compiled validations function
|
||||
*/
|
||||
|
||||
var co = require('co');
|
||||
var ucs2length = util.ucs2length;
|
||||
var equal = require('./equal');
|
||||
|
||||
// this error is thrown by async schemas to return validation errors via exception
|
||||
var ValidationError = require('./validation_error');
|
||||
|
||||
module.exports = compile;
|
||||
|
||||
|
||||
/**
|
||||
* Compiles schema to validation function
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema object
|
||||
* @param {Object} root object with information about the root schema for this schema
|
||||
* @param {Object} localRefs the hash of local references inside the schema (created by resolve.id), used for inline resolution
|
||||
* @param {String} baseId base ID for IDs in the schema
|
||||
* @return {Function} validation function
|
||||
*/
|
||||
function compile(schema, root, localRefs, baseId) {
|
||||
/* jshint validthis: true, evil: true */
|
||||
/* eslint no-shadow: 0 */
|
||||
var self = this
|
||||
, opts = this._opts
|
||||
, refVal = [ undefined ]
|
||||
, refs = {}
|
||||
, patterns = []
|
||||
, patternsHash = {}
|
||||
, defaults = []
|
||||
, defaultsHash = {}
|
||||
, customRules = []
|
||||
, keepSourceCode = opts.sourceCode !== false;
|
||||
|
||||
root = root || { schema: schema, refVal: refVal, refs: refs };
|
||||
|
||||
var c = checkCompiling.call(this, schema, root, baseId);
|
||||
var compilation = this._compilations[c.index];
|
||||
if (c.compiling) return (compilation.callValidate = callValidate);
|
||||
|
||||
var formats = this._formats;
|
||||
var RULES = this.RULES;
|
||||
|
||||
try {
|
||||
var v = localCompile(schema, root, localRefs, baseId);
|
||||
compilation.validate = v;
|
||||
var cv = compilation.callValidate;
|
||||
if (cv) {
|
||||
cv.schema = v.schema;
|
||||
cv.errors = null;
|
||||
cv.refs = v.refs;
|
||||
cv.refVal = v.refVal;
|
||||
cv.root = v.root;
|
||||
cv.$async = v.$async;
|
||||
if (keepSourceCode) cv.sourceCode = v.sourceCode;
|
||||
}
|
||||
return v;
|
||||
} finally {
|
||||
endCompiling.call(this, schema, root, baseId);
|
||||
}
|
||||
|
||||
function callValidate() {
|
||||
var validate = compilation.validate;
|
||||
var result = validate.apply(null, arguments);
|
||||
callValidate.errors = validate.errors;
|
||||
return result;
|
||||
}
|
||||
|
||||
function localCompile(_schema, _root, localRefs, baseId) {
|
||||
var isRoot = !_root || (_root && _root.schema == _schema);
|
||||
if (_root.schema != root.schema)
|
||||
return compile.call(self, _schema, _root, localRefs, baseId);
|
||||
|
||||
var $async = _schema.$async === true;
|
||||
if ($async && !opts.transpile) async.setup(opts);
|
||||
|
||||
var sourceCode = validateGenerator({
|
||||
isTop: true,
|
||||
schema: _schema,
|
||||
isRoot: isRoot,
|
||||
baseId: baseId,
|
||||
root: _root,
|
||||
schemaPath: '',
|
||||
errSchemaPath: '#',
|
||||
errorPath: '""',
|
||||
RULES: RULES,
|
||||
validate: validateGenerator,
|
||||
util: util,
|
||||
resolve: resolve,
|
||||
resolveRef: resolveRef,
|
||||
usePattern: usePattern,
|
||||
useDefault: useDefault,
|
||||
useCustomRule: useCustomRule,
|
||||
opts: opts,
|
||||
formats: formats,
|
||||
self: self
|
||||
});
|
||||
|
||||
sourceCode = vars(refVal, refValCode) + vars(patterns, patternCode)
|
||||
+ vars(defaults, defaultCode) + vars(customRules, customRuleCode)
|
||||
+ sourceCode;
|
||||
|
||||
if (opts.beautify) {
|
||||
loadBeautify();
|
||||
/* istanbul ignore else */
|
||||
if (beautify) sourceCode = beautify(sourceCode, opts.beautify);
|
||||
else console.error('"npm install js-beautify" to use beautify option');
|
||||
}
|
||||
// console.log('\n\n\n *** \n', sourceCode);
|
||||
var validate, validateCode
|
||||
, transpile = opts._transpileFunc;
|
||||
try {
|
||||
validateCode = $async && transpile
|
||||
? transpile(sourceCode)
|
||||
: sourceCode;
|
||||
|
||||
var makeValidate = new Function(
|
||||
'self',
|
||||
'RULES',
|
||||
'formats',
|
||||
'root',
|
||||
'refVal',
|
||||
'defaults',
|
||||
'customRules',
|
||||
'co',
|
||||
'equal',
|
||||
'ucs2length',
|
||||
'ValidationError',
|
||||
validateCode
|
||||
);
|
||||
|
||||
validate = makeValidate(
|
||||
self,
|
||||
RULES,
|
||||
formats,
|
||||
root,
|
||||
refVal,
|
||||
defaults,
|
||||
customRules,
|
||||
co,
|
||||
equal,
|
||||
ucs2length,
|
||||
ValidationError
|
||||
);
|
||||
|
||||
refVal[0] = validate;
|
||||
} catch(e) {
|
||||
console.error('Error compiling schema, function code:', validateCode);
|
||||
throw e;
|
||||
}
|
||||
|
||||
validate.schema = _schema;
|
||||
validate.errors = null;
|
||||
validate.refs = refs;
|
||||
validate.refVal = refVal;
|
||||
validate.root = isRoot ? validate : _root;
|
||||
if ($async) validate.$async = true;
|
||||
if (keepSourceCode) validate.sourceCode = sourceCode;
|
||||
if (opts.sourceCode === true) {
|
||||
validate.source = {
|
||||
patterns: patterns,
|
||||
defaults: defaults
|
||||
};
|
||||
}
|
||||
|
||||
return validate;
|
||||
}
|
||||
|
||||
function resolveRef(baseId, ref, isRoot) {
|
||||
ref = resolve.url(baseId, ref);
|
||||
var refIndex = refs[ref];
|
||||
var _refVal, refCode;
|
||||
if (refIndex !== undefined) {
|
||||
_refVal = refVal[refIndex];
|
||||
refCode = 'refVal[' + refIndex + ']';
|
||||
return resolvedRef(_refVal, refCode);
|
||||
}
|
||||
if (!isRoot && root.refs) {
|
||||
var rootRefId = root.refs[ref];
|
||||
if (rootRefId !== undefined) {
|
||||
_refVal = root.refVal[rootRefId];
|
||||
refCode = addLocalRef(ref, _refVal);
|
||||
return resolvedRef(_refVal, refCode);
|
||||
}
|
||||
}
|
||||
|
||||
refCode = addLocalRef(ref);
|
||||
var v = resolve.call(self, localCompile, root, ref);
|
||||
if (!v) {
|
||||
var localSchema = localRefs && localRefs[ref];
|
||||
if (localSchema) {
|
||||
v = resolve.inlineRef(localSchema, opts.inlineRefs)
|
||||
? localSchema
|
||||
: compile.call(self, localSchema, root, localRefs, baseId);
|
||||
}
|
||||
}
|
||||
|
||||
if (v) {
|
||||
replaceLocalRef(ref, v);
|
||||
return resolvedRef(v, refCode);
|
||||
}
|
||||
}
|
||||
|
||||
function addLocalRef(ref, v) {
|
||||
var refId = refVal.length;
|
||||
refVal[refId] = v;
|
||||
refs[ref] = refId;
|
||||
return 'refVal' + refId;
|
||||
}
|
||||
|
||||
function replaceLocalRef(ref, v) {
|
||||
var refId = refs[ref];
|
||||
refVal[refId] = v;
|
||||
}
|
||||
|
||||
function resolvedRef(refVal, code) {
|
||||
return typeof refVal == 'object'
|
||||
? { code: code, schema: refVal, inline: true }
|
||||
: { code: code, $async: refVal && refVal.$async };
|
||||
}
|
||||
|
||||
function usePattern(regexStr) {
|
||||
var index = patternsHash[regexStr];
|
||||
if (index === undefined) {
|
||||
index = patternsHash[regexStr] = patterns.length;
|
||||
patterns[index] = regexStr;
|
||||
}
|
||||
return 'pattern' + index;
|
||||
}
|
||||
|
||||
function useDefault(value) {
|
||||
switch (typeof value) {
|
||||
case 'boolean':
|
||||
case 'number':
|
||||
return '' + value;
|
||||
case 'string':
|
||||
return util.toQuotedString(value);
|
||||
case 'object':
|
||||
if (value === null) return 'null';
|
||||
var valueStr = stableStringify(value);
|
||||
var index = defaultsHash[valueStr];
|
||||
if (index === undefined) {
|
||||
index = defaultsHash[valueStr] = defaults.length;
|
||||
defaults[index] = value;
|
||||
}
|
||||
return 'default' + index;
|
||||
}
|
||||
}
|
||||
|
||||
function useCustomRule(rule, schema, parentSchema, it) {
|
||||
var validateSchema = rule.definition.validateSchema;
|
||||
if (validateSchema && self._opts.validateSchema !== false) {
|
||||
var valid = validateSchema(schema);
|
||||
if (!valid) {
|
||||
var message = 'keyword schema is invalid: ' + self.errorsText(validateSchema.errors);
|
||||
if (self._opts.validateSchema == 'log') console.error(message);
|
||||
else throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
var compile = rule.definition.compile
|
||||
, inline = rule.definition.inline
|
||||
, macro = rule.definition.macro;
|
||||
|
||||
var validate;
|
||||
if (compile) {
|
||||
validate = compile.call(self, schema, parentSchema, it);
|
||||
} else if (macro) {
|
||||
validate = macro.call(self, schema, parentSchema, it);
|
||||
if (opts.validateSchema !== false) self.validateSchema(validate, true);
|
||||
} else if (inline) {
|
||||
validate = inline.call(self, it, rule.keyword, schema, parentSchema);
|
||||
} else {
|
||||
validate = rule.definition.validate;
|
||||
}
|
||||
|
||||
var index = customRules.length;
|
||||
customRules[index] = validate;
|
||||
|
||||
return {
|
||||
code: 'customRule' + index,
|
||||
validate: validate
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Checks if the schema is currently compiled
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema to compile
|
||||
* @param {Object} root root object
|
||||
* @param {String} baseId base schema ID
|
||||
* @return {Object} object with properties "index" (compilation index) and "compiling" (boolean)
|
||||
*/
|
||||
function checkCompiling(schema, root, baseId) {
|
||||
/* jshint validthis: true */
|
||||
var index = compIndex.call(this, schema, root, baseId);
|
||||
if (index >= 0) return { index: index, compiling: true };
|
||||
index = this._compilations.length;
|
||||
this._compilations[index] = {
|
||||
schema: schema,
|
||||
root: root,
|
||||
baseId: baseId
|
||||
};
|
||||
return { index: index, compiling: false };
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Removes the schema from the currently compiled list
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema to compile
|
||||
* @param {Object} root root object
|
||||
* @param {String} baseId base schema ID
|
||||
*/
|
||||
function endCompiling(schema, root, baseId) {
|
||||
/* jshint validthis: true */
|
||||
var i = compIndex.call(this, schema, root, baseId);
|
||||
if (i >= 0) this._compilations.splice(i, 1);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Index of schema compilation in the currently compiled list
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema to compile
|
||||
* @param {Object} root root object
|
||||
* @param {String} baseId base schema ID
|
||||
* @return {Integer} compilation index
|
||||
*/
|
||||
function compIndex(schema, root, baseId) {
|
||||
/* jshint validthis: true */
|
||||
for (var i=0; i<this._compilations.length; i++) {
|
||||
var c = this._compilations[i];
|
||||
if (c.schema == schema && c.root == root && c.baseId == baseId) return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
function patternCode(i, patterns) {
|
||||
return 'var pattern' + i + ' = new RegExp(' + util.toQuotedString(patterns[i]) + ');';
|
||||
}
|
||||
|
||||
|
||||
function defaultCode(i) {
|
||||
return 'var default' + i + ' = defaults[' + i + '];';
|
||||
}
|
||||
|
||||
|
||||
function refValCode(i, refVal) {
|
||||
return refVal[i] ? 'var refVal' + i + ' = refVal[' + i + '];' : '';
|
||||
}
|
||||
|
||||
|
||||
function customRuleCode(i) {
|
||||
return 'var customRule' + i + ' = customRules[' + i + '];';
|
||||
}
|
||||
|
||||
|
||||
function vars(arr, statement) {
|
||||
if (!arr.length) return '';
|
||||
var code = '';
|
||||
for (var i=0; i<arr.length; i++)
|
||||
code += statement(i, arr);
|
||||
return code;
|
||||
}
|
||||
267
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/resolve.js
generated
vendored
267
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/resolve.js
generated
vendored
|
|
@ -1,267 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var url = require('url')
|
||||
, equal = require('./equal')
|
||||
, util = require('./util')
|
||||
, SchemaObject = require('./schema_obj');
|
||||
|
||||
module.exports = resolve;
|
||||
|
||||
resolve.normalizeId = normalizeId;
|
||||
resolve.fullPath = getFullPath;
|
||||
resolve.url = resolveUrl;
|
||||
resolve.ids = resolveIds;
|
||||
resolve.inlineRef = inlineRef;
|
||||
resolve.schema = resolveSchema;
|
||||
|
||||
/**
|
||||
* [resolve and compile the references ($ref)]
|
||||
* @this Ajv
|
||||
* @param {Function} compile reference to schema compilation funciton (localCompile)
|
||||
* @param {Object} root object with information about the root schema for the current schema
|
||||
* @param {String} ref reference to resolve
|
||||
* @return {Object|Function} schema object (if the schema can be inlined) or validation function
|
||||
*/
|
||||
function resolve(compile, root, ref) {
|
||||
/* jshint validthis: true */
|
||||
var refVal = this._refs[ref];
|
||||
if (typeof refVal == 'string') {
|
||||
if (this._refs[refVal]) refVal = this._refs[refVal];
|
||||
else return resolve.call(this, compile, root, refVal);
|
||||
}
|
||||
|
||||
refVal = refVal || this._schemas[ref];
|
||||
if (refVal instanceof SchemaObject) {
|
||||
return inlineRef(refVal.schema, this._opts.inlineRefs)
|
||||
? refVal.schema
|
||||
: refVal.validate || this._compile(refVal);
|
||||
}
|
||||
|
||||
var res = resolveSchema.call(this, root, ref);
|
||||
var schema, v, baseId;
|
||||
if (res) {
|
||||
schema = res.schema;
|
||||
root = res.root;
|
||||
baseId = res.baseId;
|
||||
}
|
||||
|
||||
if (schema instanceof SchemaObject) {
|
||||
v = schema.validate || compile.call(this, schema.schema, root, undefined, baseId);
|
||||
} else if (schema) {
|
||||
v = inlineRef(schema, this._opts.inlineRefs)
|
||||
? schema
|
||||
: compile.call(this, schema, root, undefined, baseId);
|
||||
}
|
||||
|
||||
return v;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resolve schema, its root and baseId
|
||||
* @this Ajv
|
||||
* @param {Object} root root object with properties schema, refVal, refs
|
||||
* @param {String} ref reference to resolve
|
||||
* @return {Object} object with properties schema, root, baseId
|
||||
*/
|
||||
function resolveSchema(root, ref) {
|
||||
/* jshint validthis: true */
|
||||
var p = url.parse(ref, false, true)
|
||||
, refPath = _getFullPath(p)
|
||||
, baseId = getFullPath(root.schema.id);
|
||||
if (refPath !== baseId) {
|
||||
var id = normalizeId(refPath);
|
||||
var refVal = this._refs[id];
|
||||
if (typeof refVal == 'string') {
|
||||
return resolveRecursive.call(this, root, refVal, p);
|
||||
} else if (refVal instanceof SchemaObject) {
|
||||
if (!refVal.validate) this._compile(refVal);
|
||||
root = refVal;
|
||||
} else {
|
||||
refVal = this._schemas[id];
|
||||
if (refVal instanceof SchemaObject) {
|
||||
if (!refVal.validate) this._compile(refVal);
|
||||
if (id == normalizeId(ref))
|
||||
return { schema: refVal, root: root, baseId: baseId };
|
||||
root = refVal;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!root.schema) return;
|
||||
baseId = getFullPath(root.schema.id);
|
||||
}
|
||||
return getJsonPointer.call(this, p, baseId, root.schema, root);
|
||||
}
|
||||
|
||||
|
||||
/* @this Ajv */
|
||||
function resolveRecursive(root, ref, parsedRef) {
|
||||
/* jshint validthis: true */
|
||||
var res = resolveSchema.call(this, root, ref);
|
||||
if (res) {
|
||||
var schema = res.schema;
|
||||
var baseId = res.baseId;
|
||||
root = res.root;
|
||||
if (schema.id) baseId = resolveUrl(baseId, schema.id);
|
||||
return getJsonPointer.call(this, parsedRef, baseId, schema, root);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var PREVENT_SCOPE_CHANGE = util.toHash(['properties', 'patternProperties', 'enum', 'dependencies', 'definitions']);
|
||||
/* @this Ajv */
|
||||
function getJsonPointer(parsedRef, baseId, schema, root) {
|
||||
/* jshint validthis: true */
|
||||
parsedRef.hash = parsedRef.hash || '';
|
||||
if (parsedRef.hash.slice(0,2) != '#/') return;
|
||||
var parts = parsedRef.hash.split('/');
|
||||
|
||||
for (var i = 1; i < parts.length; i++) {
|
||||
var part = parts[i];
|
||||
if (part) {
|
||||
part = util.unescapeFragment(part);
|
||||
schema = schema[part];
|
||||
if (!schema) break;
|
||||
if (schema.id && !PREVENT_SCOPE_CHANGE[part]) baseId = resolveUrl(baseId, schema.id);
|
||||
if (schema.$ref) {
|
||||
var $ref = resolveUrl(baseId, schema.$ref);
|
||||
var res = resolveSchema.call(this, root, $ref);
|
||||
if (res) {
|
||||
schema = res.schema;
|
||||
root = res.root;
|
||||
baseId = res.baseId;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (schema && schema != root.schema)
|
||||
return { schema: schema, root: root, baseId: baseId };
|
||||
}
|
||||
|
||||
|
||||
var SIMPLE_INLINED = util.toHash([
|
||||
'type', 'format', 'pattern',
|
||||
'maxLength', 'minLength',
|
||||
'maxProperties', 'minProperties',
|
||||
'maxItems', 'minItems',
|
||||
'maximum', 'minimum',
|
||||
'uniqueItems', 'multipleOf',
|
||||
'required', 'enum'
|
||||
]);
|
||||
function inlineRef(schema, limit) {
|
||||
if (limit === false) return false;
|
||||
if (limit === undefined || limit === true) return checkNoRef(schema);
|
||||
else if (limit) return countKeys(schema) <= limit;
|
||||
}
|
||||
|
||||
|
||||
function checkNoRef(schema) {
|
||||
var item;
|
||||
if (Array.isArray(schema)) {
|
||||
for (var i=0; i<schema.length; i++) {
|
||||
item = schema[i];
|
||||
if (typeof item == 'object' && !checkNoRef(item)) return false;
|
||||
}
|
||||
} else {
|
||||
for (var key in schema) {
|
||||
if (key == '$ref') return false;
|
||||
item = schema[key];
|
||||
if (typeof item == 'object' && !checkNoRef(item)) return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
function countKeys(schema) {
|
||||
var count = 0, item;
|
||||
if (Array.isArray(schema)) {
|
||||
for (var i=0; i<schema.length; i++) {
|
||||
item = schema[i];
|
||||
if (typeof item == 'object') count += countKeys(item);
|
||||
if (count == Infinity) return Infinity;
|
||||
}
|
||||
} else {
|
||||
for (var key in schema) {
|
||||
if (key == '$ref') return Infinity;
|
||||
if (SIMPLE_INLINED[key]) {
|
||||
count++;
|
||||
} else {
|
||||
item = schema[key];
|
||||
if (typeof item == 'object') count += countKeys(item) + 1;
|
||||
if (count == Infinity) return Infinity;
|
||||
}
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
function getFullPath(id, normalize) {
|
||||
if (normalize !== false) id = normalizeId(id);
|
||||
var p = url.parse(id, false, true);
|
||||
return _getFullPath(p);
|
||||
}
|
||||
|
||||
|
||||
function _getFullPath(p) {
|
||||
var protocolSeparator = p.protocol || p.href.slice(0,2) == '//' ? '//' : '';
|
||||
return (p.protocol||'') + protocolSeparator + (p.host||'') + (p.path||'') + '#';
|
||||
}
|
||||
|
||||
|
||||
var TRAILING_SLASH_HASH = /#\/?$/;
|
||||
function normalizeId(id) {
|
||||
return id ? id.replace(TRAILING_SLASH_HASH, '') : '';
|
||||
}
|
||||
|
||||
|
||||
function resolveUrl(baseId, id) {
|
||||
id = normalizeId(id);
|
||||
return url.resolve(baseId, id);
|
||||
}
|
||||
|
||||
|
||||
/* @this Ajv */
|
||||
function resolveIds(schema) {
|
||||
/* eslint no-shadow: 0 */
|
||||
/* jshint validthis: true */
|
||||
var id = normalizeId(schema.id);
|
||||
var localRefs = {};
|
||||
_resolveIds.call(this, schema, getFullPath(id, false), id);
|
||||
return localRefs;
|
||||
|
||||
/* @this Ajv */
|
||||
function _resolveIds(schema, fullPath, baseId) {
|
||||
/* jshint validthis: true */
|
||||
if (Array.isArray(schema)) {
|
||||
for (var i=0; i<schema.length; i++)
|
||||
_resolveIds.call(this, schema[i], fullPath+'/'+i, baseId);
|
||||
} else if (schema && typeof schema == 'object') {
|
||||
if (typeof schema.id == 'string') {
|
||||
var id = baseId = baseId
|
||||
? url.resolve(baseId, schema.id)
|
||||
: schema.id;
|
||||
id = normalizeId(id);
|
||||
|
||||
var refVal = this._refs[id];
|
||||
if (typeof refVal == 'string') refVal = this._refs[refVal];
|
||||
if (refVal && refVal.schema) {
|
||||
if (!equal(schema, refVal.schema))
|
||||
throw new Error('id "' + id + '" resolves to more than one schema');
|
||||
} else if (id != normalizeId(fullPath)) {
|
||||
if (id[0] == '#') {
|
||||
if (localRefs[id] && !equal(schema, localRefs[id]))
|
||||
throw new Error('id "' + id + '" resolves to more than one schema');
|
||||
localRefs[id] = schema;
|
||||
} else {
|
||||
this._refs[id] = fullPath;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (var key in schema)
|
||||
_resolveIds.call(this, schema[key], fullPath+'/'+util.escapeFragment(key), baseId);
|
||||
}
|
||||
}
|
||||
}
|
||||
40
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/rules.js
generated
vendored
40
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/rules.js
generated
vendored
|
|
@ -1,40 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var ruleModules = require('./_rules')
|
||||
, toHash = require('./util').toHash;
|
||||
|
||||
module.exports = function rules() {
|
||||
var RULES = [
|
||||
{ type: 'number',
|
||||
rules: [ 'maximum', 'minimum', 'multipleOf'] },
|
||||
{ type: 'string',
|
||||
rules: [ 'maxLength', 'minLength', 'pattern', 'format' ] },
|
||||
{ type: 'array',
|
||||
rules: [ 'maxItems', 'minItems', 'uniqueItems', 'items' ] },
|
||||
{ type: 'object',
|
||||
rules: [ 'maxProperties', 'minProperties', 'required', 'dependencies', 'properties' ] },
|
||||
{ rules: [ '$ref', 'enum', 'not', 'anyOf', 'oneOf', 'allOf' ] }
|
||||
];
|
||||
|
||||
var ALL = [ 'type', 'additionalProperties', 'patternProperties' ];
|
||||
var KEYWORDS = [ 'additionalItems', '$schema', 'id', 'title', 'description', 'default' ];
|
||||
var TYPES = [ 'number', 'integer', 'string', 'array', 'object', 'boolean', 'null' ];
|
||||
RULES.all = toHash(ALL);
|
||||
|
||||
RULES.forEach(function (group) {
|
||||
group.rules = group.rules.map(function (keyword) {
|
||||
ALL.push(keyword);
|
||||
var rule = RULES.all[keyword] = {
|
||||
keyword: keyword,
|
||||
code: ruleModules[keyword]
|
||||
};
|
||||
return rule;
|
||||
});
|
||||
});
|
||||
|
||||
RULES.keywords = toHash(ALL.concat(KEYWORDS));
|
||||
RULES.types = toHash(TYPES);
|
||||
RULES.custom = {};
|
||||
|
||||
return RULES;
|
||||
};
|
||||
9
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/schema_obj.js
generated
vendored
9
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/schema_obj.js
generated
vendored
|
|
@ -1,9 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
module.exports = SchemaObject;
|
||||
|
||||
function SchemaObject(obj) {
|
||||
util.copy(obj, this);
|
||||
}
|
||||
20
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/ucs2length.js
generated
vendored
20
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/ucs2length.js
generated
vendored
|
|
@ -1,20 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// https://mathiasbynens.be/notes/javascript-encoding
|
||||
// https://github.com/bestiejs/punycode.js - punycode.ucs2.decode
|
||||
module.exports = function ucs2length(str) {
|
||||
var length = 0
|
||||
, len = str.length
|
||||
, pos = 0
|
||||
, value;
|
||||
while (pos < len) {
|
||||
length++;
|
||||
value = str.charCodeAt(pos++);
|
||||
if (value >= 0xD800 && value <= 0xDBFF && pos < len) {
|
||||
// high surrogate, and there is a next character
|
||||
value = str.charCodeAt(pos);
|
||||
if ((value & 0xFC00) == 0xDC00) pos++; // low surrogate
|
||||
}
|
||||
}
|
||||
return length;
|
||||
};
|
||||
257
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/util.js
generated
vendored
257
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/util.js
generated
vendored
|
|
@ -1,257 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
|
||||
module.exports = {
|
||||
copy: copy,
|
||||
checkDataType: checkDataType,
|
||||
checkDataTypes: checkDataTypes,
|
||||
coerceToTypes: coerceToTypes,
|
||||
toHash: toHash,
|
||||
getProperty: getProperty,
|
||||
escapeQuotes: escapeQuotes,
|
||||
ucs2length: require('./ucs2length'),
|
||||
varOccurences: varOccurences,
|
||||
varReplace: varReplace,
|
||||
cleanUpCode: cleanUpCode,
|
||||
cleanUpVarErrors: cleanUpVarErrors,
|
||||
schemaHasRules: schemaHasRules,
|
||||
schemaHasRulesExcept: schemaHasRulesExcept,
|
||||
stableStringify: require('json-stable-stringify'),
|
||||
toQuotedString: toQuotedString,
|
||||
getPathExpr: getPathExpr,
|
||||
getPath: getPath,
|
||||
getData: getData,
|
||||
unescapeFragment: unescapeFragment,
|
||||
escapeFragment: escapeFragment,
|
||||
escapeJsonPointer: escapeJsonPointer
|
||||
};
|
||||
|
||||
|
||||
function copy(o, to) {
|
||||
to = to || {};
|
||||
for (var key in o) to[key] = o[key];
|
||||
return to;
|
||||
}
|
||||
|
||||
|
||||
function checkDataType(dataType, data, negate) {
|
||||
var EQUAL = negate ? ' !== ' : ' === '
|
||||
, AND = negate ? ' || ' : ' && '
|
||||
, OK = negate ? '!' : ''
|
||||
, NOT = negate ? '' : '!';
|
||||
switch (dataType) {
|
||||
case 'null': return data + EQUAL + 'null';
|
||||
case 'array': return OK + 'Array.isArray(' + data + ')';
|
||||
case 'object': return '(' + OK + data + AND +
|
||||
'typeof ' + data + EQUAL + '"object"' + AND +
|
||||
NOT + 'Array.isArray(' + data + '))';
|
||||
case 'integer': return '(typeof ' + data + EQUAL + '"number"' + AND +
|
||||
NOT + '(' + data + ' % 1)' +
|
||||
AND + data + EQUAL + data + ')';
|
||||
default: return 'typeof ' + data + EQUAL + '"' + dataType + '"';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function checkDataTypes(dataTypes, data) {
|
||||
switch (dataTypes.length) {
|
||||
case 1: return checkDataType(dataTypes[0], data, true);
|
||||
default:
|
||||
var code = '';
|
||||
var types = toHash(dataTypes);
|
||||
if (types.array && types.object) {
|
||||
code = types.null ? '(': '(!' + data + ' || ';
|
||||
code += 'typeof ' + data + ' !== "object")';
|
||||
delete types.null;
|
||||
delete types.array;
|
||||
delete types.object;
|
||||
}
|
||||
if (types.number) delete types.integer;
|
||||
for (var t in types)
|
||||
code += (code ? ' && ' : '' ) + checkDataType(t, data, true);
|
||||
|
||||
return code;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var COERCE_TO_TYPES = toHash([ 'string', 'number', 'integer', 'boolean', 'null' ]);
|
||||
function coerceToTypes(optionCoerceTypes, dataTypes) {
|
||||
if (Array.isArray(dataTypes)) {
|
||||
var types = [];
|
||||
for (var i=0; i<dataTypes.length; i++) {
|
||||
var t = dataTypes[i];
|
||||
if (COERCE_TO_TYPES[t]) types[types.length] = t;
|
||||
else if (optionCoerceTypes === 'array' && t === 'array') types[types.length] = t;
|
||||
}
|
||||
if (types.length) return types;
|
||||
} else if (COERCE_TO_TYPES[dataTypes]) {
|
||||
return [dataTypes];
|
||||
} else if (optionCoerceTypes === 'array' && dataTypes === 'array') {
|
||||
return ['array'];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function toHash(arr) {
|
||||
var hash = {};
|
||||
for (var i=0; i<arr.length; i++) hash[arr[i]] = true;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
var IDENTIFIER = /^[a-z$_][a-z$_0-9]*$/i;
|
||||
var SINGLE_QUOTE = /'|\\/g;
|
||||
function getProperty(key) {
|
||||
return typeof key == 'number'
|
||||
? '[' + key + ']'
|
||||
: IDENTIFIER.test(key)
|
||||
? '.' + key
|
||||
: "['" + escapeQuotes(key) + "']";
|
||||
}
|
||||
|
||||
|
||||
function escapeQuotes(str) {
|
||||
return str.replace(SINGLE_QUOTE, '\\$&')
|
||||
.replace(/\n/g, '\\n')
|
||||
.replace(/\r/g, '\\r')
|
||||
.replace(/\f/g, '\\f')
|
||||
.replace(/\t/g, '\\t');
|
||||
}
|
||||
|
||||
|
||||
function varOccurences(str, dataVar) {
|
||||
dataVar += '[^0-9]';
|
||||
var matches = str.match(new RegExp(dataVar, 'g'));
|
||||
return matches ? matches.length : 0;
|
||||
}
|
||||
|
||||
|
||||
function varReplace(str, dataVar, expr) {
|
||||
dataVar += '([^0-9])';
|
||||
expr = expr.replace(/\$/g, '$$$$');
|
||||
return str.replace(new RegExp(dataVar, 'g'), expr + '$1');
|
||||
}
|
||||
|
||||
|
||||
var EMPTY_ELSE = /else\s*{\s*}/g
|
||||
, EMPTY_IF_NO_ELSE = /if\s*\([^)]+\)\s*\{\s*\}(?!\s*else)/g
|
||||
, EMPTY_IF_WITH_ELSE = /if\s*\(([^)]+)\)\s*\{\s*\}\s*else(?!\s*if)/g;
|
||||
function cleanUpCode(out) {
|
||||
return out.replace(EMPTY_ELSE, '')
|
||||
.replace(EMPTY_IF_NO_ELSE, '')
|
||||
.replace(EMPTY_IF_WITH_ELSE, 'if (!($1))');
|
||||
}
|
||||
|
||||
|
||||
var ERRORS_REGEXP = /[^v\.]errors/g
|
||||
, REMOVE_ERRORS = /var errors = 0;|var vErrors = null;|validate.errors = vErrors;/g
|
||||
, REMOVE_ERRORS_ASYNC = /var errors = 0;|var vErrors = null;/g
|
||||
, RETURN_VALID = 'return errors === 0;'
|
||||
, RETURN_TRUE = 'validate.errors = null; return true;'
|
||||
, RETURN_ASYNC = /if \(errors === 0\) return true;\s*else throw new ValidationError\(vErrors\);/
|
||||
, RETURN_TRUE_ASYNC = 'return true;';
|
||||
|
||||
function cleanUpVarErrors(out, async) {
|
||||
var matches = out.match(ERRORS_REGEXP);
|
||||
if (!matches || matches.length !== 2) return out;
|
||||
return async
|
||||
? out.replace(REMOVE_ERRORS_ASYNC, '')
|
||||
.replace(RETURN_ASYNC, RETURN_TRUE_ASYNC)
|
||||
: out.replace(REMOVE_ERRORS, '')
|
||||
.replace(RETURN_VALID, RETURN_TRUE);
|
||||
}
|
||||
|
||||
|
||||
function schemaHasRules(schema, rules) {
|
||||
for (var key in schema) if (rules[key]) return true;
|
||||
}
|
||||
|
||||
|
||||
function schemaHasRulesExcept(schema, rules, exceptKeyword) {
|
||||
for (var key in schema) if (key != exceptKeyword && rules[key]) return true;
|
||||
}
|
||||
|
||||
|
||||
function toQuotedString(str) {
|
||||
return '\'' + escapeQuotes(str) + '\'';
|
||||
}
|
||||
|
||||
|
||||
function getPathExpr(currentPath, expr, jsonPointers, isNumber) {
|
||||
var path = jsonPointers // false by default
|
||||
? '\'/\' + ' + expr + (isNumber ? '' : '.replace(/~/g, \'~0\').replace(/\\//g, \'~1\')')
|
||||
: (isNumber ? '\'[\' + ' + expr + ' + \']\'' : '\'[\\\'\' + ' + expr + ' + \'\\\']\'');
|
||||
return joinPaths(currentPath, path);
|
||||
}
|
||||
|
||||
|
||||
function getPath(currentPath, prop, jsonPointers) {
|
||||
var path = jsonPointers // false by default
|
||||
? toQuotedString('/' + escapeJsonPointer(prop))
|
||||
: toQuotedString(getProperty(prop));
|
||||
return joinPaths(currentPath, path);
|
||||
}
|
||||
|
||||
|
||||
var JSON_POINTER = /^\/(?:[^~]|~0|~1)*$/;
|
||||
var RELATIVE_JSON_POINTER = /^([0-9]+)(#|\/(?:[^~]|~0|~1)*)?$/;
|
||||
function getData($data, lvl, paths) {
|
||||
var up, jsonPointer, data, matches;
|
||||
if ($data === '') return 'rootData';
|
||||
if ($data[0] == '/') {
|
||||
if (!JSON_POINTER.test($data)) throw new Error('Invalid JSON-pointer: ' + $data);
|
||||
jsonPointer = $data;
|
||||
data = 'rootData';
|
||||
} else {
|
||||
matches = $data.match(RELATIVE_JSON_POINTER);
|
||||
if (!matches) throw new Error('Invalid JSON-pointer: ' + $data);
|
||||
up = +matches[1];
|
||||
jsonPointer = matches[2];
|
||||
if (jsonPointer == '#') {
|
||||
if (up >= lvl) throw new Error('Cannot access property/index ' + up + ' levels up, current level is ' + lvl);
|
||||
return paths[lvl - up];
|
||||
}
|
||||
|
||||
if (up > lvl) throw new Error('Cannot access data ' + up + ' levels up, current level is ' + lvl);
|
||||
data = 'data' + ((lvl - up) || '');
|
||||
if (!jsonPointer) return data;
|
||||
}
|
||||
|
||||
var expr = data;
|
||||
var segments = jsonPointer.split('/');
|
||||
for (var i=0; i<segments.length; i++) {
|
||||
var segment = segments[i];
|
||||
if (segment) {
|
||||
data += getProperty(unescapeJsonPointer(segment));
|
||||
expr += ' && ' + data;
|
||||
}
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
||||
|
||||
function joinPaths (a, b) {
|
||||
if (a == '""') return b;
|
||||
return (a + ' + ' + b).replace(/' \+ '/g, '');
|
||||
}
|
||||
|
||||
|
||||
function unescapeFragment(str) {
|
||||
return unescapeJsonPointer(decodeURIComponent(str));
|
||||
}
|
||||
|
||||
|
||||
function escapeFragment(str) {
|
||||
return encodeURIComponent(escapeJsonPointer(str));
|
||||
}
|
||||
|
||||
|
||||
function escapeJsonPointer(str) {
|
||||
return str.replace(/~/g, '~0').replace(/\//g, '~1');
|
||||
}
|
||||
|
||||
|
||||
function unescapeJsonPointer(str) {
|
||||
return str.replace(/~1/g, '/').replace(/~0/g, '~');
|
||||
}
|
||||
14
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/validation_error.js
generated
vendored
14
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/compile/validation_error.js
generated
vendored
|
|
@ -1,14 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = ValidationError;
|
||||
|
||||
|
||||
function ValidationError(errors) {
|
||||
this.message = 'validation failed';
|
||||
this.errors = errors;
|
||||
this.ajv = this.validation = true;
|
||||
}
|
||||
|
||||
|
||||
ValidationError.prototype = Object.create(Error.prototype);
|
||||
ValidationError.prototype.constructor = ValidationError;
|
||||
49
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limit.jst
generated
vendored
49
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limit.jst
generated
vendored
|
|
@ -1,49 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{
|
||||
var $isMax = $keyword == 'maximum'
|
||||
, $exclusiveKeyword = $isMax ? 'exclusiveMaximum' : 'exclusiveMinimum'
|
||||
, $schemaExcl = it.schema[$exclusiveKeyword]
|
||||
, $isDataExcl = it.opts.v5 && $schemaExcl && $schemaExcl.$data
|
||||
, $op = $isMax ? '<' : '>'
|
||||
, $notOp = $isMax ? '>' : '<';
|
||||
}}
|
||||
|
||||
{{? $isDataExcl }}
|
||||
{{
|
||||
var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr)
|
||||
, $exclusive = 'exclusive' + $lvl
|
||||
, $opExpr = 'op' + $lvl
|
||||
, $opStr = '\' + ' + $opExpr + ' + \'';
|
||||
}}
|
||||
var schemaExcl{{=$lvl}} = {{=$schemaValueExcl}};
|
||||
{{ $schemaValueExcl = 'schemaExcl' + $lvl; }}
|
||||
|
||||
var exclusive{{=$lvl}};
|
||||
if (typeof {{=$schemaValueExcl}} != 'boolean' && typeof {{=$schemaValueExcl}} != 'undefined') {
|
||||
{{ var $errorKeyword = $exclusiveKeyword; }}
|
||||
{{# def.error:'_exclusiveLimit' }}
|
||||
} else if({{# def.$dataNotType:'number' }}
|
||||
((exclusive{{=$lvl}} = {{=$schemaValueExcl}} === true)
|
||||
? {{=$data}} {{=$notOp}}= {{=$schemaValue}}
|
||||
: {{=$data}} {{=$notOp}} {{=$schemaValue}})
|
||||
|| {{=$data}} !== {{=$data}}) {
|
||||
var op{{=$lvl}} = exclusive{{=$lvl}} ? '{{=$op}}' : '{{=$op}}=';
|
||||
{{??}}
|
||||
{{
|
||||
var $exclusive = $schemaExcl === true
|
||||
, $opStr = $op; /*used in error*/
|
||||
if (!$exclusive) $opStr += '=';
|
||||
var $opExpr = '\'' + $opStr + '\''; /*used in error*/
|
||||
}}
|
||||
|
||||
if ({{# def.$dataNotType:'number' }}
|
||||
{{=$data}} {{=$notOp}}{{?$exclusive}}={{?}} {{=$schemaValue}}
|
||||
|| {{=$data}} !== {{=$data}}) {
|
||||
{{?}}
|
||||
{{ var $errorKeyword = $keyword; }}
|
||||
{{# def.error:'_limit' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limitItems.jst
generated
vendored
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limitItems.jst
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{ var $op = $keyword == 'maxItems' ? '>' : '<'; }}
|
||||
if ({{# def.$dataNotType:'number' }} {{=$data}}.length {{=$op}} {{=$schemaValue}}) {
|
||||
{{ var $errorKeyword = $keyword; }}
|
||||
{{# def.error:'_limitItems' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limitLength.jst
generated
vendored
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limitLength.jst
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{ var $op = $keyword == 'maxLength' ? '>' : '<'; }}
|
||||
if ({{# def.$dataNotType:'number' }} {{# def.strLength }} {{=$op}} {{=$schemaValue}}) {
|
||||
{{ var $errorKeyword = $keyword; }}
|
||||
{{# def.error:'_limitLength' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limitProperties.jst
generated
vendored
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/_limitProperties.jst
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{ var $op = $keyword == 'maxProperties' ? '>' : '<'; }}
|
||||
if ({{# def.$dataNotType:'number' }} Object.keys({{=$data}}).length {{=$op}} {{=$schemaValue}}) {
|
||||
{{ var $errorKeyword = $keyword; }}
|
||||
{{# def.error:'_limitProperties' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
34
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/allOf.jst
generated
vendored
34
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/allOf.jst
generated
vendored
|
|
@ -1,34 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
{{
|
||||
var $currentBaseId = $it.baseId
|
||||
, $allSchemasEmpty = true;
|
||||
}}
|
||||
|
||||
{{~ $schema:$sch:$i }}
|
||||
{{? {{# def.nonEmptySchema:$sch }} }}
|
||||
{{
|
||||
$allSchemasEmpty = false;
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + '[' + $i + ']';
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + $i;
|
||||
}}
|
||||
|
||||
{{# def.insertSubschemaCode }}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
{{~}}
|
||||
|
||||
{{? $breakOnError }}
|
||||
{{? $allSchemasEmpty }}
|
||||
if (true) {
|
||||
{{??}}
|
||||
{{= $closingBraces.slice(0,-1) }}
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
{{# def.cleanUp }}
|
||||
48
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/anyOf.jst
generated
vendored
48
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/anyOf.jst
generated
vendored
|
|
@ -1,48 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
{{
|
||||
var $noEmptySchema = $schema.every(function($sch) {
|
||||
return {{# def.nonEmptySchema:$sch }};
|
||||
});
|
||||
}}
|
||||
{{? $noEmptySchema }}
|
||||
{{ var $currentBaseId = $it.baseId; }}
|
||||
var {{=$errs}} = errors;
|
||||
var {{=$valid}} = false;
|
||||
|
||||
{{# def.setCompositeRule }}
|
||||
|
||||
{{~ $schema:$sch:$i }}
|
||||
{{
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + '[' + $i + ']';
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + $i;
|
||||
}}
|
||||
|
||||
{{# def.insertSubschemaCode }}
|
||||
|
||||
{{=$valid}} = {{=$valid}} || {{=$nextValid}};
|
||||
|
||||
if (!{{=$valid}}) {
|
||||
{{ $closingBraces += '}'; }}
|
||||
{{~}}
|
||||
|
||||
{{# def.resetCompositeRule }}
|
||||
|
||||
{{= $closingBraces }}
|
||||
|
||||
if (!{{=$valid}}) {
|
||||
{{# def.addError:'anyOf' }}
|
||||
} else {
|
||||
{{# def.resetErrors }}
|
||||
{{? it.opts.allErrors }} } {{?}}
|
||||
|
||||
{{# def.cleanUp }}
|
||||
{{??}}
|
||||
{{? $breakOnError }}
|
||||
if (true) {
|
||||
{{?}}
|
||||
{{?}}
|
||||
61
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/coerce.def
generated
vendored
61
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/coerce.def
generated
vendored
|
|
@ -1,61 +0,0 @@
|
|||
{{## def.coerceType:
|
||||
{{
|
||||
var $dataType = 'dataType' + $lvl
|
||||
, $coerced = 'coerced' + $lvl;
|
||||
}}
|
||||
var {{=$dataType}} = typeof {{=$data}};
|
||||
{{? it.opts.coerceTypes == 'array'}}
|
||||
if ({{=$dataType}} == 'object' && Array.isArray({{=$data}})) {{=$dataType}} = 'array';
|
||||
{{?}}
|
||||
|
||||
var {{=$coerced}} = undefined;
|
||||
|
||||
{{ var $bracesCoercion = ''; }}
|
||||
{{~ $coerceToTypes:$type:$i }}
|
||||
{{? $i }}
|
||||
if ({{=$coerced}} === undefined) {
|
||||
{{ $bracesCoercion += '}'; }}
|
||||
{{?}}
|
||||
|
||||
{{? it.opts.coerceTypes == 'array' && $type != 'array' }}
|
||||
if ({{=$dataType}} == 'array' && {{=$data}}.length == 1) {
|
||||
{{=$coerced}} = {{=$data}} = {{=$data}}[0];
|
||||
{{=$dataType}} = typeof {{=$data}};
|
||||
/*if ({{=$dataType}} == 'object' && Array.isArray({{=$data}})) {{=$dataType}} = 'array';*/
|
||||
}
|
||||
{{?}}
|
||||
|
||||
{{? $type == 'string' }}
|
||||
if ({{=$dataType}} == 'number' || {{=$dataType}} == 'boolean')
|
||||
{{=$coerced}} = '' + {{=$data}};
|
||||
else if ({{=$data}} === null) {{=$coerced}} = '';
|
||||
{{?? $type == 'number' || $type == 'integer' }}
|
||||
if ({{=$dataType}} == 'boolean' || {{=$data}} === null
|
||||
|| ({{=$dataType}} == 'string' && {{=$data}} && {{=$data}} == +{{=$data}}
|
||||
{{? $type == 'integer' }} && !({{=$data}} % 1){{?}}))
|
||||
{{=$coerced}} = +{{=$data}};
|
||||
{{?? $type == 'boolean' }}
|
||||
if ({{=$data}} === 'false' || {{=$data}} === 0 || {{=$data}} === null)
|
||||
{{=$coerced}} = false;
|
||||
else if ({{=$data}} === 'true' || {{=$data}} === 1)
|
||||
{{=$coerced}} = true;
|
||||
{{?? $type == 'null' }}
|
||||
if ({{=$data}} === '' || {{=$data}} === 0 || {{=$data}} === false)
|
||||
{{=$coerced}} = null;
|
||||
{{?? it.opts.coerceTypes == 'array' && $type == 'array' }}
|
||||
if ({{=$dataType}} == 'string' || {{=$dataType}} == 'number' || {{=$dataType}} == 'boolean' || {{=$data}} == null)
|
||||
{{=$coerced}} = [{{=$data}}];
|
||||
{{?}}
|
||||
{{~}}
|
||||
|
||||
{{= $bracesCoercion }}
|
||||
|
||||
if ({{=$coerced}} === undefined) {
|
||||
{{# def.error:'type' }}
|
||||
} else {
|
||||
{{# def.setParentData }}
|
||||
{{=$data}} = {{=$coerced}};
|
||||
{{? !$dataLvl }}if ({{=$parentData}} !== undefined){{?}}
|
||||
{{=$parentData}}[{{=$parentDataProperty}}] = {{=$coerced}};
|
||||
}
|
||||
#}}
|
||||
184
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/custom.jst
generated
vendored
184
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/custom.jst
generated
vendored
|
|
@ -1,184 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{
|
||||
var $rule = this
|
||||
, $definition = 'definition' + $lvl
|
||||
, $rDef = $rule.definition;
|
||||
var $validate = $rDef.validate;
|
||||
var $compile, $inline, $macro, $ruleValidate, $validateCode;
|
||||
}}
|
||||
|
||||
{{? $isData && $rDef.$data }}
|
||||
{{
|
||||
$validateCode = 'keywordValidate' + $lvl;
|
||||
var $validateSchema = $rDef.validateSchema;
|
||||
}}
|
||||
var {{=$definition}} = RULES.custom['{{=$keyword}}'].definition;
|
||||
var {{=$validateCode}} = {{=$definition}}.validate;
|
||||
{{??}}
|
||||
{{
|
||||
$ruleValidate = it.useCustomRule($rule, $schema, it.schema, it);
|
||||
$schemaValue = 'validate.schema' + $schemaPath;
|
||||
$validateCode = $ruleValidate.code;
|
||||
$compile = $rDef.compile;
|
||||
$inline = $rDef.inline;
|
||||
$macro = $rDef.macro;
|
||||
}}
|
||||
{{?}}
|
||||
|
||||
{{
|
||||
var $ruleErrs = $validateCode + '.errors'
|
||||
, $i = 'i' + $lvl
|
||||
, $ruleErr = 'ruleErr' + $lvl
|
||||
, $asyncKeyword = $rDef.async;
|
||||
|
||||
if ($asyncKeyword && !it.async)
|
||||
throw new Error('async keyword in sync schema');
|
||||
}}
|
||||
|
||||
|
||||
{{? !($inline || $macro) }}{{=$ruleErrs}} = null;{{?}}
|
||||
var {{=$errs}} = errors;
|
||||
var {{=$valid}};
|
||||
|
||||
{{## def.callRuleValidate:
|
||||
{{=$validateCode}}.call(
|
||||
{{? it.opts.passContext }}this{{??}}self{{?}}
|
||||
{{? $compile || $rDef.schema === false }}
|
||||
, {{=$data}}
|
||||
{{??}}
|
||||
, {{=$schemaValue}}
|
||||
, {{=$data}}
|
||||
, validate.schema{{=it.schemaPath}}
|
||||
{{?}}
|
||||
, {{# def.dataPath }}
|
||||
{{# def.passParentData }}
|
||||
, rootData
|
||||
)
|
||||
#}}
|
||||
|
||||
{{## def.extendErrors:_inline:
|
||||
for (var {{=$i}}={{=$errs}}; {{=$i}}<errors; {{=$i}}++) {
|
||||
var {{=$ruleErr}} = vErrors[{{=$i}}];
|
||||
if ({{=$ruleErr}}.dataPath === undefined)
|
||||
{{=$ruleErr}}.dataPath = (dataPath || '') + {{= it.errorPath }};
|
||||
{{# _inline ? 'if (\{\{=$ruleErr\}\}.schemaPath === undefined) {' : '' }}
|
||||
{{=$ruleErr}}.schemaPath = "{{=$errSchemaPath}}";
|
||||
{{# _inline ? '}' : '' }}
|
||||
{{? it.opts.verbose }}
|
||||
{{=$ruleErr}}.schema = {{=$schemaValue}};
|
||||
{{=$ruleErr}}.data = {{=$data}};
|
||||
{{?}}
|
||||
}
|
||||
#}}
|
||||
|
||||
|
||||
{{? $validateSchema }}
|
||||
{{=$valid}} = {{=$definition}}.validateSchema({{=$schemaValue}});
|
||||
if ({{=$valid}}) {
|
||||
{{?}}
|
||||
|
||||
{{? $inline }}
|
||||
{{? $rDef.statements }}
|
||||
{{= $ruleValidate.validate }}
|
||||
{{??}}
|
||||
{{=$valid}} = {{= $ruleValidate.validate }};
|
||||
{{?}}
|
||||
{{?? $macro }}
|
||||
{{# def.setupNextLevel }}
|
||||
{{
|
||||
$it.schema = $ruleValidate.validate;
|
||||
$it.schemaPath = '';
|
||||
}}
|
||||
{{# def.setCompositeRule }}
|
||||
{{ var $code = it.validate($it).replace(/validate\.schema/g, $validateCode); }}
|
||||
{{# def.resetCompositeRule }}
|
||||
{{= $code }}
|
||||
{{??}}
|
||||
{{# def.beginDefOut}}
|
||||
{{# def.callRuleValidate }}
|
||||
{{# def.storeDefOut:def_callRuleValidate }}
|
||||
|
||||
{{? $rDef.errors === false }}
|
||||
{{=$valid}} = {{? $asyncKeyword }}{{=it.yieldAwait}}{{?}}{{= def_callRuleValidate }};
|
||||
{{??}}
|
||||
{{? $asyncKeyword }}
|
||||
{{ $ruleErrs = 'customErrors' + $lvl; }}
|
||||
var {{=$ruleErrs}} = null;
|
||||
try {
|
||||
{{=$valid}} = {{=it.yieldAwait}}{{= def_callRuleValidate }};
|
||||
} catch (e) {
|
||||
{{=$valid}} = false;
|
||||
if (e instanceof ValidationError) {{=$ruleErrs}} = e.errors;
|
||||
else throw e;
|
||||
}
|
||||
{{??}}
|
||||
{{=$ruleErrs}} = null;
|
||||
{{=$valid}} = {{= def_callRuleValidate }};
|
||||
{{?}}
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
{{? $rDef.modifying }}
|
||||
{{=$data}} = {{=$parentData}}[{{=$parentDataProperty}}];
|
||||
{{?}}
|
||||
|
||||
{{? $validateSchema }}
|
||||
}
|
||||
{{?}}
|
||||
|
||||
{{## def.notValidationResult:
|
||||
{{? $rDef.valid === undefined }}
|
||||
!{{? $macro }}{{=$nextValid}}{{??}}{{=$valid}}{{?}}
|
||||
{{??}}
|
||||
{{= !$rDef.valid }}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
{{? $rDef.valid }}
|
||||
{{? $breakOnError }} if (true) { {{?}}
|
||||
{{??}}
|
||||
if ({{# def.notValidationResult }}) {
|
||||
{{ $errorKeyword = $rule.keyword; }}
|
||||
{{# def.beginDefOut}}
|
||||
{{# def.error:'custom' }}
|
||||
{{# def.storeDefOut:def_customError }}
|
||||
|
||||
{{? $inline }}
|
||||
{{? $rDef.errors }}
|
||||
{{? $rDef.errors != 'full' }}
|
||||
{{# def.extendErrors:true }}
|
||||
{{?}}
|
||||
{{??}}
|
||||
{{? $rDef.errors === false}}
|
||||
{{= def_customError }}
|
||||
{{??}}
|
||||
if ({{=$errs}} == errors) {
|
||||
{{= def_customError }}
|
||||
} else {
|
||||
{{# def.extendErrors:true }}
|
||||
}
|
||||
{{?}}
|
||||
{{?}}
|
||||
{{?? $macro }}
|
||||
{{# def.extraError:'custom' }}
|
||||
{{??}}
|
||||
{{? $rDef.errors === false}}
|
||||
{{= def_customError }}
|
||||
{{??}}
|
||||
if (Array.isArray({{=$ruleErrs}})) {
|
||||
if (vErrors === null) vErrors = {{=$ruleErrs}};
|
||||
else vErrors = vErrors.concat({{=$ruleErrs}});
|
||||
errors = vErrors.length;
|
||||
{{# def.extendErrors:false }}
|
||||
} else {
|
||||
{{= def_customError }}
|
||||
}
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
{{?}}
|
||||
32
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/defaults.def
generated
vendored
32
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/defaults.def
generated
vendored
|
|
@ -1,32 +0,0 @@
|
|||
{{## def.assignDefault:
|
||||
if ({{=$passData}} === undefined)
|
||||
{{=$passData}} = {{? it.opts.useDefaults == 'shared' }}
|
||||
{{= it.useDefault($sch.default) }}
|
||||
{{??}}
|
||||
{{= JSON.stringify($sch.default) }}
|
||||
{{?}};
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.defaultProperties:
|
||||
{{
|
||||
var $schema = it.schema.properties
|
||||
, $schemaKeys = Object.keys($schema); }}
|
||||
{{~ $schemaKeys:$propertyKey }}
|
||||
{{ var $sch = $schema[$propertyKey]; }}
|
||||
{{? $sch.default !== undefined }}
|
||||
{{ var $passData = $data + it.util.getProperty($propertyKey); }}
|
||||
{{# def.assignDefault }}
|
||||
{{?}}
|
||||
{{~}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.defaultItems:
|
||||
{{~ it.schema.items:$sch:$i }}
|
||||
{{? $sch.default !== undefined }}
|
||||
{{ var $passData = $data + '[' + $i + ']'; }}
|
||||
{{# def.assignDefault }}
|
||||
{{?}}
|
||||
{{~}}
|
||||
#}}
|
||||
182
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/definitions.def
generated
vendored
182
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/definitions.def
generated
vendored
|
|
@ -1,182 +0,0 @@
|
|||
{{## def.setupKeyword:
|
||||
{{
|
||||
var $lvl = it.level;
|
||||
var $dataLvl = it.dataLevel;
|
||||
var $schema = it.schema[$keyword];
|
||||
var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
|
||||
var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
|
||||
var $breakOnError = !it.opts.allErrors;
|
||||
var $errorKeyword;
|
||||
|
||||
var $data = 'data' + ($dataLvl || '');
|
||||
var $valid = 'valid' + $lvl;
|
||||
var $errs = 'errs__' + $lvl;
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.setCompositeRule:
|
||||
{{
|
||||
var $wasComposite = it.compositeRule;
|
||||
it.compositeRule = $it.compositeRule = true;
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.resetCompositeRule:
|
||||
{{ it.compositeRule = $it.compositeRule = $wasComposite; }}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.setupNextLevel:
|
||||
{{
|
||||
var $it = it.util.copy(it);
|
||||
var $closingBraces = '';
|
||||
$it.level++;
|
||||
var $nextValid = 'valid' + $it.level;
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.ifValid:
|
||||
{{? $breakOnError }}
|
||||
if ({{=$valid}}) {
|
||||
{{ $closingBraces += '}'; }}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.ifResultValid:
|
||||
{{? $breakOnError }}
|
||||
if ({{=$nextValid}}) {
|
||||
{{ $closingBraces += '}'; }}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.elseIfValid:
|
||||
{{? $breakOnError }}
|
||||
{{ $closingBraces += '}'; }}
|
||||
else {
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.nonEmptySchema:_schema:
|
||||
it.util.schemaHasRules(_schema, it.RULES.all)
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.strLength:
|
||||
{{? it.opts.unicode === false }}
|
||||
{{=$data}}.length
|
||||
{{??}}
|
||||
ucs2length({{=$data}})
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.willOptimize:
|
||||
it.util.varOccurences($code, $nextData) < 2
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.generateSubschemaCode:
|
||||
{{
|
||||
var $code = it.validate($it);
|
||||
$it.baseId = $currentBaseId;
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.insertSubschemaCode:
|
||||
{{= it.validate($it) }}
|
||||
{{ $it.baseId = $currentBaseId; }}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def._optimizeValidate:
|
||||
it.util.varReplace($code, $nextData, $passData)
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.optimizeValidate:
|
||||
{{? {{# def.willOptimize}} }}
|
||||
{{= {{# def._optimizeValidate }} }}
|
||||
{{??}}
|
||||
var {{=$nextData}} = {{=$passData}};
|
||||
{{= $code }}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.cleanUp: {{ out = it.util.cleanUpCode(out); }} #}}
|
||||
|
||||
|
||||
{{## def.cleanUpVarErrors: {{ out = it.util.cleanUpVarErrors(out, $async); }} #}}
|
||||
|
||||
|
||||
{{## def.$data:
|
||||
{{
|
||||
var $isData = it.opts.v5 && $schema && $schema.$data
|
||||
, $schemaValue;
|
||||
}}
|
||||
{{? $isData }}
|
||||
var schema{{=$lvl}} = {{= it.util.getData($schema.$data, $dataLvl, it.dataPathArr) }};
|
||||
{{ $schemaValue = 'schema' + $lvl; }}
|
||||
{{??}}
|
||||
{{ $schemaValue = $schema; }}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.$dataNotType:_type:
|
||||
{{?$isData}} ({{=$schemaValue}} !== undefined && typeof {{=$schemaValue}} != _type) || {{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.check$dataIsArray:
|
||||
if (schema{{=$lvl}} === undefined) {{=$valid}} = true;
|
||||
else if (!Array.isArray(schema{{=$lvl}})) {{=$valid}} = false;
|
||||
else {
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.beginDefOut:
|
||||
{{
|
||||
var $$outStack = $$outStack || [];
|
||||
$$outStack.push(out);
|
||||
out = '';
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.storeDefOut:_variable:
|
||||
{{
|
||||
var _variable = out;
|
||||
out = $$outStack.pop();
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.dataPath:(dataPath || ''){{? it.errorPath != '""'}} + {{= it.errorPath }}{{?}}#}}
|
||||
|
||||
{{## def.setParentData:
|
||||
{{
|
||||
var $parentData = $dataLvl ? 'data' + (($dataLvl-1)||'') : 'parentData'
|
||||
, $parentDataProperty = $dataLvl ? it.dataPathArr[$dataLvl] : 'parentDataProperty';
|
||||
}}
|
||||
#}}
|
||||
|
||||
{{## def.passParentData:
|
||||
{{# def.setParentData }}
|
||||
, {{= $parentData }}
|
||||
, {{= $parentDataProperty }}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.checkOwnProperty:
|
||||
{{? $ownProperties }}
|
||||
if (!Object.prototype.hasOwnProperty.call({{=$data}}, {{=$key}})) continue;
|
||||
{{?}}
|
||||
#}}
|
||||
69
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/dependencies.jst
generated
vendored
69
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/dependencies.jst
generated
vendored
|
|
@ -1,69 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.missing }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
|
||||
{{
|
||||
var $schemaDeps = {}
|
||||
, $propertyDeps = {};
|
||||
|
||||
for ($property in $schema) {
|
||||
var $sch = $schema[$property];
|
||||
var $deps = Array.isArray($sch) ? $propertyDeps : $schemaDeps;
|
||||
$deps[$property] = $sch;
|
||||
}
|
||||
}}
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
|
||||
{{ var $currentErrorPath = it.errorPath; }}
|
||||
|
||||
var missing{{=$lvl}};
|
||||
{{ for (var $property in $propertyDeps) { }}
|
||||
{{ $deps = $propertyDeps[$property]; }}
|
||||
if ({{=$data}}{{= it.util.getProperty($property) }} !== undefined
|
||||
{{? $breakOnError }}
|
||||
&& ({{# def.checkMissingProperty:$deps }})) {
|
||||
{{# def.errorMissingProperty:'dependencies' }}
|
||||
{{??}}
|
||||
) {
|
||||
{{~ $deps:$reqProperty }}
|
||||
{{# def.allErrorsMissingProperty:'dependencies' }}
|
||||
{{~}}
|
||||
{{?}}
|
||||
} {{# def.elseIfValid }}
|
||||
{{ } }}
|
||||
|
||||
{{
|
||||
it.errorPath = $currentErrorPath;
|
||||
var $currentBaseId = $it.baseId;
|
||||
}}
|
||||
|
||||
|
||||
{{ for (var $property in $schemaDeps) { }}
|
||||
{{ var $sch = $schemaDeps[$property]; }}
|
||||
{{? {{# def.nonEmptySchema:$sch }} }}
|
||||
{{=$nextValid}} = true;
|
||||
|
||||
if ({{=$data}}{{= it.util.getProperty($property) }} !== undefined) {
|
||||
{{
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + it.util.getProperty($property);
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($property);
|
||||
}}
|
||||
|
||||
{{# def.insertSubschemaCode }}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
{{ } }}
|
||||
|
||||
{{? $breakOnError }}
|
||||
{{= $closingBraces }}
|
||||
if ({{=$errs}} == errors) {
|
||||
{{?}}
|
||||
|
||||
{{# def.cleanUp }}
|
||||
30
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/enum.jst
generated
vendored
30
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/enum.jst
generated
vendored
|
|
@ -1,30 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{
|
||||
var $i = 'i' + $lvl
|
||||
, $vSchema = 'schema' + $lvl;
|
||||
}}
|
||||
|
||||
{{? !$isData }}
|
||||
var {{=$vSchema}} = validate.schema{{=$schemaPath}};
|
||||
{{?}}
|
||||
var {{=$valid}};
|
||||
|
||||
{{?$isData}}{{# def.check$dataIsArray }}{{?}}
|
||||
|
||||
{{=$valid}} = false;
|
||||
|
||||
for (var {{=$i}}=0; {{=$i}}<{{=$vSchema}}.length; {{=$i}}++)
|
||||
if (equal({{=$data}}, {{=$vSchema}}[{{=$i}}])) {
|
||||
{{=$valid}} = true;
|
||||
break;
|
||||
}
|
||||
|
||||
{{? $isData }} } {{?}}
|
||||
|
||||
{{# def.checkError:'enum' }}
|
||||
|
||||
{{? $breakOnError }} else { {{?}}
|
||||
185
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/errors.def
generated
vendored
185
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/errors.def
generated
vendored
|
|
@ -1,185 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
|
||||
{{## def._error:_rule:
|
||||
{{ 'istanbul ignore else'; }}
|
||||
{{? it.createErrors !== false }}
|
||||
{
|
||||
keyword: '{{= $errorKeyword || _rule }}'
|
||||
, dataPath: (dataPath || '') + {{= it.errorPath }}
|
||||
, schemaPath: {{=it.util.toQuotedString($errSchemaPath)}}
|
||||
, params: {{# def._errorParams[_rule] }}
|
||||
{{? it.opts.messages !== false }}
|
||||
, message: {{# def._errorMessages[_rule] }}
|
||||
{{?}}
|
||||
{{? it.opts.verbose }}
|
||||
, schema: {{# def._errorSchemas[_rule] }}
|
||||
, parentSchema: validate.schema{{=it.schemaPath}}
|
||||
, data: {{=$data}}
|
||||
{{?}}
|
||||
}
|
||||
{{??}}
|
||||
{}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def._addError:_rule:
|
||||
if (vErrors === null) vErrors = [err];
|
||||
else vErrors.push(err);
|
||||
errors++;
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.addError:_rule:
|
||||
var err = {{# def._error:_rule }};
|
||||
{{# def._addError:_rule }}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.error:_rule:
|
||||
{{# def.beginDefOut}}
|
||||
{{# def._error:_rule }}
|
||||
{{# def.storeDefOut:__err }}
|
||||
|
||||
{{? !it.compositeRule && $breakOnError }}
|
||||
{{ 'istanbul ignore if'; }}
|
||||
{{? it.async }}
|
||||
throw new ValidationError([{{=__err}}]);
|
||||
{{??}}
|
||||
validate.errors = [{{=__err}}];
|
||||
return false;
|
||||
{{?}}
|
||||
{{??}}
|
||||
var err = {{=__err}};
|
||||
{{# def._addError:_rule }}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.extraError:_rule:
|
||||
{{# def.addError:_rule}}
|
||||
{{? !it.compositeRule && $breakOnError }}
|
||||
{{ 'istanbul ignore if'; }}
|
||||
{{? it.async }}
|
||||
throw new ValidationError(vErrors);
|
||||
{{??}}
|
||||
validate.errors = vErrors;
|
||||
return false;
|
||||
{{?}}
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.checkError:_rule:
|
||||
if (!{{=$valid}}) {
|
||||
{{# def.error:_rule }}
|
||||
}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.resetErrors:
|
||||
errors = {{=$errs}};
|
||||
if (vErrors !== null) {
|
||||
if ({{=$errs}}) vErrors.length = {{=$errs}};
|
||||
else vErrors = null;
|
||||
}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.concatSchema:{{?$isData}}' + {{=$schemaValue}} + '{{??}}{{=$schema}}{{?}}#}}
|
||||
{{## def.appendSchema:{{?$isData}}' + {{=$schemaValue}}{{??}}{{=$schema}}'{{?}}#}}
|
||||
{{## def.concatSchemaEQ:{{?$isData}}' + {{=$schemaValue}} + '{{??}}{{=it.util.escapeQuotes($schema)}}{{?}}#}}
|
||||
|
||||
{{## def._errorMessages = {
|
||||
$ref: "'can\\\'t resolve reference {{=it.util.escapeQuotes($schema)}}'",
|
||||
additionalItems: "'should NOT have more than {{=$schema.length}} items'",
|
||||
additionalProperties: "'should NOT have additional properties'",
|
||||
anyOf: "'should match some schema in anyOf'",
|
||||
dependencies: "'should have {{? $deps.length == 1 }}property {{= it.util.escapeQuotes($deps[0]) }}{{??}}properties {{= it.util.escapeQuotes($deps.join(\", \")) }}{{?}} when property {{= it.util.escapeQuotes($property) }} is present'",
|
||||
'enum': "'should be equal to one of the allowed values'",
|
||||
format: "'should match format \"{{#def.concatSchemaEQ}}\"'",
|
||||
_limit: "'should be {{=$opStr}} {{#def.appendSchema}}",
|
||||
_exclusiveLimit: "'{{=$exclusiveKeyword}} should be boolean'",
|
||||
_limitItems: "'should NOT have {{?$keyword=='maxItems'}}more{{??}}less{{?}} than {{#def.concatSchema}} items'",
|
||||
_limitLength: "'should NOT be {{?$keyword=='maxLength'}}longer{{??}}shorter{{?}} than {{#def.concatSchema}} characters'",
|
||||
_limitProperties:"'should NOT have {{?$keyword=='maxProperties'}}more{{??}}less{{?}} than {{#def.concatSchema}} properties'",
|
||||
multipleOf: "'should be multiple of {{#def.appendSchema}}",
|
||||
not: "'should NOT be valid'",
|
||||
oneOf: "'should match exactly one schema in oneOf'",
|
||||
pattern: "'should match pattern \"{{#def.concatSchemaEQ}}\"'",
|
||||
required: "'{{? it.opts._errorDataPathProperty }}is a required property{{??}}should have required property \\'{{=$missingProperty}}\\'{{?}}'",
|
||||
type: "'should be {{? $typeIsArray }}{{= $typeSchema.join(\",\") }}{{??}}{{=$typeSchema}}{{?}}'",
|
||||
uniqueItems: "'should NOT have duplicate items (items ## ' + j + ' and ' + i + ' are identical)'",
|
||||
custom: "'should pass \"{{=$rule.keyword}}\" keyword validation'",
|
||||
patternGroups: "'should NOT have {{=$moreOrLess}} than {{=$limit}} properties matching pattern \"{{=it.util.escapeQuotes($pgProperty)}}\"'",
|
||||
patternRequired: "'should have property matching pattern \\'{{=$missingPattern}}\\''",
|
||||
switch: "'should pass \"switch\" keyword validation'",
|
||||
constant: "'should be equal to constant'",
|
||||
_formatLimit: "'should be {{=$opStr}} \"{{#def.concatSchemaEQ}}\"'",
|
||||
_formatExclusiveLimit: "'{{=$exclusiveKeyword}} should be boolean'"
|
||||
} #}}
|
||||
|
||||
|
||||
{{## def.schemaRefOrVal: {{?$isData}}validate.schema{{=$schemaPath}}{{??}}{{=$schema}}{{?}} #}}
|
||||
{{## def.schemaRefOrQS: {{?$isData}}validate.schema{{=$schemaPath}}{{??}}{{=it.util.toQuotedString($schema)}}{{?}} #}}
|
||||
|
||||
{{## def._errorSchemas = {
|
||||
$ref: "{{=it.util.toQuotedString($schema)}}",
|
||||
additionalItems: "false",
|
||||
additionalProperties: "false",
|
||||
anyOf: "validate.schema{{=$schemaPath}}",
|
||||
dependencies: "validate.schema{{=$schemaPath}}",
|
||||
'enum': "validate.schema{{=$schemaPath}}",
|
||||
format: "{{#def.schemaRefOrQS}}",
|
||||
_limit: "{{#def.schemaRefOrVal}}",
|
||||
_exclusiveLimit: "validate.schema{{=$schemaPath}}",
|
||||
_limitItems: "{{#def.schemaRefOrVal}}",
|
||||
_limitLength: "{{#def.schemaRefOrVal}}",
|
||||
_limitProperties:"{{#def.schemaRefOrVal}}",
|
||||
multipleOf: "{{#def.schemaRefOrVal}}",
|
||||
not: "validate.schema{{=$schemaPath}}",
|
||||
oneOf: "validate.schema{{=$schemaPath}}",
|
||||
pattern: "{{#def.schemaRefOrQS}}",
|
||||
required: "validate.schema{{=$schemaPath}}",
|
||||
type: "validate.schema{{=$schemaPath}}",
|
||||
uniqueItems: "{{#def.schemaRefOrVal}}",
|
||||
custom: "validate.schema{{=$schemaPath}}",
|
||||
patternGroups: "validate.schema{{=$schemaPath}}",
|
||||
patternRequired: "validate.schema{{=$schemaPath}}",
|
||||
switch: "validate.schema{{=$schemaPath}}",
|
||||
constant: "validate.schema{{=$schemaPath}}",
|
||||
_formatLimit: "{{#def.schemaRefOrQS}}",
|
||||
_formatExclusiveLimit: "validate.schema{{=$schemaPath}}"
|
||||
} #}}
|
||||
|
||||
|
||||
{{## def.schemaValueQS: {{?$isData}}{{=$schemaValue}}{{??}}{{=it.util.toQuotedString($schema)}}{{?}} #}}
|
||||
|
||||
{{## def._errorParams = {
|
||||
$ref: "{ ref: '{{=it.util.escapeQuotes($schema)}}' }",
|
||||
additionalItems: "{ limit: {{=$schema.length}} }",
|
||||
additionalProperties: "{ additionalProperty: '{{=$additionalProperty}}' }",
|
||||
anyOf: "{}",
|
||||
dependencies: "{ property: '{{= it.util.escapeQuotes($property) }}', missingProperty: '{{=$missingProperty}}', depsCount: {{=$deps.length}}, deps: '{{= it.util.escapeQuotes($deps.length==1 ? $deps[0] : $deps.join(\", \")) }}' }",
|
||||
'enum': "{ allowedValues: schema{{=$lvl}} }",
|
||||
format: "{ format: {{#def.schemaValueQS}} }",
|
||||
_limit: "{ comparison: {{=$opExpr}}, limit: {{=$schemaValue}}, exclusive: {{=$exclusive}} }",
|
||||
_exclusiveLimit: "{}",
|
||||
_limitItems: "{ limit: {{=$schemaValue}} }",
|
||||
_limitLength: "{ limit: {{=$schemaValue}} }",
|
||||
_limitProperties:"{ limit: {{=$schemaValue}} }",
|
||||
multipleOf: "{ multipleOf: {{=$schemaValue}} }",
|
||||
not: "{}",
|
||||
oneOf: "{}",
|
||||
pattern: "{ pattern: {{#def.schemaValueQS}} }",
|
||||
required: "{ missingProperty: '{{=$missingProperty}}' }",
|
||||
type: "{ type: '{{? $typeIsArray }}{{= $typeSchema.join(\",\") }}{{??}}{{=$typeSchema}}{{?}}' }",
|
||||
uniqueItems: "{ i: i, j: j }",
|
||||
custom: "{ keyword: '{{=$rule.keyword}}' }",
|
||||
patternGroups: "{ reason: '{{=$reason}}', limit: {{=$limit}}, pattern: '{{=it.util.escapeQuotes($pgProperty)}}' }",
|
||||
patternRequired: "{ missingPattern: '{{=$missingPattern}}' }",
|
||||
switch: "{ caseIndex: {{=$caseIndex}} }",
|
||||
constant: "{}",
|
||||
_formatLimit: "{ comparison: {{=$opExpr}}, limit: {{#def.schemaValueQS}}, exclusive: {{=$exclusive}} }",
|
||||
_formatExclusiveLimit: "{}"
|
||||
} #}}
|
||||
100
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/format.jst
generated
vendored
100
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/format.jst
generated
vendored
|
|
@ -1,100 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
|
||||
{{## def.skipFormat:
|
||||
{{? $breakOnError }} if (true) { {{?}}
|
||||
{{ return out; }}
|
||||
#}}
|
||||
|
||||
{{? it.opts.format === false }}{{# def.skipFormat }}{{?}}
|
||||
|
||||
|
||||
{{# def.$data }}
|
||||
|
||||
|
||||
{{## def.$dataCheckFormat:
|
||||
{{# def.$dataNotType:'string' }}
|
||||
({{? $unknownFormats === true || $allowUnknown }}
|
||||
({{=$schemaValue}} && !{{=$format}}
|
||||
{{? $allowUnknown }}
|
||||
&& self._opts.unknownFormats.indexOf({{=$schemaValue}}) == -1
|
||||
{{?}}) ||
|
||||
{{?}}
|
||||
({{=$format}} && !(typeof {{=$format}} == 'function'
|
||||
? {{? it.async}}
|
||||
(async{{=$lvl}} ? {{=it.yieldAwait}} {{=$format}}({{=$data}}) : {{=$format}}({{=$data}}))
|
||||
{{??}}
|
||||
{{=$format}}({{=$data}})
|
||||
{{?}}
|
||||
: {{=$format}}.test({{=$data}}))))
|
||||
#}}
|
||||
|
||||
{{## def.checkFormat:
|
||||
{{
|
||||
var $formatRef = 'formats' + it.util.getProperty($schema);
|
||||
if ($isObject) $formatRef += '.validate';
|
||||
}}
|
||||
{{? typeof $format == 'function' }}
|
||||
{{=$formatRef}}({{=$data}})
|
||||
{{??}}
|
||||
{{=$formatRef}}.test({{=$data}})
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{
|
||||
var $unknownFormats = it.opts.unknownFormats
|
||||
, $allowUnknown = Array.isArray($unknownFormats);
|
||||
}}
|
||||
|
||||
{{? $isData }}
|
||||
{{ var $format = 'format' + $lvl; }}
|
||||
var {{=$format}} = formats[{{=$schemaValue}}];
|
||||
var isObject{{=$lvl}} = typeof {{=$format}} == 'object'
|
||||
&& !({{=$format}} instanceof RegExp)
|
||||
&& {{=$format}}.validate;
|
||||
if (isObject{{=$lvl}}) {
|
||||
{{? it.async}}
|
||||
var async{{=$lvl}} = {{=$format}}.async;
|
||||
{{?}}
|
||||
{{=$format}} = {{=$format}}.validate;
|
||||
}
|
||||
if ({{# def.$dataCheckFormat }}) {
|
||||
{{??}}
|
||||
{{ var $format = it.formats[$schema]; }}
|
||||
{{? !$format }}
|
||||
{{? $unknownFormats === true || ($allowUnknown && $unknownFormats.indexOf($schema) == -1) }}
|
||||
{{ throw new Error('unknown format "' + $schema + '" is used in schema at path "' + it.errSchemaPath + '"'); }}
|
||||
{{??}}
|
||||
{{
|
||||
if (!$allowUnknown) {
|
||||
console.warn('unknown format "' + $schema + '" ignored in schema at path "' + it.errSchemaPath + '"');
|
||||
if ($unknownFormats !== 'ignore')
|
||||
console.warn('In the next major version it will throw exception. See option unknownFormats for more information');
|
||||
}
|
||||
}}
|
||||
{{# def.skipFormat }}
|
||||
{{?}}
|
||||
{{?}}
|
||||
{{
|
||||
var $isObject = typeof $format == 'object'
|
||||
&& !($format instanceof RegExp)
|
||||
&& $format.validate;
|
||||
if ($isObject) {
|
||||
var $async = $format.async === true;
|
||||
$format = $format.validate;
|
||||
}
|
||||
}}
|
||||
{{? $async }}
|
||||
{{
|
||||
if (!it.async) throw new Error('async format in sync schema');
|
||||
var $formatRef = 'formats' + it.util.getProperty($schema) + '.validate';
|
||||
}}
|
||||
if (!({{=it.yieldAwait}} {{=$formatRef}}({{=$data}}))) {
|
||||
{{??}}
|
||||
if (!{{# def.checkFormat }}) {
|
||||
{{?}}
|
||||
{{?}}
|
||||
{{# def.error:'format' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
101
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/items.jst
generated
vendored
101
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/items.jst
generated
vendored
|
|
@ -1,101 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
|
||||
{{## def.validateItems:startFrom:
|
||||
for (var {{=$idx}} = {{=startFrom}}; {{=$idx}} < {{=$data}}.length; {{=$idx}}++) {
|
||||
{{
|
||||
$it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true);
|
||||
var $passData = $data + '[' + $idx + ']';
|
||||
$it.dataPathArr[$dataNxt] = $idx;
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
{{# def.optimizeValidate }}
|
||||
|
||||
{{? $breakOnError }}
|
||||
if (!{{=$nextValid}}) break;
|
||||
{{?}}
|
||||
}
|
||||
#}}
|
||||
|
||||
{{
|
||||
var $idx = 'i' + $lvl
|
||||
, $dataNxt = $it.dataLevel = it.dataLevel + 1
|
||||
, $nextData = 'data' + $dataNxt
|
||||
, $currentBaseId = it.baseId;
|
||||
}}
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
var {{=$valid}};
|
||||
|
||||
{{? Array.isArray($schema) }}
|
||||
{{ /* 'items' is an array of schemas */}}
|
||||
{{ var $additionalItems = it.schema.additionalItems; }}
|
||||
{{? $additionalItems === false }}
|
||||
{{=$valid}} = {{=$data}}.length <= {{= $schema.length }};
|
||||
{{
|
||||
var $currErrSchemaPath = $errSchemaPath;
|
||||
$errSchemaPath = it.errSchemaPath + '/additionalItems';
|
||||
}}
|
||||
{{# def.checkError:'additionalItems' }}
|
||||
{{ $errSchemaPath = $currErrSchemaPath; }}
|
||||
{{# def.elseIfValid}}
|
||||
{{?}}
|
||||
|
||||
{{~ $schema:$sch:$i }}
|
||||
{{? {{# def.nonEmptySchema:$sch }} }}
|
||||
{{=$nextValid}} = true;
|
||||
|
||||
if ({{=$data}}.length > {{=$i}}) {
|
||||
{{
|
||||
var $passData = $data + '[' + $i + ']';
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + '[' + $i + ']';
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + $i;
|
||||
$it.errorPath = it.util.getPathExpr(it.errorPath, $i, it.opts.jsonPointers, true);
|
||||
$it.dataPathArr[$dataNxt] = $i;
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
{{# def.optimizeValidate }}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
{{~}}
|
||||
|
||||
{{? typeof $additionalItems == 'object' && {{# def.nonEmptySchema:$additionalItems }} }}
|
||||
{{
|
||||
$it.schema = $additionalItems;
|
||||
$it.schemaPath = it.schemaPath + '.additionalItems';
|
||||
$it.errSchemaPath = it.errSchemaPath + '/additionalItems';
|
||||
}}
|
||||
{{=$nextValid}} = true;
|
||||
|
||||
if ({{=$data}}.length > {{= $schema.length }}) {
|
||||
{{# def.validateItems: $schema.length }}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
|
||||
{{?? {{# def.nonEmptySchema:$schema }} }}
|
||||
{{ /* 'items' is a single schema */}}
|
||||
{{
|
||||
$it.schema = $schema;
|
||||
$it.schemaPath = $schemaPath;
|
||||
$it.errSchemaPath = $errSchemaPath;
|
||||
}}
|
||||
{{# def.validateItems: 0 }}
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
|
||||
{{? $breakOnError }}
|
||||
{{= $closingBraces }}
|
||||
if ({{=$errs}} == errors) {
|
||||
{{?}}
|
||||
|
||||
{{# def.cleanUp }}
|
||||
34
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/missing.def
generated
vendored
34
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/missing.def
generated
vendored
|
|
@ -1,34 +0,0 @@
|
|||
{{## def.checkMissingProperty:_properties:
|
||||
{{~ _properties:_$property:$i }}
|
||||
{{?$i}} || {{?}}
|
||||
{{ var $prop = it.util.getProperty(_$property); }}
|
||||
( {{=$data}}{{=$prop}} === undefined && (missing{{=$lvl}} = {{= it.util.toQuotedString(it.opts.jsonPointers ? _$property : $prop) }}) )
|
||||
{{~}}
|
||||
#}}
|
||||
|
||||
|
||||
{{## def.errorMissingProperty:_error:
|
||||
{{
|
||||
var $propertyPath = 'missing' + $lvl
|
||||
, $missingProperty = '\' + ' + $propertyPath + ' + \'';
|
||||
if (it.opts._errorDataPathProperty) {
|
||||
it.errorPath = it.opts.jsonPointers
|
||||
? it.util.getPathExpr($currentErrorPath, $propertyPath, true)
|
||||
: $currentErrorPath + ' + ' + $propertyPath;
|
||||
}
|
||||
}}
|
||||
{{# def.error:_error }}
|
||||
#}}
|
||||
|
||||
{{## def.allErrorsMissingProperty:_error:
|
||||
{{
|
||||
var $prop = it.util.getProperty($reqProperty)
|
||||
, $missingProperty = it.util.escapeQuotes($reqProperty);
|
||||
if (it.opts._errorDataPathProperty) {
|
||||
it.errorPath = it.util.getPath($currentErrorPath, $reqProperty, it.opts.jsonPointers);
|
||||
}
|
||||
}}
|
||||
if ({{=$data}}{{=$prop}} === undefined) {
|
||||
{{# def.addError:_error }}
|
||||
}
|
||||
#}}
|
||||
20
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/multipleOf.jst
generated
vendored
20
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/multipleOf.jst
generated
vendored
|
|
@ -1,20 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
var division{{=$lvl}};
|
||||
if ({{?$isData}}
|
||||
{{=$schemaValue}} !== undefined && (
|
||||
typeof {{=$schemaValue}} != 'number' ||
|
||||
{{?}}
|
||||
(division{{=$lvl}} = {{=$data}} / {{=$schemaValue}},
|
||||
{{? it.opts.multipleOfPrecision }}
|
||||
Math.abs(Math.round(division{{=$lvl}}) - division{{=$lvl}}) > 1e-{{=it.opts.multipleOfPrecision}}
|
||||
{{??}}
|
||||
division{{=$lvl}} !== parseInt(division{{=$lvl}})
|
||||
{{?}}
|
||||
)
|
||||
{{?$isData}} ) {{?}} ) {
|
||||
{{# def.error:'multipleOf' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
43
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/not.jst
generated
vendored
43
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/not.jst
generated
vendored
|
|
@ -1,43 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
{{? {{# def.nonEmptySchema:$schema }} }}
|
||||
{{
|
||||
$it.schema = $schema;
|
||||
$it.schemaPath = $schemaPath;
|
||||
$it.errSchemaPath = $errSchemaPath;
|
||||
}}
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
|
||||
{{# def.setCompositeRule }}
|
||||
|
||||
{{
|
||||
$it.createErrors = false;
|
||||
var $allErrorsOption;
|
||||
if ($it.opts.allErrors) {
|
||||
$allErrorsOption = $it.opts.allErrors;
|
||||
$it.opts.allErrors = false;
|
||||
}
|
||||
}}
|
||||
{{= it.validate($it) }}
|
||||
{{
|
||||
$it.createErrors = true;
|
||||
if ($allErrorsOption) $it.opts.allErrors = $allErrorsOption;
|
||||
}}
|
||||
|
||||
{{# def.resetCompositeRule }}
|
||||
|
||||
if ({{=$nextValid}}) {
|
||||
{{# def.error:'not' }}
|
||||
} else {
|
||||
{{# def.resetErrors }}
|
||||
{{? it.opts.allErrors }} } {{?}}
|
||||
{{??}}
|
||||
{{# def.addError:'not' }}
|
||||
{{? $breakOnError}}
|
||||
if (false) {
|
||||
{{?}}
|
||||
{{?}}
|
||||
44
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/oneOf.jst
generated
vendored
44
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/oneOf.jst
generated
vendored
|
|
@ -1,44 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
var prevValid{{=$lvl}} = false;
|
||||
var {{=$valid}} = false;
|
||||
|
||||
{{ var $currentBaseId = $it.baseId; }}
|
||||
{{# def.setCompositeRule }}
|
||||
|
||||
{{~ $schema:$sch:$i }}
|
||||
{{? {{# def.nonEmptySchema:$sch }} }}
|
||||
{{
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + '[' + $i + ']';
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + $i;
|
||||
}}
|
||||
|
||||
{{# def.insertSubschemaCode }}
|
||||
{{??}}
|
||||
var {{=$nextValid}} = true;
|
||||
{{?}}
|
||||
|
||||
{{? $i }}
|
||||
if ({{=$nextValid}} && prevValid{{=$lvl}})
|
||||
{{=$valid}} = false;
|
||||
else {
|
||||
{{ $closingBraces += '}'; }}
|
||||
{{?}}
|
||||
|
||||
if ({{=$nextValid}}) {{=$valid}} = prevValid{{=$lvl}} = true;
|
||||
{{~}}
|
||||
|
||||
{{# def.resetCompositeRule }}
|
||||
|
||||
{{= $closingBraces }}
|
||||
|
||||
if (!{{=$valid}}) {
|
||||
{{# def.error:'oneOf' }}
|
||||
} else {
|
||||
{{# def.resetErrors }}
|
||||
{{? it.opts.allErrors }} } {{?}}
|
||||
14
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/pattern.jst
generated
vendored
14
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/pattern.jst
generated
vendored
|
|
@ -1,14 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{
|
||||
var $regexp = $isData
|
||||
? '(new RegExp(' + $schemaValue + '))'
|
||||
: it.usePattern($schema);
|
||||
}}
|
||||
|
||||
if ({{# def.$dataNotType:'string' }} !{{=$regexp}}.test({{=$data}}) ) {
|
||||
{{# def.error:'pattern' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
319
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/properties.jst
generated
vendored
319
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/properties.jst
generated
vendored
|
|
@ -1,319 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
|
||||
{{## def.validateAdditional:
|
||||
{{ /* additionalProperties is schema */
|
||||
$it.schema = $aProperties;
|
||||
$it.schemaPath = it.schemaPath + '.additionalProperties';
|
||||
$it.errSchemaPath = it.errSchemaPath + '/additionalProperties';
|
||||
$it.errorPath = it.opts._errorDataPathProperty
|
||||
? it.errorPath
|
||||
: it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
|
||||
var $passData = $data + '[' + $key + ']';
|
||||
$it.dataPathArr[$dataNxt] = $key;
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
{{# def.optimizeValidate }}
|
||||
#}}
|
||||
|
||||
|
||||
{{
|
||||
var $key = 'key' + $lvl
|
||||
, $dataNxt = $it.dataLevel = it.dataLevel + 1
|
||||
, $nextData = 'data' + $dataNxt;
|
||||
|
||||
var $schemaKeys = Object.keys($schema || {})
|
||||
, $pProperties = it.schema.patternProperties || {}
|
||||
, $pPropertyKeys = Object.keys($pProperties)
|
||||
, $aProperties = it.schema.additionalProperties
|
||||
, $someProperties = $schemaKeys.length || $pPropertyKeys.length
|
||||
, $noAdditional = $aProperties === false
|
||||
, $additionalIsSchema = typeof $aProperties == 'object'
|
||||
&& Object.keys($aProperties).length
|
||||
, $removeAdditional = it.opts.removeAdditional
|
||||
, $checkAdditional = $noAdditional || $additionalIsSchema || $removeAdditional
|
||||
, $ownProperties = it.opts.ownProperties
|
||||
, $currentBaseId = it.baseId;
|
||||
|
||||
var $required = it.schema.required;
|
||||
if ($required && !(it.opts.v5 && $required.$data) && $required.length < it.opts.loopRequired)
|
||||
var $requiredHash = it.util.toHash($required);
|
||||
|
||||
if (it.opts.v5) {
|
||||
var $pgProperties = it.schema.patternGroups || {}
|
||||
, $pgPropertyKeys = Object.keys($pgProperties);
|
||||
}
|
||||
}}
|
||||
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
var {{=$nextValid}} = true;
|
||||
|
||||
{{? $checkAdditional }}
|
||||
for (var {{=$key}} in {{=$data}}) {
|
||||
{{# def.checkOwnProperty }}
|
||||
{{? $someProperties }}
|
||||
var isAdditional{{=$lvl}} = !(false
|
||||
{{? $schemaKeys.length }}
|
||||
{{? $schemaKeys.length > 5 }}
|
||||
|| validate.schema{{=$schemaPath}}[{{=$key}}]
|
||||
{{??}}
|
||||
{{~ $schemaKeys:$propertyKey }}
|
||||
|| {{=$key}} == {{= it.util.toQuotedString($propertyKey) }}
|
||||
{{~}}
|
||||
{{?}}
|
||||
{{?}}
|
||||
{{? $pPropertyKeys.length }}
|
||||
{{~ $pPropertyKeys:$pProperty:$i }}
|
||||
|| {{= it.usePattern($pProperty) }}.test({{=$key}})
|
||||
{{~}}
|
||||
{{?}}
|
||||
{{? it.opts.v5 && $pgPropertyKeys && $pgPropertyKeys.length }}
|
||||
{{~ $pgPropertyKeys:$pgProperty:$i }}
|
||||
|| {{= it.usePattern($pgProperty) }}.test({{=$key}})
|
||||
{{~}}
|
||||
{{?}}
|
||||
);
|
||||
|
||||
if (isAdditional{{=$lvl}}) {
|
||||
{{?}}
|
||||
{{? $removeAdditional == 'all' }}
|
||||
delete {{=$data}}[{{=$key}}];
|
||||
{{??}}
|
||||
{{
|
||||
var $currentErrorPath = it.errorPath;
|
||||
var $additionalProperty = '\' + ' + $key + ' + \'';
|
||||
if (it.opts._errorDataPathProperty) {
|
||||
it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
|
||||
}
|
||||
}}
|
||||
{{? $noAdditional }}
|
||||
{{? $removeAdditional }}
|
||||
delete {{=$data}}[{{=$key}}];
|
||||
{{??}}
|
||||
{{=$nextValid}} = false;
|
||||
{{
|
||||
var $currErrSchemaPath = $errSchemaPath;
|
||||
$errSchemaPath = it.errSchemaPath + '/additionalProperties';
|
||||
}}
|
||||
{{# def.error:'additionalProperties' }}
|
||||
{{ $errSchemaPath = $currErrSchemaPath; }}
|
||||
{{? $breakOnError }} break; {{?}}
|
||||
{{?}}
|
||||
{{?? $additionalIsSchema }}
|
||||
{{? $removeAdditional == 'failing' }}
|
||||
var {{=$errs}} = errors;
|
||||
{{# def.setCompositeRule }}
|
||||
|
||||
{{# def.validateAdditional }}
|
||||
|
||||
if (!{{=$nextValid}}) {
|
||||
errors = {{=$errs}};
|
||||
if (validate.errors !== null) {
|
||||
if (errors) validate.errors.length = errors;
|
||||
else validate.errors = null;
|
||||
}
|
||||
delete {{=$data}}[{{=$key}}];
|
||||
}
|
||||
|
||||
{{# def.resetCompositeRule }}
|
||||
{{??}}
|
||||
{{# def.validateAdditional }}
|
||||
{{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}}
|
||||
{{?}}
|
||||
{{?}}
|
||||
{{ it.errorPath = $currentErrorPath; }}
|
||||
{{?}}
|
||||
{{? $someProperties }}
|
||||
}
|
||||
{{?}}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
|
||||
{{ var $useDefaults = it.opts.useDefaults && !it.compositeRule; }}
|
||||
|
||||
{{? $schemaKeys.length }}
|
||||
{{~ $schemaKeys:$propertyKey }}
|
||||
{{ var $sch = $schema[$propertyKey]; }}
|
||||
|
||||
{{? {{# def.nonEmptySchema:$sch}} }}
|
||||
{{
|
||||
var $prop = it.util.getProperty($propertyKey)
|
||||
, $passData = $data + $prop
|
||||
, $hasDefault = $useDefaults && $sch.default !== undefined;
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + $prop;
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($propertyKey);
|
||||
$it.errorPath = it.util.getPath(it.errorPath, $propertyKey, it.opts.jsonPointers);
|
||||
$it.dataPathArr[$dataNxt] = it.util.toQuotedString($propertyKey);
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
|
||||
{{? {{# def.willOptimize }} }}
|
||||
{{
|
||||
$code = {{# def._optimizeValidate }};
|
||||
var $useData = $passData;
|
||||
}}
|
||||
{{??}}
|
||||
{{ var $useData = $nextData; }}
|
||||
var {{=$nextData}} = {{=$passData}};
|
||||
{{?}}
|
||||
|
||||
{{? $hasDefault }}
|
||||
{{= $code }}
|
||||
{{??}}
|
||||
{{? $requiredHash && $requiredHash[$propertyKey] }}
|
||||
if ({{=$useData}} === undefined) {
|
||||
{{=$nextValid}} = false;
|
||||
{{
|
||||
var $currentErrorPath = it.errorPath
|
||||
, $currErrSchemaPath = $errSchemaPath
|
||||
, $missingProperty = it.util.escapeQuotes($propertyKey);
|
||||
if (it.opts._errorDataPathProperty) {
|
||||
it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers);
|
||||
}
|
||||
$errSchemaPath = it.errSchemaPath + '/required';
|
||||
}}
|
||||
{{# def.error:'required' }}
|
||||
{{ $errSchemaPath = $currErrSchemaPath; }}
|
||||
{{ it.errorPath = $currentErrorPath; }}
|
||||
} else {
|
||||
{{??}}
|
||||
{{? $breakOnError }}
|
||||
if ({{=$useData}} === undefined) {
|
||||
{{=$nextValid}} = true;
|
||||
} else {
|
||||
{{??}}
|
||||
if ({{=$useData}} !== undefined) {
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
{{= $code }}
|
||||
}
|
||||
{{?}} {{ /* $hasDefault */ }}
|
||||
{{?}} {{ /* def.nonEmptySchema */ }}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{~}}
|
||||
{{?}}
|
||||
|
||||
{{~ $pPropertyKeys:$pProperty }}
|
||||
{{ var $sch = $pProperties[$pProperty]; }}
|
||||
|
||||
{{? {{# def.nonEmptySchema:$sch}} }}
|
||||
{{
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = it.schemaPath + '.patternProperties' + it.util.getProperty($pProperty);
|
||||
$it.errSchemaPath = it.errSchemaPath + '/patternProperties/'
|
||||
+ it.util.escapeFragment($pProperty);
|
||||
}}
|
||||
|
||||
for (var {{=$key}} in {{=$data}}) {
|
||||
{{# def.checkOwnProperty }}
|
||||
if ({{= it.usePattern($pProperty) }}.test({{=$key}})) {
|
||||
{{
|
||||
$it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
|
||||
var $passData = $data + '[' + $key + ']';
|
||||
$it.dataPathArr[$dataNxt] = $key;
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
{{# def.optimizeValidate }}
|
||||
|
||||
{{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}}
|
||||
}
|
||||
{{? $breakOnError }} else {{=$nextValid}} = true; {{?}}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}} {{ /* def.nonEmptySchema */ }}
|
||||
{{~}}
|
||||
|
||||
|
||||
{{? it.opts.v5 }}
|
||||
{{~ $pgPropertyKeys:$pgProperty }}
|
||||
{{
|
||||
var $pgSchema = $pgProperties[$pgProperty]
|
||||
, $sch = $pgSchema.schema;
|
||||
}}
|
||||
|
||||
{{? {{# def.nonEmptySchema:$sch}} }}
|
||||
{{
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = it.schemaPath + '.patternGroups' + it.util.getProperty($pgProperty) + '.schema';
|
||||
$it.errSchemaPath = it.errSchemaPath + '/patternGroups/'
|
||||
+ it.util.escapeFragment($pgProperty)
|
||||
+ '/schema';
|
||||
}}
|
||||
|
||||
var pgPropCount{{=$lvl}} = 0;
|
||||
|
||||
for (var {{=$key}} in {{=$data}}) {
|
||||
{{# def.checkOwnProperty }}
|
||||
if ({{= it.usePattern($pgProperty) }}.test({{=$key}})) {
|
||||
pgPropCount{{=$lvl}}++;
|
||||
|
||||
{{
|
||||
$it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
|
||||
var $passData = $data + '[' + $key + ']';
|
||||
$it.dataPathArr[$dataNxt] = $key;
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
{{# def.optimizeValidate }}
|
||||
|
||||
{{? $breakOnError }} if (!{{=$nextValid}}) break; {{?}}
|
||||
}
|
||||
{{? $breakOnError }} else {{=$nextValid}} = true; {{?}}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
|
||||
{{
|
||||
var $pgMin = $pgSchema.minimum
|
||||
, $pgMax = $pgSchema.maximum;
|
||||
}}
|
||||
{{? $pgMin !== undefined || $pgMax !== undefined }}
|
||||
var {{=$valid}} = true;
|
||||
|
||||
{{ var $currErrSchemaPath = $errSchemaPath; }}
|
||||
|
||||
{{? $pgMin !== undefined }}
|
||||
{{ var $limit = $pgMin, $reason = 'minimum', $moreOrLess = 'less'; }}
|
||||
{{=$valid}} = pgPropCount{{=$lvl}} >= {{=$pgMin}};
|
||||
{{ $errSchemaPath = it.errSchemaPath + '/patternGroups/minimum'; }}
|
||||
{{# def.checkError:'patternGroups' }}
|
||||
{{? $pgMax !== undefined }}
|
||||
else
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
{{? $pgMax !== undefined }}
|
||||
{{ var $limit = $pgMax, $reason = 'maximum', $moreOrLess = 'more'; }}
|
||||
{{=$valid}} = pgPropCount{{=$lvl}} <= {{=$pgMax}};
|
||||
{{ $errSchemaPath = it.errSchemaPath + '/patternGroups/maximum'; }}
|
||||
{{# def.checkError:'patternGroups' }}
|
||||
{{?}}
|
||||
|
||||
{{ $errSchemaPath = $currErrSchemaPath; }}
|
||||
|
||||
{{# def.ifValid }}
|
||||
{{?}}
|
||||
{{?}} {{ /* def.nonEmptySchema */ }}
|
||||
{{~}}
|
||||
{{?}}
|
||||
|
||||
|
||||
{{? $breakOnError }}
|
||||
{{= $closingBraces }}
|
||||
if ({{=$errs}} == errors) {
|
||||
{{?}}
|
||||
|
||||
{{# def.cleanUp }}
|
||||
86
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/ref.jst
generated
vendored
86
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/ref.jst
generated
vendored
|
|
@ -1,86 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
|
||||
{{## def._validateRef:_v:
|
||||
{{? it.opts.passContext }}
|
||||
{{=_v}}.call(this,
|
||||
{{??}}
|
||||
{{=_v}}(
|
||||
{{?}}
|
||||
{{=$data}}, {{# def.dataPath }}{{# def.passParentData }}, rootData)
|
||||
#}}
|
||||
|
||||
{{ var $async, $refCode; }}
|
||||
{{? $schema == '#' || $schema == '#/' }}
|
||||
{{
|
||||
if (it.isRoot) {
|
||||
$async = it.async;
|
||||
$refCode = 'validate';
|
||||
} else {
|
||||
$async = it.root.schema.$async === true;
|
||||
$refCode = 'root.refVal[0]';
|
||||
}
|
||||
}}
|
||||
{{??}}
|
||||
{{ var $refVal = it.resolveRef(it.baseId, $schema, it.isRoot); }}
|
||||
{{? $refVal === undefined }}
|
||||
{{ var $message = 'can\'t resolve reference ' + $schema + ' from id ' + it.baseId; }}
|
||||
{{? it.opts.missingRefs == 'fail' }}
|
||||
{{ console.log($message); }}
|
||||
{{# def.error:'$ref' }}
|
||||
{{? $breakOnError }} if (false) { {{?}}
|
||||
{{?? it.opts.missingRefs == 'ignore' }}
|
||||
{{ console.log($message); }}
|
||||
{{? $breakOnError }} if (true) { {{?}}
|
||||
{{??}}
|
||||
{{
|
||||
var $error = new Error($message);
|
||||
$error.missingRef = it.resolve.url(it.baseId, $schema);
|
||||
$error.missingSchema = it.resolve.normalizeId(it.resolve.fullPath($error.missingRef));
|
||||
throw $error;
|
||||
}}
|
||||
{{?}}
|
||||
{{?? $refVal.inline }}
|
||||
{{# def.setupNextLevel }}
|
||||
{{
|
||||
$it.schema = $refVal.schema;
|
||||
$it.schemaPath = '';
|
||||
$it.errSchemaPath = $schema;
|
||||
}}
|
||||
{{ var $code = it.validate($it).replace(/validate\.schema/g, $refVal.code); }}
|
||||
{{= $code }}
|
||||
{{? $breakOnError}}
|
||||
if ({{=$nextValid}}) {
|
||||
{{?}}
|
||||
{{??}}
|
||||
{{
|
||||
$async = $refVal.$async === true;
|
||||
$refCode = $refVal.code;
|
||||
}}
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
{{? $refCode }}
|
||||
{{# def.beginDefOut}}
|
||||
{{# def._validateRef:$refCode }}
|
||||
{{# def.storeDefOut:__callValidate }}
|
||||
|
||||
{{? $async }}
|
||||
{{ if (!it.async) throw new Error('async schema referenced by sync schema'); }}
|
||||
try { {{? $breakOnError }}var {{=$valid}} ={{?}} {{=it.yieldAwait}} {{=__callValidate}}; }
|
||||
catch (e) {
|
||||
if (!(e instanceof ValidationError)) throw e;
|
||||
if (vErrors === null) vErrors = e.errors;
|
||||
else vErrors = vErrors.concat(e.errors);
|
||||
errors = vErrors.length;
|
||||
}
|
||||
{{? $breakOnError }} if ({{=$valid}}) { {{?}}
|
||||
{{??}}
|
||||
if (!{{=__callValidate}}) {
|
||||
if (vErrors === null) vErrors = {{=$refCode}}.errors;
|
||||
else vErrors = vErrors.concat({{=$refCode}}.errors);
|
||||
errors = vErrors.length;
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
{{?}}
|
||||
{{?}}
|
||||
96
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/required.jst
generated
vendored
96
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/required.jst
generated
vendored
|
|
@ -1,96 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.missing }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{ var $vSchema = 'schema' + $lvl; }}
|
||||
|
||||
{{## def.setupLoop:
|
||||
{{? !$isData }}
|
||||
var {{=$vSchema}} = validate.schema{{=$schemaPath}};
|
||||
{{?}}
|
||||
|
||||
{{
|
||||
var $i = 'i' + $lvl
|
||||
, $propertyPath = 'schema' + $lvl + '[' + $i + ']'
|
||||
, $missingProperty = '\' + ' + $propertyPath + ' + \'';
|
||||
if (it.opts._errorDataPathProperty) {
|
||||
it.errorPath = it.util.getPathExpr($currentErrorPath, $propertyPath, it.opts.jsonPointers);
|
||||
}
|
||||
}}
|
||||
#}}
|
||||
|
||||
|
||||
{{? !$isData }}
|
||||
{{? $schema.length < it.opts.loopRequired &&
|
||||
it.schema.properties && Object.keys(it.schema.properties).length }}
|
||||
{{ var $required = []; }}
|
||||
{{~ $schema:$property }}
|
||||
{{ var $propertySch = it.schema.properties[$property]; }}
|
||||
{{? !($propertySch && {{# def.nonEmptySchema:$propertySch}}) }}
|
||||
{{ $required[$required.length] = $property; }}
|
||||
{{?}}
|
||||
{{~}}
|
||||
{{??}}
|
||||
{{ var $required = $schema; }}
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
|
||||
{{? $isData || $required.length }}
|
||||
{{
|
||||
var $currentErrorPath = it.errorPath
|
||||
, $loopRequired = $isData || $required.length >= it.opts.loopRequired;
|
||||
}}
|
||||
|
||||
{{? $breakOnError }}
|
||||
var missing{{=$lvl}};
|
||||
{{? $loopRequired }}
|
||||
{{# def.setupLoop }}
|
||||
var {{=$valid}} = true;
|
||||
|
||||
{{?$isData}}{{# def.check$dataIsArray }}{{?}}
|
||||
|
||||
for (var {{=$i}} = 0; {{=$i}} < {{=$vSchema}}.length; {{=$i}}++) {
|
||||
{{=$valid}} = {{=$data}}[{{=$vSchema}}[{{=$i}}]] !== undefined;
|
||||
if (!{{=$valid}}) break;
|
||||
}
|
||||
|
||||
{{? $isData }} } {{?}}
|
||||
|
||||
{{# def.checkError:'required' }}
|
||||
else {
|
||||
{{??}}
|
||||
if ({{# def.checkMissingProperty:$required }}) {
|
||||
{{# def.errorMissingProperty:'required' }}
|
||||
} else {
|
||||
{{?}}
|
||||
{{??}}
|
||||
{{? $loopRequired }}
|
||||
{{# def.setupLoop }}
|
||||
{{? $isData }}
|
||||
if ({{=$vSchema}} && !Array.isArray({{=$vSchema}})) {
|
||||
{{# def.addError:'required' }}
|
||||
} else if ({{=$vSchema}} !== undefined) {
|
||||
{{?}}
|
||||
|
||||
for (var {{=$i}} = 0; {{=$i}} < {{=$vSchema}}.length; {{=$i}}++) {
|
||||
if ({{=$data}}[{{=$vSchema}}[{{=$i}}]] === undefined) {
|
||||
{{# def.addError:'required' }}
|
||||
}
|
||||
}
|
||||
|
||||
{{? $isData }} } {{?}}
|
||||
{{??}}
|
||||
{{~ $required:$reqProperty }}
|
||||
{{# def.allErrorsMissingProperty:'required' }}
|
||||
{{~}}
|
||||
{{?}}
|
||||
{{?}}
|
||||
|
||||
{{ it.errorPath = $currentErrorPath; }}
|
||||
|
||||
{{?? $breakOnError }}
|
||||
if (true) {
|
||||
{{?}}
|
||||
38
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/uniqueItems.jst
generated
vendored
38
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/uniqueItems.jst
generated
vendored
|
|
@ -1,38 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
|
||||
{{? ($schema || $isData) && it.opts.uniqueItems !== false }}
|
||||
{{? $isData }}
|
||||
var {{=$valid}};
|
||||
if ({{=$schemaValue}} === false || {{=$schemaValue}} === undefined)
|
||||
{{=$valid}} = true;
|
||||
else if (typeof {{=$schemaValue}} != 'boolean')
|
||||
{{=$valid}} = false;
|
||||
else {
|
||||
{{?}}
|
||||
|
||||
var {{=$valid}} = true;
|
||||
if ({{=$data}}.length > 1) {
|
||||
var i = {{=$data}}.length, j;
|
||||
outer:
|
||||
for (;i--;) {
|
||||
for (j = i; j--;) {
|
||||
if (equal({{=$data}}[i], {{=$data}}[j])) {
|
||||
{{=$valid}} = false;
|
||||
break outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{{? $isData }} } {{?}}
|
||||
|
||||
if (!{{=$valid}}) {
|
||||
{{# def.error:'uniqueItems' }}
|
||||
} {{? $breakOnError }} else { {{?}}
|
||||
{{??}}
|
||||
{{? $breakOnError }} if (true) { {{?}}
|
||||
{{?}}
|
||||
116
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/v5/_formatLimit.jst
generated
vendored
116
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/v5/_formatLimit.jst
generated
vendored
|
|
@ -1,116 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
|
||||
var {{=$valid}} = undefined;
|
||||
|
||||
{{## def.skipFormatLimit:
|
||||
{{=$valid}} = true;
|
||||
{{ return out; }}
|
||||
#}}
|
||||
|
||||
{{## def.compareFormat:
|
||||
{{? $isData }}
|
||||
if ({{=$schemaValue}} === undefined) {{=$valid}} = true;
|
||||
else if (typeof {{=$schemaValue}} != 'string') {{=$valid}} = false;
|
||||
else {
|
||||
{{ $closingBraces += '}'; }}
|
||||
{{?}}
|
||||
|
||||
{{? $isDataFormat }}
|
||||
if (!{{=$compare}}) {{=$valid}} = true;
|
||||
else {
|
||||
{{ $closingBraces += '}'; }}
|
||||
{{?}}
|
||||
|
||||
var {{=$result}} = {{=$compare}}({{=$data}}, {{# def.schemaValueQS }});
|
||||
|
||||
if ({{=$result}} === undefined) {{=$valid}} = false;
|
||||
#}}
|
||||
|
||||
|
||||
{{? it.opts.format === false }}{{# def.skipFormatLimit }}{{?}}
|
||||
|
||||
{{
|
||||
var $schemaFormat = it.schema.format
|
||||
, $isDataFormat = it.opts.v5 && $schemaFormat.$data
|
||||
, $closingBraces = '';
|
||||
}}
|
||||
|
||||
{{? $isDataFormat }}
|
||||
{{
|
||||
var $schemaValueFormat = it.util.getData($schemaFormat.$data, $dataLvl, it.dataPathArr)
|
||||
, $format = 'format' + $lvl
|
||||
, $compare = 'compare' + $lvl;
|
||||
}}
|
||||
|
||||
var {{=$format}} = formats[{{=$schemaValueFormat}}]
|
||||
, {{=$compare}} = {{=$format}} && {{=$format}}.compare;
|
||||
{{??}}
|
||||
{{ var $format = it.formats[$schemaFormat]; }}
|
||||
{{? !($format && $format.compare) }}
|
||||
{{# def.skipFormatLimit }}
|
||||
{{?}}
|
||||
{{ var $compare = 'formats' + it.util.getProperty($schemaFormat) + '.compare'; }}
|
||||
{{?}}
|
||||
|
||||
{{
|
||||
var $isMax = $keyword == 'formatMaximum'
|
||||
, $exclusiveKeyword = 'formatExclusive' + ($isMax ? 'Maximum' : 'Minimum')
|
||||
, $schemaExcl = it.schema[$exclusiveKeyword]
|
||||
, $isDataExcl = it.opts.v5 && $schemaExcl && $schemaExcl.$data
|
||||
, $op = $isMax ? '<' : '>'
|
||||
, $result = 'result' + $lvl;
|
||||
}}
|
||||
|
||||
{{# def.$data }}
|
||||
|
||||
|
||||
{{? $isDataExcl }}
|
||||
{{
|
||||
var $schemaValueExcl = it.util.getData($schemaExcl.$data, $dataLvl, it.dataPathArr)
|
||||
, $exclusive = 'exclusive' + $lvl
|
||||
, $opExpr = 'op' + $lvl
|
||||
, $opStr = '\' + ' + $opExpr + ' + \'';
|
||||
}}
|
||||
var schemaExcl{{=$lvl}} = {{=$schemaValueExcl}};
|
||||
{{ $schemaValueExcl = 'schemaExcl' + $lvl; }}
|
||||
|
||||
if (typeof {{=$schemaValueExcl}} != 'boolean' && {{=$schemaValueExcl}} !== undefined) {
|
||||
{{=$valid}} = false;
|
||||
{{ var $errorKeyword = $exclusiveKeyword; }}
|
||||
{{# def.error:'_formatExclusiveLimit' }}
|
||||
}
|
||||
|
||||
{{# def.elseIfValid }}
|
||||
|
||||
{{# def.compareFormat }}
|
||||
var {{=$exclusive}} = {{=$schemaValueExcl}} === true;
|
||||
|
||||
if ({{=$valid}} === undefined) {
|
||||
{{=$valid}} = {{=$exclusive}}
|
||||
? {{=$result}} {{=$op}} 0
|
||||
: {{=$result}} {{=$op}}= 0;
|
||||
}
|
||||
|
||||
if (!{{=$valid}}) var op{{=$lvl}} = {{=$exclusive}} ? '{{=$op}}' : '{{=$op}}=';
|
||||
{{??}}
|
||||
{{
|
||||
var $exclusive = $schemaExcl === true
|
||||
, $opStr = $op; /*used in error*/
|
||||
if (!$exclusive) $opStr += '=';
|
||||
var $opExpr = '\'' + $opStr + '\''; /*used in error*/
|
||||
}}
|
||||
|
||||
{{# def.compareFormat }}
|
||||
|
||||
if ({{=$valid}} === undefined)
|
||||
{{=$valid}} = {{=$result}} {{=$op}}{{?!$exclusive}}={{?}} 0;
|
||||
{{?}}
|
||||
|
||||
{{= $closingBraces }}
|
||||
|
||||
if (!{{=$valid}}) {
|
||||
{{ var $errorKeyword = $keyword; }}
|
||||
{{# def.error:'_formatLimit' }}
|
||||
}
|
||||
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/v5/constant.jst
generated
vendored
10
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/v5/constant.jst
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.$data }}
|
||||
|
||||
{{? !$isData }}
|
||||
var schema{{=$lvl}} = validate.schema{{=$schemaPath}};
|
||||
{{?}}
|
||||
var {{=$valid}} = equal({{=$data}}, schema{{=$lvl}});
|
||||
{{# def.checkError:'constant' }}
|
||||
28
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/v5/patternRequired.jst
generated
vendored
28
pype/premiere/extensions/com.pond5.ppro/node_modules/ajv/lib/dot/v5/patternRequired.jst
generated
vendored
|
|
@ -1,28 +0,0 @@
|
|||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
|
||||
{{
|
||||
var $key = 'key' + $lvl
|
||||
, $matched = 'patternMatched' + $lvl
|
||||
, $closingBraces = ''
|
||||
, $ownProperties = it.opts.ownProperties;
|
||||
}}
|
||||
|
||||
var {{=$valid}} = true;
|
||||
{{~ $schema:$pProperty }}
|
||||
var {{=$matched}} = false;
|
||||
for (var {{=$key}} in {{=$data}}) {
|
||||
{{# def.checkOwnProperty }}
|
||||
{{=$matched}} = {{= it.usePattern($pProperty) }}.test({{=$key}});
|
||||
if ({{=$matched}}) break;
|
||||
}
|
||||
|
||||
{{ var $missingPattern = it.util.escapeQuotes($pProperty); }}
|
||||
if (!{{=$matched}}) {
|
||||
{{=$valid}} = false;
|
||||
{{# def.addError:'patternRequired' }}
|
||||
} {{# def.elseIfValid }}
|
||||
{{~}}
|
||||
|
||||
{{= $closingBraces }}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue