Merge branch 'develop' into feature/PYPE-762_multi_root

This commit is contained in:
iLLiCiTiT 2020-05-11 16:34:09 +02:00
commit 8438de5816
131 changed files with 32461 additions and 11610 deletions

2
.gitignore vendored
View file

@ -33,3 +33,5 @@ coverage.xml
##################
node_modules/
package-lock.json
pype/premiere/ppro/js/debug.log

View file

@ -11,6 +11,7 @@ log = logging.getLogger(__name__)
PROJECT_PLUGINS_PATH = os.environ.get("PYPE_PROJECT_PLUGINS")
STUDIO_PLUGINS_PATH = os.environ.get("PYPE_STUDIO_PLUGINS")
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -83,6 +84,20 @@ def install():
pyblish.register_plugin_path(plugin_path)
avalon.register_plugin_path(avalon.Loader, plugin_path)
avalon.register_plugin_path(avalon.Creator, plugin_path)
avalon.register_plugin_path(
avalon.InventoryAction, plugin_path
)
# Register studio specific plugins
if STUDIO_PLUGINS_PATH and project_name:
for path in STUDIO_PLUGINS_PATH.split(os.pathsep):
if not path:
continue
if os.path.exists(path):
pyblish.register_plugin_path(path)
avalon.register_plugin_path(avalon.Loader, path)
avalon.register_plugin_path(avalon.Creator, path)
avalon.register_plugin_path(avalon.InventoryAction, path)
if project_name:
root_obj = Roots(project_name)

View file

@ -1,122 +0,0 @@
import os
import sys
from avalon import api as avalon
from pyblish import api as pyblish
from app import api as app
from .. import api
t = app.Templates()
log = api.Logger.getLogger(__name__, "aport")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
ADDITIONAL_PLUGINS = ['ftrack']
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
PUBLISH_PATH = os.path.join(
PLUGINS_DIR, "aport", "publish"
).replace("\\", "/")
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[PUBLISH_PATH]
)
else:
os.environ["PUBLISH_PATH"] = PUBLISH_PATH
LOAD_PATH = os.path.join(PLUGINS_DIR, "aport", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "aport", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
def register_additional_plugin_paths():
'''Adding additional host plugins
'''
for host in ADDITIONAL_PLUGINS:
publish_path = os.path.join(
PLUGINS_DIR, host, "publish").replace("\\", "/")
pyblish.register_plugin_path(publish_path)
# adding path to PUBLISH_PATH environment
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(
"Registered additional plugin path: "
"{}".format(publish_path))
def install():
# api.set_avalon_workdir()
log.info("Registering Aport plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# additional plugins
register_additional_plugin_paths()
# Disable all families except for the ones we explicitly want to see
family_states = [
"imagesequence",
"mov"
]
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# launch pico server
pico_server_launch()
def uninstall():
log.info("Deregistering Aport plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()
def pico_server_launch():
# path = "C:/Users/hubert/CODE/github/pico/examples/everything"
path = os.path.join(
os.path.dirname(__file__),
# "package"
)
os.chdir(path)
print(os.getcwd())
print(os.listdir(path))
try:
args = [sys.executable, "-m", "pico.server",
# "pipeline",
"api"
]
app.forward(
args,
cwd=path
)
except Exception as e:
log.error(e)
log.error(sys.exc_info())
# sys.exit(returncode)

View file

@ -1,225 +0,0 @@
# api.py
import os
import sys
import pico
from pico import PicoApp
from app.api import forward, Logger
import pipeline as ppl
log = Logger.getLogger(__name__, "aport")
@pico.expose()
def get_session():
ppl.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
ppl.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
ppl.AVALON_TASK = os.getenv("AVALON_TASK", None)
ppl.AVALON_SILO = os.getenv("AVALON_SILO", None)
return ppl.get_session()
@pico.expose()
def load_representations(project, representations):
'''Querry data from mongo db for defined representations.
Args:
project (str): name of the project
representations (list): representations which are required
Returns:
data (dict): representations in last versions
# testing url:
http://localhost:4242/api/load_representations?project=jakub_projectx&representations=[{%22asset%22:%22e09s031_0040%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22},%20{%22asset%22:%22e09s031_0030%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22}]
# returning:
{"e09s031_0040_referenceDefault":{"_id":"5c6dabaa2af61756b02f7f32","schema":"pype:representation-2.0","type":"representation","parent":"5c6dabaa2af61756b02f7f31","name":"mp4","data":{"path":"C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\thisFolder\\e09\\s031\\e09s031_0040\\publish\\clip\\referenceDefault\\v019\\jkprx_e09s031_0040_referenceDefault_v019.mp4","template":"{publish.root}/{publish.folder}/{version.main}/{publish.file}"},"dependencies":[],"context":{"root":"C:\\Users\\hubert\\_PYPE_testing\\projects","project":{"name":"jakub_projectx","code":"jkprx"},"task":"edit","silo":"thisFolder","asset":"e09s031_0040","family":"clip","subset":"referenceDefault","VERSION":19,"hierarchy":"thisFolder\\e09\\s031","representation":"mp4"}}}
'''
data = {}
# log.info("___project: {}".format(project))
# ppl.io.activate_project(project)
#
# from_mongo = ppl.io.find({"name": repr['representation'],
# "type": "representation"})[:]
for repr in representations:
log.info("asset: {}".format(repr['asset']))
# set context for each asset individually
context(project, repr['asset'], '')
# query data from mongo db for the asset's subset representation
related_repr = [r for r in ppl.io.find({"name": repr['representation'],
"type": "representation",
"context.asset": repr['asset']})[:]]
versions_dict = {r['context']['version']: i
for i, r in enumerate(related_repr)}
versions_list = [v for v in versions_dict.keys()]
sorted(versions_list)
version_index_last = versions_dict[max(versions_list)]
log.info("version_index_last: {}".format(version_index_last))
# create name which will be used on timeline clip
name = '_'.join([repr['asset'], repr['subset']])
# log.info("___related_repr: {}".format(related_repr))
# assign data for the clip representation
version = ppl.io.find_one(
{'_id': related_repr[version_index_last]['parent']})
log.info("version: {}".format(version))
# fixing path workarround
if '.#####.mxf' in related_repr[version_index_last]['data']['path']:
related_repr[version_index_last]['data']['path'] = related_repr[version_index_last]['data']['path'].replace(
'.#####.mxf', '.mxf')
related_repr[version_index_last]['version'] = version
related_repr[version_index_last]['parentClip'] = repr['parentClip']
data[name] = related_repr[version_index_last]
return data
@pico.expose()
def publish(send_json_path, get_json_path, gui):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
send_json_path (string): path to temp json file with
sending context data
get_json_path (strign): path to temp json file with
returning context data
Returns:
dict: get_json_path
Raises:
Exception: description
"""
log.info("avalon.session is: \n{}".format(ppl.SESSION))
log.info("PUBLISH_PATH: \n{}".format(os.environ["PUBLISH_PATH"]))
pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'),
"app", "pype-start.py")
args = [pype_start,
"--root", os.environ['AVALON_PROJECTS'], "--publish-gui",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", send_json_path,
"-d", "post_json_data_path", get_json_path
]
log.debug(args)
log.info("_aport.api Variable `AVALON_PROJECTS` had changed to `{0}`.".format(
os.environ['AVALON_PROJECTS']))
forward([
sys.executable, "-u"
] + args,
# cwd=cwd
)
return {"get_json_path": get_json_path}
@pico.expose()
def context(project, asset, task, app='aport'):
os.environ["AVALON_PROJECT"] = ppl.AVALON_PROJECT = project
os.environ["AVALON_ASSET"] = ppl.AVALON_ASSET = asset
os.environ["AVALON_TASK"] = ppl.AVALON_TASK = task
os.environ["AVALON_SILO"] = ppl.AVALON_SILO = ''
ppl.get_session()
# log.info('ppl.SESSION: {}'.format(ppl.SESSION))
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
ppl.update_current_task(task, asset, app)
project_code = ppl.io.find_one({"type": "project"})["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = \
ppl.SESSION["AVALON_PROJECTCODE"] = project_code
parents = ppl.io.find_one({"type": 'asset',
"name": ppl.AVALON_ASSET})['data']['parents']
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents).replace("\\", "/")
os.environ["AVALON_HIERARCHY"] = \
ppl.SESSION["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in ppl.SESSION.items()
if isinstance(v, str)}
ppl.SESSION.update(fix_paths)
ppl.SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return ppl.SESSION
@pico.expose()
def anatomy_fill(data):
from pype import api as pype
pype.load_data_from_templates()
anatomy = pype.Anatomy
return anatomy.format(data)
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = os.pathsep.join(
[p.replace("\\", "/")
for p in os.environ["PUBLISH_PATH"].split(os.pathsep)
if "aport" in p or
"ftrack" in p])
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep)
+ [publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
app = PicoApp()
app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
Logger.logging.root.removeHandler(handler)
# SPLASH.hide_splash()

View file

@ -1,26 +0,0 @@
import pype.api as pype
def get_anatomy(**kwarg):
return pype.Anatomy
def format_anatomy(data):
from .templates import (
get_anatomy
)
file = script_name()
anatomy = get_anatomy()
# TODO: perhaps should be in try!
padding = anatomy.render.padding
data.update({
"hierarchy": pype.get_hierarchy(),
"frame": "#" * padding,
"VERSION": pype.get_version_from_workfile(file)
})
# log.info("format_anatomy:anatomy: {}".format(anatomy))
return anatomy.format(data)

View file

@ -1,130 +0,0 @@
import sys
import os
import getpass
from app.api import Logger
from io_nonsingleton import DbConnector
io = DbConnector()
log = Logger.getLogger(__name__, "aport")
self = sys.modules[__name__]
self.SESSION = None
self._registered_root = {"_": ""}
self.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
self.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
self.AVALON_TASK = os.getenv("AVALON_TASK", None)
self.AVALON_SILO = os.getenv("AVALON_SILO", None)
def get_session():
if not self.SESSION:
io.install()
self.SESSION = io.Session
# for k, v in os.environ.items():
# if 'AVALON' in k:
# print(str((k, v)))
return self.SESSION
def update_current_task(task=None, asset=None, app=None):
"""Update active Session to a new task work area.
This updates the live Session to a different `asset`, `task` or `app`.
Args:
task (str): The task to set.
asset (str): The asset to set.
app (str): The app to set.
Returns:
dict: The changed key, values in the current Session.
"""
mapping = {
"AVALON_ASSET": asset,
"AVALON_TASK": task,
"AVALON_APP": app,
}
changed = {key: value for key, value in mapping.items() if value}
if not changed:
return
# Update silo when asset changed
if "AVALON_ASSET" in changed:
asset_document = io.find_one({"name": changed["AVALON_ASSET"],
"type": "asset"})
assert asset_document, "Asset must exist"
silo = asset_document["silo"]
if silo is None:
silo = asset_document["name"]
changed["AVALON_SILO"] = silo
parents = asset_document['data']['parents']
hierarchy = ""
if len(parents) > 0:
hierarchy = os.path.sep.join(parents)
changed['AVALON_HIERARCHY'] = hierarchy
# Compute work directory (with the temporary changed session so far)
project = io.find_one({"type": "project"},
projection={"config.template.work": True})
template = project["config"]["template"]["work"]
_session = self.SESSION.copy()
_session.update(changed)
changed["AVALON_WORKDIR"] = _format_work_template(template, _session)
# Update the full session in one go to avoid half updates
self.SESSION.update(changed)
# Update the environment
os.environ.update(changed)
return changed
def _format_work_template(template, session=None):
"""Return a formatted configuration template with a Session.
Note: This *cannot* format the templates for published files since the
session does not hold the context for a published file. Instead use
`get_representation_path` to parse the full path to a published file.
Args:
template (str): The template to format.
session (dict, Optional): The Session to use. If not provided use the
currently active global Session.
Returns:
str: The fully formatted path.
"""
if session is None:
session = self.SESSION
project = io.find_one({'type': 'project'})
return template.format(**{
"root": registered_root(),
"project": {
"name": project.get("name", session["AVALON_PROJECT"]),
"code": project["data"].get("code", ''),
},
"silo": session["AVALON_SILO"],
"hierarchy": session['AVALON_HIERARCHY'],
"asset": session["AVALON_ASSET"],
"task": session["AVALON_TASK"],
"app": session["AVALON_APP"],
"user": session.get("AVALON_USER", getpass.getuser())
})
def registered_root():
"""Return currently registered root"""
return os.path.normpath(
self._registered_root["_"]
or self.SESSION.get("AVALON_PROJECTS") or ""
)

File diff suppressed because it is too large Load diff

View file

@ -1,149 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Pype extention</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<script>
if (typeof module === 'object') {
window.module = module;
module = undefined;
}
</script>
<script src="./build.js"></script>
<script>
if (window.module) module = window.module;
</script>
<!-- <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/styles/default.min.css">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/highlight.min.js"></script>
<script></script> -->
<style type="text/css">
html,
body {
height: 100%;
margin: 0px;
padding: 0px;
}
div {
padding: 5px;
}
#container {
height: 100%;
}
#header {
height: 5%;
}
#main {
height: 70%;
}
#output {
background-color: #333;
color: #aaa;
min-height: 15%;
overflow-y: scroll;
padding: 20px;
position: fixed;
bottom: 0px;
width: 100%;
}
.error {
color: #f00 !important;
}
#examples li {
padding: 10px;
margin: 10px;
background-color: silver;
}
code {
border-radius: 0;
*/ margin: 5px;
white-space: pre !important;
}
#source {
height: 100%;
}
#examples {
height: 100%;
}
#spacer {
height: 20%;
}
.highlight {
background-color: yellow;
}
</style>
</head>
<body onresize="resizePanel()">
<a href="javascript:history.go(0)">Refresh panel</a>
<div id="container">
<div class="row row-eq-height" id="main">
<div class="col-md-6" id="examples">
<ol>
<li id="context">
<h4>Set context here</h4>
<pre><code class="js"></code></pre>
Project<input type="text" name="project" value="jakub_projectx" />Asset<input type="text" name="asset" value="shot01" />task<input type="text" name="task" value="compositing" />app<input type="text" name="app" value="premiera" />
<button class="btn btn-default btn-sm" type="button" onclick="context()">Set context</button>
</li>
<li id="deregister">
<h4>deregister_plugin_path</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="deregister()">Deregister</button>
</li>
<li id="register">
<h4>register_plugin_path</h4>
<pre><code class="js"></code></pre>
Path: <input type="text" name="path" value="C:/Users/hubertCODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish" />
<button class="btn btn-default btn-sm" type="button" onclick="register()">Register path</button>
</li>
<li id="publish">
<h4>Publish</h4>
<pre><code class="js"></code></pre>
Json path: <input type="text" name="path" value="C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/premiere/example_publish_reqst.json" />
Gui<input type="checkbox" name="gui" value="True" checked>
<button class="btn btn-default btn-sm" type="button" onclick="publish()">Publish</button>
</li>
</ol>
<div id="spacer">
</div>
</div>
<div class="col-md-6" id="source">
<!-- <pre>
<code class="python"></code>
</pre> -->
</div>
</div>
<div class="row" id="output">
</div>
</div>
<script src="script.js"></script>
</body>
</html>

View file

@ -1,214 +0,0 @@
var api = pico.importModule('api');
var output = document.getElementById('output');
function querySelector(parent){
return function(child){
return document.querySelector(parent).querySelector(child)
};
}
var defs = {}
function jumpTo(name){
var e = defs[name];
document.querySelectorAll('.highlight').forEach(function(el){
el.classList.remove('highlight');
});
e.classList.add('highlight');
return false;
}
function displayResult(r){
output.classList.remove("error");
output.innerText = JSON.stringify(r);
}
function displayError(e){
output.classList.add("error");
output.innerText = e.message;
}
function unindent(code){
var lines = code.split('\n');
var margin = -1;
for(var j=0; j < lines.length; j++){
var l = lines[j];
for(i=0; i < l.length; i++){
if(l[i] != " "){
margin = i;
break;
}
}
if(margin > -1){
break;
}
}
lines = lines.slice(j);
return lines.map(function(s){ return s.substr(margin)}).join('\n');
}
function deregister(){
var $ = querySelector("#deregister");
api.deregister_plugin_path().then(displayResult);
}
function register(){
var $ = querySelector("#register");
var path = $("input[name=path]").value;
api.register_plugin_path(path).then(displayResult);
}
function publish(){
var $ = querySelector("#publish");
var path = $("input[name=path]").value;
var gui = $("input[name=gui]").checked;
api.publish(path, gui).then(displayResult);
}
function context(){
var $ = querySelector("#context");
var project = $("input[name=project]").value;
var asset = $("input[name=asset]").value;
var task = $("input[name=task]").value;
var app = $("input[name=app]").value;
api.context(project,asset,task,app).then(displayResult);
}
//
// function example1(){
// var $ = querySelector("#example1");
// var name = $("input[name=name]").value;
// api.hello(name).then(displayResult);
// }
//
//
// function example2(){
// var $ = querySelector("#example2");
// var x = $("input[name=x]").valueAsNumber;
// var y = $("#example2 input[name=y]").valueAsNumber;
// api.multiply(x, y).then(displayResult);
// }
//
// function example3(){
// var $ = querySelector("#example3");
// var file = $("input[name=upload]").files[0];
// api.upload(file, file.name).then(displayResult).catch(displayError);
// }
//
// function example4(){
// var $ = querySelector("#example4");
// api.my_ip().then(displayResult)
// }
//
// function example5(){
// var $ = querySelector("#example5");
// var username = $("input[name=username]").value;
// var password = $("input[name=password]").value;
// pico.setAuthentication(api, username, password);
// api.current_user().then(displayResult).catch(displayError);
// pico.clearAuthentication(api);
// }
//
// function example6(){
// var $ = querySelector("#example6");
// api.start_session().then(function(){
// api.session_id().then(displayResult).then(function(){
// api.end_session();
// })
// })
// }
//
// function example7(){
// var $ = querySelector("#example7");
// var session_id = "4242";
// pico.setRequestHook(api, 'session', function(req) {
// req.headers.set('X-SESSION-ID', session_id)
// })
// api.session_id2().then(displayResult)
// pico.clearRequestHook(api, 'session');
// }
//
// function example8(){
// var $ = querySelector("#example8");
// api.countdown(10).each(displayResult).then(function(){
// displayResult("Boom!");
// });
// }
//
// function example9(){
// var $ = querySelector("#example9");
// var user = {
// name: "Bob",
// age: 30,
// occupation: "Software Engineer",
// }
// api.user_description(user).then(displayResult);
// }
//
// function example10(){
// var $ = querySelector("#example10");
// api.fail().then(displayResult).catch(displayError);
// }
//
// function example11(){
// var $ = querySelector("#example11");
// api.make_coffee().then(displayResult).catch(displayError);
// }
//
//
// function example12(){
// var $ = querySelector("#example12");
// var form = $("form");
// api.multiply.submitFormData(new FormData(form)).then(displayResult).catch(displayError);
// }
//
// function example13(){
// var $ = querySelector("#example13");
// var data = {
// x: 6,
// y: 7,
// }
// api.multiply.submitJSON(data).then(displayResult).catch(displayError);
// }
// api.show_source().then(function(s){
// document.querySelector('#source code').innerText = s;
// }).then(ready);
function ready(){
// // set the <code> element of each example to the corresponding functions source
// document.querySelectorAll('li pre code.js').forEach(function(e){
// var id = e.parentElement.parentElement.id;
// var f = window[id];
// var code = f.toString().split('\n').slice(2, -1).join('\n');
// e.innerText = unindent(code);
// })
document.querySelectorAll('li pre code.html').forEach(function(e){
var html = e.parentElement.parentElement.querySelector('div.example').innerHTML;
e.innerText = unindent(html);
})
hljs.initHighlighting();
// // find all the elements representing the function definitions in the python source
// document.querySelectorAll('.python .hljs-function .hljs-title').forEach(function(e){
// var a = document.createElement('a');
// a.name = e.innerText;
// e.parentElement.insertBefore(a, e)
// return defs[e.innerText] = e.parentElement;
// });
// convert all 'api.X' strings to hyperlinks to jump to python source
document.querySelectorAll('.js').forEach(function(e){
var code = e.innerHTML;
Object.keys(defs).forEach(function(k){
code = code.replace('api.' + k + '(', '<a href="#' + k + '" onclick="jumpTo(\'' + k + '\')">api.' + k + '</a>(');
})
e.innerHTML = code;
})
}

View file

@ -1,41 +0,0 @@
from pype import api as pype
log = pype.Logger.getLogger(__name__, "aport")
def get_anatomy(**kwarg):
return pype.Anatomy
def get_dataflow(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "aport")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
aport_dataflow = getattr(pype.Dataflow, str(host), None)
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
if preset:
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
log.info("Dataflow: {}".format(aport_dataflow_node))
return aport_dataflow_node
def get_colorspace(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "aport")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
aport_colorspace = getattr(pype.Colorspace, str(host), None)
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
if preset:
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)
log.info("Colorspace: {}".format(aport_colorspace_node))
return aport_colorspace_node

View file

@ -23,7 +23,9 @@ class AvalonRestApi(RestApi):
if not project_name:
output = {}
for project_name in self.dbcon.tables():
project = self.dbcon[project_name].find_one({"type": "project"})
project = self.dbcon[project_name].find_one({
"type": "project"
})
output[project_name] = project
return CallbackResult(data=self.result_to_json(output))
@ -44,7 +46,7 @@ class AvalonRestApi(RestApi):
if not self.dbcon.exist_table(_project_name):
abort(404, "Project \"{}\" was not found in database".format(
project_name
_project_name
))
if not _asset:
@ -65,9 +67,27 @@ class AvalonRestApi(RestApi):
return asset
abort(404, "Asset \"{}\" with {} was not found in project {}".format(
_asset, identificator, project_name
_asset, identificator, _project_name
))
@RestApi.route("/publish/<asset_name>",
url_prefix="/premiere", methods="GET")
def publish(self, request):
"""
http://localhost:8021/premiere/publish/shot021?json_in=this/path/file_in.json&json_out=this/path/file_out.json
"""
asset_name = request.url_data["asset_name"]
query = request.query
data = request.request_data
output = {
"message": "Got your data. Thanks.",
"your_data": data,
"your_query": query,
"your_asset_is": asset_name
}
return CallbackResult(data=self.result_to_json(output))
def result_to_json(self, result):
""" Converts result of MongoDB query to dict without $oid (ObjectId)
keys with help of regex matching.

View file

@ -0,0 +1,107 @@
import os
import collections
import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib.avalon_sync import get_avalon_attr
class CleanHierarchicalAttrsAction(BaseAction):
identifier = "clean.hierarchical.attr"
label = "Pype Admin"
variant = "- Clean hierarchical custom attributes"
description = "Unset empty hierarchical attribute values."
role_list = ["Pypeclub", "Administrator", "Project Manager"]
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
all_project_entities_query = (
"select id, name, parent_id, link"
" from TypedContext where project_id is \"{}\""
)
cust_attr_query = (
"select value, entity_id from CustomAttributeValue "
"where entity_id in ({}) and configuration_id is \"{}\""
)
def discover(self, session, entities, event):
"""Show only on project entity."""
if len(entities) == 1 and entities[0].entity_type.lower() == "project":
return True
return False
def launch(self, session, entities, event):
project = entities[0]
user_message = "This may take some time"
self.show_message(event, user_message, result=True)
self.log.debug("Preparing entities for cleanup.")
all_entities = session.query(
self.all_project_entities_query.format(project["id"])
).all()
all_entities_ids = [
"\"{}\"".format(entity["id"])
for entity in all_entities
if entity.entity_type.lower() != "task"
]
self.log.debug(
"Collected {} entities to process.".format(len(all_entities_ids))
)
entity_ids_joined = ", ".join(all_entities_ids)
attrs, hier_attrs = get_avalon_attr(session)
for attr in hier_attrs:
configuration_key = attr["key"]
self.log.debug(
"Looking for cleanup of custom attribute \"{}\"".format(
configuration_key
)
)
configuration_id = attr["id"]
call_expr = [{
"action": "query",
"expression": self.cust_attr_query.format(
entity_ids_joined, configuration_id
)
}]
[values] = self.session.call(call_expr)
data = {}
for item in values["data"]:
value = item["value"]
if value is None:
data[item["entity_id"]] = value
if not data:
self.log.debug(
"Nothing to clean for \"{}\".".format(configuration_key)
)
continue
self.log.debug("Cleaning up {} values for \"{}\".".format(
len(data), configuration_key
))
for entity_id, value in data.items():
entity_key = collections.OrderedDict({
"configuration_id": configuration_id,
"entity_id": entity_id
})
session.recorded_operations.push(
ftrack_api.operation.DeleteEntityOperation(
"CustomAttributeValue",
entity_key
)
)
session.commit()
return True
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
CleanHierarchicalAttrsAction(session, plugins_presets).register()

View file

@ -49,27 +49,23 @@ class DeleteAssetSubset(BaseAction):
def _launch(self, event):
try:
args = self._translate_event(
self.session, event
)
entities = self._translate_event(event)
if "values" not in event["data"]:
self.dbcon.install()
return self._interface(self.session, *args)
return self._interface(self.session, entities, event)
confirmation = self.confirm_delete(*args)
confirmation = self.confirm_delete(entities, event)
if confirmation:
return confirmation
self.dbcon.install()
response = self.launch(
self.session, *args
self.session, entities, event
)
finally:
self.dbcon.uninstall()
return self._handle_result(
self.session, response, *args
)
return self._handle_result(response)
def interface(self, session, entities, event):
self.show_message(event, "Preparing data...", True)

View file

@ -55,6 +55,8 @@ class SeedDebugProject(BaseAction):
# Define how much shots will be created for each sequence
default_shots_count = 10
max_entities_created_at_one_commit = 50
existing_projects = None
new_project_item = "< New Project >"
current_project_item = "< Current Project >"
@ -284,21 +286,28 @@ class SeedDebugProject(BaseAction):
int(asset_count / available_assets) +
(asset_count % available_assets > 0)
)
created_assets = 0
index = 0
created_entities = 0
to_create_length = asset_count + (asset_count * len(self.asset_tasks))
for _asset_name in self.assets:
if created_assets >= asset_count:
if created_entities >= to_create_length:
break
for asset_num in range(1, repetitive_times + 1):
if created_assets >= asset_count:
if created_entities >= asset_count:
break
asset_name = "%s_%02d" % (_asset_name, asset_num)
asset = self.session.create("AssetBuild", {
"name": asset_name,
"parent": main_entity
})
created_assets += 1
self.log.debug("- Assets/{}".format(asset_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for task_name in self.asset_tasks:
self.session.create("Task", {
"name": task_name,
@ -309,7 +318,17 @@ class SeedDebugProject(BaseAction):
asset_name, task_name
))
created_entities += 1
index += 1
if self.temp_commit(
index, created_entities, to_create_length
):
index = 0
self.log.debug("*** Commiting Assets")
self.log.debug("Commiting entities. {}/{}".format(
created_entities, to_create_length
))
self.session.commit()
def create_shots(self, project, seq_count, shots_count):
@ -345,7 +364,14 @@ class SeedDebugProject(BaseAction):
})
self.log.debug("- Shots")
for seq_num in range(1, seq_count+1):
index = 0
created_entities = 0
to_create_length = (
seq_count
+ (seq_count * shots_count)
+ (seq_count * shots_count * len(self.shot_tasks))
)
for seq_num in range(1, seq_count + 1):
seq_name = "sq%03d" % seq_num
seq = self.session.create("Sequence", {
"name": seq_name,
@ -353,14 +379,24 @@ class SeedDebugProject(BaseAction):
})
self.log.debug("- Shots/{}".format(seq_name))
for shot_num in range(1, shots_count+1):
shot_name = "%ssh%04d" % (seq_name, (shot_num*10))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for shot_num in range(1, shots_count + 1):
shot_name = "%ssh%04d" % (seq_name, (shot_num * 10))
shot = self.session.create("Shot", {
"name": shot_name,
"parent": seq
})
self.log.debug("- Shots/{}/{}".format(seq_name, shot_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for task_name in self.shot_tasks:
self.session.create("Task", {
"name": task_name,
@ -371,9 +407,27 @@ class SeedDebugProject(BaseAction):
seq_name, shot_name, task_name
))
created_entities += 1
index += 1
if self.temp_commit(
index, created_entities, to_create_length
):
index = 0
self.log.debug("*** Commiting Shots")
self.log.debug("Commiting entities. {}/{}".format(
created_entities, to_create_length
))
self.session.commit()
def temp_commit(self, index, created_entities, to_create_length):
if index < self.max_entities_created_at_one_commit:
return False
self.log.debug("Commiting {} entities. {}/{}".format(
index, created_entities, to_create_length
))
self.session.commit()
return True
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''

View file

@ -5,13 +5,11 @@ import json
from bson.objectid import ObjectId
from pype.ftrack import BaseAction
from pype.ftrack.lib import (
get_project_from_entity,
get_avalon_entities_for_assetversion
)
from pypeapp import Anatomy
from pype.ftrack.lib.io_nonsingleton import DbConnector
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
class StoreThumbnailsToAvalon(BaseAction):
# Action identifier
@ -189,7 +187,7 @@ class StoreThumbnailsToAvalon(BaseAction):
).format(entity["id"]))
continue
avalon_ents_result = get_avalon_entities_for_assetversion(
avalon_ents_result = self.get_avalon_entities_for_assetversion(
entity, self.db_con
)
version_full_path = (
@ -348,6 +346,119 @@ class StoreThumbnailsToAvalon(BaseAction):
file_open.close()
return True
def get_avalon_entities_for_assetversion(self, asset_version, db_con):
output = {
"success": True,
"message": None,
"project": None,
"project_name": None,
"asset": None,
"asset_name": None,
"asset_path": None,
"subset": None,
"subset_name": None,
"version": None,
"version_name": None,
"representations": None
}
db_con.install()
ft_asset = asset_version["asset"]
subset_name = ft_asset["name"]
version = asset_version["version"]
parent = ft_asset["parent"]
ent_path = "/".join(
[ent["name"] for ent in parent["link"]]
)
project = self.get_project_from_entity(asset_version)
project_name = project["full_name"]
output["project_name"] = project_name
output["asset_name"] = parent["name"]
output["asset_path"] = ent_path
output["subset_name"] = subset_name
output["version_name"] = version
db_con.Session["AVALON_PROJECT"] = project_name
avalon_project = db_con.find_one({"type": "project"})
output["project"] = avalon_project
if not avalon_project:
output["success"] = False
output["message"] = (
"Project not synchronized to avalon `{}`".format(project_name)
)
return output
asset_ent = None
asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
if asset_mongo_id:
try:
asset_mongo_id = ObjectId(asset_mongo_id)
asset_ent = db_con.find_one({
"type": "asset",
"_id": asset_mongo_id
})
except Exception:
pass
if not asset_ent:
asset_ent = db_con.find_one({
"type": "asset",
"data.ftrackId": parent["id"]
})
output["asset"] = asset_ent
if not asset_ent:
output["success"] = False
output["message"] = (
"Not synchronized entity to avalon `{}`".format(ent_path)
)
return output
asset_mongo_id = asset_ent["_id"]
subset_ent = db_con.find_one({
"type": "subset",
"parent": asset_mongo_id,
"name": subset_name
})
output["subset"] = subset_ent
if not subset_ent:
output["success"] = False
output["message"] = (
"Subset `{}` does not exist under Asset `{}`"
).format(subset_name, ent_path)
return output
version_ent = db_con.find_one({
"type": "version",
"name": version,
"parent": subset_ent["_id"]
})
output["version"] = version_ent
if not version_ent:
output["success"] = False
output["message"] = (
"Version `{}` does not exist under Subset `{}` | Asset `{}`"
).format(version, subset_name, ent_path)
return output
repre_ents = list(db_con.find({
"type": "representation",
"parent": version_ent["_id"]
}))
output["representations"] = repre_ents
return output
def register(session, plugins_presets={}):
StoreThumbnailsToAvalon(session, plugins_presets).register()

View file

@ -1,11 +1,15 @@
from . import avalon_sync
from . import credentials
from .ftrack_app_handler import *
from .ftrack_event_handler import *
from .ftrack_action_handler import *
from .ftrack_base_handler import *
from .ftrack_base_handler import BaseHandler
from .ftrack_event_handler import BaseEvent
from .ftrack_action_handler import BaseAction
from .ftrack_app_handler import AppAction
from .lib import (
get_project_from_entity,
get_avalon_entities_for_assetversion
)
__all__ = [
"avalon_sync",
"credentials",
"BaseHandler",
"BaseEvent",
"BaseAction",
"AppAction"
]

View file

@ -23,17 +23,13 @@ class BaseAction(BaseHandler):
def __init__(self, session, plugins_presets={}):
'''Expects a ftrack_api.Session instance'''
super().__init__(session, plugins_presets)
if self.label is None:
raise ValueError(
'Action missing label.'
)
raise ValueError('Action missing label.')
elif self.identifier is None:
raise ValueError(
'Action missing identifier.'
)
if self.identifier is None:
raise ValueError('Action missing identifier.')
super().__init__(session, plugins_presets)
def register(self):
'''
@ -61,66 +57,131 @@ class BaseAction(BaseHandler):
self._launch
)
def _launch(self, event):
args = self._translate_event(
self.session, event
def _discover(self, event):
entities = self._translate_event(event)
accepts = self.discover(self.session, entities, event)
if not accepts:
return
self.log.debug(u'Discovering action with selection: {0}'.format(
event['data'].get('selection', [])
))
return {
'items': [{
'label': self.label,
'variant': self.variant,
'description': self.description,
'actionIdentifier': self.identifier,
'icon': self.icon,
}]
}
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id. If the entity is a hierarchical you will always get the
entity type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return False
def _interface(self, session, entities, event):
interface = self.interface(session, entities, event)
if not interface:
return
if isinstance(interface, (tuple, list)):
return {"items": interface}
if isinstance(interface, dict):
if (
"items" in interface
or ("success" in interface and "message" in interface)
):
return interface
raise ValueError((
"Invalid interface output expected key: \"items\" or keys:"
" \"success\" and \"message\". Got: \"{}\""
).format(str(interface)))
raise ValueError(
"Invalid interface output type \"{}\"".format(
str(type(interface))
)
)
def interface(self, session, entities, event):
'''Return a interface if applicable or None
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and
the entity id. If the entity is a hierarchical you will always get the
entity type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return None
def _launch(self, event):
entities = self._translate_event(event)
preactions_launched = self._handle_preactions(self.session, event)
if preactions_launched is False:
return
interface = self._interface(
self.session, *args
self.session, entities, event
)
if interface:
return interface
response = self.launch(
self.session, *args
self.session, entities, event
)
return self._handle_result(
self.session, response, *args
)
return self._handle_result(response)
def _handle_result(self, session, result, entities, event):
def _handle_result(self, result):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
if result is True:
result = {
'success': result,
'message': (
'{0} launched successfully.'.format(self.label)
)
}
msg = 'Action {0} finished.'
else:
result = {
'success': result,
'message': (
'{0} launch failed.'.format(self.label)
)
}
msg = 'Action {0} failed.'
elif isinstance(result, dict):
return {
'success': result,
'message': msg.format(self.label)
}
if isinstance(result, dict):
if 'items' in result:
items = result['items']
if not isinstance(items, list):
if not isinstance(result['items'], list):
raise ValueError('Invalid items format, must be list!')
else:
for key in ('success', 'message'):
if key in result:
continue
if key not in result:
raise KeyError('Missing required key: {0}.'.format(key))
return result
raise KeyError(
'Missing required key: {0}.'.format(key)
)
else:
self.log.error(
'Invalid result type must be bool or dictionary!'
)
self.log.warning((
'Invalid result type \"{}\" must be bool or dictionary!'
).format(str(type(result))))
return result

View file

@ -2,43 +2,35 @@ import os
import sys
import copy
import platform
from avalon import lib as avalonlib
import avalon.lib
import acre
from pype import lib as pypelib
from pypeapp import config
from .ftrack_base_handler import BaseHandler
from .ftrack_action_handler import BaseAction
from pypeapp import Anatomy
class AppAction(BaseHandler):
'''Custom Action base class
class AppAction(BaseAction):
"""Application Action class.
<label> - a descriptive string identifing your action.
<varaint> - To group actions together, give them the same
label and specify a unique variant per action.
<identifier> - a unique identifier for app.
<description> - a verbose descriptive text for you action
<icon> - icon in ftrack
'''
Args:
session (ftrack_api.Session): Session where action will be registered.
label (str): A descriptive string identifing your action.
varaint (str, optional): To group actions together, give them the same
label and specify a unique variant per action.
identifier (str): An unique identifier for app.
description (str): A verbose descriptive text for you action.
icon (str): Url path to icon which will be shown in Ftrack web.
"""
type = 'Application'
preactions = ['start.timer']
type = "Application"
preactions = ["start.timer"]
def __init__(
self, session, label, name, executable, variant=None,
icon=None, description=None, preactions=[], plugins_presets={}
):
super().__init__(session, plugins_presets)
'''Expects a ftrack_api.Session instance'''
if label is None:
raise ValueError('Action missing label.')
elif name is None:
raise ValueError('Action missing identifier.')
elif executable is None:
raise ValueError('Action missing executable.')
self.label = label
self.identifier = name
self.executable = executable
@ -47,11 +39,19 @@ class AppAction(BaseHandler):
self.description = description
self.preactions.extend(preactions)
super().__init__(session, plugins_presets)
if label is None:
raise ValueError("Action missing label.")
if name is None:
raise ValueError("Action missing identifier.")
if executable is None:
raise ValueError("Action missing executable.")
def register(self):
'''Registers the action, subscribing the discover and launch topics.'''
"""Registers the action, subscribing the discover and launch topics."""
discovery_subscription = (
'topic=ftrack.action.discover and source.user.username={0}'
"topic=ftrack.action.discover and source.user.username={0}"
).format(self.session.api_user)
self.session.event_hub.subscribe(
@ -61,9 +61,9 @@ class AppAction(BaseHandler):
)
launch_subscription = (
'topic=ftrack.action.launch'
' and data.actionIdentifier={0}'
' and source.user.username={1}'
"topic=ftrack.action.launch"
" and data.actionIdentifier={0}"
" and source.user.username={1}"
).format(
self.identifier,
self.session.api_user
@ -74,7 +74,61 @@ class AppAction(BaseHandler):
)
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
"""Return true if we can handle the selected entities.
Args:
session (ftrack_api.Session): Helps to query necessary data.
entities (list): Object of selected entities.
event (ftrack_api.Event): Ftrack event causing discover callback.
"""
if (
len(entities) != 1
or entities[0].entity_type.lower() != 'task'
):
return False
entity = entities[0]
if entity["parent"].entity_type.lower() == "project":
return False
ft_project = self.get_project_from_entity(entity)
database = pypelib.get_avalon_database()
project_name = ft_project["full_name"]
avalon_project = database[project_name].find_one({
"type": "project"
})
if not avalon_project:
return False
project_apps = avalon_project["config"].get("apps", [])
apps = [app["name"] for app in project_apps]
if self.identifier in apps:
return True
return False
def _launch(self, event):
entities = self._translate_event(event)
preactions_launched = self._handle_preactions(
self.session, event
)
if preactions_launched is False:
return
response = self.launch(self.session, entities, event)
return self._handle_result(response)
def launch(self, session, entities, event):
"""Callback method for the custom action.
return either a bool (True if successful or False if the action failed)
or a dictionary with they keys `message` and `success`, the message
should be a string and will be displayed as feedback to the user,
success should be a bool, True if successful or False if the action
failed.
*session* is a `ftrack_api.Session` instance
@ -85,76 +139,7 @@ class AppAction(BaseHandler):
or Asset Build.
*event* the unmodified original event
'''
if (
len(entities) != 1
or entities[0].entity_type.lower() != 'task'
):
return False
if entities[0]['parent'].entity_type.lower() == 'project':
return False
ft_project = entities[0]['project']
database = pypelib.get_avalon_database()
project_name = ft_project['full_name']
avalon_project = database[project_name].find_one({
"type": "project"
})
if avalon_project is None:
return False
else:
apps = [app['name'] for app in avalon_project['config'].get(
'apps', []
)]
if self.identifier not in apps:
return False
return True
def _launch(self, event):
args = self._translate_event(
self.session, event
)
preactions_launched = self._handle_preactions(
self.session, event
)
if preactions_launched is False:
return
response = self.launch(
self.session, *args
)
return self._handle_result(
self.session, response, *args
)
def launch(self, session, entities, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action failed )
or a dictionary with they keys `message` and `success`, the message should be a
string and will be displayed as feedback to the user, success should be a bool,
True if successful or False if the action failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
"""
entity = entities[0]
project_name = entity["project"]["full_name"]
@ -172,7 +157,7 @@ class AppAction(BaseHandler):
if len(asset_doc_parents) > 0:
hierarchy = os.path.join(*asset_doc_parents)
application = avalonlib.get_application(self.identifier)
application = avalon.lib.get_application(self.identifier)
data = {
"project": {
"name": entity["project"]["full_name"],
@ -270,7 +255,7 @@ class AppAction(BaseHandler):
)
}
popen = avalonlib.launch(
popen = avalon.lib.launch(
executable=execfile, args=[], environment=env
)

View file

@ -192,50 +192,10 @@ class BaseHandler(object):
raise NotImplementedError()
def _discover(self, event):
items = {
'items': [{
'label': self.label,
'variant': self.variant,
'description': self.description,
'actionIdentifier': self.identifier,
'icon': self.icon,
}]
}
args = self._translate_event(
self.session, event
)
accepts = self.discover(
self.session, *args
)
if accepts is True:
self.log.debug(u'Discovering action with selection: {0}'.format(
event['data'].get('selection', [])))
return items
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return False
def _translate_event(self, session, event):
def _translate_event(self, event, session=None):
'''Return *event* translated structure to be used with the API.'''
if session is None:
session = self.session
_entities = event['data'].get('entities_object', None)
if (
@ -245,25 +205,40 @@ class BaseHandler(object):
) == ftrack_api.symbol.NOT_SET
):
_entities = self._get_entities(event)
event['data']['entities_object'] = _entities
return [
_entities,
event
]
return _entities
def _get_entities(self, event, session=None, ignore=None):
entities = []
selection = event['data'].get('selection')
if not selection:
return entities
if ignore is None:
ignore = []
elif isinstance(ignore, str):
ignore = [ignore]
filtered_selection = []
for entity in selection:
if entity['entityType'] not in ignore:
filtered_selection.append(entity)
if not filtered_selection:
return entities
def _get_entities(self, event, session=None):
if session is None:
session = self.session
session._local_cache.clear()
selection = event['data'].get('selection') or []
_entities = []
for entity in selection:
_entities.append(session.get(
for entity in filtered_selection:
entities.append(session.get(
self._get_entity_type(entity, session),
entity.get('entityId')
))
event['data']['entities_object'] = _entities
return _entities
return entities
def _get_entity_type(self, entity, session=None):
'''Return translated entity type tht can be used with API.'''
@ -292,30 +267,12 @@ class BaseHandler(object):
)
def _launch(self, event):
args = self._translate_event(
self.session, event
)
self.session.rollback()
self.session._local_cache.clear()
preactions_launched = self._handle_preactions(self.session, event)
if preactions_launched is False:
return
self.launch(self.session, event)
interface = self._interface(
self.session, *args
)
if interface:
return interface
response = self.launch(
self.session, *args
)
return self._handle_result(
self.session, response, *args
)
def launch(self, session, entities, event):
def launch(self, session, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action failed )
@ -360,35 +317,7 @@ class BaseHandler(object):
return False
def _interface(self, *args):
interface = self.interface(*args)
if interface:
if (
'items' in interface or
('success' in interface and 'message' in interface)
):
return interface
return {
'items': interface
}
def interface(self, session, entities, event):
'''Return a interface if applicable or None
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return None
def _handle_result(self, session, result, entities, event):
def _handle_result(self, result):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
if result is True:
@ -417,11 +346,6 @@ class BaseHandler(object):
'Missing required key: {0}.'.format(key)
)
else:
self.log.error(
'Invalid result type must be bool or dictionary!'
)
return result
def show_message(self, event, input_message, result=False):
@ -629,11 +553,20 @@ class BaseHandler(object):
if low_entity_type == "project":
return entity
if low_entity_type == "reviewsession":
if "project" in entity:
# reviewsession, task(Task, Shot, Sequence,...)
return entity["project"]
if low_entity_type == "filecomponent":
entity = entity["version"]
low_entity_type = entity.entity_type.lower()
if low_entity_type == "assetversion":
asset = entity["asset"]
if asset:
parent = asset["parent"]
if parent:
return parent["project"]
project_data = entity["link"][0]
return self.session.query(

View file

@ -43,35 +43,10 @@ class BaseEvent(BaseHandler):
priority=self.priority
)
def _launch(self, event):
self.session.rollback()
self.session._local_cache.clear()
self.launch(self.session, event)
def _translate_event(self, session, event):
def _translate_event(self, event, session=None):
'''Return *event* translated structure to be used with the API.'''
return [
self._get_entities(session, event),
event
]
def _get_entities(
self, session, event, ignore=['socialfeed', 'socialnotification']
):
_selection = event['data'].get('entities', [])
_entities = list()
if isinstance(ignore, str):
ignore = list(ignore)
for entity in _selection:
if entity['entityType'] in ignore:
continue
_entities.append(
(
session.get(
self._get_entity_type(entity),
entity.get('entityId')
)
)
)
return _entities
return self._get_entities(
event,
session,
ignore=['socialfeed', 'socialnotification']
)

View file

@ -1,135 +0,0 @@
from bson.objectid import ObjectId
from .avalon_sync import CustAttrIdKey
import avalon.io
def get_project_from_entity(entity):
# TODO add more entities
ent_type_lowered = entity.entity_type.lower()
if ent_type_lowered == "project":
return entity
elif ent_type_lowered == "assetversion":
return entity["asset"]["parent"]["project"]
elif "project" in entity:
return entity["project"]
return None
def get_avalon_entities_for_assetversion(asset_version, db_con=None):
output = {
"success": True,
"message": None,
"project": None,
"project_name": None,
"asset": None,
"asset_name": None,
"asset_path": None,
"subset": None,
"subset_name": None,
"version": None,
"version_name": None,
"representations": None
}
if db_con is None:
db_con = avalon.io
db_con.install()
ft_asset = asset_version["asset"]
subset_name = ft_asset["name"]
version = asset_version["version"]
parent = ft_asset["parent"]
ent_path = "/".join(
[ent["name"] for ent in parent["link"]]
)
project = get_project_from_entity(asset_version)
project_name = project["full_name"]
output["project_name"] = project_name
output["asset_name"] = parent["name"]
output["asset_path"] = ent_path
output["subset_name"] = subset_name
output["version_name"] = version
db_con.Session["AVALON_PROJECT"] = project_name
avalon_project = db_con.find_one({"type": "project"})
output["project"] = avalon_project
if not avalon_project:
output["success"] = False
output["message"] = "Project not synchronized to avalon `{}`".format(
project_name
)
return output
asset_ent = None
asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
if asset_mongo_id:
try:
asset_mongo_id = ObjectId(asset_mongo_id)
asset_ent = db_con.find_one({
"type": "asset",
"_id": asset_mongo_id
})
except Exception:
pass
if not asset_ent:
asset_ent = db_con.find_one({
"type": "asset",
"data.ftrackId": parent["id"]
})
output["asset"] = asset_ent
if not asset_ent:
output["success"] = False
output["message"] = "Not synchronized entity to avalon `{}`".format(
ent_path
)
return output
asset_mongo_id = asset_ent["_id"]
subset_ent = db_con.find_one({
"type": "subset",
"parent": asset_mongo_id,
"name": subset_name
})
output["subset"] = subset_ent
if not subset_ent:
output["success"] = False
output["message"] = (
"Subset `{}` does not exist under Asset `{}`"
).format(subset_name, ent_path)
return output
version_ent = db_con.find_one({
"type": "version",
"name": version,
"parent": subset_ent["_id"]
})
output["version"] = version_ent
if not version_ent:
output["success"] = False
output["message"] = (
"Version `{}` does not exist under Subset `{}` | Asset `{}`"
).format(version, subset_name, ent_path)
return output
repre_ents = list(db_con.find({
"type": "representation",
"parent": version_ent["_id"]
}))
output["representations"] = repre_ents
return output

View file

@ -0,0 +1,42 @@
import os
import traceback
from pype.lib import PypeHook
from pypeapp import Logger
from pype.premiere import lib as prlib
class PremierePrelaunch(PypeHook):
"""
This hook will check if current workfile path has Adobe Premiere
project inside. IF not, it initialize it and finally it pass
path to the project by environment variable to Premiere launcher
shell script.
"""
def __init__(self, logger=None):
if not logger:
self.log = Logger().get_logger(self.__class__.__name__)
else:
self.log = logger
self.signature = "( {} )".format(self.__class__.__name__)
def execute(self, *args, env: dict = None) -> bool:
if not env:
env = os.environ
try:
__import__("pype.premiere")
__import__("pyblish")
except ImportError as e:
print(traceback.format_exc())
print("pyblish: Could not load integration: %s " % e)
else:
# Premiere Setup integration
# importlib.reload(prlib)
prlib.setup(env)
return True

View file

@ -0,0 +1,92 @@
import os
import pyblish.api
from avalon import (
io,
api as avalon
)
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "AdobeCommunicator Collect Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
self.log.info(
"registred_hosts: `{}`".format(pyblish.api.registered_hosts()))
io.install()
# get json paths from data
input_json_path = os.environ.get("AC_PUBLISH_INPATH")
output_json_path = os.environ.get("AC_PUBLISH_OUTPATH")
rqst_json_data_path = Path(input_json_path)
post_json_data_path = Path(output_json_path)
context.data['post_json_data_path'] = str(post_json_data_path)
# get avalon session data and convert \ to /
_S = avalon.session
projects = Path(_S["AVALON_PROJECTS"]).resolve()
asset = _S["AVALON_ASSET"]
workdir = Path(_S["AVALON_WORKDIR"]).resolve()
_S["AVALON_PROJECTS"] = str(projects)
_S["AVALON_WORKDIR"] = str(workdir)
context.data["avalonSession"] = _S
self.log.info(f"__ avalonSession: `{_S}`")
# get stagin directory from recieved path to json
context.data["stagingDir"] = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data["jsonData"] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = _S["AVALON_APP"] = host
context.data["hostVersion"] = \
_S["AVALON_APP_VERSION"] = host_version
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = str(Path(current_file).resolve())
# get project data from avalon
project_data = io.find_one({'type': 'project'})
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = io.find_one({
"type": 'asset',
"name": asset
})["data"]
assert asset_data, "No `asset_data` data in avalon db"
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -1,12 +1,5 @@
import os
import json
import pyblish.api
from avalon import (
io,
api as avalon
)
from pype import api as pype
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
@ -26,7 +19,11 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
def process(self, context):
a_session = context.data.get("avalonSession")
_S = context.data["avalonSession"]
asset = _S["AVALON_ASSET"]
task = _S["AVALON_TASK"]
host = _S["AVALON_APP"]
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
@ -36,96 +33,91 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
host = context.data["host"]
presets = context.data["presets"][host]
context.data["ftrackTypes"] = ftrack_types
rules_tasks = presets["rules_tasks"]
asset_default = presets["asset_default"]
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = io.find_one({"name": asset_name,
"type": "asset"})
assert asset_default, ("No `asset_default` data in"
"`/presets/[host]/asset_default.json` file")
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("fstart", None)
frame_start = context.data["assetData"].get("frameStart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["fstart"]
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
self.log.debug("frame_start: `{}`".format(frame_start))
# get handles > first try from asset data
handles = context.data["assetData"].get("handles", None)
if not handles:
handle_start = context.data["assetData"].get("handleStart", None)
handle_end = context.data["assetData"].get("handleEnd", None)
if (handle_start is None) or (handle_end is None):
# get frame start > second try from parent data
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
handle_start = asset_default.get("handleStart", None)
handle_end = asset_default.get("handleEnd", None)
assert handles, "No `handles` data found, "
"please set `fstart` on asset"
self.log.debug("handles: `{}`".format(handles))
assert (
(handle_start is not None) or (
handle_end is not None)), (
"No `handle_start, handle_end` data found")
instances = []
task = a_session["AVALON_TASK"]
current_file = os.path.basename(context.data.get("currentFile"))
name, ext = os.path.splitext(current_file)
# get current file host
host = a_session["AVALON_APP"]
family = "projectfile"
families = "filesave"
family = "workfile"
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
label = "{0} - {1}".format(name, task)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
wf_instance = next((inst for inst in instances_data
if inst.get("family", None) in 'workfile'), None)
if wf_instance:
self.log.debug('wf_instance: {}'.format(wf_instance))
version = int(wf_instance.get("version", None))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
instance.data.update(wf_instance)
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representations": [{
"files": current_file,
'stagingDir': staging_dir,
'name': "projectfile",
'ext': ext[1:]
}],
"host": host,
"asset": asset,
"label": label,
"name": name,
"family": family,
"families": ["ftrack"],
"publish": True,
"version": version
})
instances.append(instance)
for inst in instances_data:
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
version = int(inst.get("version", None))
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
@ -135,14 +127,14 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
if family in 'workfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
assert hierarchy, f"No `hierarchy` data in json file for {name}"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
@ -161,17 +153,12 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
subsets = rules_tasks["taskToSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
@ -184,8 +171,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
if inst["subsetToRepresentations"].get(subset, None):
repr = inst["subsetToRepresentations"][subset]
ext = repr['representation']
else:
continue
@ -197,7 +184,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
family = "review"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
@ -209,17 +196,15 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
if subset in f or "thumbnail" in f]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"frameStart": frame_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
@ -230,6 +215,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"jsonReprSubset": subset,
"jsonReprExt": ext,
"publish": True,
"version": version})
self.log.info(
@ -238,9 +225,6 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
context.data["instances"] = instances
# Sort/grouped by family (preserving local index)
# context[:] = sorted(context, key=self.sort_by_task)
self.log.debug("context: {}".format(context))
def sort_by_task(self, instance):

View file

@ -2,7 +2,7 @@
import json
import clique
import pyblish.api
from pypeapp import Anatomy
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
@ -14,28 +14,27 @@ class ExtractJSON(pyblish.api.ContextPlugin):
json_path = context.data['post_json_data_path']
data = dict(self.serialize(context.data()))
# self.log.info(data)
instances_data = []
for instance in context:
iData = {}
for key, value in instance.data.items():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
iData[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
instances_data.append(iData)
data["instances"] = instances_data
# instances_data = []
# for instance in context:
#
# iData = {}
# for key, value in instance.data.items():
# if isinstance(value, clique.Collection):
# value = value.format()
#
# try:
# json.dumps(value)
# iData[key] = value
# except KeyError:
# msg = "\"{0}\"".format(value)
# msg += " in instance.data[\"{0}\"]".format(key)
# msg += " could not be serialized."
# self.log.debug(msg)
#
# instances_data.append(iData)
#
# data["instances"] = instances_data
with open(json_path, "w") as outfile:
outfile.write(json.dumps(data, indent=4, sort_keys=True))
@ -60,6 +59,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# self.log.info("1: {}".format(data))
if isinstance(data, Anatomy):
return
if not isinstance(data, dict):
# self.log.info("2: {}".format(data))
return data
@ -88,6 +90,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# loops if dictionary
data[key] = self.serialize(value)
if isinstance(value, Anatomy):
continue
if isinstance(value, (list or tuple)):
# loops if list or tuple
for i, item in enumerate(value):

View file

@ -1,104 +0,0 @@
import os
import pyblish.api
from avalon import api as avalon
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "Collect Aport Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
# get json paths from data
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = session["AVALON_APP"] = host
context.data["hostVersion"] = \
session["AVALON_APP_VERSION"] = host_version
# register pyblish for filtering of hosts in plugins
pyblish.api.deregister_all_hosts()
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
for file in os.listdir(presets_dir):
name, ext = os.path.splitext(file)
with open(os.path.join(presets_dir, file)) as prst:
preset_data[name] = json.load(prst)
context.data['presets'] = preset_data
assert preset_data, "No `presets` data in json file"
self.log.debug("preset_data: {}".format(preset_data))
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -89,7 +89,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context[:] if i.data['asset'] in entity['name']
i for i in self.context if i.data['asset'] in entity['name']
]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (

View file

@ -51,10 +51,26 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
context.data["frameStart"] = data.get("frameStart")
context.data["frameEnd"] = data.get("frameEnd")
handles = int(data.get("handles") or 0)
context.data["handles"] = handles
context.data["handleStart"] = int(data.get("handleStart", handles))
context.data["handleEnd"] = int(data.get("handleEnd", handles))
handles = data.get("handles") or 0
handle_start = data.get("handleStart")
if handle_start is None:
handle_start = handles
self.log.info((
"Key \"handleStart\" is not set."
" Using value from \"handles\" key {}."
).format(handle_start))
handle_end = data.get("handleEnd")
if handle_end is None:
handle_end = handles
self.log.info((
"Key \"handleEnd\" is not set."
" Using value from \"handles\" key {}."
).format(handle_end))
context.data["handles"] = int(handles)
context.data["handleStart"] = int(handle_start)
context.data["handleEnd"] = int(handle_end)
frame_start_h = data.get("frameStart") - context.data["handleStart"]
frame_end_h = data.get("frameEnd") + context.data["handleEnd"]

View file

@ -18,7 +18,7 @@ class ExtractBurnin(pype.api.Extractor):
label = "Extract burnins"
order = pyblish.api.ExtractorOrder + 0.03
families = ["review", "burnin"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
optional = True
def process(self, instance):
@ -193,6 +193,7 @@ class ExtractBurnin(pype.api.Extractor):
self.log.debug("Output: {}".format(output))
repre_update = {
"anatomy_template": "render",
"files": movieFileBurnin,
"name": repre["name"],
"tags": [x for x in repre["tags"] if x != "delete"]

View file

@ -20,7 +20,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
label = "Extract Review"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
outputs = {}
ext_filter = []

View file

@ -5,6 +5,7 @@ import sys
import copy
import clique
import errno
import six
from pymongo import DeleteOne, InsertOne
import pyblish.api
@ -551,7 +552,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical
while True:
copyfile(src, dst)
try:
copyfile(src, dst)
except OSError as e:
self.log.critical("Cannot copy {} to {}".format(src, dst))
self.log.critical(e)
six.reraise(*sys.exc_info())
if str(getsize(src)) in str(getsize(dst)):
break
@ -589,7 +595,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"name": subset_name,
"data": {
"families": instance.data.get('families')
},
},
"parent": asset["_id"]
}).inserted_id

View file

@ -237,6 +237,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
environment["PYPE_METADATA_FILE"] = metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1"
try:
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
except KeyError:
# PYPE_PYTHON_EXE not set
pass
i = 0
for index, key in enumerate(environment):
if key.upper() in self.enviro_filter:

View file

@ -11,6 +11,7 @@ class CreateYetiCache(avalon.maya.Creator):
label = "Yeti Cache"
family = "yeticache"
icon = "pagelines"
defaults = ["Main"]
def __init__(self, *args, **kwargs):
super(CreateYetiCache, self).__init__(*args, **kwargs)

View file

@ -10,6 +10,7 @@ class CreateYetiRig(avalon.maya.Creator):
label = "Yeti Rig"
family = "yetiRig"
icon = "usb"
defaults = ["Main"]
def process(self):

View file

@ -18,17 +18,17 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
def process_reference(self, context, name, namespace, options):
"""
Load and try to assign Lookdev to nodes based on relationship data
Load and try to assign Lookdev to nodes based on relationship data.
Args:
name:
namespace:
context:
data:
options:
Returns:
"""
import maya.cmds as cmds
from avalon import maya

View file

@ -65,8 +65,10 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
roots.add(pm.PyNode(node).getAllParents()[-2])
except: # noqa: E722
pass
for root in roots:
root.setParent(world=True)
if family not in ["layout", "setdress", "mayaAscii"]:
for root in roots:
root.setParent(world=True)
groupNode.zeroTransformPivots()
for root in roots:

View file

@ -21,7 +21,8 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
icon = "code-fork"
color = "orange"
def process_reference(self, context, name=None, namespace=None, data=None):
def process_reference(
self, context, name=None, namespace=None, options=None):
import maya.cmds as cmds
from avalon import maya

View file

@ -250,7 +250,8 @@ class CollectLook(pyblish.api.InstancePlugin):
# Remove sets that didn't have any members assigned in the end
# Thus the data will be limited to only what we need.
if not sets[objset]["members"]:
self.log.info("objset {}".format(sets[objset]))
if not sets[objset]["members"] or (not objset.endswith("SG")):
self.log.info("Removing redundant set information: "
"%s" % objset)
sets.pop(objset, None)

View file

@ -157,6 +157,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
attachTo = []
if sets:
for s in sets:
if "family" not in cmds.listAttr(s):
continue
attachTo.append(
{
"version": None, # we need integrator for that

View file

@ -56,7 +56,8 @@ class ExtractAnimation(pype.api.Extractor):
"writeCreases": True,
"uvWrite": True,
"selection": True,
"worldSpace": instance.data.get("worldSpace", True)
"worldSpace": instance.data.get("worldSpace", True),
"writeColorSets": instance.data.get("writeColorSets", False)
}
if not instance.data.get("includeParentHierarchy", True):

View file

@ -147,7 +147,7 @@ class ExtractYetiRig(pype.api.Extractor):
nodes = instance.data["setMembers"]
resources = instance.data.get("resources", {})
with disconnect_plugs(settings, members):
with yetigraph_attribute_values(destination_folder, resources):
with yetigraph_attribute_values(resources_dir, resources):
with maya.attribute_values(attr_value):
cmds.select(nodes, noExpand=True)
cmds.file(maya_path,

View file

@ -48,6 +48,14 @@ class CreateWritePrerender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -49,6 +49,14 @@ class CreateWriteRender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -125,6 +125,7 @@ class ExtractThumbnail(pype.api.Extractor):
repre = {
'name': name,
'ext': "jpeg",
"outputName": "thumb",
'files': file,
"stagingDir": staging_dir,
"frameStart": first_frame,

View file

@ -13,5 +13,5 @@ class CollectAudioVersion(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.info('Audio: {}'.format(instance.data['name']))
instance.data['version'] = '001'
instance.data['version'] = 1
self.log.info('Audio version to: {}'.format(instance.data['version']))

View file

@ -1,12 +0,0 @@
import pyblish.api
class CollectContextDataPremiera(pyblish.api.ContextPlugin):
"""Collecting data from temp json sent from premiera context"""
label = "Collect Premiera Context"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
data_path = context.data['rqst_json_data_path']
self.log.info("Context is: {}".format(data_path))

View file

@ -19,16 +19,18 @@ class CollectFrameranges(pyblish.api.InstancePlugin):
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
fps = metadata['ppro.timeline.fps']
fps = float(metadata['ppro.timeline.fps'])
sec_start = metadata['ppro.clip.start']
sec_end = metadata['ppro.clip.end']
fstart = instance.data.get('fstart')
fstart = instance.data.get('frameStart')
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
instance.data['name'],
fps, sec_start, sec_end, fstart, fend))
instance.data['startFrame'] = fstart
instance.data['endFrame'] = fend
instance.data['frameStart'] = fstart
instance.data['frameEnd'] = fend
instance.data['handleStart'] = instance.context.data['handleStart']
instance.data['handleEnd'] = instance.context.data['handleEnd']
instance.data['fps'] = metadata['ppro.timeline.fps']

View file

@ -26,7 +26,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
json_data = context.data.get("jsonData", None)
temp_context = {}
for instance in json_data['instances']:
if instance['family'] in 'projectfile':
if instance['family'] in 'workfile':
continue
in_info = {}
@ -35,10 +35,13 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
in_info['entity_type'] = 'Shot'
instance_pyblish = [
i for i in context.data["instances"] if i.data['asset'] in name][0]
i for i in context.data["instances"]
if i.data['asset'] in name][0]
in_info['custom_attributes'] = {
'fend': instance_pyblish.data['endFrame'],
'fstart': instance_pyblish.data['startFrame'],
'frameStart': instance_pyblish.data['frameStart'],
'frameEnd': instance_pyblish.data['frameEnd'],
'handleStart': instance_pyblish.data['handleStart'],
'handleEnd': instance_pyblish.data['handleEnd'],
'fps': instance_pyblish.data['fps']
}

View file

@ -0,0 +1,83 @@
import os
import pyblish.api
class CollectClipRepresentations(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Representations"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# add to representations
if not instance.data.get("representations"):
instance.data["representations"] = list()
ins_d = instance.data
staging_dir = ins_d["stagingDir"]
frame_start = ins_d["frameStart"]
frame_end = ins_d["frameEnd"]
handle_start = ins_d["handleStart"]
handle_end = ins_d["handleEnd"]
fps = ins_d["fps"]
files_list = ins_d.get("files")
if not files_list:
return
json_repr_ext = ins_d["jsonReprExt"]
json_repr_subset = ins_d["jsonReprSubset"]
if files_list:
file = next((f for f in files_list
if json_repr_subset in f), None)
else:
return
if json_repr_ext in ["mov", "mp4"]:
representation = {
"files": file,
"stagingDir": staging_dir,
"frameStart": frame_start,
"frameEnd": frame_end,
"frameStartFtrack": frame_start - handle_start,
"frameEndFtrack": frame_end - handle_end,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review", "delete"]
}
else:
representation = {
"files": file,
"stagingDir": staging_dir,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review"]
}
self.log.debug("representation: {}".format(representation))
instance.data["representations"].append(representation)
thumb = next((f for f in files_list
if "thumbnail" in f), None)
if thumb:
thumb_representation = {
'files': thumb,
'stagingDir': staging_dir,
'name': "thumbnail",
'thumbnail': True,
'ext': os.path.splitext(thumb)[-1].replace(".", "")
}
self.log.debug("representation: {}".format(thumb_representation))
instance.data["representations"].append(
thumb_representation)

View file

@ -0,0 +1,31 @@
import pyblish.api
class CollectResolution(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Resolution"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# getting metadata from jsonData key
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
pixel_aspect = float(metadata['ppro.format.pixelaspect'])
res_width = metadata['ppro.format.width']
res_height = metadata['ppro.format.height']
instance.data['pixelAspect'] = pixel_aspect
instance.data['resolutionWidth'] = res_width
instance.data['resolutionHeight'] = res_height
self.log.info(f"Resolution was set to: `{res_width}x{res_height}`,"
f" and pixel aspect ration to: `{pixel_aspect}`")

View file

@ -1,144 +0,0 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile"]
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
# template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
mock_template = anatomy_filled.publish.path
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
template = project["config"]["template"]["publish"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -1,140 +0,0 @@
import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
"""
order = pyblish.api.IntegratorOrder - 0.1
label = 'Integrate Hierarchy To Avalon'
families = ['clip']
def process(self, context):
if "hierarchyContext" not in context.data:
return
self.db = io
if not self.db.Session:
self.db.install()
input_data = context.data["hierarchyContext"]
self.import_to_avalon(input_data)
def import_to_avalon(self, input_data, parent=None):
for name in input_data:
self.log.info('input_data[name]: {}'.format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data['entity_type']
data = {}
# Process project
if entity_type.lower() == 'project':
entity = self.db.find_one({'type': 'project'})
# TODO: should be in validator?
assert (entity is not None), "Didn't find project in DB"
# get data from already existing project
for key, value in entity.get('data', {}).items():
data[key] = value
self.av_project = entity
# Raise error if project or parent are not set
elif self.av_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# Else process assset
else:
entity = self.db.find_one({'type': 'asset', 'name': name})
# Create entity if doesn't exist
if entity is None:
if self.av_project['_id'] == parent['_id']:
silo = None
elif parent['silo'] is None:
silo = parent['name']
else:
silo = parent['silo']
entity = self.create_avalon_asset(name, silo)
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____1____')
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
data = input_data[name]
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
entity = self.db.find_one({'type': 'asset', 'name': name})
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____2____')
# Else get data from already existing
else:
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('________')
for key, value in entity.get('data', {}).items():
data[key] = value
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
# do not store project's id as visualParent (silo asset)
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
# CUSTOM ATTRIBUTES
for k, val in entity_data.get('custom_attributes', {}).items():
data[k] = val
# Update entity data with input data
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
if 'childs' in entity_data:
self.import_to_avalon(entity_data['childs'], entity)
def create_avalon_asset(self, name, silo):
item = {
'schema': 'avalon-core:asset-2.0',
'name': name,
'silo': silo,
'parent': self.av_project['_id'],
'type': 'asset',
'data': {}
}
entity_id = self.db.insert_one(item).inserted_id
return self.db.find_one({'_id': entity_id})

View file

@ -1,171 +0,0 @@
import sys
import pyblish.api
import six
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder
label = 'Integrate Hierarchy To Ftrack'
families = ["clip"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in context.data:
return
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
# adding ftrack types from presets
ftrack_types = context.data['ftrackTypes']
self.import_to_ftrack(input_data, ftrack_types)
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type'].capitalize()
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (
'Missing custom attribute')
entity['custom_attributes'][key] = custom_attributes[key]
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)
for task in tasks_to_create:
self.create_task(
name=task,
task_type=ftrack_types[task],
parent=entity
)
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], ftrack_types, entity)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return entity

View file

@ -1,160 +1,71 @@
import os
import sys
import shutil
from pysync import walktree
from avalon import api as avalon
from avalon.lib import launch
from pyblish import api as pyblish
from app import api as app
from pprint import pprint
from .. import api
from pypeapp import Logger
import requests
from .lib import (
setup,
reload_pipeline,
ls,
LOAD_PATH,
CREATE_PATH,
PUBLISH_PATH
)
__all__ = [
"setup",
"reload_pipeline",
"ls"
]
log = api.Logger.getLogger(__name__, "premiere")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
EXTENSIONS_PATH_LOCAL = os.getenv("EXTENSIONS_PATH", None)
EXTENSIONS_CACHE_PATH = os.getenv("EXTENSIONS_CACHE_PATH", None)
EXTENSIONS_PATH_REMOTE = os.path.join(os.path.dirname(__file__), "extensions")
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
_clearing_cache = ["com.pype.rename", "com.pype.avalon"]
PUBLISH_PATH = os.path.join(
PLUGINS_DIR, "premiere", "publish"
).replace("\\", "/")
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[PUBLISH_PATH]
)
else:
os.environ["PUBLISH_PATH"] = PUBLISH_PATH
LOAD_PATH = os.path.join(PLUGINS_DIR, "premiere", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "premiere", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "premiere", "inventory")
def clearing_caches_ui():
'''Before every start of premiere it will make sure there is not
outdated stuff in cep_cache dir'''
for d in os.listdir(EXTENSIONS_CACHE_PATH):
match = [p for p in _clearing_cache
if str(p) in d]
if match:
try:
path = os.path.normpath(os.path.join(EXTENSIONS_CACHE_PATH, d))
log.info("Removing dir: {}".format(path))
shutil.rmtree(path, ignore_errors=True)
except Exception as e:
log.debug("problem: {}".format(e))
def request_aport(url_path, data={}):
try:
api.add_tool_to_environment(["aport_0.1"])
ip = os.getenv("PICO_IP", None)
if ip and ip.startswith('http'):
ip = ip.replace("http://", "")
port = int(os.getenv("PICO_PORT", None))
url = "http://{0}:{1}{2}".format(ip, port, url_path)
req = requests.post(url, data=data).text
return req
except Exception as e:
api.message(title="Premiere Aport Server",
message="Before you can run Premiere, start Aport Server. \n Error: {}".format(
e),
level="critical")
def extensions_sync():
# import time
process_pairs = list()
# get extensions dir in pype.premiere.extensions
# build dir path to premiere cep extensions
for name in os.listdir(EXTENSIONS_PATH_REMOTE):
print(name)
src = os.path.join(EXTENSIONS_PATH_REMOTE, name)
dst = os.path.join(EXTENSIONS_PATH_LOCAL, name)
process_pairs.append((name, src, dst))
# synchronize all extensions
for name, src, dst in process_pairs:
if not os.path.exists(dst):
os.makedirs(dst, mode=0o777)
walktree(source=src, target=dst, options_input=["y", ">"])
log.info("Extension {0} from `{1}` coppied to `{2}`".format(
name, src, dst
))
# time.sleep(10)
return
log = Logger().get_logger(__name__, "premiere")
def install():
api.set_avalon_workdir()
log.info("Registering Premiera plug-ins..")
reg_paths = request_aport("/api/register_plugin_path",
{"publish_path": PUBLISH_PATH})
"""Install Premiere-specific functionality of avalon-core.
# avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
# avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
# avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
This is where you install menus and register families, data
and loaders into Premiere.
It is called automatically when installing via `api.install(premiere)`.
See the Maya equivalent for inspiration on how to implement this.
"""
# Disable all families except for the ones we explicitly want to see
# family_states = [
# "imagesequence",
# "mov"
#
# ]
# avalon.data["familiesStateDefault"] = False
# avalon.data["familiesStateToggled"] = family_states
family_states = [
"imagesequence",
"mov"
]
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
log.info("pype.premiere installed")
# remove cep_cache from user temp dir
clearing_caches_ui()
pyblish.register_host("premiere")
pyblish.register_plugin_path(PUBLISH_PATH)
log.info("Registering Premiera plug-ins..")
# synchronize extensions
extensions_sync()
message = "The Pype extension has been installed. " \
"\nThe following publishing paths has been registered: " \
"\n\n{}".format(
reg_paths)
api.message(title="pyblish_paths", message=message, level="info")
# launching premiere
exe = r"C:\Program Files\Adobe\Adobe Premiere Pro CC 2019\Adobe Premiere Pro.exe".replace(
"\\", "/")
log.info("____path exists: {}".format(os.path.exists(exe)))
app.forward(args=[exe],
silent=False,
cwd=os.getcwd(),
env=dict(os.environ),
shell=None)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
def uninstall():
log.info("Deregistering Premiera plug-ins..")
"""Uninstall all tha was installed
This is where you undo everything that was done in `install()`.
That means, removing menus, deregistering families and data
and everything. It should be as though `install()` was never run,
because odds are calling this function means the user is interested
in re-installing shortly afterwards. If, for example, he has been
modifying the menu or registered families.
"""
pyblish.deregister_host("premiere")
pyblish.deregister_plugin_path(PUBLISH_PATH)
log.info("Deregistering Premiera plug-ins..")
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()

View file

@ -0,0 +1,20 @@
@echo off
rem You need https://github.com/Adobe-CEP/CEP-Resources/raw/master/ZXPSignCMD/4.1.1/win64/ZXPSignCmd.exe
rem You need https://partners.adobe.com/exchangeprogram/creativecloud/support/exman-com-line-tool.html
rem !!! make sure you run windows power shell as admin
set pwd="12PPROext581"
echo ">>> creating certificate ..."
.\ZXPSignCmd -selfSignedCert CZ Prague OrbiTools "Signing robot" %pwd% certificate.p12
echo ">>> building com.pype"
.\ZXPSignCmd -sign com.pype/ pype.zxp certificate.p12 %pwd%
echo ">>> building com.pype.rename"
.\ZXPSignCmd -sign com.pype.rename/ pype_rename.zxp certificate.p12 %pwd%
echo ">>> installing com.pype"
.\ExManCmd.exe /install .\pype.zxp
echo ">>> installing com.pype.rename"
.\ExManCmd.exe /install .\pype_rename.zxp

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionList>
<Extension Id="com.pype.avalon">
<HostList>
<Host Name="PPRO" Port="7778"/>
</HostList>
</Extension>
</ExtensionList>

View file

@ -1,17 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Avalon</title>
<link rel="stylesheet" type="text/css" href="css/app.css">
</head>
<body onLoad="onLoaded()">
</body>
<script src="js/app.js"></script>
<script src="js/json2.js"></script>
</html>

View file

@ -1,60 +0,0 @@
/*************************************************************************
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2014 Adobe
* All Rights Reserved.
*
* NOTICE: Adobe permits you to use, modify, and distribute this file in
* accordance with the terms of the Adobe license agreement accompanying
* it. If you have received this file from a source other than Adobe,
* then your use, modification, or distribution of it requires the prior
* written permission of Adobe.
**************************************************************************/
// time display types
TIMEDISPLAY_24Timecode = 100;
TIMEDISPLAY_25Timecode = 101;
TIMEDISPLAY_2997DropTimecode = 102;
TIMEDISPLAY_2997NonDropTimecode = 103;
TIMEDISPLAY_30Timecode = 104;
TIMEDISPLAY_50Timecode = 105;
TIMEDISPLAY_5994DropTimecode = 106;
TIMEDISPLAY_5994NonDropTimecode = 107;
TIMEDISPLAY_60Timecode = 108;
TIMEDISPLAY_Frames = 109;
TIMEDISPLAY_23976Timecode = 110;
TIMEDISPLAY_16mmFeetFrames = 111;
TIMEDISPLAY_35mmFeetFrames = 112;
TIMEDISPLAY_48Timecode = 113;
TIMEDISPLAY_AudioSamplesTimecode = 200;
TIMEDISPLAY_AudioMsTimecode = 201;
// field type constants
FIELDTYPE_Progressive = 0;
FIELDTYPE_UpperFirst = 1;
FIELDTYPE_LowerFirst = 2;
// audio channel types
AUDIOCHANNELTYPE_Mono = 0;
AUDIOCHANNELTYPE_Stereo = 1;
AUDIOCHANNELTYPE_51 = 2;
AUDIOCHANNELTYPE_Multichannel = 3;
AUDIOCHANNELTYPE_4Channel = 4;
AUDIOCHANNELTYPE_8Channel = 5;
// vr projection type
VRPROJECTIONTYPE_None = 0;
VRPROJECTIONTYPE_Equirectangular = 1;
// vr stereoscopic type
VRSTEREOSCOPICTYPE_Monoscopic = 0;
VRSTEREOSCOPICTYPE_OverUnder = 1;
VRSTEREOSCOPICTYPE_SideBySide = 2;
NOT_SET = -400000;

File diff suppressed because it is too large Load diff

View file

@ -1,178 +0,0 @@
/* global app, XMPMeta, ExternalObject, CSXSEvent, Folder */
/* --------------------------------------
-. == [ part 0f PyPE CluB ] == .-
_______________.___._____________________
\______ \__ | |\______ \_ _____/
| ___// | | | ___/| __)_
| | \____ | | | | \
|____| / ______| |____| /_______ /
\/ \/
.. __/ CliP R3N4M3R \__ ..
*/
// variable br is defined in pypeAvalon.jsx
br = {
getSelectedVideoTrackItems: function () {
var seq = app.project.activeSequence;
var selected = [];
var videoTracks = seq.videoTracks;
var numOfVideoTracks = videoTracks.numTracks;
// VIDEO CLIPS IN SEQUENCES
for (var l = 0; l < numOfVideoTracks; l++) {
var videoTrack = seq.videoTracks[l];
if (videoTrack.isTargeted()) {
$.writeln(videoTrack.name);
var numOfClips = videoTrack.clips.numTracks;
for (var m = 0; m < numOfClips; m++) {
var clip = videoTrack.clips[m];
selected.push({
'name': clip.name,
'clip': clip,
'sequence': seq,
'videoTrack': videoTrack
});
}
}
}
var names = [];
var items = {};
var sorted = [];
for (var c = 0; c < selected.length; c++) {
items[selected[c].name] = selected[c];
names.push(selected[c].name);
}
names.sort();
for (var cl = 0; cl < names.length; cl++) {
sorted.push(items[names[cl]]);
}
return sorted;
},
/**
* Set Pype metadata into sequence metadata using XMP.
* This is `hackish` way to get over premiere lack of addressing unique clip on timeline,
* so we cannot store data directly per clip.
*
* @param {Object} sequence - sequence object
* @param {Object} data - to be serialized and saved
*/
setSequencePypeMetadata: function (sequence, data) { // eslint-disable-line no-unused-vars
var kPProPrivateProjectMetadataURI = 'http://ns.adobe.com/premierePrivateProjectMetaData/1.0/';
var metadata = sequence.projectItem.getProjectMetadata();
var pypeData = 'pypeData';
var xmp = new XMPMeta(metadata);
app.project.addPropertyToProjectMetadataSchema(pypeData, 'Pype Data', 2);
xmp.setProperty(kPProPrivateProjectMetadataURI, pypeData, JSON.stringify(data));
var str = xmp.serialize();
sequence.projectItem.setProjectMetadata(str, [pypeData]);
// test
var newMetadata = sequence.projectItem.getProjectMetadata();
var newXMP = new XMPMeta(newMetadata);
var found = newXMP.doesPropertyExist(kPProPrivateProjectMetadataURI, pypeData);
if (!found) {
app.setSDKEventMessage('metadata not set', 'error');
}
},
/**
* Get Pype metadata from sequence using XMP.
* @param {Object} sequence
* @return {Object}
*/
getSequencePypeMetadata: function (sequence) { // eslint-disable-line no-unused-vars
var kPProPrivateProjectMetadataURI = 'http://ns.adobe.com/premierePrivateProjectMetaData/1.0/';
var metadata = sequence.projectItem.getProjectMetadata();
var pypeData = 'pypeData';
var pypeDataN = 'Pype Data';
var xmp = new XMPMeta(metadata);
app.project.addPropertyToProjectMetadataSchema(pypeData, pypeDataN, 2);
var pypeDataValue = xmp.getProperty(kPProPrivateProjectMetadataURI, pypeData);
$.writeln('pypeDataValue');
$.writeln(pypeDataValue);
if (pypeDataValue === undefined) {
var pyMeta = {
clips: {},
tags: {}
};
br.setSequencePypeMetadata(sequence, pyMeta);
pypeDataValue = xmp.getProperty(kPProPrivateProjectMetadataURI, pypeData);
return br.getSequencePypeMetadata(sequence);
} else {
return JSON.parse(pypeDataValue);
}
},
renameTargetedTextLayer: function (data) {
$.writeln(data);
var selected = br.getSelectedVideoTrackItems();
var seq = app.project.activeSequence;
var metadata = br.getSequencePypeMetadata(seq);
var startCount = 10;
var stepCount = 10;
var padding = 3;
var newItems = {};
var episode = data.ep;
var episodeSuf = data.epSuffix;
var shotPref = 'sh';
var count = 0;
var seqCheck = '';
for (var c = 0; c < selected.length; c++) {
// fill in hierarchy if set
var parents = [];
var hierarchy = [];
var name = selected[c].name;
var sequenceName = name.slice(0, 5);
var shotNum = Number(name.slice((name.length - 3), name.length));
// if (sequenceName !== seqCheck) {
// seqCheck = sequenceName;
// count = 0;
// };
//
// var seqCount = (count * stepCount) + startCount;
// count += 1;
var newName = episode + sequenceName + shotPref + (shotNum).pad(padding);
$.writeln(newName);
selected[c].clip.name = newName;
parents.push({
'entityType': 'episode',
'entityName': episode + '_' + episodeSuf
});
hierarchy.push(episode + '_' + episodeSuf);
parents.push({
'entityType': 'sequence',
'entityName': episode + sequenceName
});
hierarchy.push(episode + sequenceName);
newItems[newName] = {
'parents': parents,
'hierarchy': hierarchy.join('/')
};
}
metadata.clips = newItems;
br.setSequencePypeMetadata(seq, metadata);
return JSON.stringify(metadata);
}
};
Number.prototype.pad = function (size) {
var s = String(this);
while (s.length < (size || 2)) {
s = "0" + s;
}
return s;
}

File diff suppressed because it is too large Load diff

View file

@ -1 +0,0 @@
application/vnd.adobe.air-ucf-package+zip

View file

@ -1,3 +0,0 @@
body{background-color:#323238;color:#eeeeee}#output{background:#121212;color:#eeeeee;padding:2em;font-family:monospace;font-weight:bold;min-height:8em}.dark>.list-group-item{background:#454747}
/*# sourceMappingURL=avalon.min.css.map */

View file

@ -1,162 +0,0 @@
<!DOCTYPE html>
<html>
<html lang="en">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>Pype extention</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<link href="./css/bootstrap.min.css" type="text/css" rel="stylesheet">
<link href="./css/avalon.min.css" type="text/css" rel="stylesheet">
<script>
if (typeof module === 'object') {
window.module = module;
module = undefined;
}
</script>
<script src="./js/vendor/jquery-3.3.1.min.js"></script>
<script src="./js/vendor/CSInterface-8.js"></script>
<script src="./js/vendor/popper.min.js"></script>
<script src="./js/vendor/bootstrap.min.js"></script>
<script src="./js/vendor/json2.js"></script>
<script>
if (window.module) module = window.module;
var ENV;
</script>
</head>
<body>
<div id="section"><a href="javascript:history.go(0)">Refresh panel</a>
<ul class="list-group list-group-flush dark">
<li class="list-group-item" id="rename">
<div class="input-group input-group-sm mb-1">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Rename targeted text layers<br /> converts sc010sh020 <br />to lbb201sc010sh020<br />and creates ftrack metadata</span>
<div class="input-group-text">
<input type="text" name="episode" placeholder="lbb2" aria-label="episode" aria-describedby="basic-addon5" value="" style="width:75px;">
</div>
<div class="input-group-text">
<input type="text" name="ep_suffix" placeholder="nameofepisode" aria-label="Name of episode" aria-describedby="basic-addon5" value="">
</div>
</div>
<div class="input-group-append">
<button id="btn-rename" type="button" class="btn btn-info btn-sm btn-block">Rename</button>
</div>
</div>
</li>
<li class="list-group-item" id="publish">
<h5>Publish</h5>
<pre><code class="js"></code></pre>
<div class="input-group input-group-lg mb-4">
<!-- <div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">GUI</span>
<div class="input-group-text">
<input type="checkbox" name="gui" checked="checked" aria-label="Checkbox for following text input">
</div>
</div> -->
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Version Up</span>
<div class="input-group-text">
<input type="checkbox" name="version-up" checked="checked" aria-label="Checkbox for following text input">
</div>
<span class="input-group-text" id="basic-addon6">
Audio Only</span>
<div class="input-group-text">
<input type="checkbox" name="audio-only" aria-label="Check if you want to export only audio">
</div>
</div>
<div class="input-group-append">
<button id="btn-publish" type="button" class="btn btn-info btn-sm btn-block">Publish</button>
</div>
</div>
<div class="input-group input-group-sm mb-1">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Path to sending data json</span>
<div class="input-group-text">
<input type="text" name="send-path" placeholder="Path" aria-label="Path" aria-describedby="basic-addon5" value="">
</div>
</div>
<div class="input-group-append">
<button id="btn-send-reset" type="button" class="btn btn-info btn-sm btn-block">Reset</button>
</div>
</div>
<div class="input-group input-group-sm mb-3">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Path to getting data json</span>
<div class="input-group-text">
<input type="text" name="get-path" placeholder="Path" aria-label="Path" aria-describedby="basic-addon5" value="">
</div>
</div>
<div class="input-group-prepend">
<button id="btn-get-reset" type="button" class="btn btn-info btn-sm btn-block">Reset</button>
</div>
<div class="input-group-append">
<button id="btn-metadata" type="button" class="btn btn-info btn-sm btn-block">Injest metadata</button>
</div>
</div>
</li>
<li class="list-group-item"><button type="button" class="btn btn-info btn-sm btn-block" id="btn-newWorkfileVersion">Save next workfile version</button></li>
<li class="list-group-item"><button type="button" class="btn btn-info btn-sm btn-block" id="btn-get-frame">Get screen grab</button></li>
<li class="list-group-item" id="load">
<h5>Load/Update assets to timeline</h5>
<pre><code class="js"></code></pre>
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Type</span>
<div class="input-group-text">
<input type="text" name="type" placeholder="renderAnimation" aria-label="Asset Type" aria-describedby="basic-addon5" value="renderAnimation">
</div>
<span class="input-group-text" id="basic-addon6">Ext</span>
<div class="input-group-text">
<input type="text" name="ext" placeholder="mxf" aria-label="File Extension" aria-describedby="basic-addon5" value="mxf">
</div>
<div class="input-group-append">
<button type="button" class="btn btn-info btn-sm btn-block" id="btn-getRernderAnimation">DO IT!</button>
</div>
</div>
</li>
</ul>
<hr />
<div class="col-md-6" id="source">
<!-- <pre>
<code class="python"></code>
</pre> -->
</div>
<h5>Output</h5>
<div class="row" id="output">
</div>
<script src="./js/pico_client.js"></script>
<script src="./js/avalon.js"></script>
</body>
</html>

View file

@ -1,367 +0,0 @@
/* global CSInterface, $, querySelector, api, displayResult */
var csi = new CSInterface();
var output = document.getElementById('output');
var rootFolderPath = csi.getSystemPath(SystemPath.EXTENSION);
var timecodes = cep_node.require('node-timecodes');
var process = cep_node.require('process');
function getEnv() {
csi.evalScript('pype.getProjectFileData();', function (result) {
process.env.EXTENSION_PATH = rootFolderPath
window.ENV = process.env;
var resultData = JSON.parse(result);
for (key in resultData) {
window.ENV[key] = resultData[key];
};
csi.evalScript('pype.setEnvs(' + JSON.stringify(window.ENV) + ')');
});
}
function renderClips() {
csi.evalScript('pype.transcodeExternal(' + rootFolderPath + ');', function (result) {
displayResult(result);
});
}
function displayResult(r) {
console.log(r);
csi.evalScript('$.writeln( ' + JSON.stringify(r) + ' )');
output.classList.remove("error");
output.innerText = r;
}
function displayError(e) {
output.classList.add("error");
output.innerText = e.message;
}
function loadJSX() {
// get the appName of the currently used app. For Premiere Pro it's "PPRO"
var appName = csi.hostEnvironment.appName;
var extensionPath = csi.getSystemPath(SystemPath.EXTENSION);
// load general JSX script independent of appName
var extensionRootGeneral = extensionPath + '/jsx/';
csi.evalScript('$._ext.evalFiles("' + extensionRootGeneral + '")');
// load JSX scripts based on appName
var extensionRootApp = extensionPath + '/jsx/' + appName + '/';
csi.evalScript('$._ext.evalFiles("' + extensionRootApp + '")');
// csi.evalScript('$._PPP_.logConsoleOutput()');
getEnv();
csi.evalScript('$._PPP_.updateEventPanel( "' + "all plugins are loaded" + '" )');
csi.evalScript('$._PPP_.updateEventPanel( "' + "testing function done" + '" )');
}
// run all at loading
loadJSX()
function loadAnimationRendersToTimeline() {
// it will get type of asset and extension from input
// and start loading script from jsx
var $ = querySelector('#load');
var data = {};
data.subset = $('input[name=type]').value;
data.subsetExt = $('input[name=ext]').value;
var requestList = [];
// get all selected clips
csi.evalScript('pype.getClipsForLoadingSubsets( "' + data.subset + '" )', function (result) {
// TODO: need to check if the clips are already created and this is just updating to last versions
var resultObj = JSON.parse(result);
var instances = resultObj[0];
var numTracks = resultObj[1];
var key = '';
// creating requesting list of dictionaries
for (key in instances) {
var clipData = {};
clipData.parentClip = instances[key];
clipData.asset = key;
clipData.subset = data.subset;
clipData.representation = data.subsetExt;
requestList.push(clipData);
}
// gets data from mongodb
api.load_representations(window.ENV['AVALON_PROJECT'], requestList).then(
function (avalonData) {
// creates or updates data on timeline
var makeData = {};
makeData.binHierarchy = data.subset + '/' + data.subsetExt;
makeData.clips = avalonData;
makeData.numTracks = numTracks;
csi.evalScript('pype.importFiles( ' + JSON.stringify(makeData) + ' )');
}
);
});
}
function evalScript(script) {
var callback = function (result) {
displayResult(result);
};
csi.evalScript(script, callback);
}
function deregister() {
api.deregister_plugin_path().then(displayResult);
}
function register() {
var $ = querySelector('#register');
var path = $('input[name=path]').value;
api.register_plugin_path(path).then(displayResult);
}
function getStagingDir() {
// create stagingDir
const fs = require('fs-extra');
const os = require('os');
const path = require('path');
const UUID = require('pure-uuid');
const id = new UUID(4).format();
const stagingDir = path.join(os.tmpdir(), id);
fs.mkdirs(stagingDir);
return stagingDir;
}
function convertPathString(path) {
return path.replace(
new RegExp('\\\\', 'g'), '/').replace(new RegExp('//\\?/', 'g'), '');
}
function publish() {
var $ = querySelector('#publish');
// var gui = $('input[name=gui]').checked;
var gui = true;
var versionUp = $('input[name=version-up]').checked;
var audioOnly = $('input[name=audio-only]').checked;
var jsonSendPath = $('input[name=send-path]').value;
var jsonGetPath = $('input[name=get-path]').value;
var publish_path = window.ENV['PUBLISH_PATH'];
if (jsonSendPath == '') {
// create temp staging directory on local
var stagingDir = convertPathString(getStagingDir());
// copy project file to stagingDir
const fs = require('fs-extra');
const path = require('path');
csi.evalScript('pype.getProjectFileData();', function (result) {
displayResult(result);
var data = JSON.parse(result);
displayResult(stagingDir);
displayResult(data.projectfile);
var destination = convertPathString(path.join(stagingDir, data.projectfile));
displayResult('copy project file');
displayResult(data.projectfile);
displayResult(destination);
fs.copyFile(data.projectpath, destination);
displayResult('project file coppied!');
});
// publishing file
csi.evalScript('pype.getPyblishRequest("' + stagingDir + '", ' + audioOnly + ');', function (r) {
var request = JSON.parse(r);
displayResult(JSON.stringify(request));
csi.evalScript('pype.encodeRepresentation(' + JSON.stringify(request) + ');', function (result) {
// create json for pyblish
var jsonfile = require('jsonfile');
var jsonSendPath = stagingDir + '_send.json'
var jsonGetPath = stagingDir + '_get.json'
$('input[name=send-path]').value = jsonSendPath;
$('input[name=get-path]').value = jsonGetPath;
var jsonContent = JSON.parse(result);
jsonfile.writeFile(jsonSendPath, jsonContent);
var checkingFile = function (path) {
var timeout = 1000;
setTimeout(function () {
if (fs.existsSync(path)) {
// register publish path
api.register_plugin_path(publish_path).then(displayResult);
// send json to pyblish
api.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
// check if resulted path exists as file
if (fs.existsSync(result.get_json_path)) {
// read json data from resulted path
displayResult('Updating metadata of clips after publishing');
jsonfile.readFile(result.get_json_path, function (err, json) {
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
})
// version up project
if (versionUp) {
displayResult('Saving new version of the project file');
csi.evalScript('pype.versionUpWorkFile();');
};
} else {
// if resulted path file not existing
displayResult('Publish has not been finished correctly. Hit Publish again to publish from already rendered data, or Reset to render all again.');
};
});
} else {
displayResult('waiting');
checkingFile(path);
};
},
timeout)
};
checkingFile(jsonContent.waitingFor)
});
});
} else {
// register publish path
api.register_plugin_path(publish_path).then(displayResult);
// send json to pyblish
api.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
// check if resulted path exists as file
if (fs.existsSync(result.get_json_path)) {
// read json data from resulted path
displayResult('Updating metadata of clips after publishing');
jsonfile.readFile(result.get_json_path, function (err, json) {
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
})
// version up project
if (versionUp) {
displayResult('Saving new version of the project file');
csi.evalScript('pype.versionUpWorkFile();');
};
} else {
// if resulted path file not existing
displayResult('Publish has not been finished correctly. Hit Publish again to publish from already rendered data, or Reset to render all again.');
};
});
};
// $('input[name=send-path]').value = '';
// $('input[name=get-path]').value = '';
}
function context() {
var $ = querySelector('#context');
var project = $('input[name=project]').value;
var asset = $('input[name=asset]').value;
var task = $('input[name=task]').value;
var app = $('input[name=app]').value;
api.context(project, asset, task, app).then(displayResult);
}
function tc(timecode) {
var seconds = timecodes.toSeconds(timecode);
var timec = timecodes.fromSeconds(seconds);
displayResult(seconds);
displayResult(timec);
}
function rename() {
var $ = querySelector('#rename');
var data = {};
data.ep = $('input[name=episode]').value;
data.epSuffix = $('input[name=ep_suffix]').value;
if (!data.ep) {
csi.evalScript('pype.alert_message("' + 'Need to fill episode code' + '")');
return;
};
if (!data.epSuffix) {
csi.evalScript('pype.alert_message("' + 'Need to fill episode longer suffix' + '")');
return;
};
csi.evalScript('br.renameTargetedTextLayer( ' + JSON.stringify(data) + ' );', function (result) {
displayResult(result);
});
}
// bind buttons
$('#btn-getRernderAnimation').click(function () {
loadAnimationRendersToTimeline();
});
$('#btn-rename').click(function () {
rename();
});
$('#btn-set-context').click(function () {
context();
});
$('#btn-register').click(function () {
register();
});
$('#btn-deregister').click(function () {
deregister();
});
$('#btn-publish').click(function () {
publish();
});
$('#btn-send-reset').click(function () {
var $ = querySelector('#publish');
$('input[name=send-path]').value = '';
});
$('#btn-get-reset').click(function () {
var $ = querySelector('#publish');
$('input[name=get-path]').value = '';
});
$('#btn-get-active-sequence').click(function () {
evalScript('pype.getActiveSequence();');
});
$('#btn-get-selected').click(function () {
$('#output').html('getting selected clips info ...');
evalScript('pype.getSelectedItems();');
});
$('#btn-get-env').click(function () {
displayResult(window.ENV);
});
$('#btn-get-projectitems').click(function () {
evalScript('pype.getProjectItems();');
});
$('#btn-metadata').click(function () {
var $ = querySelector('#publish');
var path = $('input[name=get-path]').value;
var jsonfile = require('jsonfile');
displayResult(path);
jsonfile.readFile(path, function (err, json) {
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
displayResult('Metadata of clips after publishing were updated');
})
});
$('#btn-get-frame').click(function () {
evalScript('$._PPP_.exportCurrentFrameAsPNG();');
});
$('#btn-tc').click(function () {
tc('00:23:47:10');
});
$('#btn-generateRequest').click(function () {
evalScript('pype.getPyblishRequest();');
});
$('#btn-newWorkfileVersion').click(function () {
evalScript('pype.versionUpWorkFile();');
});

View file

@ -1,75 +0,0 @@
// pico connection of python module name
var api = pico.importModule('api');
function querySelector(parent) {
return function (child) {
return document.querySelector(parent).querySelector(child)
};
}
var defs = {}
function jumpTo(name) {
var e = defs[name];
document.querySelectorAll('.highlight').forEach(function (el) {
el.classList.remove('highlight');
});
e.classList.add('highlight');
return false;
}
function unindent(code) {
var lines = code.split('\n');
var margin = -1;
for (var j = 0; j < lines.length; j++) {
var l = lines[j];
for (i = 0; i < l.length; i++) {
if (l[i] != " ") {
margin = i;
break;
}
}
if (margin > -1) {
break;
}
}
lines = lines.slice(j);
return lines.map(function (s) {
return s.substr(margin)
}).join('\n');
}
function ready() {
// // set the <code> element of each example to the corresponding functions source
// document.querySelectorAll('li pre code.js').forEach(function(e){
// var id = e.parentElement.parentElement.id;
// var f = window[id];
// var code = f.toString().split('\n').slice(2, -1).join('\n');
// e.innerText = unindent(code);
// })
document.querySelectorAll('li pre code.html').forEach(function (e) {
var html = e.parentElement.parentElement.querySelector('div.example').innerHTML;
e.innerText = unindent(html);
})
hljs.initHighlighting();
// // find all the elements representing the function definitions in the python source
// document.querySelectorAll('.python .hljs-function .hljs-title').forEach(function(e){
// var a = document.createElement('a');
// a.name = e.innerText;
// e.parentElement.insertBefore(a, e)
// return defs[e.innerText] = e.parentElement;
// });
// convert all 'api.X' strings to hyperlinks to jump to python source
document.querySelectorAll('.js').forEach(function (e) {
var code = e.innerHTML;
Object.keys(defs).forEach(function (k) {
code = code.replace('api.' + k + '(', '<a href="#' + k + '" onclick="jumpTo(\'' + k + '\')">api.' + k + '</a>(');
})
e.innerHTML = code;
})
}

View file

@ -1,92 +0,0 @@
/*************************************************************************
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2014 Adobe
* All Rights Reserved.
*
* NOTICE: Adobe permits you to use, modify, and distribute this file in
* accordance with the terms of the Adobe license agreement accompanying
* it. If you have received this file from a source other than Adobe,
* then your use, modification, or distribution of it requires the prior
* written permission of Adobe.
**************************************************************************/
var json2 = '~/AppData/Roaming/Adobe/CEP/extensions/com.pype.avalon/js/json2.js';
$.evalFile(json2);
if (typeof ($) == 'undefined') {
$ = {};
}
if (typeof (pype) == 'undefined') {
var pype = {};
}
if (typeof (br) == 'undefined') {
var br = {};
}
function keepExtention() {
return app.setExtensionPersistent("com.pype.avalon", 0);
}
keepExtention()
$._ext = {
// Evaluate a file and catch the exception.
evalFile: function (path) {
try {
$.evalFile(path);
$.writeln(path);
} catch (e) {
$.writeln(e);
alert("Exception:" + e);
}
},
// Evaluate all the files in the given folder
evalFiles: function (jsxFolderPath) {
var folder = new Folder(jsxFolderPath);
if (folder.exists) {
var jsxFiles = folder.getFiles("*.jsx");
for (var i = 0; i < jsxFiles.length; i++) {
var jsxFile = jsxFiles[i];
$._ext.evalFile(jsxFile);
}
}
},
// entry-point function to call scripts more easily & reliably
callScript: function (dataStr) {
try {
var dataObj = JSON.parse(decodeURIComponent(dataStr));
if (
!dataObj ||
!dataObj.namespace ||
!dataObj.scriptName ||
!dataObj.args
) {
throw new Error('Did not provide all needed info to callScript!');
}
// call the specified jsx-function
var result = $[dataObj.namespace][dataObj.scriptName].apply(
null,
dataObj.args
);
// build the payload-object to return
var payload = {
err: 0,
result: result
};
return encodeURIComponent(JSON.stringify(payload));
} catch (err) {
payload = {
err: err
};
return encodeURIComponent(JSON.stringify(payload));
}
}
};
// var dalsiJsxFile = 'C:\\Users\\hubert\\CODE\\pype-setup\\repos\\pype-config\\pype\\premiere\\extensions\\com.pype.avalon\\jsx\\pype.jsx';
// // $._ext.evalFile(dalsiJsxFile);
// $.evalFile(dalsiJsxFile);

View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionList>
<Extension Id="com.pype">
<HostList>
<Host Name="PPRO" Port="7766"/>
</HostList>
</Extension>
</ExtensionList>

View file

@ -2,9 +2,9 @@
<!-- /************************************************************************* * ADOBE CONFIDENTIAL * ___________________ * * Copyright 2014 Adobe * All Rights Reserved. * * NOTICE: Adobe permits you to use, modify, and distribute this file in *
accordance with the terms of the Adobe license agreement accompanying * it. If you have received this file from a source other than Adobe, * then your use, modification, or distribution of it requires the prior * written permission of Adobe.
**************************************************************************/ -->
<ExtensionManifest Version="5.0" ExtensionBundleId="com.pype.avalon" ExtensionBundleVersion="11.1" ExtensionBundleName="Pype Avalon Panel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ExtensionManifest Version="5.0" ExtensionBundleId="com.pype" ExtensionBundleVersion="11.1" ExtensionBundleName="Pype Panel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ExtensionList>
<Extension Id="com.pype.avalon" Version="10.3.0"/>
<Extension Id="com.pype" Version="10.3.0"/>
</ExtensionList>
<ExecutionEnvironment>
<HostList>
@ -19,11 +19,11 @@ accordance with the terms of the Adobe license agreement accompanying * it. If y
</ExecutionEnvironment>
<DispatchInfoList>
<Extension Id="com.pype.avalon">
<Extension Id="com.pype">
<DispatchInfo >
<Resources>
<MainPath>./index_remote.html</MainPath>
<ScriptPath>./pypeAvalon.jsx</ScriptPath>
<ScriptPath>./pypeApp.jsx</ScriptPath>
<CEFCommandLine>
<Parameter>--enable-nodejs</Parameter>
<Parameter>--mixed-context</Parameter>
@ -35,7 +35,7 @@ accordance with the terms of the Adobe license agreement accompanying * it. If y
</Lifecycle>
<UI>
<Type>Panel</Type>
<Menu>Avalon</Menu>
<Menu>Pype</Menu>
<Geometry>
<Size>
<Width>600</Width>

File diff suppressed because it is too large Load diff

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

@ -0,0 +1,21 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Avalon</title>
<link rel="stylesheet" type="text/css" href="css/app.css">
</head>
<body onLoad="onLoaded()">
</body>
<script src="./lib/app.js"></script>
<script src="./lib/json2.js"></script>
<script src="./lib/CSInterface.js"></script>
<script src="./lib/CEPEngine_extensions.js"></script>
<script src="./lib/jquery-1.9.1.js"></script>
<script src="./lib/Vulcan.js"></script>
</html>

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,91 @@
/// <reference path="JavaScript.d.ts" />
interface ExternalObjectConstructor {
readonly prototype: ExternalObject
/**
* Creates a new ExternalObject object.
*/
new (lib: string): ExternalObject
(lib: string): ExternalObject
}
declare const ExternalObject: ExternalObjectConstructor
interface ExternalObject {
/**
* Set to true to write status information to standard output (the
* JavaScript Console in the ExtendScript Toolkit). Set to false to turn
* logging off. Default is false.
*/
log: boolean
/**
* A set of alternate paths in which to search for the shared library files, a
* single string with multiple path specifications delimited by semicolons
* (;). Paths can be absolute or relative to the Folder.startup location.
*/
searchFolders: string
/**
* The version of the library, as returned by ESGetVersion()
*/
version: number
/**
* Reports whether a compiled C/C++ library can be found, but does not load it. If logging is on, the
* paths searched are reported to the JavaScript Console in the ExtendScript Toolkit.
* Returns true if the library is found, false otherwise.
* @param spec The file specification for the compiled library, with or without path information.
*/
search(spec: string): boolean
/**
* Explicitly shuts down the ExternalObject dynamic library wrapped by this instance.
* It can be helpful to force a shutdown of the external library if termination of external libraries during
* the shutdown of the hosting application does not occur in the correct order.
*/
terminate(): undefined
}
interface CSXSEventConstructor {
readonly prototype: CSXSEvent
/**
* Creates a new CSXSEvent object.
*/
new (type?: string, scope?: string, data?: string): CSXSEvent
(type?: string, scope?: string, data?: string): CSXSEvent
}
declare const CSXSEvent: CSXSEventConstructor
interface CSXSEvent {
/**
* Retrieves the unique identifier of the application from which this event was dispatched.
*/
readonly appId: string
/**
* Retrieves or sets the payload of this event.
*/
data: string
/**
* Retrieves the unique identifier of the extension from which this event was dispatched.
*/
readonly extensionId: string
/**
* Retrieves the scope of this event.
*/
scope: string
/**
* Retrieves the type of this event.
*/
type: string
/**
* Dispatch the event
*/
dispatch(): void
}

File diff suppressed because it is too large Load diff

View file

@ -357,3 +357,7 @@ if (ExternalObject.AdobeXMPScript === undefined) {
// var seq = app.project.activeSequence;
// renamer.getSequencePypeMetadata(seq);
var messageText = 'this module is loaded> PypeRename.jsx';
$._PPP_.updateEventPanel(messageText);
$.writeln(messageText);

View file

@ -0,0 +1,51 @@
// A commonly used construct for loading XMPScript into
// ExtendScript contexts.
interface ExternalObjectConstructor {
AdobeXMPScript: ExternalObject | undefined;
}
interface XMPMetaConstructor {
/** Creates an empty object. */
new (): XMPMetaInstance;
/**
* @param packet A String containing an XML file or an XMP packet.
*/
new (packet: string): XMPMetaInstance;
/**
* @param buffer The UTF-8 or UTF-16 encoded bytes of an XML file
* or an XMP packet. This array is the result of a call to `serializeToArray`
* on an `XMPMeta` instance.
*/
new (buffer: number[]): XMPMetaInstance;
// Class stuff.
}
interface XMPMetaInstance {
doesPropertyExist(namespace:String, value:String): Boolean
getProperty(namespace:String, property:String): XMPProperty
setProperty(namespace:String, property:String, value:String): Boolean
countArrayItems(namespace:String, property:String): Number
getArrayItem(namespace:String, property:String, itemIndex:Number): XMPProperty
deleteProperty(namespace:String, property:String): Boolean
appendArrayItem(namespace:String, property:String, arrayOptions:String, valueToAppend:String, valueOptions:String): Boolean
dumpObject():String
serialize(): String
// Instance stuff.
}
declare const XMPMeta: XMPMetaConstructor | undefined;
interface XMPConstConstructor {
new (): XMPConstInstance;
NS_DM: string;
NS_DC: string;
ARRAY_IS_ORDERED: string;
// Class stuff.
}
interface XMPConstInstance {
// Instance stuff.
}
declare const XMPConst: XMPConstConstructor | undefined;

View file

@ -0,0 +1,116 @@
/* global $, JSON, app, XMPMeta, ExternalObject, CSXSEvent, Folder */
/* --------------------------------------
-. == [ part 0f PyPE CluB ] == .-
_______________.___._____________________
\______ \__ | |\______ \_ _____/
| ___// | | | ___/| __)_
| | \____ | | | | \
|____| / ______| |____| /_______ /
\/ \/
.. __/ CliP R3N4M3R \__ ..
*/
var BatchRenamer = {
getSelectedVideoTrackItems: function() {
var seq = app.project.activeSequence;
var selected = [];
var videoTracks = seq.videoTracks;
var numOfVideoTracks = videoTracks.numTracks;
// VIDEO CLIPS IN SEQUENCES
for (var l = 0; l < numOfVideoTracks; l++) {
var videoTrack = seq.videoTracks[l];
if (videoTrack.isTargeted()) {
$.writeln(videoTrack.name);
// var numOfClips = videoTrack.clips.numTracks;
var numOfClips = videoTrack.clips.numItems;
for (var m = 0; m < numOfClips; m++) {
var clip = videoTrack.clips[m];
selected.push({
name: clip.name,
clip: clip,
sequence: seq,
videoTrack: videoTrack
});
}
}
}
var names = [];
var items = {};
var sorted = [];
for (var c = 0; c < selected.length; c++) {
items[selected[c].name] = selected[c];
names.push(selected[c].name);
}
names.sort();
for (var cl = 0; cl < names.length; cl++) {
sorted.push(items[names[cl]]);
}
return sorted;
},
renameTargetedTextLayer: function (data) {
$.bp(true);
$.writeln(data);
var selected = BatchRenamer.getSelectedVideoTrackItems();
var seq = app.project.activeSequence;
var metadata = $.pype.getSequencePypeMetadata(seq, true);
var startCount = 10;
var stepCount = 10;
var padding = 3;
var newItems = {};
var episode = data.ep;
var episodeSuf = data.epSuffix;
var shotPref = 'sh';
var count = 0;
var seqCheck = '';
for (var c = 0; c < selected.length; c++) {
// fill in hierarchy if set
var parents = [];
var hierarchy = [];
var name = selected[c].name;
var sequenceName = name.slice(0, 5);
var shotNum = Number(name.slice((name.length - 3), name.length));
// if (sequenceName !== seqCheck) {
// seqCheck = sequenceName;
// count = 0;
// };
//
// var seqCount = (count * stepCount) + startCount;
// count += 1;
var newName = episode + sequenceName + shotPref + (shotNum).pad(padding);
$.pype.log(newName);
selected[c].clip.name = newName;
parents.push({
'entityType': 'Episode',
'entityName': episode + '_' + episodeSuf
});
hierarchy.push(episode + '_' + episodeSuf);
parents.push({
'entityType': 'Sequence',
'entityName': episode + sequenceName
});
hierarchy.push(episode + sequenceName);
newItems[newName] = {
'parents': parents,
'hierarchy': hierarchy.join('/')
};
}
metadata.clips = newItems;
$.pype.setSequencePypeMetadata(seq, metadata);
return JSON.stringify(metadata);
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,149 @@
/**
* The global BridgeTalk object.
*/
declare var BridgeTalk: any
/**
* The Infinity global property is a predefined variable with the value for infinity.
*/
declare var Infinity: number
/**
* The NaN global property is a predefined variable with the value NaN (Not-a-Number), as specified by the IEEE-754 standard.
*/
declare var NaN: number
/**
* The application object
*/
declare var app: Application
declare interface Application {}
/**
* Displays an alert box
* @param message The text to display
* @param title The title of the alert; ignored on the Macintosh
* @param errorIcon Display an Error icon; ignored on the Macintosh
*/
declare function alert(message: string, title?: string, errorIcon?: boolean): void
/**
* Displays an alert box with Yes and No buttons; returns true for Yes
* @param message The text to display
* @param noAsDefault Set to true to set the No button as the default button
* @param title The title of the alert; ignored on the Macintosh
*/
declare function confirm(message: string, noAsDefault?: boolean, title?: string): boolean
/**
* Decodes a string created with encodeURI().
* @param uri The text to decode.
*/
declare function decodeURI(uri: string): string
/**
* Decodes a string created with encodeURIComponent().
* @param uri The text to decode.
*/
declare function decodeURIComponent(uri: string): string
/**
* Encodes a string after RFC2396.
* Create an UTF-8 ASCII encoded version of this string. The string is converted into UTF-8. Every non-alphanumeric character is encoded as a percent escape
* character of the form %xx, where xx is the hex value of the character. After the conversion to UTF-8 encoding and escaping, it is guaranteed that the string does not contain characters codes greater than 127. The list of characters not to be encoded is -_.!~*'();/?:@&=+$,#. The method returns false on errors.
* @param text The text to encode.
*/
declare function encodeURI(text: string): string
/**
* Encodes a string after RFC2396.
* Create an UTF-8 ASCII encoded version of this string. The string is converted into UTF-8. Every non-alphanumeric character is encoded as a percent escape
* character of the form %xx, where xx is the hex value of the character. After the conversion to UTF-8 encoding and escaping, it is guaranteed that the string does not contain characters codes greater than 127. The list of characters not to be encoded is -_.!~*'(). The method returns false on errors.
* @param text The text to encode.
*/
declare function encodeURIComponent(text: string): string
/**
* Creates a URL-encoded string from aString.
* In the new string, characters of aString that require URL encoding are replaced with the format %xx, where xx is the hexadecimal value of the character code in the Unicode character set.This format is used to transmit information appended to a URL during, for example, execution of the GET method.Use the unescape() global function to translate the string back into its original format. Returns a string which is aString URL-encoded.
* @param aString The string to be encoded.
*/
declare function escape(aString: string): string
/**
* Evaluates its argument as a JavaScript script, and returns the result of evaluation.
* You can pass the result of an object's toSource() method to reconstruct that object.
* @param stringExpression The string to evaluate.
*/
declare function eval(stringExpression: string): any
/**
* Evaluates an expression and reports whether the result is a finite number.
* Returns true if the expression is a finite number, false otherwise. False if the value is infinity or negative infinity.
* @param expression Any valid JavaScript expression.
*/
declare function isFinite(expression: number): boolean
/**
* Evaluates an expression and reports whether the result is "Not-a-Number" (NaN).
* Returns true if the result of evaluation is not a number (NaN), false if the value is a number.
* @param expression Any valid JavaScript expression.
*/
declare function isNaN(expression: number): boolean
/**
* Returns true if the supplied string is a valid XML name.
* @param name The XML name to test.
*/
declare function isXMLName(name: string): boolean
/**
* Localizes a ZString-encoded string and merges additional arguments into the string.
* @param what The string to localize. A ZString-encoded string that can contain placeholder for additional arguments in the form %1 to %n.
* @param arguments Optional argument(s) to be merged into the string. There may be more than one argument.
*/
declare function localize(what: string, ...arguments: any[]): string
/**
* Extracts a floating-point number from a string.
* Parses a string to find the first set of characters that can be converted to a floating point number, and returns that number, or NaN if it does not encounter characters that it can converted to a number.The function supports exponential notation.
* @param text The string from which to extract a floating point number.
*/
declare function parseFloat(text: string): number
/**
* Extracts an integer from a string.
* Parses a string to find the first set of characters, in a specified base, that can be converted to an integer, and returns that integer, or NaN if it does not encounter characters that it can convert to a number.
* @param text The string from which to extract an integer.
* @param base The base of the string to parse (from base 2 to base 36). If not supplied, base is determined by the format of string.
*/
declare function parseInt(text: string, base?: number): number
/**
* Displays a dialog allowing the user to enter text
* Returns null if the user cancelled the dialog, the text otherwise
* @param prompt The text to display
* @param default_ The default text to preset the edit field with
* @param title The title of the dialog;
*/
declare function prompt(prompt: string, default_?: string, title?: string): string
/**
* Defines the default XML namespace.
* This is a replacement function for the standard JavaScript statement set default xml namespace.
* @param namespace The namespace to use. Omit this parameter to return to the empty namespace. This is either a Namespace object or a string.
*/
declare function setDefaultXMLNamespace(namespace: Namespace): void
/**
* Translates URL-encoded string into a regular string, and returns that string.
* Use the escape() global function to URL-encode strings.
* @param stringExpression The URL-encoded string to convert.
*/
declare function unescape(stringExpression: string): string
/**
* Creates a source code representation of the supplied argument, and returns it as a string.
* @param what The object to uneval.
*/
declare function uneval(what: any): string

View file

@ -0,0 +1,9 @@
{
"compilerOptions": {
"noLib" : true,
"checkJs": true
},
"include": [
"**/*"
]
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,11 @@
var extensionFolder = new Folder(
'C:/Users/jezsc/CODE/pype-setup/repos/pype/pype/premiere/extensions/com.pype');
$.writeln(extensionFolder);
var mainJsx = extensionFolder + '/pypeApp.jsx';
$.evalFile(mainJsx);
var appName = 'PPRO';
$._ext.evalJSXFiles(extensionFolder, appName);
$._ext.evalJSFiles(extensionFolder);
///////////////////////////////////////////////////////////////////////////////////////////////////////////
$.pype.getSequencePypeMetadata(app.project.activeSequence);

View file

@ -0,0 +1,699 @@
/**************************************************************************************************
*
* ADOBE SYSTEMS INCORPORATED
* Copyright 2013 Adobe Systems Incorporated
* All Rights Reserved.
*
* NOTICE: Adobe permits you to use, modify, and distribute this file in accordance with the
* terms of the Adobe license agreement accompanying it. If you have received this file from a
* source other than Adobe, then your use, modification, or distribution of it requires the prior
* written permission of Adobe.
*
**************************************************************************************************/
// This is the JavaScript code for bridging to native functionality
// See CEPEngine_extensions.cpp for implementation of native methods.
//
// Note: So far all native file i/o functions are synchronous, and aynchronous file i/o is TBD.
/** Version v8.0.0 */
/*jslint vars: true, plusplus: true, devel: true, browser: true, nomen: true, indent: 4, forin: true, maxerr: 50, regexp: true */
/*global define, native */
var cep;
if (!cep) {
cep = {};
}
if (!cep.fs) {
cep.fs = {};
}
if (!cep.process) {
cep.process = {};
}
if (!cep.encoding) {
cep.encoding = {};
}
if (!cep.util) {
cep.util = {};
}
(function () {
// Internal function to get the last error code.
native function GetLastError();
function getLastError() {
return GetLastError();
}
function getErrorResult(){
var result = {err: getLastError()};
return result;
}
// Error values. These MUST be in sync with the error values
// at the top of CEPEngine_extensions.cpp
/**
* @constant No error.
*/
cep.fs.NO_ERROR = 0;
/**
* @constant Unknown error occurred.
*/
cep.fs.ERR_UNKNOWN = 1;
/**
* @constant Invalid parameters passed to function.
*/
cep.fs.ERR_INVALID_PARAMS = 2;
/**
* @constant File or directory was not found.
*/
cep.fs.ERR_NOT_FOUND = 3;
/**
* @constant File or directory could not be read.
*/
cep.fs.ERR_CANT_READ = 4;
/**
* @constant An unsupported encoding value was specified.
*/
cep.fs.ERR_UNSUPPORTED_ENCODING = 5;
/**
* @constant File could not be written.
*/
cep.fs.ERR_CANT_WRITE = 6;
/**
* @constant Target directory is out of space. File could not be written.
*/
cep.fs.ERR_OUT_OF_SPACE = 7;
/**
* @constant Specified path does not point to a file.
*/
cep.fs.ERR_NOT_FILE = 8;
/**
* @constant Specified path does not point to a directory.
*/
cep.fs.ERR_NOT_DIRECTORY = 9;
/**
* @constant Specified file already exists.
*/
cep.fs.ERR_FILE_EXISTS = 10;
/**
* @constant The maximum number of processes has been exceeded.
*/
cep.process.ERR_EXCEED_MAX_NUM_PROCESS = 101;
/**
* @constant Invalid URL.
*/
cep.util.ERR_INVALID_URL = 201;
/**
* @constant deprecated API.
*/
cep.util.DEPRECATED_API = 202;
/**
* @constant UTF8 encoding type.
*/
cep.encoding.UTF8 = "UTF-8";
/**
* @constant Base64 encoding type.
*/
cep.encoding.Base64 = "Base64";
/**
* Displays the OS File Open dialog, allowing the user to select files or directories.
*
* @param allowMultipleSelection {boolean} When true, multiple files/folders can be selected.
* @param chooseDirectory {boolean} When true, only folders can be selected. When false, only
* files can be selected.
* @param title {string} Title of the open dialog.
* @param initialPath {string} Initial path to display in the dialog. Pass NULL or "" to
* display the last path chosen.
* @param fileTypes {Array.<string>} The file extensions (without the dot) for the types
* of files that can be selected. Ignored when chooseDirectory=true.
*
* @return An object with these properties:
* <ul><li>"data": An array of the full names of the selected files.</li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_INVALID_PARAMS </li>
* </ul>
**/
native function ShowOpenDialog();
cep.fs.showOpenDialog = function (allowMultipleSelection, chooseDirectory, title, initialPath, fileTypes) {
var resultString = ShowOpenDialog(allowMultipleSelection, chooseDirectory,
title || 'Open', initialPath || '',
fileTypes ? fileTypes.join(' ') : '');
var result = {data: JSON.parse(resultString || '[]'), err: getLastError() };
return result;
};
/**
* Displays the OS File Open dialog, allowing the user to select files or directories.
*
* @param allowMultipleSelection {boolean} When true, multiple files/folders can be selected.
* @param chooseDirectory {boolean} When true, only folders can be selected. When false, only
* files can be selected.
* @param title {string} Title of the open dialog.
* @param initialPath {string} Initial path to display in the dialog. Pass NULL or "" to
* display the last path chosen.
* @param fileTypes {Array.<string>} The file extensions (without the dot) for the types
* of files that can be selected. Ignored when chooseDirectory=true.
* @param friendlyFilePrefix {string} String to put in front of the extensions
* of files that can be selected. Ignored when chooseDirectory=true. (win only)
* For example:
* fileTypes = ["gif", "jpg", "jpeg", "png", "bmp", "webp", "svg"];
* friendlyFilePrefix = "Images (*.gif;*.jpg;*.jpeg;*.png;*.bmp;*.webp;*.svg)";
* @param prompt {string} String for OK button (mac only, default is "Open" on mac, "Open" or "Select Folder" on win).
*
* @return An object with these properties:
* <ul><li>"data": An array of the full names of the selected files.</li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_INVALID_PARAMS </li>
* </ul>
**/
native function ShowOpenDialogEx();
cep.fs.showOpenDialogEx = function (allowMultipleSelection, chooseDirectory, title, initialPath, fileTypes,
friendlyFilePrefix, prompt) {
var resultString = ShowOpenDialogEx(allowMultipleSelection, chooseDirectory,
title || 'Open', initialPath || '',
fileTypes ? fileTypes.join(' ') : '', friendlyFilePrefix || '',
prompt || '');
var result = {data: JSON.parse(resultString || '[]'), err: getLastError() };
return result;
};
/**
* Displays the OS File Save dialog, allowing the user to type in a file name.
*
* @param title {string} Title of the save dialog.
* @param initialPath {string} Initial path to display in the dialog. Pass NULL or "" to
* display the last path chosen.
* @param fileTypes {Array.<string>} The file extensions (without the dot) for the types
* of files that can be selected.
* @param defaultName {string} String to start with for the file name.
* @param friendlyFilePrefix {string} String to put in front of the extensions of files that can be selected. (win only)
* For example:
* fileTypes = ["gif", "jpg", "jpeg", "png", "bmp", "webp", "svg"];
* friendlyFilePrefix = "Images (*.gif;*.jpg;*.jpeg;*.png;*.bmp;*.webp;*.svg)";
* @param prompt {string} String for Save button (mac only, default is "Save" on mac and win).
* @param nameFieldLabel {string} String displayed in front of the file name text field (mac only, "File name:" on win).
*
* @return An object with these properties:
* <ul><li>"data": The file path selected to save at or "" if canceled</li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_INVALID_PARAMS </li>
* </ul>
**/
native function ShowSaveDialogEx();
cep.fs.showSaveDialogEx = function (title, initialPath, fileTypes, defaultName, friendlyFilePrefix, prompt, nameFieldLabel) {
var resultString = ShowSaveDialogEx(title || '', initialPath || '',
fileTypes ? fileTypes.join(' ') : '', defaultName || '',
friendlyFilePrefix || '', prompt || '', nameFieldLabel || '');
var result = {data: resultString || '', err: getLastError() };
return result;
};
/**
* Reads the contents of a folder.
*
* @param path {string} The path of the folder to read.
*
* @return An object with these properties:
* <ul><li>"data": An array of the names of the contained files (excluding '.' and '..'.</li>
* <li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_NOT_FOUND
* <br>ERR_CANT_READ </li></ul>
**/
native function ReadDir();
cep.fs.readdir = function (path) {
var resultString = ReadDir(path);
var result = {data: JSON.parse(resultString || '[]'), err: getLastError() };
return result;
};
/**
* Creates a new folder.
*
* @param path {string} The path of the folder to create.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS</li></ul>
**/
native function MakeDir();
cep.fs.makedir = function (path) {
MakeDir(path);
return getErrorResult();
};
/**
* Renames a file or folder.
*
* @param oldPath {string} The old name of the file or folder.
* @param newPath {string} The new name of the file or folder.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_NOT_FOUND
* <br>ERR_FILE_EXISTS </li></ul>
**/
native function Rename();
cep.fs.rename = function(oldPath, newPath) {
Rename(oldPath, newPath);
return getErrorResult();
};
/**
* Reports whether an item is a file or folder.
*
* @param path {string} The path of the file or folder.
*
* @return An object with these properties:
* <ul><li>"data": An object with properties
* <br>isFile (boolean)
* <br>isDirectory (boolean)
* <br>mtime (modification DateTime) </li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_NOT_FOUND </li>
* </ul>
**/
native function IsDirectory();
native function GetFileModificationTime();
cep.fs.stat = function (path) {
var isDir = IsDirectory(path);
var modtime = GetFileModificationTime(path);
var result = {
data: {
isFile: function () {
return !isDir;
},
isDirectory: function () {
return isDir;
},
mtime: modtime
},
err: getLastError()
};
return result;
};
/**
* Reads the entire contents of a file.
*
* @param path {string} The path of the file to read.
* @param encoding {string} The encoding of the contents of file, one of
* UTF8 (the default) or Base64.
*
* @return An object with these properties:
* <ul><li>"data": The file contents. </li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_NOT_FOUND
* <br>ERR_CANT_READ
* <br>ERR_UNSUPPORTED_ENCODING </li>
* </ul>
**/
native function ReadFile();
cep.fs.readFile = function (path, encoding) {
encoding = encoding ? encoding : cep.encoding.UTF8;
var contents = ReadFile(path, encoding);
var result = {data: contents, err: getLastError() };
return result;
};
/**
* Writes data to a file, replacing the file if it already exists.
*
* @param path {string} The path of the file to write.
* @param data {string} The data to write to the file.
* @param encoding {string} The encoding of the contents of file, one of
* UTF8 (the default) or Base64.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_UNSUPPORTED_ENCODING
* <br>ERR_CANT_WRITE
* <br>ERR_OUT_OF_SPACE </li></ul>
**/
native function WriteFile();
cep.fs.writeFile = function (path, data, encoding) {
encoding = encoding ? encoding : cep.encoding.UTF8;
WriteFile(path, data, encoding);
return getErrorResult();
};
/**
* Sets permissions for a file or folder.
*
* @param path {string} The path of the file or folder.
* @param mode {number} The permissions in numeric format (for example, 0777).
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_CANT_WRITE </li></ul>
**/
native function SetPosixPermissions();
cep.fs.chmod = function (path, mode) {
SetPosixPermissions(path, mode);
return getErrorResult();
};
/**
* Deletes a file.
*
* @param path {string} The path of the file to delete.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_NOT_FOUND
* <br>ERR_NOT_FILE </li></ul>
**/
native function DeleteFileOrDirectory();
native function IsDirectory();
cep.fs.deleteFile = function (path) {
if (IsDirectory(path)) {
var result = {err: cep.fs.ERR_NOT_FILE};
return result;
}
DeleteFileOrDirectory(path);
return getErrorResult();
};
/**
* Creates a process.
*
* @param arguments {list} The arguments to create process. The first one is the full path of the executable,
* followed by the arguments of the executable.
*
* @return An object with these properties:
* <ul><li>"data": The pid of the process, or -1 on error. </li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_EXCEED_MAX_NUM_PROCESS
* <br>ERR_NOT_FOUND
* <br>ERR_NOT_FILE</li>
* </ul>
**/
native function CreateProcess();
cep.process.createProcess = function () {
var args = Array.prototype.slice.call(arguments);
var pid = CreateProcess(args);
var result = {data: pid, err: getLastError()};
return result;
};
/**
* Registers a standard-output handler for a process.
*
* @param pid {int} The pid of the process.
* @param callback {function} The handler function for the standard output callback.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function SetupStdOutHandler();
cep.process.stdout = function (pid, callback) {
SetupStdOutHandler(pid, callback);
return getErrorResult();
};
/**
* Registers up a standard-error handler for a process.
*
* @param pid {int} The pid of the process.
* @param callback {function} The handler function for the standard error callback.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function SetupStdErrHandler();
cep.process.stderr = function (pid, callback) {
SetupStdErrHandler(pid, callback);
return getErrorResult();
};
/**
* Writes data to the standard input of a process.
*
* @param pid {int} The pid of the process
* @param data {string} The data to write.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function WriteStdIn();
cep.process.stdin = function (pid, data) {
WriteStdIn(pid, data);
return getErrorResult();
};
/**
* Retrieves the working directory of a process.
*
* @param pid {int} The pid of the process.
*
* @return An object with these properties:
* <ul><li>"data": The path of the working directory. </li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function GetWorkingDirectory();
cep.process.getWorkingDirectory = function (pid) {
var wd = GetWorkingDirectory(pid);
var result = {data: wd, err: getLastError()};
return result;
};
/**
* Waits for a process to quit.
*
* @param pid {int} The pid of the process.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function WaitFor();
cep.process.waitfor = function (pid) {
WaitFor(pid);
return getErrorResult();
};
/**
* Registers a handler for the onquit callback of a process.
*
* @param pid {int} The pid of the process.
* @param callback {function} The handler function.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function OnQuit();
cep.process.onquit = function (pid, callback) {
OnQuit(pid, callback);
return getErrorResult();
};
/**
* Reports whether a process is currently running.
*
* @param pid {int} The pid of the process.
*
* @return An object with these properties:
* <ul><li>"data": True if the process is running, false otherwise. </li>
* <li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function IsRunning();
cep.process.isRunning = function (pid) {
var isRunning = IsRunning(pid);
var result = {data: isRunning, err: getLastError()};
return result;
};
/**
* Terminates a process.
*
* @param pid {int} The pid of the process
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS
* <br>ERR_INVALID_PROCESS_ID </li></ul>
**/
native function Terminate();
cep.process.terminate = function (pid) {
Terminate(pid);
return getErrorResult();
};
/**
* Encoding conversions.
*
*/
cep.encoding.convertion =
{
utf8_to_b64: function(str) {
return window.btoa(unescape(encodeURIComponent(str)));
},
b64_to_utf8: function(base64str) {
// If a base64 string contains any whitespace character, DOM Exception 5 occurs during window.atob, please see
// http://stackoverflow.com/questions/14695988/dom-exception-5-invalid-character-error-on-valid-base64-image-string-in-javascri
base64str = base64str.replace(/\s/g, '');
return decodeURIComponent(escape(window.atob(base64str)));
},
binary_to_b64: function(binary) {
return window.btoa(binary);
},
b64_to_binary: function(base64str) {
return window.atob(base64str);
},
ascii_to_b64: function(ascii) {
return window.btoa(binary);
},
b64_to_ascii: function(base64str) {
return window.atob(base64str);
}
};
/**
* Opens a page in the default system browser.
*
* @param url {string} The URL of the page/file to open, or the email address.
* Must use HTTP/HTTPS/file/mailto. For example:
* "http://www.adobe.com"
* "https://github.com"
* "file:///C:/log.txt"
* "mailto:test@adobe.com"
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_UNKNOWN
* <br>ERR_INVALID_PARAMS</li></ul>
**/
native function OpenURLInDefaultBrowser();
cep.util.openURLInDefaultBrowser = function (url) {
if (url && (url.indexOf("http://") === 0 ||
url.indexOf("https://") === 0 ||
url.indexOf("file://") === 0 ||
url.indexOf("mailto:") === 0)) {
OpenURLInDefaultBrowser(url);
return getErrorResult();
} else {
return { err : cep.util.ERR_INVALID_URL };
}
};
/**
* Registers a callback function for extension unload. If called more than once,
* the last callback that is successfully registered is used.
*
* @deprecated since version 6.0.0
*
* @param callback {function} The handler function.
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of:
* <br>NO_ERROR
* <br>ERR_INVALID_PARAMS</li></ul>
**/
native function RegisterExtensionUnloadCallback();
cep.util.registerExtensionUnloadCallback = function (callback) {
return { err : cep.util.DEPRECATED_API };
};
/**
* Stores the user's proxy credentials
*
* @param username {string} proxy username
* @param password {string} proxy password
*
* @return An object with this property:
* <ul><li>"err": The status of the operation, one of
* <br>NO_ERROR
* <br>ERR_INVALID_PARAMS </li>
* </ul>
**/
native function StoreProxyCredentials();
cep.util.storeProxyCredentials = function (username, password) {
StoreProxyCredentials(username, password);
return getErrorResult();
};
})();

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,459 @@
/**************************************************************************************************
*
* ADOBE SYSTEMS INCORPORATED
* Copyright 2013 Adobe Systems Incorporated
* All Rights Reserved.
*
* NOTICE: Adobe permits you to use, modify, and distribute this file in accordance with the
* terms of the Adobe license agreement accompanying it. If you have received this file from a
* source other than Adobe, then your use, modification, or distribution of it requires the prior
* written permission of Adobe.
*
**************************************************************************************************/
/** Vulcan - v9.2.0 */
/**
* @class Vulcan
*
* The singleton instance, <tt>VulcanInterface</tt>, provides an interface
* to the Vulcan. Allows you to launch CC applications
* and discover information about them.
*/
function Vulcan()
{
}
/**
* Gets all available application specifiers on the local machine.
*
* @return The array of all available application specifiers.
*/
Vulcan.prototype.getTargetSpecifiers = function()
{
var params = {};
return JSON.parse(window.__adobe_cep__.invokeSync("vulcanGetTargetSpecifiers", JSON.stringify(params)));
};
/**
* Launches a CC application on the local machine, if it is not already running.
*
* @param targetSpecifier The application specifier; for example "indesign".
*
* Note: In Windows 7 64-bit or Windows 8 64-bit system, some target applications (like Photoshop and Illustrator) have both 32-bit version
* and 64-bit version. Therefore, we need to specify the version by this parameter with "photoshop-70.032" or "photoshop-70.064". If you
* installed Photoshop 32-bit and 64-bit on one Windows 64-bit system and invoke this interface with parameter "photoshop-70.032", you may
* receive wrong result.
* The specifiers for Illustrator is "illustrator-17.032", "illustrator-17.064", "illustrator-17" and "illustrator".
*
* In other platforms there is no such issue, so we can use "photoshop" or "photoshop-70" as specifier.
* @param focus True to launch in foreground, or false to launch in the background.
* @param cmdLine Optional, command-line parameters to supply to the launch command.
* @return True if the app can be launched, false otherwise.
*/
Vulcan.prototype.launchApp = function(targetSpecifier, focus, cmdLine)
{
if(!requiredParamsValid(targetSpecifier))
{
return false;
}
var params = {};
params.targetSpecifier = targetSpecifier;
params.focus = focus ? "true" : "false";
params.cmdLine = requiredParamsValid(cmdLine) ? cmdLine : "";
return JSON.parse(window.__adobe_cep__.invokeSync("vulcanLaunchApp", JSON.stringify(params))).result;
};
/**
* Checks whether a CC application is running on the local machine.
*
* @param targetSpecifier The application specifier; for example "indesign".
*
* Note: In Windows 7 64-bit or Windows 8 64-bit system, some target applications (like Photoshop and Illustrator) have both 32-bit version
* and 64-bit version. Therefore, we need to specify the version by this parameter with "photoshop-70.032" or "photoshop-70.064". If you
* installed Photoshop 32-bit and 64-bit on one Windows 64-bit system and invoke this interface with parameter "photoshop-70.032", you may
* receive wrong result.
* The specifiers for Illustrator is "illustrator-17.032", "illustrator-17.064", "illustrator-17" and "illustrator".
*
* In other platforms there is no such issue, so we can use "photoshop" or "photoshop-70" as specifier.
* @return True if the app is running, false otherwise.
*/
Vulcan.prototype.isAppRunning = function(targetSpecifier)
{
if(!requiredParamsValid(targetSpecifier))
{
return false;
}
var params = {};
params.targetSpecifier = targetSpecifier;
return JSON.parse(window.__adobe_cep__.invokeSync("vulcanIsAppRunning", JSON.stringify(params))).result;
};
/**
* Checks whether a CC application is installed on the local machine.
*
* @param targetSpecifier The application specifier; for example "indesign".
*
* Note: In Windows 7 64-bit or Windows 8 64-bit system, some target applications (like Photoshop and Illustrator) have both 32-bit version
* and 64-bit version. Therefore, we need to specify the version by this parameter with "photoshop-70.032" or "photoshop-70.064". If you
* installed Photoshop 32-bit and 64-bit on one Windows 64-bit system and invoke this interface with parameter "photoshop-70.032", you may
* receive wrong result.
* The specifiers for Illustrator is "illustrator-17.032", "illustrator-17.064", "illustrator-17" and "illustrator".
*
* In other platforms there is no such issue, so we can use "photoshop" or "photoshop-70" as specifier.
* @return True if the app is installed, false otherwise.
*/
Vulcan.prototype.isAppInstalled = function(targetSpecifier)
{
if(!requiredParamsValid(targetSpecifier))
{
return false;
}
var params = {};
params.targetSpecifier = targetSpecifier;
return JSON.parse(window.__adobe_cep__.invokeSync("vulcanIsAppInstalled", JSON.stringify(params))).result;
};
/**
* Retrieves the local install path of a CC application.
*
* @param targetSpecifier The application specifier; for example "indesign".
*
* Note: In Windows 7 64-bit or Windows 8 64-bit system, some target applications (like Photoshop and Illustrator) have both 32-bit version
* and 64-bit version. Therefore, we need to specify the version by this parameter with "photoshop-70.032" or "photoshop-70.064". If you
* installed Photoshop 32-bit and 64-bit on one Windows 64-bit system and invoke this interface with parameter "photoshop-70.032", you may
* receive wrong result.
* The specifiers for Illustrator is "illustrator-17.032", "illustrator-17.064", "illustrator-17" and "illustrator".
*
* In other platforms there is no such issue, so we can use "photoshop" or "photoshop-70" as specifier.
* @return The path string if the application is found, "" otherwise.
*/
Vulcan.prototype.getAppPath = function(targetSpecifier)
{
if(!requiredParamsValid(targetSpecifier))
{
return "";
}
var params = {};
params.targetSpecifier = targetSpecifier;
return JSON.parse(window.__adobe_cep__.invokeSync("vulcanGetAppPath", JSON.stringify(params))).result;
};
/**
* Registers a message listener callback function for a Vulcan message.
*
* @param type The message type.
* @param callback The callback function that handles the message.
* Takes one argument, the message object.
* @param obj Optional, the object containing the callback method, if any.
* Default is null.
*/
Vulcan.prototype.addMessageListener = function(type, callback, obj)
{
if(!requiredParamsValid(type, callback) || !strStartsWith(type, VulcanMessage.TYPE_PREFIX))
{
return;
}
var params = {};
params.type = type;
window.__adobe_cep__.invokeAsync("vulcanAddMessageListener", JSON.stringify(params), callback, obj);
};
/**
* Removes a registered message listener callback function for a Vulcan message.
*
* @param type The message type.
* @param callback The callback function that was registered.
* Takes one argument, the message object.
* @param obj Optional, the object containing the callback method, if any.
* Default is null.
*/
Vulcan.prototype.removeMessageListener = function(type, callback, obj)
{
if(!requiredParamsValid(type, callback) || !strStartsWith(type, VulcanMessage.TYPE_PREFIX))
{
return;
}
var params = {};
params.type = type;
window.__adobe_cep__.invokeAsync("vulcanRemoveMessageListener", JSON.stringify(params), callback, obj);
};
/**
* Dispatches a Vulcan message.
*
* @param vulcanMessage The message object.
*/
Vulcan.prototype.dispatchMessage = function(vulcanMessage)
{
if(!requiredParamsValid(vulcanMessage) || !strStartsWith(vulcanMessage.type, VulcanMessage.TYPE_PREFIX))
{
return;
}
var params = {};
var message = new VulcanMessage(vulcanMessage.type);
message.initialize(vulcanMessage);
params.vulcanMessage = message;
window.__adobe_cep__.invokeSync("vulcanDispatchMessage", JSON.stringify(params));
};
/**
* Retrieves the message payload of a Vulcan message for the registered message listener callback function.
*
* @param vulcanMessage The message object.
* @return A string containing the message payload.
*/
Vulcan.prototype.getPayload = function(vulcanMessage)
{
if(!requiredParamsValid(vulcanMessage) || !strStartsWith(vulcanMessage.type, VulcanMessage.TYPE_PREFIX))
{
return null;
}
var message = new VulcanMessage(vulcanMessage.type);
message.initialize(vulcanMessage);
return message.getPayload();
};
/**
* Gets all available endpoints of the running Vulcan-enabled applications.
*
* Since 7.0.0
*
* @return The array of all available endpoints.
* An example endpoint string:
* <endPoint>
* <appId>PHXS</appId>
* <appVersion>16.1.0</appVersion>
* </endPoint>
*/
Vulcan.prototype.getEndPoints = function()
{
var params = {};
return JSON.parse(window.__adobe_cep__.invokeSync("vulcanGetEndPoints", JSON.stringify(params)));
};
/**
* Gets the endpoint for itself.
*
* Since 7.0.0
*
* @return The endpoint string for itself.
*/
Vulcan.prototype.getSelfEndPoint = function()
{
var params = {};
return window.__adobe_cep__.invokeSync("vulcanGetSelfEndPoint", JSON.stringify(params));
};
/** Singleton instance of Vulcan **/
var VulcanInterface = new Vulcan();
//--------------------------------- Vulcan Message ------------------------------
/**
* @class VulcanMessage
* Message type for sending messages between host applications.
* A message of this type can be sent to the designated destination
* when appId and appVersion are provided and valid. Otherwise,
* the message is broadcast to all running Vulcan-enabled applications.
*
* To send a message between extensions running within one
* application, use the <code>CSEvent</code> type in CSInterface.js.
*
* @param type The message type.
* @param appId The peer appId.
* @param appVersion The peer appVersion.
*
*/
function VulcanMessage(type, appId, appVersion)
{
this.type = type;
this.scope = VulcanMessage.SCOPE_SUITE;
this.appId = requiredParamsValid(appId) ? appId : VulcanMessage.DEFAULT_APP_ID;
this.appVersion = requiredParamsValid(appVersion) ? appVersion : VulcanMessage.DEFAULT_APP_VERSION;
this.data = VulcanMessage.DEFAULT_DATA;
}
VulcanMessage.TYPE_PREFIX = "vulcan.SuiteMessage.";
VulcanMessage.SCOPE_SUITE = "GLOBAL";
VulcanMessage.DEFAULT_APP_ID = "UNKNOWN";
VulcanMessage.DEFAULT_APP_VERSION = "UNKNOWN";
VulcanMessage.DEFAULT_DATA = "<data><payload></payload></data>";
VulcanMessage.dataTemplate = "<data>{0}</data>";
VulcanMessage.payloadTemplate = "<payload>{0}</payload>";
/**
* Initializes this message instance.
*
* @param message A \c message instance to use for initialization.
*/
VulcanMessage.prototype.initialize = function(message)
{
this.type = message.type;
this.scope = message.scope;
this.appId = message.appId;
this.appVersion = message.appVersion;
this.data = message.data;
};
/**
* Retrieves the message data.
*
* @return A data string in XML format.
*/
VulcanMessage.prototype.xmlData = function()
{
if(this.data === undefined)
{
var str = "";
str = String.format(VulcanMessage.payloadTemplate, str);
this.data = String.format(VulcanMessage.dataTemplate, str);
}
return this.data;
};
/**
* Sets the message payload of this message.
*
* @param payload A string containing the message payload.
*/
VulcanMessage.prototype.setPayload = function(payload)
{
var str = cep.encoding.convertion.utf8_to_b64(payload);
str = String.format(VulcanMessage.payloadTemplate, str);
this.data = String.format(VulcanMessage.dataTemplate, str);
};
/**
* Retrieves the message payload of this message.
*
* @return A string containing the message payload.
*/
VulcanMessage.prototype.getPayload = function()
{
var str = GetValueByKey(this.data, "payload");
if(str !== null)
{
return cep.encoding.convertion.b64_to_utf8(str);
}
return null;
};
/**
* Converts the properties of this instance to a string.
*
* @return The string version of this instance.
*/
VulcanMessage.prototype.toString = function()
{
var str = "type=" + this.type;
str += ", scope=" + this.scope;
str += ", appId=" + this.appId;
str += ", appVersion=" + this.appVersion;
str += ", data=" + this.xmlData();
return str;
};
//--------------------------------------- Util --------------------------------
/**
* Formats a string based on a template.
*
* @param src The format template.
*
* @return The formatted string
*/
String.format = function(src)
{
if (arguments.length === 0)
{
return null;
}
var args = Array.prototype.slice.call(arguments, 1);
return src.replace(/\{(\d+)\}/g, function(m, i){
return args[i];
});
};
/**
* Retrieves the content of an XML element.
*
* @param xmlStr The XML string.
* @param key The name of the tag.
*
* @return The content of the tag, or the empty string
* if such tag is not found or the tag has no content.
*/
function GetValueByKey(xmlStr, key)
{
if(window.DOMParser)
{
var parser = new window.DOMParser();
try
{
var xmlDoc = parser.parseFromString(xmlStr, "text/xml");
var node = xmlDoc.getElementsByTagName(key)[0];
if(node && node.childNodes[0])
{
return node.childNodes[0].nodeValue;
}
}
catch(e)
{
//log the error
}
}
return "";
}
/**
* Reports whether required parameters are valid.
*
* @return True if all required parameters are valid,
* false if any of the required parameters are invalid.
*/
function requiredParamsValid()
{
for(var i = 0; i < arguments.length; i++)
{
var argument = arguments[i];
if(argument === undefined || argument === null)
{
return false;
}
}
return true;
}
/**
* Reports whether a string has a given prefix.
*
* @param str The target string.
* @param prefix The specific prefix string.
*
* @return True if the string has the prefix, false if not.
*/
function strStartsWith(str, prefix)
{
if(typeof str != "string")
{
return false;
}
return str.indexOf(prefix) === 0;
}

View file

@ -1,4 +1,4 @@
// switch between live and local code
function onLoaded() {
window.location.href = "http://localhost:4242/ppro/index.html";
window.location.href = "http://localhost:8021/ppro/index.html";
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,30 @@
{
"name": "com.pype",
"version": "1.0.0",
"description": "pype avalon integration",
"license": "ISC",
"main": "CSXS\\manifest.xml",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"dependencies": {
"bluebird": "^3.7.2",
"decompress-zip": "^0.2.2",
"fs": "^0.0.1-security",
"jsonfile": "^6.0.1",
"junk": "^3.1.0",
"mkdirp": "^1.0.4",
"node-fetch": "^2.6.0",
"node-timecodes": "^2.5.0",
"opn": "^6.0.0",
"os": "^0.1.1",
"path": "^0.12.7",
"process": "^0.11.10",
"pure-uuid": "^1.6.0",
"rimraf": "^3.0.2",
"url": "^0.11.0",
"walk": "^2.3.14",
"xml2js": "^0.4.23"
},
"devDependencies": {}
}

View file

@ -0,0 +1,15 @@
/* global $, File, Folder, alert */
if (typeof ($) === 'undefined') {
var $ = {};
}
if (typeof (app) === 'undefined') {
var app = {};
}
function keepExtention () {
return app.setExtensionPersistent('com.pype', 0);
}
keepExtention();

Some files were not shown because too many files have changed in this diff Show more