Merge branch 'develop' into feature/93-flexible_template_assignment

This commit is contained in:
Milan Kolar 2020-05-12 22:45:02 +02:00
commit f8652e9401
156 changed files with 33857 additions and 13060 deletions

2
.gitignore vendored
View file

@ -33,3 +33,5 @@ coverage.xml
##################
node_modules/
package-lock.json
pype/premiere/ppro/js/debug.log

View file

@ -15,14 +15,11 @@
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sys
import os
from pprint import pprint
from pypeapp.pypeLauncher import PypeLauncher
from pypeapp.storage import Storage
from pypeapp.deployment import Deployment
pype_setup = os.getenv('PYPE_ROOT')
pype_setup = os.getenv('PYPE_SETUP_PATH')
d = Deployment(pype_setup)
launcher = PypeLauncher()
@ -32,7 +29,6 @@ os.environ['PYPE_CONFIG'] = config_path
os.environ['TOOL_ENV'] = os.path.normpath(os.path.join(config_path,
'environments'))
launcher._add_modules()
Storage().update_environment()
launcher._load_default_environments(tools=tools)
# -- Project information -----------------------------------------------------

View file

@ -25,15 +25,15 @@ set PYTHONPATH=%%d;!PYTHONPATH!
echo ^>^>^> Setting PYPE_CONFIG
call :ResolvePath pypeconfig "..\pype-config"
set PYPE_CONFIG=%pypeconfig%
echo ^>^>^> Setting PYPE_ROOT
echo ^>^>^> Setting PYPE_SETUP_PATH
call :ResolvePath pyperoot "..\..\"
set PYPE_ROOT=%pyperoot%
set PYTHONPATH=%PYPE_ROOT%;%PYTHONPATH%
set PYPE_SETUP_PATH=%pyperoot%
set PYTHONPATH=%PYPE_SETUP_PATH%;%PYTHONPATH%
echo ^>^>^> Setting PYPE_ENV
set PYPE_ENV="C:\Users\Public\pype_env2"
call "docs\make.bat" clean
sphinx-apidoc -M -f -d 6 --ext-autodoc --ext-intersphinx --ext-viewcode -o docs\source pype %PYPE_ROOT%\repos\pype\pype\vendor\*
sphinx-apidoc -M -f -d 6 --ext-autodoc --ext-intersphinx --ext-viewcode -o docs\source pype %PYPE_SETUP_PATH%\repos\pype\pype\vendor\*
call "docs\make.bat" html
echo ^>^>^> Doing cleanup ...
set PYTHONPATH=%_OLD_PYTHONPATH%

View file

@ -3,7 +3,7 @@ import os
from pyblish import api as pyblish
from avalon import api as avalon
from .lib import filter_pyblish_plugins
from pypeapp import config
from pypeapp import config, Roots
import logging
@ -11,6 +11,7 @@ log = logging.getLogger(__name__)
PROJECT_PLUGINS_PATH = os.environ.get("PYPE_PROJECT_PLUGINS")
STUDIO_PLUGINS_PATH = os.environ.get("PYPE_STUDIO_PLUGINS")
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -83,7 +84,24 @@ def install():
pyblish.register_plugin_path(plugin_path)
avalon.register_plugin_path(avalon.Loader, plugin_path)
avalon.register_plugin_path(avalon.Creator, plugin_path)
avalon.register_plugin_path(
avalon.InventoryAction, plugin_path
)
# Register studio specific plugins
if STUDIO_PLUGINS_PATH and project_name:
for path in STUDIO_PLUGINS_PATH.split(os.pathsep):
if not path:
continue
if os.path.exists(path):
pyblish.register_plugin_path(path)
avalon.register_plugin_path(avalon.Loader, path)
avalon.register_plugin_path(avalon.Creator, path)
avalon.register_plugin_path(avalon.InventoryAction, path)
if project_name:
root_obj = Roots(project_name)
avalon.register_root(root_obj.roots)
# apply monkey patched discover to original one
avalon.discover = patched_discover

View file

@ -1,122 +0,0 @@
import os
import sys
from avalon import api as avalon
from pyblish import api as pyblish
from app import api as app
from .. import api
t = app.Templates()
log = api.Logger.getLogger(__name__, "aport")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
ADDITIONAL_PLUGINS = ['ftrack']
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
PUBLISH_PATH = os.path.join(
PLUGINS_DIR, "aport", "publish"
).replace("\\", "/")
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[PUBLISH_PATH]
)
else:
os.environ["PUBLISH_PATH"] = PUBLISH_PATH
LOAD_PATH = os.path.join(PLUGINS_DIR, "aport", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "aport", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "aport", "inventory")
def register_additional_plugin_paths():
'''Adding additional host plugins
'''
for host in ADDITIONAL_PLUGINS:
publish_path = os.path.join(
PLUGINS_DIR, host, "publish").replace("\\", "/")
pyblish.register_plugin_path(publish_path)
# adding path to PUBLISH_PATH environment
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[publish_path]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(
"Registered additional plugin path: "
"{}".format(publish_path))
def install():
# api.set_avalon_workdir()
log.info("Registering Aport plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# additional plugins
register_additional_plugin_paths()
# Disable all families except for the ones we explicitly want to see
family_states = [
"imagesequence",
"mov"
]
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
# launch pico server
pico_server_launch()
def uninstall():
log.info("Deregistering Aport plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()
def pico_server_launch():
# path = "C:/Users/hubert/CODE/github/pico/examples/everything"
path = os.path.join(
os.path.dirname(__file__),
# "package"
)
os.chdir(path)
print(os.getcwd())
print(os.listdir(path))
try:
args = [sys.executable, "-m", "pico.server",
# "pipeline",
"api"
]
app.forward(
args,
cwd=path
)
except Exception as e:
log.error(e)
log.error(sys.exc_info())
# sys.exit(returncode)

View file

@ -1,225 +0,0 @@
# api.py
import os
import sys
import pico
from pico import PicoApp
from app.api import forward, Logger
import pipeline as ppl
log = Logger.getLogger(__name__, "aport")
@pico.expose()
def get_session():
ppl.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
ppl.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
ppl.AVALON_TASK = os.getenv("AVALON_TASK", None)
ppl.AVALON_SILO = os.getenv("AVALON_SILO", None)
return ppl.get_session()
@pico.expose()
def load_representations(project, representations):
'''Querry data from mongo db for defined representations.
Args:
project (str): name of the project
representations (list): representations which are required
Returns:
data (dict): representations in last versions
# testing url:
http://localhost:4242/api/load_representations?project=jakub_projectx&representations=[{%22asset%22:%22e09s031_0040%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22},%20{%22asset%22:%22e09s031_0030%22,%22subset%22:%22referenceDefault%22,%22representation%22:%22mp4%22}]
# returning:
{"e09s031_0040_referenceDefault":{"_id":"5c6dabaa2af61756b02f7f32","schema":"pype:representation-2.0","type":"representation","parent":"5c6dabaa2af61756b02f7f31","name":"mp4","data":{"path":"C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\thisFolder\\e09\\s031\\e09s031_0040\\publish\\clip\\referenceDefault\\v019\\jkprx_e09s031_0040_referenceDefault_v019.mp4","template":"{publish.root}/{publish.folder}/{version.main}/{publish.file}"},"dependencies":[],"context":{"root":"C:\\Users\\hubert\\_PYPE_testing\\projects","project":{"name":"jakub_projectx","code":"jkprx"},"task":"edit","silo":"thisFolder","asset":"e09s031_0040","family":"clip","subset":"referenceDefault","VERSION":19,"hierarchy":"thisFolder\\e09\\s031","representation":"mp4"}}}
'''
data = {}
# log.info("___project: {}".format(project))
# ppl.io.activate_project(project)
#
# from_mongo = ppl.io.find({"name": repr['representation'],
# "type": "representation"})[:]
for repr in representations:
log.info("asset: {}".format(repr['asset']))
# set context for each asset individually
context(project, repr['asset'], '')
# query data from mongo db for the asset's subset representation
related_repr = [r for r in ppl.io.find({"name": repr['representation'],
"type": "representation",
"context.asset": repr['asset']})[:]]
versions_dict = {r['context']['version']: i
for i, r in enumerate(related_repr)}
versions_list = [v for v in versions_dict.keys()]
sorted(versions_list)
version_index_last = versions_dict[max(versions_list)]
log.info("version_index_last: {}".format(version_index_last))
# create name which will be used on timeline clip
name = '_'.join([repr['asset'], repr['subset']])
# log.info("___related_repr: {}".format(related_repr))
# assign data for the clip representation
version = ppl.io.find_one(
{'_id': related_repr[version_index_last]['parent']})
log.info("version: {}".format(version))
# fixing path workarround
if '.#####.mxf' in related_repr[version_index_last]['data']['path']:
related_repr[version_index_last]['data']['path'] = related_repr[version_index_last]['data']['path'].replace(
'.#####.mxf', '.mxf')
related_repr[version_index_last]['version'] = version
related_repr[version_index_last]['parentClip'] = repr['parentClip']
data[name] = related_repr[version_index_last]
return data
@pico.expose()
def publish(send_json_path, get_json_path, gui):
"""
Runs standalone pyblish and adds link to
data in external json file
It is necessary to run `register_plugin_path` if particular
host is needed
Args:
send_json_path (string): path to temp json file with
sending context data
get_json_path (strign): path to temp json file with
returning context data
Returns:
dict: get_json_path
Raises:
Exception: description
"""
log.info("avalon.session is: \n{}".format(ppl.SESSION))
log.info("PUBLISH_PATH: \n{}".format(os.environ["PUBLISH_PATH"]))
pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'),
"app", "pype-start.py")
args = [pype_start,
"--root", os.environ['AVALON_PROJECTS'], "--publish-gui",
"-pp", os.environ["PUBLISH_PATH"],
"-d", "rqst_json_data_path", send_json_path,
"-d", "post_json_data_path", get_json_path
]
log.debug(args)
log.info("_aport.api Variable `AVALON_PROJECTS` had changed to `{0}`.".format(
os.environ['AVALON_PROJECTS']))
forward([
sys.executable, "-u"
] + args,
# cwd=cwd
)
return {"get_json_path": get_json_path}
@pico.expose()
def context(project, asset, task, app='aport'):
os.environ["AVALON_PROJECT"] = ppl.AVALON_PROJECT = project
os.environ["AVALON_ASSET"] = ppl.AVALON_ASSET = asset
os.environ["AVALON_TASK"] = ppl.AVALON_TASK = task
os.environ["AVALON_SILO"] = ppl.AVALON_SILO = ''
ppl.get_session()
# log.info('ppl.SESSION: {}'.format(ppl.SESSION))
# http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp
ppl.update_current_task(task, asset, app)
project_code = ppl.io.find_one({"type": "project"})["data"].get("code", '')
os.environ["AVALON_PROJECTCODE"] = \
ppl.SESSION["AVALON_PROJECTCODE"] = project_code
parents = ppl.io.find_one({"type": 'asset',
"name": ppl.AVALON_ASSET})['data']['parents']
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents).replace("\\", "/")
os.environ["AVALON_HIERARCHY"] = \
ppl.SESSION["AVALON_HIERARCHY"] = hierarchy
fix_paths = {k: v.replace("\\", "/") for k, v in ppl.SESSION.items()
if isinstance(v, str)}
ppl.SESSION.update(fix_paths)
ppl.SESSION.update({"AVALON_HIERARCHY": hierarchy,
"AVALON_PROJECTCODE": project_code,
"current_dir": os.getcwd().replace("\\", "/")
})
return ppl.SESSION
@pico.expose()
def anatomy_fill(data):
from pype import api as pype
pype.load_data_from_templates()
anatomy = pype.Anatomy
return anatomy.format(data)
@pico.expose()
def deregister_plugin_path():
if os.getenv("PUBLISH_PATH", None):
aport_plugin_path = os.pathsep.join(
[p.replace("\\", "/")
for p in os.environ["PUBLISH_PATH"].split(os.pathsep)
if "aport" in p or
"ftrack" in p])
os.environ["PUBLISH_PATH"] = aport_plugin_path
else:
log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred")
return "Publish path deregistered"
@pico.expose()
def register_plugin_path(publish_path):
deregister_plugin_path()
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep)
+ [publish_path.replace("\\", "/")]
)
else:
os.environ["PUBLISH_PATH"] = publish_path
log.info(os.environ["PUBLISH_PATH"].split(os.pathsep))
return "Publish registered paths: {}".format(
os.environ["PUBLISH_PATH"].split(os.pathsep)
)
app = PicoApp()
app.register_module(__name__)
# remove all Handlers created by pico
for name, handler in [(handler.get_name(), handler)
for handler in Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
Logger.logging.root.removeHandler(handler)
# SPLASH.hide_splash()

View file

@ -1,26 +0,0 @@
import pype.api as pype
def get_anatomy(**kwarg):
return pype.Anatomy
def format_anatomy(data):
from .templates import (
get_anatomy
)
file = script_name()
anatomy = get_anatomy()
# TODO: perhaps should be in try!
padding = anatomy.render.padding
data.update({
"hierarchy": pype.get_hierarchy(),
"frame": "#" * padding,
"VERSION": pype.get_version_from_workfile(file)
})
# log.info("format_anatomy:anatomy: {}".format(anatomy))
return anatomy.format(data)

View file

@ -1,130 +0,0 @@
import sys
import os
import getpass
from app.api import Logger
from io_nonsingleton import DbConnector
io = DbConnector()
log = Logger.getLogger(__name__, "aport")
self = sys.modules[__name__]
self.SESSION = None
self._registered_root = {"_": ""}
self.AVALON_PROJECT = os.getenv("AVALON_PROJECT", None)
self.AVALON_ASSET = os.getenv("AVALON_ASSET", None)
self.AVALON_TASK = os.getenv("AVALON_TASK", None)
self.AVALON_SILO = os.getenv("AVALON_SILO", None)
def get_session():
if not self.SESSION:
io.install()
self.SESSION = io.Session
# for k, v in os.environ.items():
# if 'AVALON' in k:
# print(str((k, v)))
return self.SESSION
def update_current_task(task=None, asset=None, app=None):
"""Update active Session to a new task work area.
This updates the live Session to a different `asset`, `task` or `app`.
Args:
task (str): The task to set.
asset (str): The asset to set.
app (str): The app to set.
Returns:
dict: The changed key, values in the current Session.
"""
mapping = {
"AVALON_ASSET": asset,
"AVALON_TASK": task,
"AVALON_APP": app,
}
changed = {key: value for key, value in mapping.items() if value}
if not changed:
return
# Update silo when asset changed
if "AVALON_ASSET" in changed:
asset_document = io.find_one({"name": changed["AVALON_ASSET"],
"type": "asset"})
assert asset_document, "Asset must exist"
silo = asset_document["silo"]
if silo is None:
silo = asset_document["name"]
changed["AVALON_SILO"] = silo
parents = asset_document['data']['parents']
hierarchy = ""
if len(parents) > 0:
hierarchy = os.path.sep.join(parents)
changed['AVALON_HIERARCHY'] = hierarchy
# Compute work directory (with the temporary changed session so far)
project = io.find_one({"type": "project"},
projection={"config.template.work": True})
template = project["config"]["template"]["work"]
_session = self.SESSION.copy()
_session.update(changed)
changed["AVALON_WORKDIR"] = _format_work_template(template, _session)
# Update the full session in one go to avoid half updates
self.SESSION.update(changed)
# Update the environment
os.environ.update(changed)
return changed
def _format_work_template(template, session=None):
"""Return a formatted configuration template with a Session.
Note: This *cannot* format the templates for published files since the
session does not hold the context for a published file. Instead use
`get_representation_path` to parse the full path to a published file.
Args:
template (str): The template to format.
session (dict, Optional): The Session to use. If not provided use the
currently active global Session.
Returns:
str: The fully formatted path.
"""
if session is None:
session = self.SESSION
project = io.find_one({'type': 'project'})
return template.format(**{
"root": registered_root(),
"project": {
"name": project.get("name", session["AVALON_PROJECT"]),
"code": project["data"].get("code", ''),
},
"silo": session["AVALON_SILO"],
"hierarchy": session['AVALON_HIERARCHY'],
"asset": session["AVALON_ASSET"],
"task": session["AVALON_TASK"],
"app": session["AVALON_APP"],
"user": session.get("AVALON_USER", getpass.getuser())
})
def registered_root():
"""Return currently registered root"""
return os.path.normpath(
self._registered_root["_"]
or self.SESSION.get("AVALON_PROJECTS") or ""
)

File diff suppressed because it is too large Load diff

View file

@ -1,149 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Pype extention</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<script>
if (typeof module === 'object') {
window.module = module;
module = undefined;
}
</script>
<script src="./build.js"></script>
<script>
if (window.module) module = window.module;
</script>
<!-- <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/styles/default.min.css">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.6.0/highlight.min.js"></script>
<script></script> -->
<style type="text/css">
html,
body {
height: 100%;
margin: 0px;
padding: 0px;
}
div {
padding: 5px;
}
#container {
height: 100%;
}
#header {
height: 5%;
}
#main {
height: 70%;
}
#output {
background-color: #333;
color: #aaa;
min-height: 15%;
overflow-y: scroll;
padding: 20px;
position: fixed;
bottom: 0px;
width: 100%;
}
.error {
color: #f00 !important;
}
#examples li {
padding: 10px;
margin: 10px;
background-color: silver;
}
code {
border-radius: 0;
*/ margin: 5px;
white-space: pre !important;
}
#source {
height: 100%;
}
#examples {
height: 100%;
}
#spacer {
height: 20%;
}
.highlight {
background-color: yellow;
}
</style>
</head>
<body onresize="resizePanel()">
<a href="javascript:history.go(0)">Refresh panel</a>
<div id="container">
<div class="row row-eq-height" id="main">
<div class="col-md-6" id="examples">
<ol>
<li id="context">
<h4>Set context here</h4>
<pre><code class="js"></code></pre>
Project<input type="text" name="project" value="jakub_projectx" />Asset<input type="text" name="asset" value="shot01" />task<input type="text" name="task" value="compositing" />app<input type="text" name="app" value="premiera" />
<button class="btn btn-default btn-sm" type="button" onclick="context()">Set context</button>
</li>
<li id="deregister">
<h4>deregister_plugin_path</h4>
<pre><code class="js"></code></pre>
<button class="btn btn-default btn-sm" type="button" onclick="deregister()">Deregister</button>
</li>
<li id="register">
<h4>register_plugin_path</h4>
<pre><code class="js"></code></pre>
Path: <input type="text" name="path" value="C:/Users/hubertCODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish" />
<button class="btn btn-default btn-sm" type="button" onclick="register()">Register path</button>
</li>
<li id="publish">
<h4>Publish</h4>
<pre><code class="js"></code></pre>
Json path: <input type="text" name="path" value="C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/premiere/example_publish_reqst.json" />
Gui<input type="checkbox" name="gui" value="True" checked>
<button class="btn btn-default btn-sm" type="button" onclick="publish()">Publish</button>
</li>
</ol>
<div id="spacer">
</div>
</div>
<div class="col-md-6" id="source">
<!-- <pre>
<code class="python"></code>
</pre> -->
</div>
</div>
<div class="row" id="output">
</div>
</div>
<script src="script.js"></script>
</body>
</html>

View file

@ -1,214 +0,0 @@
var api = pico.importModule('api');
var output = document.getElementById('output');
function querySelector(parent){
return function(child){
return document.querySelector(parent).querySelector(child)
};
}
var defs = {}
function jumpTo(name){
var e = defs[name];
document.querySelectorAll('.highlight').forEach(function(el){
el.classList.remove('highlight');
});
e.classList.add('highlight');
return false;
}
function displayResult(r){
output.classList.remove("error");
output.innerText = JSON.stringify(r);
}
function displayError(e){
output.classList.add("error");
output.innerText = e.message;
}
function unindent(code){
var lines = code.split('\n');
var margin = -1;
for(var j=0; j < lines.length; j++){
var l = lines[j];
for(i=0; i < l.length; i++){
if(l[i] != " "){
margin = i;
break;
}
}
if(margin > -1){
break;
}
}
lines = lines.slice(j);
return lines.map(function(s){ return s.substr(margin)}).join('\n');
}
function deregister(){
var $ = querySelector("#deregister");
api.deregister_plugin_path().then(displayResult);
}
function register(){
var $ = querySelector("#register");
var path = $("input[name=path]").value;
api.register_plugin_path(path).then(displayResult);
}
function publish(){
var $ = querySelector("#publish");
var path = $("input[name=path]").value;
var gui = $("input[name=gui]").checked;
api.publish(path, gui).then(displayResult);
}
function context(){
var $ = querySelector("#context");
var project = $("input[name=project]").value;
var asset = $("input[name=asset]").value;
var task = $("input[name=task]").value;
var app = $("input[name=app]").value;
api.context(project,asset,task,app).then(displayResult);
}
//
// function example1(){
// var $ = querySelector("#example1");
// var name = $("input[name=name]").value;
// api.hello(name).then(displayResult);
// }
//
//
// function example2(){
// var $ = querySelector("#example2");
// var x = $("input[name=x]").valueAsNumber;
// var y = $("#example2 input[name=y]").valueAsNumber;
// api.multiply(x, y).then(displayResult);
// }
//
// function example3(){
// var $ = querySelector("#example3");
// var file = $("input[name=upload]").files[0];
// api.upload(file, file.name).then(displayResult).catch(displayError);
// }
//
// function example4(){
// var $ = querySelector("#example4");
// api.my_ip().then(displayResult)
// }
//
// function example5(){
// var $ = querySelector("#example5");
// var username = $("input[name=username]").value;
// var password = $("input[name=password]").value;
// pico.setAuthentication(api, username, password);
// api.current_user().then(displayResult).catch(displayError);
// pico.clearAuthentication(api);
// }
//
// function example6(){
// var $ = querySelector("#example6");
// api.start_session().then(function(){
// api.session_id().then(displayResult).then(function(){
// api.end_session();
// })
// })
// }
//
// function example7(){
// var $ = querySelector("#example7");
// var session_id = "4242";
// pico.setRequestHook(api, 'session', function(req) {
// req.headers.set('X-SESSION-ID', session_id)
// })
// api.session_id2().then(displayResult)
// pico.clearRequestHook(api, 'session');
// }
//
// function example8(){
// var $ = querySelector("#example8");
// api.countdown(10).each(displayResult).then(function(){
// displayResult("Boom!");
// });
// }
//
// function example9(){
// var $ = querySelector("#example9");
// var user = {
// name: "Bob",
// age: 30,
// occupation: "Software Engineer",
// }
// api.user_description(user).then(displayResult);
// }
//
// function example10(){
// var $ = querySelector("#example10");
// api.fail().then(displayResult).catch(displayError);
// }
//
// function example11(){
// var $ = querySelector("#example11");
// api.make_coffee().then(displayResult).catch(displayError);
// }
//
//
// function example12(){
// var $ = querySelector("#example12");
// var form = $("form");
// api.multiply.submitFormData(new FormData(form)).then(displayResult).catch(displayError);
// }
//
// function example13(){
// var $ = querySelector("#example13");
// var data = {
// x: 6,
// y: 7,
// }
// api.multiply.submitJSON(data).then(displayResult).catch(displayError);
// }
// api.show_source().then(function(s){
// document.querySelector('#source code').innerText = s;
// }).then(ready);
function ready(){
// // set the <code> element of each example to the corresponding functions source
// document.querySelectorAll('li pre code.js').forEach(function(e){
// var id = e.parentElement.parentElement.id;
// var f = window[id];
// var code = f.toString().split('\n').slice(2, -1).join('\n');
// e.innerText = unindent(code);
// })
document.querySelectorAll('li pre code.html').forEach(function(e){
var html = e.parentElement.parentElement.querySelector('div.example').innerHTML;
e.innerText = unindent(html);
})
hljs.initHighlighting();
// // find all the elements representing the function definitions in the python source
// document.querySelectorAll('.python .hljs-function .hljs-title').forEach(function(e){
// var a = document.createElement('a');
// a.name = e.innerText;
// e.parentElement.insertBefore(a, e)
// return defs[e.innerText] = e.parentElement;
// });
// convert all 'api.X' strings to hyperlinks to jump to python source
document.querySelectorAll('.js').forEach(function(e){
var code = e.innerHTML;
Object.keys(defs).forEach(function(k){
code = code.replace('api.' + k + '(', '<a href="#' + k + '" onclick="jumpTo(\'' + k + '\')">api.' + k + '</a>(');
})
e.innerHTML = code;
})
}

View file

@ -1,41 +0,0 @@
from pype import api as pype
log = pype.Logger.getLogger(__name__, "aport")
def get_anatomy(**kwarg):
return pype.Anatomy
def get_dataflow(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "aport")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("aport.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
aport_dataflow = getattr(pype.Dataflow, str(host), None)
aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None)
if preset:
aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None)
log.info("Dataflow: {}".format(aport_dataflow_node))
return aport_dataflow_node
def get_colorspace(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "aport")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("aport.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
aport_colorspace = getattr(pype.Colorspace, str(host), None)
aport_colorspace_node = getattr(aport_colorspace, str(cls), None)
if preset:
aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None)
log.info("Colorspace: {}".format(aport_colorspace_node))
return aport_colorspace_node

View file

@ -23,7 +23,9 @@ class AvalonRestApi(RestApi):
if not project_name:
output = {}
for project_name in self.dbcon.tables():
project = self.dbcon[project_name].find_one({"type": "project"})
project = self.dbcon[project_name].find_one({
"type": "project"
})
output[project_name] = project
return CallbackResult(data=self.result_to_json(output))
@ -44,7 +46,7 @@ class AvalonRestApi(RestApi):
if not self.dbcon.exist_table(_project_name):
abort(404, "Project \"{}\" was not found in database".format(
project_name
_project_name
))
if not _asset:
@ -65,7 +67,7 @@ class AvalonRestApi(RestApi):
return asset
abort(404, "Asset \"{}\" with {} was not found in project {}".format(
_asset, identificator, project_name
_asset, identificator, _project_name
))
def result_to_json(self, result):

View file

@ -26,7 +26,7 @@ class ClockifySettings(QtWidgets.QWidget):
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(self.parent.parent.icon)
else:
pype_setup = os.getenv('PYPE_ROOT')
pype_setup = os.getenv('PYPE_SETUP_PATH')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)

View file

@ -0,0 +1,107 @@
import os
import collections
import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib.avalon_sync import get_avalon_attr
class CleanHierarchicalAttrsAction(BaseAction):
identifier = "clean.hierarchical.attr"
label = "Pype Admin"
variant = "- Clean hierarchical custom attributes"
description = "Unset empty hierarchical attribute values."
role_list = ["Pypeclub", "Administrator", "Project Manager"]
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
all_project_entities_query = (
"select id, name, parent_id, link"
" from TypedContext where project_id is \"{}\""
)
cust_attr_query = (
"select value, entity_id from CustomAttributeValue "
"where entity_id in ({}) and configuration_id is \"{}\""
)
def discover(self, session, entities, event):
"""Show only on project entity."""
if len(entities) == 1 and entities[0].entity_type.lower() == "project":
return True
return False
def launch(self, session, entities, event):
project = entities[0]
user_message = "This may take some time"
self.show_message(event, user_message, result=True)
self.log.debug("Preparing entities for cleanup.")
all_entities = session.query(
self.all_project_entities_query.format(project["id"])
).all()
all_entities_ids = [
"\"{}\"".format(entity["id"])
for entity in all_entities
if entity.entity_type.lower() != "task"
]
self.log.debug(
"Collected {} entities to process.".format(len(all_entities_ids))
)
entity_ids_joined = ", ".join(all_entities_ids)
attrs, hier_attrs = get_avalon_attr(session)
for attr in hier_attrs:
configuration_key = attr["key"]
self.log.debug(
"Looking for cleanup of custom attribute \"{}\"".format(
configuration_key
)
)
configuration_id = attr["id"]
call_expr = [{
"action": "query",
"expression": self.cust_attr_query.format(
entity_ids_joined, configuration_id
)
}]
[values] = self.session.call(call_expr)
data = {}
for item in values["data"]:
value = item["value"]
if value is None:
data[item["entity_id"]] = value
if not data:
self.log.debug(
"Nothing to clean for \"{}\".".format(configuration_key)
)
continue
self.log.debug("Cleaning up {} values for \"{}\".".format(
len(data), configuration_key
))
for entity_id, value in data.items():
entity_key = collections.OrderedDict({
"configuration_id": configuration_id,
"entity_id": entity_id
})
session.recorded_operations.push(
ftrack_api.operation.DeleteEntityOperation(
"CustomAttributeValue",
entity_key
)
)
session.commit()
return True
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
CleanHierarchicalAttrsAction(session, plugins_presets).register()

View file

@ -1,14 +1,11 @@
import os
import sys
import argparse
import collections
import json
import arrow
import logging
import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
from pypeapp import config
from ftrack_api.exception import NoResultFoundError
"""
This action creates/updates custom attributes.
@ -135,11 +132,6 @@ class CustomAttributes(BaseAction):
return True
def launch(self, session, entities, event):
self.types = {}
self.object_type_ids = {}
self.groups = {}
self.security_roles = {}
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
@ -153,7 +145,8 @@ class CustomAttributes(BaseAction):
})
session.commit()
try:
self.avalon_mongo_id_attributes(session)
self.prepare_global_data(session)
self.avalon_mongo_id_attributes(session, event)
self.custom_attributes_from_file(session, event)
job['status'] = 'done'
@ -170,60 +163,180 @@ class CustomAttributes(BaseAction):
return True
def avalon_mongo_id_attributes(self, session):
def prepare_global_data(self, session):
self.types_per_name = {
attr_type["name"].lower(): attr_type
for attr_type in session.query("CustomAttributeType").all()
}
self.security_roles = {
role["name"].lower(): role
for role in session.query("SecurityRole").all()
}
object_types = session.query("ObjectType").all()
self.object_types_per_id = {
object_type["id"]: object_type for object_type in object_types
}
self.object_types_per_name = {
object_type["name"].lower(): object_type
for object_type in object_types
}
self.groups = {}
def avalon_mongo_id_attributes(self, session, event):
hierarchical_attr, object_type_attrs = (
self.mongo_id_custom_attributes(session)
)
if hierarchical_attr is None:
self.create_hierarchical_mongo_attr(session)
hierarchical_attr, object_type_attrs = (
self.mongo_id_custom_attributes(session)
)
if hierarchical_attr is None:
return
if object_type_attrs:
self.convert_mongo_id_to_hierarchical(
hierarchical_attr, object_type_attrs, session, event
)
def mongo_id_custom_attributes(self, session):
cust_attrs_query = (
"select id, entity_type, object_type_id, is_hierarchical, default"
" from CustomAttributeConfiguration"
" where key = \"{}\""
).format(CustAttrIdKey)
mongo_id_avalon_attr = session.query(cust_attrs_query).all()
heirarchical_attr = None
object_type_attrs = []
for cust_attr in mongo_id_avalon_attr:
if cust_attr["is_hierarchical"]:
heirarchical_attr = cust_attr
else:
object_type_attrs.append(cust_attr)
return heirarchical_attr, object_type_attrs
def create_hierarchical_mongo_attr(self, session):
# Attribute Name and Label
cust_attr_label = 'Avalon/Mongo Id'
# Types that don't need object_type_id
base = {'show'}
# Don't create custom attribute on these entity types:
exceptions = ['task', 'milestone']
exceptions.extend(base)
# Get all possible object types
all_obj_types = session.query('ObjectType').all()
# Filter object types by exceptions
filtered_types_id = set()
for obj_type in all_obj_types:
name = obj_type['name']
if " " in name:
name = name.replace(' ', '')
if obj_type['name'] not in self.object_type_ids:
self.object_type_ids[name] = obj_type['id']
if name.lower() not in exceptions:
filtered_types_id.add(obj_type['id'])
cust_attr_label = "Avalon/Mongo ID"
# Set security roles for attribute
role_list = ['API', 'Administrator']
roles = self.get_security_role(role_list)
role_list = ("API", "Administrator", "Pypeclub")
roles = self.get_security_roles(role_list)
# Set Text type of Attribute
custom_attribute_type = self.get_type('text')
custom_attribute_type = self.types_per_name["text"]
# Set group to 'avalon'
group = self.get_group('avalon')
group = self.get_group("avalon")
data = {}
data['key'] = CustAttrIdKey
data['label'] = cust_attr_label
data['type'] = custom_attribute_type
data['default'] = ''
data['write_security_roles'] = roles
data['read_security_roles'] = roles
data['group'] = group
data['config'] = json.dumps({'markdown': False})
data = {
"key": CustAttrIdKey,
"label": cust_attr_label,
"type": custom_attribute_type,
"default": "",
"write_security_roles": roles,
"read_security_roles": roles,
"group": group,
"is_hierarchical": True,
"entity_type": "show",
"config": json.dumps({"markdown": False})
}
for entity_type in base:
data['entity_type'] = entity_type
self.process_attribute(data)
self.process_attribute(data)
data['entity_type'] = 'task'
for object_type_id in filtered_types_id:
data['object_type_id'] = str(object_type_id)
self.process_attribute(data)
def convert_mongo_id_to_hierarchical(
self, hierarchical_attr, object_type_attrs, session, event
):
user_msg = "Converting old custom attributes. This may take some time."
self.show_message(event, user_msg, True)
self.log.info(user_msg)
object_types_per_id = {
object_type["id"]: object_type
for object_type in session.query("ObjectType").all()
}
cust_attr_query = (
"select value, entity_id from ContextCustomAttributeValue "
"where configuration_id is {}"
)
for attr_def in object_type_attrs:
attr_ent_type = attr_def["entity_type"]
if attr_ent_type == "show":
entity_type_label = "Project"
elif attr_ent_type == "task":
entity_type_label = (
object_types_per_id[attr_def["object_type_id"]]["name"]
)
else:
self.log.warning(
"Unsupported entity type: \"{}\". Skipping.".format(
attr_ent_type
)
)
continue
self.log.debug((
"Converting Avalon MongoID attr for Entity type \"{}\"."
).format(entity_type_label))
call_expr = [{
"action": "query",
"expression": cust_attr_query.format(attr_def["id"])
}]
if hasattr(session, "call"):
[values] = session.call(call_expr)
else:
[values] = session._call(call_expr)
for value in values["data"]:
table_values = collections.OrderedDict({
"configuration_id": hierarchical_attr["id"],
"entity_id": value["entity_id"]
})
session.recorded_operations.push(
ftrack_api.operation.UpdateEntityOperation(
"ContextCustomAttributeValue",
table_values,
"value",
ftrack_api.symbol.NOT_SET,
value["value"]
)
)
try:
session.commit()
except Exception:
session.rollback()
self.log.warning(
(
"Couldn't transfer Avalon Mongo ID"
" attribute for entity type \"{}\"."
).format(entity_type_label),
exc_info=True
)
try:
session.delete(attr_def)
session.commit()
except Exception:
session.rollback()
self.log.warning(
(
"Couldn't delete Avalon Mongo ID"
" attribute for entity type \"{}\"."
).format(entity_type_label),
exc_info=True
)
def custom_attributes_from_file(self, session, event):
presets = config.get_presets()['ftrack']['ftrack_custom_attributes']
@ -317,11 +430,11 @@ class CustomAttributes(BaseAction):
'Type {} is not valid'.format(attr['type'])
)
type_name = attr['type'].lower()
output['key'] = attr['key']
output['label'] = attr['label']
output['type'] = self.get_type(type_name)
type_name = attr['type'].lower()
output['type'] = self.types_per_name[type_name]
config = None
if type_name == 'number':
@ -382,15 +495,15 @@ class CustomAttributes(BaseAction):
config = json.dumps({
'multiSelect': multiSelect,
'data': json.dumps(data)
})
})
return config
def get_group(self, attr):
if isinstance(attr, str):
group_name = attr
else:
if isinstance(attr, dict):
group_name = attr['group'].lower()
else:
group_name = attr
if group_name in self.groups:
return self.groups[group_name]
@ -416,48 +529,30 @@ class CustomAttributes(BaseAction):
'Found more than one group "{}"'.format(group_name)
)
def get_role_ALL(self):
role_name = 'ALL'
if role_name in self.security_roles:
all_roles = self.security_roles[role_name]
else:
all_roles = self.session.query('SecurityRole').all()
self.security_roles[role_name] = all_roles
for role in all_roles:
if role['name'] not in self.security_roles:
self.security_roles[role['name']] = role
return all_roles
def get_security_roles(self, security_roles):
security_roles_lowered = tuple(name.lower() for name in security_roles)
if (
len(security_roles_lowered) == 0
or "all" in security_roles_lowered
):
return list(self.security_roles.values())
output = []
if security_roles_lowered[0] == "except":
excepts = security_roles_lowered[1:]
for role_name, role in self.security_roles.items():
if role_name not in excepts:
output.append(role)
def get_security_role(self, security_roles):
roles = []
security_roles_lowered = [role.lower() for role in security_roles]
if len(security_roles) == 0 or 'all' in security_roles_lowered:
roles = self.get_role_ALL()
elif security_roles_lowered[0] == 'except':
excepts = security_roles[1:]
all = self.get_role_ALL()
for role in all:
if role['name'] not in excepts:
roles.append(role)
if role['name'] not in self.security_roles:
self.security_roles[role['name']] = role
else:
for role_name in security_roles:
for role_name in security_roles_lowered:
if role_name in self.security_roles:
roles.append(self.security_roles[role_name])
continue
try:
query = 'SecurityRole where name is "{}"'.format(role_name)
role = self.session.query(query).one()
self.security_roles[role_name] = role
roles.append(role)
except NoResultFoundError:
output.append(self.security_roles[role_name])
else:
raise CustAttrException((
'Securit role "{}" does not exist'
"Securit role \"{}\" was not found in Ftrack."
).format(role_name))
return roles
return output
def get_default(self, attr):
type = attr['type']
@ -512,32 +607,17 @@ class CustomAttributes(BaseAction):
roles_read = attr['read_security_roles']
if 'read_security_roles' in output:
roles_write = attr['write_security_roles']
output['read_security_roles'] = self.get_security_role(roles_read)
output['write_security_roles'] = self.get_security_role(roles_write)
output['read_security_roles'] = self.get_security_roles(roles_read)
output['write_security_roles'] = self.get_security_roles(roles_write)
return output
def get_type(self, type_name):
if type_name in self.types:
return self.types[type_name]
query = 'CustomAttributeType where name is "{}"'.format(type_name)
type = self.session.query(query).one()
self.types[type_name] = type
return type
def get_entity_type(self, attr):
if 'is_hierarchical' in attr:
if attr['is_hierarchical'] is True:
type = 'show'
if 'entity_type' in attr:
type = attr['entity_type']
return {
'is_hierarchical': True,
'entity_type': type
}
if attr.get("is_hierarchical", False):
return {
"is_hierarchical": True,
"entity_type": attr.get("entity_type") or "show"
}
if 'entity_type' not in attr:
raise CustAttrException('Missing entity_type')
@ -549,23 +629,16 @@ class CustomAttributes(BaseAction):
raise CustAttrException('Missing object_type')
object_type_name = attr['object_type']
if object_type_name not in self.object_type_ids:
try:
query = 'ObjectType where name is "{}"'.format(
object_type_name
)
object_type_id = self.session.query(query).one()['id']
except Exception:
raise CustAttrException((
'Object type with name "{}" don\'t exist'
).format(object_type_name))
self.object_type_ids[object_type_name] = object_type_id
else:
object_type_id = self.object_type_ids[object_type_name]
object_type_name_low = object_type_name.lower()
object_type = self.object_types_per_name.get(object_type_name_low)
if not object_type:
raise CustAttrException((
'Object type with name "{}" don\'t exist'
).format(object_type_name))
return {
'entity_type': attr['entity_type'],
'object_type_id': object_type_id
'object_type_id': object_type["id"]
}
@ -573,42 +646,3 @@ def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
CustomAttributes(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -1,369 +1,240 @@
import os
import sys
import logging
import argparse
import re
import ftrack_api
from pype.ftrack import BaseAction
from avalon import lib as avalonlib
from pype.ftrack.lib.io_nonsingleton import DbConnector
from pypeapp import config, Anatomy
class CreateFolders(BaseAction):
'''Custom action.'''
#: Action identifier.
identifier = 'create.folders'
#: Action label.
label = 'Create Folders'
#: Action Icon.
icon = '{}/ftrack/action_icons/CreateFolders.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
identifier = "create.folders"
label = "Create Folders"
icon = "{}/ftrack/action_icons/CreateFolders.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
db = DbConnector()
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
not_allowed = ['assetversion', 'project']
not_allowed = ["assetversion", "project"]
if entities[0].entity_type.lower() in not_allowed:
return False
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
if event["data"].get("values", {}):
return
entity = entities[0]
without_interface = True
for child in entity['children']:
if child['object_type']['name'].lower() != 'task':
for child in entity["children"]:
if child["object_type"]["name"].lower() != "task":
without_interface = False
break
self.without_interface = without_interface
if without_interface:
return
title = 'Create folders'
title = "Create folders"
entity_name = entity['name']
entity_name = entity["name"]
msg = (
'<h2>Do you want create folders also'
' for all children of "{}"?</h2>'
"<h2>Do you want create folders also"
" for all children of \"{}\"?</h2>"
)
if entity.entity_type.lower() == 'project':
entity_name = entity['full_name']
msg = msg.replace(' also', '')
msg += '<h3>(Project root won\'t be created if not checked)</h3>'
if entity.entity_type.lower() == "project":
entity_name = entity["full_name"]
msg = msg.replace(" also", "")
msg += "<h3>(Project root won't be created if not checked)</h3>"
items = []
item_msg = {
'type': 'label',
'value': msg.format(entity_name)
"type": "label",
"value": msg.format(entity_name)
}
item_label = {
'type': 'label',
'value': 'With all chilren entities'
"type": "label",
"value": "With all chilren entities"
}
item = {
'name': 'children_included',
'type': 'boolean',
'value': False
"name": "children_included",
"type": "boolean",
"value": False
}
items.append(item_msg)
items.append(item_label)
items.append(item)
if len(items) == 0:
return {
'success': False,
'message': 'Didn\'t found any running jobs'
}
else:
return {
'items': items,
'title': title
}
return {
"items": items,
"title": title
}
def launch(self, session, entities, event):
'''Callback method for custom action.'''
with_childrens = True
if self.without_interface is False:
if 'values' not in event['data']:
if "values" not in event["data"]:
return
with_childrens = event['data']['values']['children_included']
with_childrens = event["data"]["values"]["children_included"]
entity = entities[0]
if entity.entity_type.lower() == 'project':
if entity.entity_type.lower() == "project":
proj = entity
else:
proj = entity['project']
project_name = proj['full_name']
project_code = proj['name']
if entity.entity_type.lower() == 'project' and with_childrens == False:
proj = entity["project"]
project_name = proj["full_name"]
project_code = proj["name"]
if entity.entity_type.lower() == 'project' and with_childrens is False:
return {
'success': True,
'message': 'Nothing was created'
}
data = {
"root": os.environ["AVALON_PROJECTS"],
"project": {
"name": project_name,
"code": project_code
}
}
all_entities = []
all_entities.append(entity)
if with_childrens:
all_entities = self.get_notask_children(entity)
av_project = None
try:
self.db.install()
self.db.Session['AVALON_PROJECT'] = project_name
av_project = self.db.find_one({'type': 'project'})
template_work = av_project['config']['template']['work']
template_publish = av_project['config']['template']['publish']
self.db.uninstall()
except Exception:
templates = Anatomy().templates
template_work = templates["avalon"]["work"]
template_publish = templates["avalon"]["publish"]
anatomy = Anatomy(project_name)
work_keys = ["work", "folder"]
work_template = anatomy.templates
for key in work_keys:
work_template = work_template[key]
work_has_apps = "{app" in work_template
publish_keys = ["publish", "folder"]
publish_template = anatomy.templates
for key in publish_keys:
publish_template = publish_template[key]
publish_has_apps = "{app" in publish_template
presets = config.get_presets()
app_presets = presets.get("tools", {}).get("sw_folders")
cached_apps = {}
collected_paths = []
presets = config.get_presets()['tools']['sw_folders']
for entity in all_entities:
if entity.entity_type.lower() == 'project':
if entity.entity_type.lower() == "project":
continue
ent_data = data.copy()
ent_data = {
"project": {
"name": project_name,
"code": project_code
}
}
asset_name = entity['name']
ent_data['asset'] = asset_name
ent_data["asset"] = entity["name"]
parents = entity['link']
hierarchy_names = [p['name'] for p in parents[1:-1]]
hierarchy = ''
parents = entity["link"][1:-1]
hierarchy_names = [p["name"] for p in parents]
hierarchy = ""
if hierarchy_names:
hierarchy = os.path.sep.join(hierarchy_names)
ent_data['hierarchy'] = hierarchy
ent_data["hierarchy"] = hierarchy
tasks_created = False
if entity['children']:
for child in entity['children']:
if child['object_type']['name'].lower() != 'task':
continue
tasks_created = True
task_type_name = child['type']['name'].lower()
task_data = ent_data.copy()
task_data['task'] = child['name']
possible_apps = presets.get(task_type_name, [])
template_work_created = False
template_publish_created = False
apps = []
for child in entity["children"]:
if child["object_type"]["name"].lower() != "task":
continue
tasks_created = True
task_type_name = child["type"]["name"].lower()
task_data = ent_data.copy()
task_data["task"] = child["name"]
apps = []
if app_presets and (work_has_apps or publish_has_apps):
possible_apps = app_presets.get(task_type_name, [])
for app in possible_apps:
try:
app_data = avalonlib.get_application(app)
app_dir = app_data['application_dir']
except ValueError:
app_dir = app
if app in cached_apps:
app_dir = cached_apps[app]
else:
try:
app_data = avalonlib.get_application(app)
app_dir = app_data["application_dir"]
except ValueError:
app_dir = app
cached_apps[app] = app_dir
apps.append(app_dir)
# Template wok
if '{app}' in template_work:
for app in apps:
template_work_created = True
app_data = task_data.copy()
app_data['app'] = app
collected_paths.append(
self.compute_template(
template_work, app_data
)
)
if template_work_created is False:
collected_paths.append(
self.compute_template(template_work, task_data)
)
# Template publish
if '{app}' in template_publish:
for app in apps:
template_publish_created = True
app_data = task_data.copy()
app_data['app'] = app
collected_paths.append(
self.compute_template(
template_publish, app_data, True
)
)
if template_publish_created is False:
collected_paths.append(
self.compute_template(
template_publish, task_data, True
)
)
# Template wok
if work_has_apps:
app_data = task_data.copy()
for app in apps:
app_data["app"] = app
collected_paths.append(self.compute_template(
anatomy, app_data, work_keys
))
else:
collected_paths.append(self.compute_template(
anatomy, task_data, work_keys
))
# Template publish
if publish_has_apps:
app_data = task_data.copy()
for app in apps:
app_data["app"] = app
collected_paths.append(self.compute_template(
anatomy, app_data, publish_keys
))
else:
collected_paths.append(self.compute_template(
anatomy, task_data, publish_keys
))
if not tasks_created:
# create path for entity
collected_paths.append(
self.compute_template(template_work, ent_data)
)
collected_paths.append(
self.compute_template(template_publish, ent_data)
)
if len(collected_paths) > 0:
self.log.info('Creating folders:')
collected_paths.append(self.compute_template(
anatomy, ent_data, work_keys
))
collected_paths.append(self.compute_template(
anatomy, ent_data, publish_keys
))
if len(collected_paths) == 0:
return {
"success": True,
"message": "No project folders to create."
}
self.log.info("Creating folders:")
for path in set(collected_paths):
self.log.info(path)
if not os.path.exists(path):
os.makedirs(path)
return {
'success': True,
'message': 'Created Folders Successfully!'
"success": True,
"message": "Successfully created project folders."
}
def get_notask_children(self, entity):
output = []
if entity.get('object_type', {}).get(
'name', entity.entity_type
).lower() == 'task':
if entity.entity_type.lower() == "task":
return output
else:
output.append(entity)
if entity['children']:
for child in entity['children']:
output.extend(self.get_notask_children(child))
output.append(entity)
for child in entity["children"]:
output.extend(self.get_notask_children(child))
return output
def template_format(self, template, data):
def compute_template(self, anatomy, data, anatomy_keys):
filled_template = anatomy.format_all(data)
for key in anatomy_keys:
filled_template = filled_template[key]
partial_data = PartialDict(data)
if filled_template.solved:
return os.path.normpath(filled_template)
# remove subdict items from string (like 'project[name]')
subdict = PartialDict()
count = 1
store_pattern = 5*'_'+'{:0>3}'
regex_patern = "\{\w*\[[^\}]*\]\}"
matches = re.findall(regex_patern, template)
for match in matches:
key = store_pattern.format(count)
subdict[key] = match
template = template.replace(match, '{'+key+'}')
count += 1
# solve fillind keys with optional keys
solved = self._solve_with_optional(template, partial_data)
# try to solve subdict and replace them back to string
for k, v in subdict.items():
try:
v = v.format_map(data)
except (KeyError, TypeError):
pass
subdict[k] = v
return solved.format_map(subdict)
def _solve_with_optional(self, template, data):
# Remove optional missing keys
pattern = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
invalid_optionals = []
for group in pattern.findall(template):
try:
group.format(**data)
except KeyError:
invalid_optionals.append(group)
for group in invalid_optionals:
template = template.replace(group, "")
solved = template.format_map(data)
# solving after format optional in second round
for catch in re.compile(r"(<.*?[^{0]*>)[^0-9]*?").findall(solved):
if "{" in catch:
# remove all optional
solved = solved.replace(catch, "")
else:
# Remove optional symbols
solved = solved.replace(catch, catch[1:-1])
return solved
def compute_template(self, str, data, task=False):
first_result = self.template_format(str, data)
if first_result == first_result.split('{')[0]:
return os.path.normpath(first_result)
if task:
return os.path.normpath(first_result.split('{')[0])
index = first_result.index('{')
regex = '\{\w*[^\}]*\}'
match = re.findall(regex, first_result[index:])[0]
without_missing = str.split(match)[0].split('}')
output_items = []
for part in without_missing:
if '{' in part:
output_items.append(part + '}')
return os.path.normpath(
self.template_format(''.join(output_items), data)
self.log.warning(
"Template \"{}\" was not fully filled \"{}\"".format(
filled_template.template, filled_template
)
)
class PartialDict(dict):
def __getitem__(self, item):
out = super().__getitem__(item)
if isinstance(out, dict):
return '{'+item+'}'
return out
def __missing__(self, key):
return '{'+key+'}'
return os.path.normpath(filled_template.split("{")[0])
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
"""Register plugin. Called when used as an plugin."""
CreateFolders(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -1,36 +1,67 @@
import os
import sys
import re
import argparse
import logging
import ftrack_api
from pype.ftrack import BaseAction
from pypeapp import config
from pypeapp import config, Anatomy
class CreateProjectFolders(BaseAction):
'''Edit meta data action.'''
"""Action create folder structure and may create hierarchy in Ftrack.
#: Action identifier.
identifier = 'create.project.structure'
#: Action label.
label = 'Create Project Structure'
#: Action description.
description = 'Creates folder structure'
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
icon = '{}/ftrack/action_icons/CreateProjectFolders.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
Creation of folder structure and hierarchy in Ftrack is based on presets.
These presets are located in:
`~/pype-config/presets/tools/project_folder_structure.json`
Example of content:
```json
{
"__project_root__": {
"prod" : {},
"resources" : {
"footage": {
"plates": {},
"offline": {}
},
"audio": {},
"art_dept": {}
},
"editorial" : {},
"assets[ftrack.Library]": {
"characters[ftrack]": {},
"locations[ftrack]": {}
},
"shots[ftrack.Sequence]": {
"scripts": {},
"editorial[ftrack.Folder]": {}
}
}
}
```
Key "__project_root__" indicates root folder (or entity). Each key in
dictionary represents folder name. Value may contain another dictionary
with subfolders.
Identifier `[ftrack]` in name says that this should be also created in
Ftrack hierarchy. It is possible to specify entity type of item with "." .
If key is `assets[ftrack.Library]` then in ftrack will be created entity
with name "assets" and entity type "Library". It is expected Library entity
type exist in Ftrack.
"""
identifier = "create.project.structure"
label = "Create Project Structure"
description = "Creates folder structure"
role_list = ["Pypeclub", "Administrator", "Project Manager"]
icon = "{}/ftrack/action_icons/CreateProjectFolders.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
pattern_array = re.compile('\[.*\]')
pattern_ftrack = '.*\[[.]*ftrack[.]*'
pattern_ent_ftrack = 'ftrack\.[^.,\],\s,]*'
project_root_key = '__project_root__'
pattern_array = re.compile(r"\[.*\]")
pattern_ftrack = re.compile(r".*\[[.]*ftrack[.]*")
pattern_ent_ftrack = re.compile(r"ftrack\.[^.,\],\s,]*")
project_root_key = "__project_root__"
def discover(self, session, entities, event):
''' Validation '''
if len(entities) != 1:
return False
@ -41,22 +72,30 @@ class CreateProjectFolders(BaseAction):
def launch(self, session, entities, event):
entity = entities[0]
if entity.entity_type.lower() == 'project':
project = entity
else:
project = entity['project']
project = self.get_project_from_entity(entity)
project_folder_presets = (
config.get_presets()
.get("tools", {})
.get("project_folder_structure")
)
if not project_folder_presets:
return {
"success": False,
"message": "Project structure presets are not set."
}
presets = config.get_presets()['tools']['project_folder_structure']
try:
# Get paths based on presets
basic_paths = self.get_path_items(presets)
self.create_folders(basic_paths, entity)
basic_paths = self.get_path_items(project_folder_presets)
anatomy = Anatomy(project["full_name"])
self.create_folders(basic_paths, entity, project, anatomy)
self.create_ftrack_entities(basic_paths, project)
except Exception as e:
except Exception as exc:
session.rollback()
return {
'success': False,
'message': str(e)
"success": False,
"message": str(exc)
}
return True
@ -113,15 +152,15 @@ class CreateProjectFolders(BaseAction):
def trigger_creation(self, separation, parent):
for item, subvalues in separation.items():
matches = re.findall(self.pattern_array, item)
ent_type = 'Folder'
ent_type = "Folder"
if len(matches) == 0:
name = item
else:
match = matches[0]
name = item.replace(match, '')
name = item.replace(match, "")
ent_type_match = re.findall(self.pattern_ent_ftrack, match)
if len(ent_type_match) > 0:
ent_type_split = ent_type_match[0].split('.')
ent_type_split = ent_type_match[0].split(".")
if len(ent_type_split) == 2:
ent_type = ent_type_split[1]
new_parent = self.create_ftrack_entity(name, ent_type, parent)
@ -130,22 +169,22 @@ class CreateProjectFolders(BaseAction):
self.trigger_creation(subvalue, new_parent)
def create_ftrack_entity(self, name, ent_type, parent):
for children in parent['children']:
if children['name'] == name:
for children in parent["children"]:
if children["name"] == name:
return children
data = {
'name': name,
'parent_id': parent['id']
"name": name,
"parent_id": parent["id"]
}
if parent.entity_type.lower() == 'project':
data['project_id'] = parent['id']
if parent.entity_type.lower() == "project":
data["project_id"] = parent["id"]
else:
data['project_id'] = parent['project']['id']
data["project_id"] = parent["project"]["id"]
existing_entity = self.session.query((
"TypedContext where name is \"{}\" and "
"parent_id is \"{}\" and project_id is \"{}\""
).format(name, data['parent_id'], data['project_id'])).first()
).format(name, data["parent_id"], data["project_id"])).first()
if existing_entity:
return existing_entity
@ -161,12 +200,11 @@ class CreateProjectFolders(BaseAction):
else:
paths = self.get_path_items(value)
for path in paths:
if isinstance(path, str):
output.append([key, path])
else:
p = [key]
p.extend(path)
output.append(p)
if not isinstance(path, (list, tuple)):
path = [path]
output.append([key, *path])
return output
def compute_paths(self, basic_paths_items, project_root):
@ -176,72 +214,30 @@ class CreateProjectFolders(BaseAction):
for path_item in path_items:
matches = re.findall(self.pattern_array, path_item)
if len(matches) > 0:
path_item = path_item.replace(matches[0], '')
path_item = path_item.replace(matches[0], "")
if path_item == self.project_root_key:
path_item = project_root
clean_items.append(path_item)
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
def create_folders(self, basic_paths, entity):
# Set project root folder
if entity.entity_type.lower() == 'project':
project_name = entity['full_name']
def create_folders(self, basic_paths, entity, project, anatomy):
roots_paths = []
if isinstance(anatomy.roots, dict):
for root in anatomy.roots:
roots_paths.append(root.value)
else:
project_name = entity['project']['full_name']
project_root_items = [os.environ['AVALON_PROJECTS'], project_name]
project_root = os.path.sep.join(project_root_items)
full_paths = self.compute_paths(basic_paths, project_root)
#Create folders
for path in full_paths:
if os.path.exists(path):
continue
os.makedirs(path.format(project_root=project_root))
roots_paths.append(anatomy.roots.value)
for root_path in roots_paths:
project_root = os.path.join(root_path, project["full_name"])
full_paths = self.compute_paths(basic_paths, project_root)
# Create folders
for path in full_paths:
if os.path.exists(path):
continue
os.makedirs(path.format(project_root=project_root))
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
CreateProjectFolders(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -49,27 +49,23 @@ class DeleteAssetSubset(BaseAction):
def _launch(self, event):
try:
args = self._translate_event(
self.session, event
)
entities = self._translate_event(event)
if "values" not in event["data"]:
self.dbcon.install()
return self._interface(self.session, *args)
return self._interface(self.session, entities, event)
confirmation = self.confirm_delete(*args)
confirmation = self.confirm_delete(entities, event)
if confirmation:
return confirmation
self.dbcon.install()
response = self.launch(
self.session, *args
self.session, entities, event
)
finally:
self.dbcon.uninstall()
return self._handle_result(
self.session, response, *args
)
return self._handle_result(response)
def interface(self, session, entities, event):
self.show_message(event, "Preparing data...", True)

View file

@ -7,6 +7,7 @@ from pymongo import UpdateOne
from pype.ftrack import BaseAction
from pype.ftrack.lib.io_nonsingleton import DbConnector
from pypeapp import Anatomy
import avalon.pipeline
@ -21,8 +22,8 @@ class DeleteOldVersions(BaseAction):
" archived with only lates versions."
)
role_list = ["Pypeclub", "Project Manager", "Administrator"]
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
dbcon = DbConnector()
@ -194,6 +195,7 @@ class DeleteOldVersions(BaseAction):
# Set Mongo collection
project_name = project["full_name"]
anatomy = Anatomy(project_name)
self.dbcon.Session["AVALON_PROJECT"] = project_name
self.log.debug("Project is set to {}".format(project_name))
@ -307,7 +309,7 @@ class DeleteOldVersions(BaseAction):
dir_paths = {}
file_paths_by_dir = collections.defaultdict(list)
for repre in repres:
file_path, seq_path = self.path_from_represenation(repre)
file_path, seq_path = self.path_from_represenation(repre, anatomy)
if file_path is None:
self.log.warning((
"Could not format path for represenation \"{}\""
@ -495,21 +497,17 @@ class DeleteOldVersions(BaseAction):
self.log.debug("Removed folder: {}".format(dir_path))
os.rmdir(dir_path)
def path_from_represenation(self, representation):
def path_from_represenation(self, representation, anatomy):
try:
template = representation["data"]["template"]
except KeyError:
return (None, None)
root = os.environ["AVALON_PROJECTS"]
if not root:
return (None, None)
sequence_path = None
try:
context = representation["context"]
context["root"] = root
context["root"] = anatomy.roots
path = avalon.pipeline.format_template_with_optional_keys(
context, template
)

View file

@ -2,7 +2,6 @@ import os
import copy
import shutil
import collections
import string
import clique
from bson.objectid import ObjectId
@ -17,24 +16,18 @@ from pype.ftrack.lib.avalon_sync import CustAttrIdKey
class Delivery(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = "delivery.action"
#: Action label.
label = "Delivery"
#: Action description.
description = "Deliver data to client"
#: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator", "Project manager"]
icon = '{}/ftrack/action_icons/Delivery.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
icon = "{}/ftrack/action_icons/Delivery.svg".format(
os.environ.get("PYPE_STATICS_SERVER", "")
)
db_con = DbConnector()
def discover(self, session, entities, event):
''' Validation '''
for entity in entities:
if entity.entity_type.lower() == "assetversion":
return True
@ -301,17 +294,10 @@ class Delivery(BaseAction):
repre = repres_by_name.get(comp_name)
repres_to_deliver.append(repre)
if not location_path:
location_path = os.environ.get("AVALON_PROJECTS") or ""
print(location_path)
anatomy = Anatomy(project_name)
for repre in repres_to_deliver:
# Get destination repre path
anatomy_data = copy.deepcopy(repre["context"])
anatomy_data["root"] = location_path
anatomy_filled = anatomy.format_all(anatomy_data)
test_path = anatomy_filled["delivery"][anatomy_name]
@ -341,7 +327,7 @@ class Delivery(BaseAction):
self.report_items[msg].append(sub_msg)
self.log.warning(
"{} Representation: \"{}\" Filled: <{}>".format(
msg, str(repre["_id"]), str(result)
msg, str(repre["_id"]), str(test_path)
)
)
continue
@ -352,7 +338,7 @@ class Delivery(BaseAction):
if frame:
repre["context"]["frame"] = len(str(frame)) * "#"
repre_path = self.path_from_represenation(repre)
repre_path = self.path_from_represenation(repre, anatomy)
# TODO add backup solution where root of path from component
# is repalced with AVALON_PROJECTS root
if not frame:
@ -452,7 +438,7 @@ class Delivery(BaseAction):
self.copy_file(src, dst)
def path_from_represenation(self, representation):
def path_from_represenation(self, representation, anatomy):
try:
template = representation["data"]["template"]
@ -461,7 +447,7 @@ class Delivery(BaseAction):
try:
context = representation["context"]
context["root"] = os.environ.get("AVALON_PROJECTS") or ""
context["root"] = anatomy.roots
path = pipeline.format_template_with_optional_keys(
context, template
)

View file

@ -1,9 +1,8 @@
import os
import json
from ruamel import yaml
from pype.ftrack import BaseAction
from pypeapp import config
from pypeapp import config, Anatomy, project_overrides_dir_path
from pype.ftrack.lib.avalon_sync import get_avalon_attr
@ -24,6 +23,7 @@ class PrepareProject(BaseAction):
# Key to store info about trigerring create folder structure
create_project_structure_key = "create_folder_structure"
item_splitter = {'type': 'label', 'value': '---'}
def discover(self, session, entities, event):
''' Validation '''
@ -41,15 +41,190 @@ class PrepareProject(BaseAction):
# Inform user that this may take a while
self.show_message(event, "Preparing data... Please wait", True)
self.log.debug("Preparing data which will be shown")
self.log.debug("Loading custom attributes")
cust_attrs, hier_cust_attrs = get_avalon_attr(session, True)
project_defaults = config.get_presets(
entities[0]["full_name"]
).get("ftrack", {}).get("project_defaults", {})
self.log.debug("Preparing data which will be shown")
project_name = entities[0]["full_name"]
project_defaults = (
config.get_presets(project_name)
.get("ftrack", {})
.get("project_defaults", {})
)
anatomy = Anatomy(project_name)
if not anatomy.roots:
return {
"success": False,
"message": (
"Have issues with loading Roots for project \"{}\"."
).format(anatomy.project_name)
}
root_items = self.prepare_root_items(anatomy)
ca_items, multiselect_enumerators = (
self.prepare_custom_attribute_items(project_defaults)
)
self.log.debug("Heavy items are ready. Preparing last items group.")
title = "Prepare Project"
items = []
# Add root items
items.extend(root_items)
items.append(self.item_splitter)
# Ask if want to trigger Action Create Folder Structure
items.append({
"type": "label",
"value": "<h3>Want to create basic Folder Structure?</h3>"
})
items.append({
"name": self.create_project_structure_key,
"type": "boolean",
"value": False,
"label": "Check if Yes"
})
items.append(self.item_splitter)
items.append({
"type": "label",
"value": "<h3>Set basic Attributes:</h3>"
})
items.extend(ca_items)
# This item will be last (before enumerators)
# - sets value of auto synchronization
auto_sync_name = "avalon_auto_sync"
auto_sync_item = {
"name": auto_sync_name,
"type": "boolean",
"value": project_defaults.get(auto_sync_name, False),
"label": "AutoSync to Avalon"
}
# Add autosync attribute
items.append(auto_sync_item)
# Add enumerator items at the end
for item in multiselect_enumerators:
items.append(item)
return {
"items": items,
"title": title
}
def prepare_root_items(self, anatomy):
root_items = []
self.log.debug("Root items preparation begins.")
root_names = anatomy.root_names()
roots = anatomy.roots
root_items.append({
"type": "label",
"value": "<h3>Check your Project root settings</h3>"
})
root_items.append({
"type": "label",
"value": (
"<p><i>NOTE: Roots are <b>crutial</b> for path filling"
" (and creating folder structure).</i></p>"
)
})
root_items.append({
"type": "label",
"value": (
"<p><i>WARNING: Do not change roots on running project,"
" that <b>will cause workflow issues</b>.</i></p>"
)
})
default_roots = anatomy.roots
while isinstance(default_roots, dict):
key = tuple(default_roots.keys())[0]
default_roots = default_roots[key]
empty_text = "Enter root path here..."
# Root names is None when anatomy templates contain "{root}"
all_platforms = ["windows", "linux", "darwin"]
if root_names is None:
root_items.append(self.item_splitter)
# find first possible key
for platform in all_platforms:
value = default_roots.raw_data.get(platform) or ""
root_items.append({
"label": platform,
"name": "__root__{}".format(platform),
"type": "text",
"value": value,
"empty_text": empty_text
})
return root_items
root_name_data = {}
missing_roots = []
for root_name in root_names:
root_name_data[root_name] = {}
if not isinstance(roots, dict):
missing_roots.append(root_name)
continue
root_item = roots.get(root_name)
if not root_item:
missing_roots.append(root_name)
continue
for platform in all_platforms:
root_name_data[root_name][platform] = (
root_item.raw_data.get(platform) or ""
)
if missing_roots:
default_values = {}
for platform in all_platforms:
default_values[platform] = (
default_roots.raw_data.get(platform) or ""
)
for root_name in missing_roots:
root_name_data[root_name] = default_values
root_names = list(root_name_data.keys())
root_items.append({
"type": "hidden",
"name": "__rootnames__",
"value": json.dumps(root_names)
})
for root_name, values in root_name_data.items():
root_items.append(self.item_splitter)
root_items.append({
"type": "label",
"value": "Root: \"{}\"".format(root_name)
})
for platform, value in values.items():
root_items.append({
"label": platform,
"name": "__root__{}{}".format(root_name, platform),
"type": "text",
"value": value,
"empty_text": empty_text
})
self.log.debug("Root items preparation ended.")
return root_items
def _attributes_to_set(self, project_defaults):
attributes_to_set = {}
cust_attrs, hier_cust_attrs = get_avalon_attr(self.session, True)
for attr in hier_cust_attrs:
key = attr["key"]
if key.startswith("avalon_"):
@ -77,45 +252,17 @@ class PrepareProject(BaseAction):
attributes_to_set.items(),
key=lambda x: x[1]["label"]
))
return attributes_to_set
def prepare_custom_attribute_items(self, project_defaults):
items = []
multiselect_enumerators = []
attributes_to_set = self._attributes_to_set(project_defaults)
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
))
item_splitter = {'type': 'label', 'value': '---'}
title = "Prepare Project"
items = []
# Ask if want to trigger Action Create Folder Structure
items.append({
"type": "label",
"value": "<h3>Want to create basic Folder Structure?</h3>"
})
items.append({
"name": self.create_project_structure_key,
"type": "boolean",
"value": False,
"label": "Check if Yes"
})
items.append(item_splitter)
items.append({
"type": "label",
"value": "<h3>Set basic Attributes:</h3>"
})
multiselect_enumerators = []
# This item will be last (before enumerators)
# - sets value of auto synchronization
auto_sync_name = "avalon_auto_sync"
auto_sync_item = {
"name": auto_sync_name,
"type": "boolean",
"value": project_defaults.get(auto_sync_name, False),
"label": "AutoSync to Avalon"
}
for key, in_data in attributes_to_set.items():
attr = in_data["object"]
@ -139,8 +286,7 @@ class PrepareProject(BaseAction):
attr_config_data = json.loads(attr_config["data"])
if attr_config["multiSelect"] is True:
multiselect_enumerators.append(item_splitter)
multiselect_enumerators.append(self.item_splitter)
multiselect_enumerators.append({
"type": "label",
"value": in_data["label"]
@ -160,10 +306,7 @@ class PrepareProject(BaseAction):
"label": "- {}".format(option["menu"])
}
if default:
if (
isinstance(default, list) or
isinstance(default, tuple)
):
if isinstance(default, (list, tuple)):
if name in default:
item["value"] = True
else:
@ -204,17 +347,7 @@ class PrepareProject(BaseAction):
items.append(item)
# Add autosync attribute
items.append(auto_sync_item)
# Add enumerator items at the end
for item in multiselect_enumerators:
items.append(item)
return {
'items': items,
'title': title
}
return items, multiselect_enumerators
def launch(self, session, entities, event):
if not event['data'].get('values', {}):
@ -222,6 +355,35 @@ class PrepareProject(BaseAction):
in_data = event['data']['values']
root_values = {}
root_key = "__root__"
for key in tuple(in_data.keys()):
if key.startswith(root_key):
_key = key[len(root_key):]
root_values[_key] = in_data.pop(key)
root_names = in_data.pop("__rootnames__", None)
root_data = {}
if root_names:
for root_name in json.loads(root_names):
root_data[root_name] = {}
for key, value in tuple(root_values.items()):
if key.startswith(root_name):
_key = key[len(root_name):]
root_data[root_name][_key] = value
else:
for key, value in root_values.items():
root_data[key] = value
project_name = entities[0]["full_name"]
anatomy = Anatomy(project_name)
anatomy.templates_obj.save_project_overrides(project_name)
anatomy.roots_obj.save_project_overrides(
project_name, root_data, override=True
)
anatomy.reset()
# pop out info about creating project structure
create_proj_struct = in_data.pop(self.create_project_structure_key)
@ -269,94 +431,22 @@ class PrepareProject(BaseAction):
def create_project_specific_config(self, project_name, json_data):
self.log.debug("*** Creating project specifig configs ***")
path_proj_configs = os.environ.get('PYPE_PROJECT_CONFIGS', "")
# Skip if PYPE_PROJECT_CONFIGS is not set
# TODO show user OS message
if not path_proj_configs:
self.log.warning((
"Environment variable \"PYPE_PROJECT_CONFIGS\" is not set."
" Project specific config can't be set."
))
return
path_proj_configs = os.path.normpath(path_proj_configs)
# Skip if path does not exist
# TODO create if not exist?!!!
if not os.path.exists(path_proj_configs):
self.log.warning((
"Path set in Environment variable \"PYPE_PROJECT_CONFIGS\""
" Does not exist."
))
return
project_specific_path = os.path.normpath(
os.path.join(path_proj_configs, project_name)
)
project_specific_path = project_overrides_dir_path(project_name)
if not os.path.exists(project_specific_path):
os.makedirs(project_specific_path)
self.log.debug((
"Project specific config folder for project \"{}\" created."
).format(project_name))
# Anatomy ####################################
self.log.debug("--- Processing Anatomy Begins: ---")
anatomy_dir = os.path.normpath(os.path.join(
project_specific_path, "anatomy"
))
anatomy_path = os.path.normpath(os.path.join(
anatomy_dir, "default.yaml"
))
anatomy = None
if os.path.exists(anatomy_path):
self.log.debug(
"Anatomy file already exist. Trying to read: \"{}\"".format(
anatomy_path
)
)
# Try to load data
with open(anatomy_path, 'r') as file_stream:
try:
anatomy = yaml.load(file_stream, Loader=yaml.loader.Loader)
self.log.debug("Reading Anatomy file was successful")
except yaml.YAMLError as exc:
self.log.warning(
"Reading Yaml file failed: \"{}\"".format(anatomy_path),
exc_info=True
)
if not anatomy:
self.log.debug("Anatomy is not set. Duplicating default.")
# Create Anatomy folder
if not os.path.exists(anatomy_dir):
self.log.debug(
"Creating Anatomy folder: \"{}\"".format(anatomy_dir)
)
os.makedirs(anatomy_dir)
source_items = [
os.environ["PYPE_CONFIG"], "anatomy", "default.yaml"
]
source_path = os.path.normpath(os.path.join(*source_items))
with open(source_path, 'r') as file_stream:
source_data = file_stream.read()
with open(anatomy_path, 'w') as file_stream:
file_stream.write(source_data)
# Presets ####################################
self.log.debug("--- Processing Presets Begins: ---")
project_defaults_dir = os.path.normpath(os.path.join(*[
project_defaults_dir = os.path.normpath(os.path.join(
project_specific_path, "presets", "ftrack"
]))
project_defaults_path = os.path.normpath(os.path.join(*[
))
project_defaults_path = os.path.normpath(os.path.join(
project_defaults_dir, "project_defaults.json"
]))
))
# Create folder if not exist
if not os.path.exists(project_defaults_dir):
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
@ -372,5 +462,4 @@ class PrepareProject(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
PrepareProject(session, plugins_presets).register()

View file

@ -55,6 +55,8 @@ class SeedDebugProject(BaseAction):
# Define how much shots will be created for each sequence
default_shots_count = 10
max_entities_created_at_one_commit = 50
existing_projects = None
new_project_item = "< New Project >"
current_project_item = "< Current Project >"
@ -284,21 +286,28 @@ class SeedDebugProject(BaseAction):
int(asset_count / available_assets) +
(asset_count % available_assets > 0)
)
created_assets = 0
index = 0
created_entities = 0
to_create_length = asset_count + (asset_count * len(self.asset_tasks))
for _asset_name in self.assets:
if created_assets >= asset_count:
if created_entities >= to_create_length:
break
for asset_num in range(1, repetitive_times + 1):
if created_assets >= asset_count:
if created_entities >= asset_count:
break
asset_name = "%s_%02d" % (_asset_name, asset_num)
asset = self.session.create("AssetBuild", {
"name": asset_name,
"parent": main_entity
})
created_assets += 1
self.log.debug("- Assets/{}".format(asset_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for task_name in self.asset_tasks:
self.session.create("Task", {
"name": task_name,
@ -309,7 +318,17 @@ class SeedDebugProject(BaseAction):
asset_name, task_name
))
created_entities += 1
index += 1
if self.temp_commit(
index, created_entities, to_create_length
):
index = 0
self.log.debug("*** Commiting Assets")
self.log.debug("Commiting entities. {}/{}".format(
created_entities, to_create_length
))
self.session.commit()
def create_shots(self, project, seq_count, shots_count):
@ -345,7 +364,14 @@ class SeedDebugProject(BaseAction):
})
self.log.debug("- Shots")
for seq_num in range(1, seq_count+1):
index = 0
created_entities = 0
to_create_length = (
seq_count
+ (seq_count * shots_count)
+ (seq_count * shots_count * len(self.shot_tasks))
)
for seq_num in range(1, seq_count + 1):
seq_name = "sq%03d" % seq_num
seq = self.session.create("Sequence", {
"name": seq_name,
@ -353,14 +379,24 @@ class SeedDebugProject(BaseAction):
})
self.log.debug("- Shots/{}".format(seq_name))
for shot_num in range(1, shots_count+1):
shot_name = "%ssh%04d" % (seq_name, (shot_num*10))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for shot_num in range(1, shots_count + 1):
shot_name = "%ssh%04d" % (seq_name, (shot_num * 10))
shot = self.session.create("Shot", {
"name": shot_name,
"parent": seq
})
self.log.debug("- Shots/{}/{}".format(seq_name, shot_name))
created_entities += 1
index += 1
if self.temp_commit(index, created_entities, to_create_length):
index = 0
for task_name in self.shot_tasks:
self.session.create("Task", {
"name": task_name,
@ -371,9 +407,27 @@ class SeedDebugProject(BaseAction):
seq_name, shot_name, task_name
))
created_entities += 1
index += 1
if self.temp_commit(
index, created_entities, to_create_length
):
index = 0
self.log.debug("*** Commiting Shots")
self.log.debug("Commiting entities. {}/{}".format(
created_entities, to_create_length
))
self.session.commit()
def temp_commit(self, index, created_entities, to_create_length):
if index < self.max_entities_created_at_one_commit:
return False
self.log.debug("Commiting {} entities. {}/{}".format(
index, created_entities, to_create_length
))
self.session.commit()
return True
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''

View file

@ -5,13 +5,11 @@ import json
from bson.objectid import ObjectId
from pype.ftrack import BaseAction
from pype.ftrack.lib import (
get_project_from_entity,
get_avalon_entities_for_assetversion
)
from pypeapp import Anatomy
from pype.ftrack.lib.io_nonsingleton import DbConnector
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
class StoreThumbnailsToAvalon(BaseAction):
# Action identifier
@ -54,8 +52,44 @@ class StoreThumbnailsToAvalon(BaseAction):
})
session.commit()
project = self.get_project_from_entity(entities[0])
project_name = project["full_name"]
anatomy = Anatomy(project_name)
if "publish" not in anatomy.templates:
msg = "Anatomy does not have set publish key!"
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
if "thumbnail" not in anatomy.templates["publish"]:
msg = (
"There is not set \"thumbnail\""
" template in Antomy for project \"{}\""
).format(project_name)
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
thumbnail_roots = os.environ.get(self.thumbnail_key)
if not thumbnail_roots:
if (
"{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"]
and not thumbnail_roots
):
msg = "`{}` environment is not set".format(self.thumbnail_key)
action_job["status"] = "failed"
@ -89,39 +123,6 @@ class StoreThumbnailsToAvalon(BaseAction):
"message": msg
}
project = get_project_from_entity(entities[0])
project_name = project["full_name"]
anatomy = Anatomy(project_name)
if "publish" not in anatomy.templates:
msg = "Anatomy does not have set publish key!"
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
if "thumbnail" not in anatomy.templates["publish"]:
msg = (
"There is not set \"thumbnail\""
" template in Antomy for project \"{}\""
).format(project_name)
action_job["status"] = "failed"
session.commit()
self.log.warning(msg)
return {
"success": False,
"message": msg
}
example_template_data = {
"_id": "ID",
"thumbnail_root": "THUBMNAIL_ROOT",
@ -186,7 +187,7 @@ class StoreThumbnailsToAvalon(BaseAction):
).format(entity["id"]))
continue
avalon_ents_result = get_avalon_entities_for_assetversion(
avalon_ents_result = self.get_avalon_entities_for_assetversion(
entity, self.db_con
)
version_full_path = (
@ -345,6 +346,119 @@ class StoreThumbnailsToAvalon(BaseAction):
file_open.close()
return True
def get_avalon_entities_for_assetversion(self, asset_version, db_con):
output = {
"success": True,
"message": None,
"project": None,
"project_name": None,
"asset": None,
"asset_name": None,
"asset_path": None,
"subset": None,
"subset_name": None,
"version": None,
"version_name": None,
"representations": None
}
db_con.install()
ft_asset = asset_version["asset"]
subset_name = ft_asset["name"]
version = asset_version["version"]
parent = ft_asset["parent"]
ent_path = "/".join(
[ent["name"] for ent in parent["link"]]
)
project = self.get_project_from_entity(asset_version)
project_name = project["full_name"]
output["project_name"] = project_name
output["asset_name"] = parent["name"]
output["asset_path"] = ent_path
output["subset_name"] = subset_name
output["version_name"] = version
db_con.Session["AVALON_PROJECT"] = project_name
avalon_project = db_con.find_one({"type": "project"})
output["project"] = avalon_project
if not avalon_project:
output["success"] = False
output["message"] = (
"Project not synchronized to avalon `{}`".format(project_name)
)
return output
asset_ent = None
asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
if asset_mongo_id:
try:
asset_mongo_id = ObjectId(asset_mongo_id)
asset_ent = db_con.find_one({
"type": "asset",
"_id": asset_mongo_id
})
except Exception:
pass
if not asset_ent:
asset_ent = db_con.find_one({
"type": "asset",
"data.ftrackId": parent["id"]
})
output["asset"] = asset_ent
if not asset_ent:
output["success"] = False
output["message"] = (
"Not synchronized entity to avalon `{}`".format(ent_path)
)
return output
asset_mongo_id = asset_ent["_id"]
subset_ent = db_con.find_one({
"type": "subset",
"parent": asset_mongo_id,
"name": subset_name
})
output["subset"] = subset_ent
if not subset_ent:
output["success"] = False
output["message"] = (
"Subset `{}` does not exist under Asset `{}`"
).format(subset_name, ent_path)
return output
version_ent = db_con.find_one({
"type": "version",
"name": version,
"parent": subset_ent["_id"]
})
output["version"] = version_ent
if not version_ent:
output["success"] = False
output["message"] = (
"Version `{}` does not exist under Subset `{}` | Asset `{}`"
).format(version, subset_name, ent_path)
return output
repre_ents = list(db_con.find({
"type": "representation",
"parent": version_ent["_id"]
}))
output["representations"] = repre_ents
return output
def register(session, plugins_presets={}):
StoreThumbnailsToAvalon(session, plugins_presets).register()

View file

@ -1244,6 +1244,8 @@ class SyncToAvalonEvent(BaseEvent):
self.process_session, entity, hier_keys, defaults
)
for key, val in hier_values.items():
if key == CustAttrIdKey:
continue
output[key] = val
return output

View file

@ -2,57 +2,42 @@ from pype.ftrack import BaseEvent
class ThumbnailEvents(BaseEvent):
def launch(self, session, event):
'''just a testing event'''
"""Updates thumbnails of entities from new AssetVersion."""
# self.log.info(event)
# start of event procedure ----------------------------------
for entity in event['data'].get('entities', []):
for entity in event["data"].get("entities", []):
if (
entity["action"] == "remove"
or entity["entityType"].lower() != "assetversion"
or "thumbid" not in (entity.get("keys") or [])
):
continue
# update created task thumbnail with first parent thumbnail
if entity['entityType'] == 'task' and entity['action'] == 'add':
version = session.get("AssetVersion", entity["entityId"])
if not version:
continue
task = session.get('TypedContext', entity['entityId'])
parent = task['parent']
thumbnail = version.get("thumbnail")
if not thumbnail:
continue
if parent.get('thumbnail') and not task.get('thumbnail'):
task['thumbnail'] = parent['thumbnail']
self.log.info('>>> Updated thumbnail on [ %s/%s ]'.format(
parent['name'], task['name']
))
parent = version["asset"]["parent"]
task = version["task"]
parent["thumbnail_id"] = version["thumbnail_id"]
if parent.entity_type.lower() == "project":
name = parent["full_name"]
else:
name = parent["name"]
# Update task thumbnail from published version
# if (entity['entityType'] == 'assetversion' and
# entity['action'] == 'encoded'):
elif (
entity['entityType'] == 'assetversion' and
entity['action'] != 'remove' and
'thumbid' in (entity.get('keys') or [])
):
task_msg = ""
if task:
task["thumbnail_id"] = version["thumbnail_id"]
task_msg = " and task [ {} ]".format(task["name"])
version = session.get('AssetVersion', entity['entityId'])
if not version:
continue
thumbnail = version.get('thumbnail')
if not thumbnail:
continue
parent = version['asset']['parent']
task = version['task']
parent['thumbnail_id'] = version['thumbnail_id']
if parent.entity_type.lower() == "project":
name = parent["full_name"]
else:
name = parent["name"]
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
if task:
task['thumbnail_id'] = version['thumbnail_id']
msg += " and task [ {} ]".format(task["name"])
self.log.info(msg)
self.log.info(">>> Updating thumbnail for shot [ {} ]{}".format(
name, task_msg
))
try:
session.commit()
@ -61,5 +46,4 @@ class ThumbnailEvents(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
ThumbnailEvents(session, plugins_presets).register()

View file

@ -158,20 +158,10 @@ class UserAssigmentEvent(BaseEvent):
"""
project_name = task['project']['full_name']
project_code = task['project']['name']
try:
root = os.environ['PYPE_STUDIO_PROJECTS_PATH']
except KeyError:
msg = 'Project ({}) root not set'.format(project_name)
self.log.error(msg)
return {
'success': False,
'message': msg
}
# fill in template data
asset = self._get_asset(task)
t_data = {
'root': root,
'project': {
'name': project_name,
'code': project_code
@ -204,11 +194,12 @@ class UserAssigmentEvent(BaseEvent):
data = self._get_template_data(task)
# format directories to pass to shell script
anatomy = Anatomy(data["project"]["name"])
anatomy_filled = anatomy.format(data)
# formatting work dir is easiest part as we can use whole path
work_dir = anatomy.format(data)['avalon']['work']
work_dir = anatomy_filled["work"]["folder"]
# we also need publish but not whole
filled_all = anatomy.format_all(data)
publish = filled_all['avalon']['publish']
anatomy_filled.strict = False
publish = anatomy_filled["publish"]["folder"]
# now find path to {asset}
m = re.search("(^.+?{})".format(data['asset']),

View file

@ -1,11 +1,15 @@
from . import avalon_sync
from . import credentials
from .ftrack_app_handler import *
from .ftrack_event_handler import *
from .ftrack_action_handler import *
from .ftrack_base_handler import *
from .ftrack_base_handler import BaseHandler
from .ftrack_event_handler import BaseEvent
from .ftrack_action_handler import BaseAction
from .ftrack_app_handler import AppAction
from .lib import (
get_project_from_entity,
get_avalon_entities_for_assetversion
)
__all__ = [
"avalon_sync",
"credentials",
"BaseHandler",
"BaseEvent",
"BaseAction",
"AppAction"
]

View file

@ -291,6 +291,8 @@ class SyncEntitiesFactory:
self.filtered_ids = []
self.not_selected_ids = []
self.hier_cust_attr_ids_by_key = {}
self._ent_paths_by_ftrack_id = {}
self.ftrack_avalon_mapper = None
@ -690,7 +692,6 @@ class SyncEntitiesFactory:
ent_type["name"]: ent_type["id"] for ent_type in ent_types
}
attrs = set()
# store default values per entity type
attrs_per_entity_type = collections.defaultdict(dict)
avalon_attrs = collections.defaultdict(dict)
@ -698,9 +699,10 @@ class SyncEntitiesFactory:
attrs_per_entity_type_ca_id = collections.defaultdict(dict)
avalon_attrs_ca_id = collections.defaultdict(dict)
attribute_key_by_id = {}
for cust_attr in custom_attrs:
key = cust_attr["key"]
attrs.add(cust_attr["id"])
attribute_key_by_id[cust_attr["id"]] = key
ca_ent_type = cust_attr["entity_type"]
if key.startswith("avalon_"):
if ca_ent_type == "show":
@ -774,7 +776,7 @@ class SyncEntitiesFactory:
"\"{}\"".format(id) for id in sync_ids
])
attributes_joined = ", ".join([
"\"{}\"".format(name) for name in attrs
"\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys()
])
cust_attr_query = (
@ -792,13 +794,13 @@ class SyncEntitiesFactory:
else:
[values] = self.session._call(call_expr)
for value in values["data"]:
entity_id = value["entity_id"]
key = value["configuration"]["key"]
for item in values["data"]:
entity_id = item["entity_id"]
key = attribute_key_by_id[item["configuration_id"]]
store_key = "custom_attributes"
if key.startswith("avalon_"):
store_key = "avalon_attrs"
self.entities_dict[entity_id][store_key][key] = value["value"]
self.entities_dict[entity_id][store_key][key] = item["value"]
# process hierarchical attributes
self.set_hierarchical_attribute(hier_attrs, sync_ids)
@ -812,6 +814,7 @@ class SyncEntitiesFactory:
key = attr["key"]
attribute_key_by_id[attr["id"]] = key
attributes_by_key[key] = attr
self.hier_cust_attr_ids_by_key[key] = attr["id"]
store_key = "hier_attrs"
if key.startswith("avalon_"):
@ -821,6 +824,21 @@ class SyncEntitiesFactory:
attr["default"]
)
# Add attribute ids to entities dictionary
avalon_attribute_id_by_key = {
attr_key: attr_id
for attr_id, attr_key in attribute_key_by_id.items()
if attr_key.startswith("avalon_")
}
for entity_id in self.entities_dict.keys():
if "avalon_attrs_id" not in self.entities_dict[entity_id]:
self.entities_dict[entity_id]["avalon_attrs_id"] = {}
for attr_key, attr_id in avalon_attribute_id_by_key.items():
self.entities_dict[entity_id]["avalon_attrs_id"][attr_key] = (
attr_id
)
# Prepare dict with all hier keys and None values
prepare_dict = {}
prepare_dict_avalon = {}
@ -842,32 +860,34 @@ class SyncEntitiesFactory:
entity_ids_joined = ", ".join([
"\"{}\"".format(id) for id in sync_ids
])
attributes_joined = ", ".join([
"\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys()
])
avalon_hier = []
for configuration_id in attribute_key_by_id.keys():
call_expr = [{
"action": "query",
"expression": (
"select value, entity_id from CustomAttributeValue "
"where entity_id in ({}) and configuration_id is \"{}\""
).format(entity_ids_joined, configuration_id)
}]
if hasattr(self.session, "call"):
[values] = self.session.call(call_expr)
else:
[values] = self.session._call(call_expr)
call_expr = [{
"action": "query",
"expression": (
"select value, entity_id from ContextCustomAttributeValue "
"where entity_id in ({}) and configuration_id in ({})"
).format(entity_ids_joined, attributes_joined)
}]
if hasattr(self.session, "call"):
[values] = self.session.call(call_expr)
else:
[values] = self.session._call(call_expr)
for value in values["data"]:
if value["value"] is None:
continue
entity_id = value["entity_id"]
key = attribute_key_by_id[value["configuration_id"]]
if key.startswith("avalon_"):
store_key = "avalon_attrs"
avalon_hier.append(key)
else:
store_key = "hier_attrs"
self.entities_dict[entity_id][store_key][key] = value["value"]
for item in values["data"]:
value = item["value"]
if value is None:
continue
entity_id = item["entity_id"]
key = attribute_key_by_id[item["configuration_id"]]
if key.startswith("avalon_"):
store_key = "avalon_attrs"
avalon_hier.append(key)
else:
store_key = "hier_attrs"
self.entities_dict[entity_id][store_key][key] = value
# Get dictionary with not None hierarchical values to pull to childs
top_id = self.ft_project_id
@ -877,6 +897,8 @@ class SyncEntitiesFactory:
project_values[key] = value
for key in avalon_hier:
if key == CustAttrIdKey:
continue
value = self.entities_dict[top_id]["avalon_attrs"][key]
if value is not None:
project_values[key] = value
@ -1593,9 +1615,16 @@ class SyncEntitiesFactory:
if current_id != new_id_str:
# store mongo id to ftrack entity
configuration_id = self.entities_dict[ftrack_id][
"avalon_attrs_id"
][CustAttrIdKey]
configuration_id = self.hier_cust_attr_ids_by_key.get(
CustAttrIdKey
)
if not configuration_id:
# NOTE this is for cases when CustAttrIdKey key is not
# hierarchical custom attribute but per entity type
configuration_id = self.entities_dict[ftrack_id][
"avalon_attrs_id"
][CustAttrIdKey]
_entity_key = collections.OrderedDict({
"configuration_id": configuration_id,
"entity_id": ftrack_id
@ -1712,14 +1741,10 @@ class SyncEntitiesFactory:
except InvalidId:
new_id = ObjectId()
project_name = self.entities_dict[self.ft_project_id]["name"]
project_item["_id"] = new_id
project_item["parent"] = None
project_item["schema"] = EntitySchemas["project"]
project_item["config"]["schema"] = EntitySchemas["config"]
project_item["config"]["template"] = (
get_avalon_project_template(project_name)
)
self.ftrack_avalon_mapper[self.ft_project_id] = new_id
self.avalon_ftrack_mapper[new_id] = self.ft_project_id

View file

@ -23,17 +23,13 @@ class BaseAction(BaseHandler):
def __init__(self, session, plugins_presets={}):
'''Expects a ftrack_api.Session instance'''
super().__init__(session, plugins_presets)
if self.label is None:
raise ValueError(
'Action missing label.'
)
raise ValueError('Action missing label.')
elif self.identifier is None:
raise ValueError(
'Action missing identifier.'
)
if self.identifier is None:
raise ValueError('Action missing identifier.')
super().__init__(session, plugins_presets)
def register(self):
'''
@ -61,66 +57,131 @@ class BaseAction(BaseHandler):
self._launch
)
def _launch(self, event):
args = self._translate_event(
self.session, event
def _discover(self, event):
entities = self._translate_event(event)
accepts = self.discover(self.session, entities, event)
if not accepts:
return
self.log.debug(u'Discovering action with selection: {0}'.format(
event['data'].get('selection', [])
))
return {
'items': [{
'label': self.label,
'variant': self.variant,
'description': self.description,
'actionIdentifier': self.identifier,
'icon': self.icon,
}]
}
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the
entity id. If the entity is a hierarchical you will always get the
entity type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return False
def _interface(self, session, entities, event):
interface = self.interface(session, entities, event)
if not interface:
return
if isinstance(interface, (tuple, list)):
return {"items": interface}
if isinstance(interface, dict):
if (
"items" in interface
or ("success" in interface and "message" in interface)
):
return interface
raise ValueError((
"Invalid interface output expected key: \"items\" or keys:"
" \"success\" and \"message\". Got: \"{}\""
).format(str(interface)))
raise ValueError(
"Invalid interface output type \"{}\"".format(
str(type(interface))
)
)
def interface(self, session, entities, event):
'''Return a interface if applicable or None
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and
the entity id. If the entity is a hierarchical you will always get the
entity type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return None
def _launch(self, event):
entities = self._translate_event(event)
preactions_launched = self._handle_preactions(self.session, event)
if preactions_launched is False:
return
interface = self._interface(
self.session, *args
self.session, entities, event
)
if interface:
return interface
response = self.launch(
self.session, *args
self.session, entities, event
)
return self._handle_result(
self.session, response, *args
)
return self._handle_result(response)
def _handle_result(self, session, result, entities, event):
def _handle_result(self, result):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
if result is True:
result = {
'success': result,
'message': (
'{0} launched successfully.'.format(self.label)
)
}
msg = 'Action {0} finished.'
else:
result = {
'success': result,
'message': (
'{0} launch failed.'.format(self.label)
)
}
msg = 'Action {0} failed.'
elif isinstance(result, dict):
return {
'success': result,
'message': msg.format(self.label)
}
if isinstance(result, dict):
if 'items' in result:
items = result['items']
if not isinstance(items, list):
if not isinstance(result['items'], list):
raise ValueError('Invalid items format, must be list!')
else:
for key in ('success', 'message'):
if key in result:
continue
if key not in result:
raise KeyError('Missing required key: {0}.'.format(key))
return result
raise KeyError(
'Missing required key: {0}.'.format(key)
)
else:
self.log.error(
'Invalid result type must be bool or dictionary!'
)
self.log.warning((
'Invalid result type \"{}\" must be bool or dictionary!'
).format(str(type(result))))
return result

View file

@ -1,44 +1,36 @@
import os
import sys
import copy
import platform
from avalon import lib as avalonlib
import avalon.lib
import acre
from pype import api as pype
from pype import lib as pypelib
from pypeapp import config
from .ftrack_base_handler import BaseHandler
from .ftrack_action_handler import BaseAction
from pypeapp import Anatomy
class AppAction(BaseHandler):
'''Custom Action base class
class AppAction(BaseAction):
"""Application Action class.
<label> - a descriptive string identifing your action.
<varaint> - To group actions together, give them the same
label and specify a unique variant per action.
<identifier> - a unique identifier for app.
<description> - a verbose descriptive text for you action
<icon> - icon in ftrack
'''
Args:
session (ftrack_api.Session): Session where action will be registered.
label (str): A descriptive string identifing your action.
varaint (str, optional): To group actions together, give them the same
label and specify a unique variant per action.
identifier (str): An unique identifier for app.
description (str): A verbose descriptive text for you action.
icon (str): Url path to icon which will be shown in Ftrack web.
"""
type = 'Application'
preactions = ['start.timer']
type = "Application"
preactions = ["start.timer"]
def __init__(
self, session, label, name, executable, variant=None,
icon=None, description=None, preactions=[], plugins_presets={}
):
super().__init__(session, plugins_presets)
'''Expects a ftrack_api.Session instance'''
if label is None:
raise ValueError('Action missing label.')
elif name is None:
raise ValueError('Action missing identifier.')
elif executable is None:
raise ValueError('Action missing executable.')
self.label = label
self.identifier = name
self.executable = executable
@ -47,11 +39,19 @@ class AppAction(BaseHandler):
self.description = description
self.preactions.extend(preactions)
super().__init__(session, plugins_presets)
if label is None:
raise ValueError("Action missing label.")
if name is None:
raise ValueError("Action missing identifier.")
if executable is None:
raise ValueError("Action missing executable.")
def register(self):
'''Registers the action, subscribing the discover and launch topics.'''
"""Registers the action, subscribing the discover and launch topics."""
discovery_subscription = (
'topic=ftrack.action.discover and source.user.username={0}'
"topic=ftrack.action.discover and source.user.username={0}"
).format(self.session.api_user)
self.session.event_hub.subscribe(
@ -61,9 +61,9 @@ class AppAction(BaseHandler):
)
launch_subscription = (
'topic=ftrack.action.launch'
' and data.actionIdentifier={0}'
' and source.user.username={1}'
"topic=ftrack.action.launch"
" and data.actionIdentifier={0}"
" and source.user.username={1}"
).format(
self.identifier,
self.session.api_user
@ -74,7 +74,61 @@ class AppAction(BaseHandler):
)
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
"""Return true if we can handle the selected entities.
Args:
session (ftrack_api.Session): Helps to query necessary data.
entities (list): Object of selected entities.
event (ftrack_api.Event): Ftrack event causing discover callback.
"""
if (
len(entities) != 1
or entities[0].entity_type.lower() != 'task'
):
return False
entity = entities[0]
if entity["parent"].entity_type.lower() == "project":
return False
ft_project = self.get_project_from_entity(entity)
database = pypelib.get_avalon_database()
project_name = ft_project["full_name"]
avalon_project = database[project_name].find_one({
"type": "project"
})
if not avalon_project:
return False
project_apps = avalon_project["config"].get("apps", [])
apps = [app["name"] for app in project_apps]
if self.identifier in apps:
return True
return False
def _launch(self, event):
entities = self._translate_event(event)
preactions_launched = self._handle_preactions(
self.session, event
)
if preactions_launched is False:
return
response = self.launch(self.session, entities, event)
return self._handle_result(response)
def launch(self, session, entities, event):
"""Callback method for the custom action.
return either a bool (True if successful or False if the action failed)
or a dictionary with they keys `message` and `success`, the message
should be a string and will be displayed as feedback to the user,
success should be a bool, True if successful or False if the action
failed.
*session* is a `ftrack_api.Session` instance
@ -85,185 +139,91 @@ class AppAction(BaseHandler):
or Asset Build.
*event* the unmodified original event
'''
if (
len(entities) != 1 or
entities[0].entity_type.lower() != 'task'
):
return False
if entities[0]['parent'].entity_type.lower() == 'project':
return False
ft_project = entities[0]['project']
database = pypelib.get_avalon_database()
project_name = ft_project['full_name']
avalon_project = database[project_name].find_one({
"type": "project"
})
if avalon_project is None:
return False
else:
apps = [app['name'] for app in avalon_project['config'].get(
'apps', []
)]
if self.identifier not in apps:
return False
return True
def _launch(self, event):
args = self._translate_event(
self.session, event
)
preactions_launched = self._handle_preactions(
self.session, event
)
if preactions_launched is False:
return
response = self.launch(
self.session, *args
)
return self._handle_result(
self.session, response, *args
)
def launch(self, session, entities, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action failed )
or a dictionary with they keys `message` and `success`, the message should be a
string and will be displayed as feedback to the user, success should be a bool,
True if successful or False if the action failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
"""
entity = entities[0]
project_name = entity['project']['full_name']
project_name = entity["project"]["full_name"]
database = pypelib.get_avalon_database()
# Get current environments
env_list = [
'AVALON_PROJECT',
'AVALON_SILO',
'AVALON_ASSET',
'AVALON_TASK',
'AVALON_APP',
'AVALON_APP_NAME'
]
env_origin = {}
for env in env_list:
env_origin[env] = os.environ.get(env, None)
# set environments for Avalon
os.environ["AVALON_PROJECT"] = project_name
os.environ["AVALON_SILO"] = entity['ancestors'][0]['name']
os.environ["AVALON_ASSET"] = entity['parent']['name']
os.environ["AVALON_TASK"] = entity['name']
os.environ["AVALON_APP"] = self.identifier.split("_")[0]
os.environ["AVALON_APP_NAME"] = self.identifier
anatomy = Anatomy()
asset_name = entity["parent"]["name"]
asset_document = database[project_name].find_one({
"type": "asset",
"name": asset_name
})
hierarchy = ""
parents = database[project_name].find_one({
"type": 'asset',
"name": entity['parent']['name']
})['data']['parents']
if parents:
hierarchy = os.path.join(*parents)
os.environ["AVALON_HIERARCHY"] = hierarchy
application = avalonlib.get_application(os.environ["AVALON_APP_NAME"])
asset_doc_parents = asset_document["data"].get("parents")
if len(asset_doc_parents) > 0:
hierarchy = os.path.join(*asset_doc_parents)
application = avalon.lib.get_application(self.identifier)
data = {
"root": os.environ.get("PYPE_STUDIO_PROJECTS_MOUNT"),
"project": {
"name": entity['project']['full_name'],
"code": entity['project']['name']
"name": entity["project"]["full_name"],
"code": entity["project"]["name"]
},
"task": entity['name'],
"asset": entity['parent']['name'],
"task": entity["name"],
"asset": asset_name,
"app": application["application_dir"],
"hierarchy": hierarchy,
"hierarchy": hierarchy
}
av_project = database[project_name].find_one({"type": 'project'})
templates = None
if av_project:
work_template = av_project.get('config', {}).get('template', {}).get(
'work', None
)
work_template = None
try:
work_template = work_template.format(**data)
except Exception:
try:
anatomy = anatomy.format(data)
work_template = anatomy["work"]["folder"]
anatomy = Anatomy(project_name)
anatomy_filled = anatomy.format(data)
workdir = os.path.normpath(anatomy_filled["work"]["folder"])
except Exception as exc:
msg = "{} Error in anatomy.format: {}".format(
__name__, str(exc)
)
self.log.error(msg, exc_info=True)
return {
'success': False,
'message': msg
}
except Exception as exc:
msg = "Error in anatomy.format: {}".format(
str(exc)
)
self.log.error(msg, exc_info=True)
return {
"success": False,
"message": msg
}
workdir = os.path.normpath(work_template)
os.environ["AVALON_WORKDIR"] = workdir
try:
os.makedirs(workdir)
except FileExistsError:
pass
# set environments for Avalon
prep_env = copy.deepcopy(os.environ)
prep_env.update({
"AVALON_PROJECT": project_name,
"AVALON_ASSET": asset_name,
"AVALON_TASK": entity["name"],
"AVALON_APP": self.identifier.split("_")[0],
"AVALON_APP_NAME": self.identifier,
"AVALON_HIERARCHY": hierarchy,
"AVALON_WORKDIR": workdir
})
prep_env.update(anatomy.roots_obj.root_environments())
# collect all parents from the task
parents = []
for item in entity['link']:
parents.append(session.get(item['type'], item['id']))
# collect all the 'environment' attributes from parents
tools_attr = [os.environ["AVALON_APP"], os.environ["AVALON_APP_NAME"]]
for parent in reversed(parents):
# check if the attribute is empty, if not use it
if parent['custom_attributes']['tools_env']:
tools_attr.extend(parent['custom_attributes']['tools_env'])
break
tools_attr = [prep_env["AVALON_APP"], prep_env["AVALON_APP_NAME"]]
tools_env = asset_document["data"].get("tools_env") or []
tools_attr.extend(tools_env)
tools_env = acre.get_tools(tools_attr)
env = acre.compute(tools_env)
env = acre.merge(env, current_env=dict(os.environ))
env = acre.append(dict(os.environ), env)
env = acre.merge(env, current_env=dict(prep_env))
env = acre.append(dict(prep_env), env)
# Get path to execute
st_temp_path = os.environ['PYPE_CONFIG']
st_temp_path = os.environ["PYPE_CONFIG"]
os_plat = platform.system().lower()
# Path to folder with launchers
path = os.path.join(st_temp_path, 'launchers', os_plat)
path = os.path.join(st_temp_path, "launchers", os_plat)
# Full path to executable launcher
execfile = None
@ -280,94 +240,92 @@ class AppAction(BaseHandler):
}
if sys.platform == "win32":
for ext in os.environ["PATHEXT"].split(os.pathsep):
fpath = os.path.join(path.strip('"'), self.executable + ext)
if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
execfile = fpath
break
pass
# Run SW if was found executable
if execfile is not None:
# Store subprocess to varaible. This is due to Blender launch
# bug. Please make sure Blender >=2.81 can be launched before
# remove `_popen` variable.
_popen = avalonlib.launch(
executable=execfile, args=[], environment=env
)
else:
if execfile is None:
return {
'success': False,
'message': "We didn't found launcher for {0}"
.format(self.label)
"success": False,
"message": "We didn't find launcher for {0}".format(
self.label
)
}
if sys.platform.startswith('linux'):
popen = avalon.lib.launch(
executable=execfile, args=[], environment=env
)
elif (sys.platform.startswith("linux")
or sys.platform.startswith("darwin")):
execfile = os.path.join(path.strip('"'), self.executable)
if os.path.isfile(execfile):
try:
fp = open(execfile)
except PermissionError as p:
self.log.exception('Access denied on {0} - {1}'.format(
execfile, p))
return {
'success': False,
'message': "Access denied on launcher - {}".format(
execfile)
}
fp.close()
# check executable permission
if not os.access(execfile, os.X_OK):
self.log.error('No executable permission on {}'.format(
execfile))
return {
'success': False,
'message': "No executable permission - {}".format(
execfile)
}
if not os.path.isfile(execfile):
msg = "Launcher doesn't exist - {}".format(execfile)
else:
self.log.error('Launcher doesn\'t exist - {}'.format(
execfile))
self.log.error(msg)
return {
'success': False,
'message': "Launcher doesn't exist - {}".format(execfile)
"success": False,
"message": msg
}
try:
fp = open(execfile)
except PermissionError as perm_exc:
msg = "Access denied on launcher {} - {}".format(
execfile, perm_exc
)
self.log.exception(msg, exc_info=True)
return {
"success": False,
"message": msg
}
fp.close()
# check executable permission
if not os.access(execfile, os.X_OK):
msg = "No executable permission - {}".format(execfile)
self.log.error(msg)
return {
"success": False,
"message": msg
}
# Run SW if was found executable
if execfile is not None:
# Store subprocess to varaible. This is due to Blender launch
# bug. Please make sure Blender >=2.81 can be launched before
# remove `_popen` variable.
_popen = avalonlib.launch(
'/usr/bin/env', args=['bash', execfile], environment=env
)
else:
if execfile is None:
return {
'success': False,
'message': "We didn't found launcher for {0}"
.format(self.label)
}
"success": False,
"message": "We didn't found launcher for {0}".format(
self.label
)
}
popen = avalon.lib.launch( # noqa: F841
"/usr/bin/env", args=["bash", execfile], environment=env
)
# Change status of task to In progress
presets = config.get_presets()["ftrack"]["ftrack_config"]
if 'status_update' in presets:
statuses = presets['status_update']
if "status_update" in presets:
statuses = presets["status_update"]
actual_status = entity['status']['name'].lower()
actual_status = entity["status"]["name"].lower()
already_tested = []
ent_path = "/".join(
[ent["name"] for ent in entity['link']]
[ent["name"] for ent in entity["link"]]
)
while True:
next_status_name = None
for key, value in statuses.items():
if key in already_tested:
continue
if actual_status in value or '_any_' in value:
if key != '_ignore_':
if actual_status in value or "_any_" in value:
if key != "_ignore_":
next_status_name = key
already_tested.append(key)
break
@ -377,12 +335,12 @@ class AppAction(BaseHandler):
break
try:
query = 'Status where name is "{}"'.format(
query = "Status where name is \"{}\"".format(
next_status_name
)
status = session.query(query).one()
entity['status'] = status
entity["status"] = status
session.commit()
self.log.debug("Changing status to \"{}\" <{}>".format(
next_status_name, ent_path
@ -392,18 +350,12 @@ class AppAction(BaseHandler):
except Exception:
session.rollback()
msg = (
'Status "{}" in presets wasn\'t found'
' on Ftrack entity type "{}"'
"Status \"{}\" in presets wasn't found"
" on Ftrack entity type \"{}\""
).format(next_status_name, entity.entity_type)
self.log.warning(msg)
# Set origin avalon environments
for key, value in env_origin.items():
if value == None:
value = ""
os.environ[key] = value
return {
'success': True,
'message': "Launching {0}".format(self.label)
"success": True,
"message": "Launching {0}".format(self.label)
}

View file

@ -192,50 +192,10 @@ class BaseHandler(object):
raise NotImplementedError()
def _discover(self, event):
items = {
'items': [{
'label': self.label,
'variant': self.variant,
'description': self.description,
'actionIdentifier': self.identifier,
'icon': self.icon,
}]
}
args = self._translate_event(
self.session, event
)
accepts = self.discover(
self.session, *args
)
if accepts is True:
self.log.debug(u'Discovering action with selection: {0}'.format(
event['data'].get('selection', [])))
return items
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return False
def _translate_event(self, session, event):
def _translate_event(self, event, session=None):
'''Return *event* translated structure to be used with the API.'''
if session is None:
session = self.session
_entities = event['data'].get('entities_object', None)
if (
@ -245,25 +205,40 @@ class BaseHandler(object):
) == ftrack_api.symbol.NOT_SET
):
_entities = self._get_entities(event)
event['data']['entities_object'] = _entities
return [
_entities,
event
]
return _entities
def _get_entities(self, event, session=None, ignore=None):
entities = []
selection = event['data'].get('selection')
if not selection:
return entities
if ignore is None:
ignore = []
elif isinstance(ignore, str):
ignore = [ignore]
filtered_selection = []
for entity in selection:
if entity['entityType'] not in ignore:
filtered_selection.append(entity)
if not filtered_selection:
return entities
def _get_entities(self, event, session=None):
if session is None:
session = self.session
session._local_cache.clear()
selection = event['data'].get('selection') or []
_entities = []
for entity in selection:
_entities.append(session.get(
for entity in filtered_selection:
entities.append(session.get(
self._get_entity_type(entity, session),
entity.get('entityId')
))
event['data']['entities_object'] = _entities
return _entities
return entities
def _get_entity_type(self, entity, session=None):
'''Return translated entity type tht can be used with API.'''
@ -292,30 +267,12 @@ class BaseHandler(object):
)
def _launch(self, event):
args = self._translate_event(
self.session, event
)
self.session.rollback()
self.session._local_cache.clear()
preactions_launched = self._handle_preactions(self.session, event)
if preactions_launched is False:
return
self.launch(self.session, event)
interface = self._interface(
self.session, *args
)
if interface:
return interface
response = self.launch(
self.session, *args
)
return self._handle_result(
self.session, response, *args
)
def launch(self, session, entities, event):
def launch(self, session, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action failed )
@ -360,35 +317,7 @@ class BaseHandler(object):
return False
def _interface(self, *args):
interface = self.interface(*args)
if interface:
if (
'items' in interface or
('success' in interface and 'message' in interface)
):
return interface
return {
'items': interface
}
def interface(self, session, entities, event):
'''Return a interface if applicable or None
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
return None
def _handle_result(self, session, result, entities, event):
def _handle_result(self, result):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
if result is True:
@ -417,11 +346,6 @@ class BaseHandler(object):
'Missing required key: {0}.'.format(key)
)
else:
self.log.error(
'Invalid result type must be bool or dictionary!'
)
return result
def show_message(self, event, input_message, result=False):
@ -623,3 +547,28 @@ class BaseHandler(object):
self.log.debug((
"Publishing event: {}"
).format(str(event.__dict__)))
def get_project_from_entity(self, entity):
low_entity_type = entity.entity_type.lower()
if low_entity_type == "project":
return entity
if "project" in entity:
# reviewsession, task(Task, Shot, Sequence,...)
return entity["project"]
if low_entity_type == "filecomponent":
entity = entity["version"]
low_entity_type = entity.entity_type.lower()
if low_entity_type == "assetversion":
asset = entity["asset"]
if asset:
parent = asset["parent"]
if parent:
return parent["project"]
project_data = entity["link"][0]
return self.session.query(
"Project where id is {}".format(project_data["id"])
).one()

View file

@ -43,35 +43,10 @@ class BaseEvent(BaseHandler):
priority=self.priority
)
def _launch(self, event):
self.session.rollback()
self.session._local_cache.clear()
self.launch(self.session, event)
def _translate_event(self, session, event):
def _translate_event(self, event, session=None):
'''Return *event* translated structure to be used with the API.'''
return [
self._get_entities(session, event),
event
]
def _get_entities(
self, session, event, ignore=['socialfeed', 'socialnotification']
):
_selection = event['data'].get('entities', [])
_entities = list()
if isinstance(ignore, str):
ignore = list(ignore)
for entity in _selection:
if entity['entityType'] in ignore:
continue
_entities.append(
(
session.get(
self._get_entity_type(entity),
entity.get('entityId')
)
)
)
return _entities
return self._get_entities(
event,
session,
ignore=['socialfeed', 'socialnotification']
)

View file

@ -1,135 +0,0 @@
from bson.objectid import ObjectId
from .avalon_sync import CustAttrIdKey
import avalon.io
def get_project_from_entity(entity):
# TODO add more entities
ent_type_lowered = entity.entity_type.lower()
if ent_type_lowered == "project":
return entity
elif ent_type_lowered == "assetversion":
return entity["asset"]["parent"]["project"]
elif "project" in entity:
return entity["project"]
return None
def get_avalon_entities_for_assetversion(asset_version, db_con=None):
output = {
"success": True,
"message": None,
"project": None,
"project_name": None,
"asset": None,
"asset_name": None,
"asset_path": None,
"subset": None,
"subset_name": None,
"version": None,
"version_name": None,
"representations": None
}
if db_con is None:
db_con = avalon.io
db_con.install()
ft_asset = asset_version["asset"]
subset_name = ft_asset["name"]
version = asset_version["version"]
parent = ft_asset["parent"]
ent_path = "/".join(
[ent["name"] for ent in parent["link"]]
)
project = get_project_from_entity(asset_version)
project_name = project["full_name"]
output["project_name"] = project_name
output["asset_name"] = parent["name"]
output["asset_path"] = ent_path
output["subset_name"] = subset_name
output["version_name"] = version
db_con.Session["AVALON_PROJECT"] = project_name
avalon_project = db_con.find_one({"type": "project"})
output["project"] = avalon_project
if not avalon_project:
output["success"] = False
output["message"] = "Project not synchronized to avalon `{}`".format(
project_name
)
return output
asset_ent = None
asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
if asset_mongo_id:
try:
asset_mongo_id = ObjectId(asset_mongo_id)
asset_ent = db_con.find_one({
"type": "asset",
"_id": asset_mongo_id
})
except Exception:
pass
if not asset_ent:
asset_ent = db_con.find_one({
"type": "asset",
"data.ftrackId": parent["id"]
})
output["asset"] = asset_ent
if not asset_ent:
output["success"] = False
output["message"] = "Not synchronized entity to avalon `{}`".format(
ent_path
)
return output
asset_mongo_id = asset_ent["_id"]
subset_ent = db_con.find_one({
"type": "subset",
"parent": asset_mongo_id,
"name": subset_name
})
output["subset"] = subset_ent
if not subset_ent:
output["success"] = False
output["message"] = (
"Subset `{}` does not exist under Asset `{}`"
).format(subset_name, ent_path)
return output
version_ent = db_con.find_one({
"type": "version",
"name": version,
"parent": subset_ent["_id"]
})
output["version"] = version_ent
if not version_ent:
output["success"] = False
output["message"] = (
"Version `{}` does not exist under Subset `{}` | Asset `{}`"
).format(version, subset_name, ent_path)
return output
repre_ents = list(db_con.find({
"type": "representation",
"parent": version_ent["_id"]
}))
output["representations"] = repre_ents
return output

View file

@ -29,7 +29,7 @@ class Login_Dialog_ui(QtWidgets.QWidget):
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(self.parent.parent.icon)
else:
pype_setup = os.getenv('PYPE_ROOT')
pype_setup = os.getenv('PYPE_SETUP_PATH')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)

View file

@ -0,0 +1,42 @@
import os
import traceback
from pype.lib import PypeHook
from pypeapp import Logger
from pype.premiere import lib as prlib
class PremierePrelaunch(PypeHook):
"""
This hook will check if current workfile path has Adobe Premiere
project inside. IF not, it initialize it and finally it pass
path to the project by environment variable to Premiere launcher
shell script.
"""
def __init__(self, logger=None):
if not logger:
self.log = Logger().get_logger(self.__class__.__name__)
else:
self.log = logger
self.signature = "( {} )".format(self.__class__.__name__)
def execute(self, *args, env: dict = None) -> bool:
if not env:
env = os.environ
try:
__import__("pype.premiere")
__import__("pyblish")
except ImportError as e:
print(traceback.format_exc())
print("pyblish: Could not load integration: %s " % e)
else:
# Premiere Setup integration
# importlib.reload(prlib)
prlib.setup(env)
return True

View file

@ -659,7 +659,7 @@ def execute_hook(hook, *args, **kwargs):
This will load hook file, instantiate class and call `execute` method
on it. Hook must be in a form:
`$PYPE_ROOT/repos/pype/path/to/hook.py/HookClass`
`$PYPE_SETUP_PATH/repos/pype/path/to/hook.py/HookClass`
This will load `hook.py`, instantiate HookClass and then execute_hook
`execute(*args, **kwargs)`
@ -670,7 +670,7 @@ def execute_hook(hook, *args, **kwargs):
class_name = hook.split("/")[-1]
abspath = os.path.join(os.getenv('PYPE_ROOT'),
abspath = os.path.join(os.getenv('PYPE_SETUP_PATH'),
'repos', 'pype', *hook.split("/")[:-1])
mod_name, mod_ext = os.path.splitext(os.path.basename(abspath))

View file

@ -397,7 +397,7 @@ class LogDetailWidget(QtWidgets.QWidget):
layout = QtWidgets.QVBoxLayout(self)
label = QtWidgets.QLabel("Detail")
detail_widget = LogDetailTextEdit()
detail_widget = QtWidgets.QTextEdit()
detail_widget.setReadOnly(True)
layout.addWidget(label)
layout.addWidget(detail_widget)
@ -420,66 +420,3 @@ class LogDetailWidget(QtWidgets.QWidget):
self.detail_widget.setHtml(self.html_text.format(**data))
class LogDetailTextEdit(QtWidgets.QTextEdit):
"""QTextEdit that displays version specific information.
This also overrides the context menu to add actions like copying
source path to clipboard or copying the raw data of the version
to clipboard.
"""
def __init__(self, parent=None):
super(LogDetailTextEdit, self).__init__(parent=parent)
# self.data = {
# "source": None,
# "raw": None
# }
#
# def contextMenuEvent(self, event):
# """Context menu with additional actions"""
# menu = self.createStandardContextMenu()
#
# # Add additional actions when any text so we can assume
# # the version is set.
# if self.toPlainText().strip():
#
# menu.addSeparator()
# action = QtWidgets.QAction("Copy source path to clipboard",
# menu)
# action.triggered.connect(self.on_copy_source)
# menu.addAction(action)
#
# action = QtWidgets.QAction("Copy raw data to clipboard",
# menu)
# action.triggered.connect(self.on_copy_raw)
# menu.addAction(action)
#
# menu.exec_(event.globalPos())
# del menu
#
# def on_copy_source(self):
# """Copy formatted source path to clipboard"""
# source = self.data.get("source", None)
# if not source:
# return
#
# # path = source.format(root=api.registered_root())
# # clipboard = QtWidgets.QApplication.clipboard()
# # clipboard.setText(path)
#
# def on_copy_raw(self):
# """Copy raw version data to clipboard
#
# The data is string formatted with `pprint.pformat`.
#
# """
# raw = self.data.get("raw", None)
# if not raw:
# return
#
# raw_text = pprint.pformat(raw)
# clipboard = QtWidgets.QApplication.clipboard()
# clipboard.setText(raw_text)

View file

@ -23,7 +23,7 @@ class MusterLogin(QtWidgets.QWidget):
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(parent.parent.icon)
else:
pype_setup = os.getenv('PYPE_ROOT')
pype_setup = os.getenv('PYPE_SETUP_PATH')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)

View file

@ -61,7 +61,6 @@ def reload_config():
reload(module)
def install():
''' Installing all requarements for Nuke host
'''
@ -72,6 +71,9 @@ def install():
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
# Register Avalon event for workfiles loading.
avalon.on("workio.open_file", lib.check_inventory_versions)
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
workfile_settings = lib.WorkfileSettings()
# Disable all families except for the ones we explicitly want to see

View file

@ -192,7 +192,6 @@ def format_anatomy(data):
data["version"] = pype.get_version_from_path(file)
project_document = pype.get_project()
data.update({
"root": api.Session["AVALON_PROJECTS"],
"subset": data["avalon"]["subset"],
"asset": data["avalon"]["asset"],
"task": api.Session["AVALON_TASK"],
@ -313,7 +312,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
if input:
# if connected input node was defined
connections.append({
"node": input,
"node": input,
"inputName": input.name()})
prev_node = nuke.createNode(
"Input", "name {}".format(input.name()))
@ -369,7 +368,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
write_node = now_node = avalon.nuke.lib.add_write_node(
"inside_{}".format(name),
**_data
)
)
# connect to previous node
now_node.setInput(0, prev_node)
@ -393,11 +392,13 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
if review:
add_review_knob(GN)
# add render button
lnk = nuke.Link_Knob("Render")
lnk.makeLink(write_node.name(), "Render")
lnk.setName("Render")
GN.addKnob(lnk)
# Add linked knobs.
linked_knob_names = ["Render", "use_limit", "first", "last"]
for name in linked_knob_names:
link = nuke.Link_Knob(name)
link.makeLink(write_node.name(), name)
link.setName(name)
GN.addKnob(link)
divider = nuke.Text_Knob('')
GN.addKnob(divider)
@ -408,7 +409,6 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
# Deadline tab.
add_deadline_tab(GN)
# set tile color
tile_color = _data.get("tile_color", "0xff0000ff")
GN["tile_color"].setValue(tile_color)
@ -436,6 +436,7 @@ def add_rendering_knobs(node):
node.addKnob(knob)
return node
def add_review_knob(node):
''' Adds additional review knob to given node
@ -645,8 +646,9 @@ class WorkfileSettings(object):
if root_dict.get("customOCIOConfigPath"):
self._root_node["customOCIOConfigPath"].setValue(
str(root_dict["customOCIOConfigPath"]).format(
**os.environ).replace("\\", "/")
)
**os.environ
).replace("\\", "/")
)
log.debug("nuke.root()['{}'] changed to: {}".format(
"customOCIOConfigPath", root_dict["customOCIOConfigPath"]))
root_dict.pop("customOCIOConfigPath")
@ -750,10 +752,9 @@ class WorkfileSettings(object):
if changes:
msg = "Read nodes are not set to correct colospace:\n\n"
for nname, knobs in changes.items():
msg += str(" - node: '{0}' is now '{1}' "
"but should be '{2}'\n").format(
nname, knobs["from"], knobs["to"]
)
msg += str(
" - node: '{0}' is now '{1}' but should be '{2}'\n"
).format(nname, knobs["from"], knobs["to"])
msg += "\nWould you like to change it?"
@ -1111,15 +1112,15 @@ class BuildWorkfile(WorkfileSettings):
self.to_script = to_script
# collect data for formating
self.data_tmp = {
"root": root_path or api.Session["AVALON_PROJECTS"],
"project": {"name": self._project["name"],
"code": self._project["data"].get("code", '')},
"code": self._project["data"].get("code", "")},
"asset": self._asset or os.environ["AVALON_ASSET"],
"task": kwargs.get("task") or api.Session["AVALON_TASK"],
"hierarchy": kwargs.get("hierarchy") or pype.get_hierarchy(),
"version": kwargs.get("version", {}).get("name", 1),
"user": getpass.getuser(),
"comment": "firstBuild"
"comment": "firstBuild",
"ext": "nk"
}
# get presets from anatomy
@ -1128,8 +1129,8 @@ class BuildWorkfile(WorkfileSettings):
anatomy_filled = anatomy.format(self.data_tmp)
# get dir and file for workfile
self.work_dir = anatomy_filled["avalon"]["work"]
self.work_file = anatomy_filled["avalon"]["workfile"] + ".nk"
self.work_dir = anatomy_filled["work"]["folder"]
self.work_file = anatomy_filled["work"]["file"]
def save_script_as(self, path=None):
# first clear anything in open window
@ -1419,7 +1420,7 @@ class ExporterReview:
repre.update({
"frameStart": self.first_frame,
"frameEnd": self.last_frame,
})
})
self.data["representations"].append(repre)
@ -1654,7 +1655,7 @@ class ExporterReviewMov(ExporterReview):
if not self.viewer_lut_raw:
colorspaces = [
self.bake_colorspace_main, self.bake_colorspace_fallback
]
]
if any(colorspaces):
# OCIOColorSpace with controled output
@ -1708,7 +1709,7 @@ class ExporterReviewMov(ExporterReview):
self.get_representation_data(
tags=["review", "delete"],
range=True
)
)
self.log.debug("Representation... `{}`".format(self.data))
@ -1743,14 +1744,14 @@ def get_dependent_nodes(nodes):
if test_in:
connections_in.update({
node: test_in
})
})
# collect all outputs outside
test_out = [i for i in outputs if i.name() not in node_names]
if test_out:
# only one dependent node is allowed
connections_out.update({
node: test_out[-1]
})
})
return connections_in, connections_out

View file

@ -0,0 +1,92 @@
import os
import pyblish.api
from avalon import (
io,
api as avalon
)
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "AdobeCommunicator Collect Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
self.log.info(
"registred_hosts: `{}`".format(pyblish.api.registered_hosts()))
io.install()
# get json paths from data
input_json_path = os.environ.get("AC_PUBLISH_INPATH")
output_json_path = os.environ.get("AC_PUBLISH_OUTPATH")
rqst_json_data_path = Path(input_json_path)
post_json_data_path = Path(output_json_path)
context.data['post_json_data_path'] = str(post_json_data_path)
# get avalon session data and convert \ to /
_S = avalon.session
projects = Path(_S["AVALON_PROJECTS"]).resolve()
asset = _S["AVALON_ASSET"]
workdir = Path(_S["AVALON_WORKDIR"]).resolve()
_S["AVALON_PROJECTS"] = str(projects)
_S["AVALON_WORKDIR"] = str(workdir)
context.data["avalonSession"] = _S
self.log.info(f"__ avalonSession: `{_S}`")
# get stagin directory from recieved path to json
context.data["stagingDir"] = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data["jsonData"] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = _S["AVALON_APP"] = host
context.data["hostVersion"] = \
_S["AVALON_APP_VERSION"] = host_version
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = str(Path(current_file).resolve())
# get project data from avalon
project_data = io.find_one({'type': 'project'})
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = io.find_one({
"type": 'asset',
"name": asset
})["data"]
assert asset_data, "No `asset_data` data in avalon db"
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -1,12 +1,5 @@
import os
import json
import pyblish.api
from avalon import (
io,
api as avalon
)
from pype import api as pype
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
@ -26,7 +19,11 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
def process(self, context):
a_session = context.data.get("avalonSession")
_S = context.data["avalonSession"]
asset = _S["AVALON_ASSET"]
task = _S["AVALON_TASK"]
host = _S["AVALON_APP"]
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
@ -36,96 +33,91 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
host = context.data["host"]
presets = context.data["presets"][host]
context.data["ftrackTypes"] = ftrack_types
rules_tasks = presets["rules_tasks"]
asset_default = presets["asset_default"]
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = io.find_one({"name": asset_name,
"type": "asset"})
assert asset_default, ("No `asset_default` data in"
"`/presets/[host]/asset_default.json` file")
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("fstart", None)
frame_start = context.data["assetData"].get("frameStart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["fstart"]
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
self.log.debug("frame_start: `{}`".format(frame_start))
# get handles > first try from asset data
handles = context.data["assetData"].get("handles", None)
if not handles:
handle_start = context.data["assetData"].get("handleStart", None)
handle_end = context.data["assetData"].get("handleEnd", None)
if (handle_start is None) or (handle_end is None):
# get frame start > second try from parent data
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
handle_start = asset_default.get("handleStart", None)
handle_end = asset_default.get("handleEnd", None)
assert handles, "No `handles` data found, "
"please set `fstart` on asset"
self.log.debug("handles: `{}`".format(handles))
assert (
(handle_start is not None) or (
handle_end is not None)), (
"No `handle_start, handle_end` data found")
instances = []
task = a_session["AVALON_TASK"]
current_file = os.path.basename(context.data.get("currentFile"))
name, ext = os.path.splitext(current_file)
# get current file host
host = a_session["AVALON_APP"]
family = "projectfile"
families = "filesave"
family = "workfile"
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
label = "{0} - {1}".format(name, task)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
wf_instance = next((inst for inst in instances_data
if inst.get("family", None) in 'workfile'), None)
if wf_instance:
self.log.debug('wf_instance: {}'.format(wf_instance))
version = int(wf_instance.get("version", None))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
instance.data.update(wf_instance)
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representations": [{
"files": current_file,
'stagingDir': staging_dir,
'name': "projectfile",
'ext': ext[1:]
}],
"host": host,
"asset": asset,
"label": label,
"name": name,
"family": family,
"families": ["ftrack"],
"publish": True,
"version": version
})
instances.append(instance)
for inst in instances_data:
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
version = int(inst.get("version", None))
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
@ -135,14 +127,14 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
if family in 'workfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
assert hierarchy, f"No `hierarchy` data in json file for {name}"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
@ -161,17 +153,12 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
subsets = rules_tasks["taskToSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
@ -184,8 +171,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
if inst["subsetToRepresentations"].get(subset, None):
repr = inst["subsetToRepresentations"][subset]
ext = repr['representation']
else:
continue
@ -197,7 +184,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
family = "review"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
@ -209,17 +196,15 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
if subset in f or "thumbnail" in f]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"frameStart": frame_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
@ -230,6 +215,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"jsonReprSubset": subset,
"jsonReprExt": ext,
"publish": True,
"version": version})
self.log.info(
@ -238,9 +225,6 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
context.data["instances"] = instances
# Sort/grouped by family (preserving local index)
# context[:] = sorted(context, key=self.sort_by_task)
self.log.debug("context: {}".format(context))
def sort_by_task(self, instance):

View file

@ -2,7 +2,7 @@
import json
import clique
import pyblish.api
from pypeapp import Anatomy
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
@ -14,28 +14,27 @@ class ExtractJSON(pyblish.api.ContextPlugin):
json_path = context.data['post_json_data_path']
data = dict(self.serialize(context.data()))
# self.log.info(data)
instances_data = []
for instance in context:
iData = {}
for key, value in instance.data.items():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
iData[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
instances_data.append(iData)
data["instances"] = instances_data
# instances_data = []
# for instance in context:
#
# iData = {}
# for key, value in instance.data.items():
# if isinstance(value, clique.Collection):
# value = value.format()
#
# try:
# json.dumps(value)
# iData[key] = value
# except KeyError:
# msg = "\"{0}\"".format(value)
# msg += " in instance.data[\"{0}\"]".format(key)
# msg += " could not be serialized."
# self.log.debug(msg)
#
# instances_data.append(iData)
#
# data["instances"] = instances_data
with open(json_path, "w") as outfile:
outfile.write(json.dumps(data, indent=4, sort_keys=True))
@ -60,6 +59,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# self.log.info("1: {}".format(data))
if isinstance(data, Anatomy):
return
if not isinstance(data, dict):
# self.log.info("2: {}".format(data))
return data
@ -88,6 +90,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# loops if dictionary
data[key] = self.serialize(value)
if isinstance(value, Anatomy):
continue
if isinstance(value, (list or tuple)):
# loops if list or tuple
for i, item in enumerate(value):

View file

@ -1,104 +0,0 @@
import os
import pyblish.api
from avalon import api as avalon
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "Collect Aport Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
# get json paths from data
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = session["AVALON_APP"] = host
context.data["hostVersion"] = \
session["AVALON_APP_VERSION"] = host_version
# register pyblish for filtering of hosts in plugins
pyblish.api.deregister_all_hosts()
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
for file in os.listdir(presets_dir):
name, ext = os.path.splitext(file)
with open(os.path.join(presets_dir, file)) as prst:
preset_data[name] = json.load(prst)
context.data['presets'] = preset_data
assert preset_data, "No `presets` data in json file"
self.log.debug("preset_data: {}".format(preset_data))
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -89,7 +89,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context[:] if i.data['asset'] in entity['name']
i for i in self.context if i.data['asset'] in entity['name']
]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (

View file

@ -68,6 +68,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
# Top-level group name
"BatchName": filename,
# Asset dependency to wait for at least the scene file to sync.
"AssetDependency0": filepath,
# Job name, as seen in Monitor
"Name": filename,

View file

@ -1,13 +1,14 @@
"""Collect Anatomy and global anatomy data.
"""Collect global context Anatomy data.
Requires:
context -> anatomy
context -> projectEntity
context -> assetEntity
context -> username
context -> datetimeData
session -> AVALON_TASK
projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder)
username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001)
datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder)
Provides:
context -> anatomy (pypeapp.Anatomy)
context -> anatomyData
"""
@ -15,45 +16,51 @@ import os
import json
from avalon import api, lib
from pypeapp import Anatomy
import pyblish.api
class CollectAnatomy(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
class CollectAnatomyContextData(pyblish.api.ContextPlugin):
"""Collect Anatomy Context data.
Example:
context.data["anatomyData"] = {
"project": {
"name": "MyProject",
"code": "myproj"
},
"asset": "AssetName",
"hierarchy": "path/to/asset",
"task": "Working",
"username": "MeDespicable",
*** OPTIONAL ***
"app": "maya" # Current application base name
+ mutliple keys from `datetimeData` # see it's collector
}
"""
order = pyblish.api.CollectorOrder + 0.002
label = "Collect Anatomy"
label = "Collect Anatomy Context Data"
def process(self, context):
root_path = api.registered_root()
task_name = api.Session["AVALON_TASK"]
project_entity = context.data["projectEntity"]
asset_entity = context.data["assetEntity"]
project_name = project_entity["name"]
context.data["anatomy"] = Anatomy(project_name)
self.log.info(
"Anatomy object collected for project \"{}\".".format(project_name)
)
hierarchy_items = asset_entity["data"]["parents"]
hierarchy = ""
if hierarchy_items:
hierarchy = os.path.join(*hierarchy_items)
context_data = {
"root": root_path,
"project": {
"name": project_name,
"name": project_entity["name"],
"code": project_entity["data"].get("code")
},
"asset": asset_entity["name"],
"hierarchy": hierarchy.replace("\\", "/"),
"task": task_name,
"username": context.data["user"]
}

View file

@ -28,11 +28,11 @@ from avalon import io
import pyblish.api
class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
"""Fill templates with data needed for publish"""
class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
"""Collect Instance specific Anatomy data."""
order = pyblish.api.CollectorOrder + 0.49
label = "Collect instance anatomy data"
label = "Collect Anatomy Instance data"
def process(self, instance):
# get all the stuff from the database

View file

@ -0,0 +1,32 @@
"""Collect Anatomy object.
Requires:
os.environ -> AVALON_PROJECT
Provides:
context -> anatomy (pypeapp.Anatomy)
"""
import os
from pypeapp import Anatomy
import pyblish.api
class CollectAnatomyObject(pyblish.api.ContextPlugin):
"""Collect Anatomy object into Context"""
order = pyblish.api.CollectorOrder - 0.4
label = "Collect Anatomy Object"
def process(self, context):
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` is not set."
"Could not initialize project's Anatomy."
)
context.data["anatomy"] = Anatomy(project_name)
self.log.info(
"Anatomy object collected for project \"{}\".".format(project_name)
)

View file

@ -15,7 +15,7 @@ import pyblish.api
class CollectAvalonEntities(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
order = pyblish.api.CollectorOrder - 0.02
order = pyblish.api.CollectorOrder - 0.1
label = "Collect Avalon Entities"
def process(self, context):
@ -51,10 +51,26 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
context.data["frameStart"] = data.get("frameStart")
context.data["frameEnd"] = data.get("frameEnd")
handles = int(data.get("handles") or 0)
context.data["handles"] = handles
context.data["handleStart"] = int(data.get("handleStart", handles))
context.data["handleEnd"] = int(data.get("handleEnd", handles))
handles = data.get("handles") or 0
handle_start = data.get("handleStart")
if handle_start is None:
handle_start = handles
self.log.info((
"Key \"handleStart\" is not set."
" Using value from \"handles\" key {}."
).format(handle_start))
handle_end = data.get("handleEnd")
if handle_end is None:
handle_end = handles
self.log.info((
"Key \"handleEnd\" is not set."
" Using value from \"handles\" key {}."
).format(handle_end))
context.data["handles"] = int(handles)
context.data["handleStart"] = int(handle_start)
context.data["handleEnd"] = int(handle_end)
frame_start_h = data.get("frameStart") - context.data["handleStart"]
frame_end_h = data.get("frameEnd") + context.data["handleEnd"]

View file

@ -1,11 +1,18 @@
"""Loads publishing context from json and continues in publish process.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
Provides:
context, instances -> All data from previous publishing process.
"""
import os
import json
import pyblish.api
from avalon import api
from pypeapp import PypeLauncher
class CollectRenderedFiles(pyblish.api.ContextPlugin):
"""
@ -13,14 +20,17 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
`PYPE_PUBLISH_DATA`. Those files _MUST_ share same context.
"""
order = pyblish.api.CollectorOrder - 0.1
order = pyblish.api.CollectorOrder - 0.2
targets = ["filesequence"]
label = "Collect rendered frames"
_context = None
def _load_json(self, path):
assert os.path.isfile(path), ("path to json file doesn't exist")
path = path.strip('\"')
assert os.path.isfile(path), (
"Path to json file doesn't exist. \"{}\"".format(path)
)
data = None
with open(path, "r") as json_file:
try:
@ -32,7 +42,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
)
return data
def _process_path(self, data):
def _fill_staging_dir(self, data_object, anatomy):
staging_dir = data_object.get("stagingDir")
if staging_dir:
data_object["stagingDir"] = anatomy.fill_root(staging_dir)
def _process_path(self, data, anatomy):
# validate basic necessary data
data_err = "invalid json file - missing data"
required = ["asset", "user", "comment",
@ -66,14 +81,23 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"]
# now we can just add instances from json file and we are done
for instance in data.get("instances"):
for instance_data in data.get("instances"):
self.log.info(" - processing instance for {}".format(
instance.get("subset")))
i = self._context.create_instance(instance.get("subset"))
self.log.info("remapping paths ...")
i.data["representations"] = [PypeLauncher().path_remapper(
data=r) for r in instance.get("representations")]
i.data.update(instance)
instance_data.get("subset")))
instance = self._context.create_instance(
instance_data.get("subset")
)
self.log.info("Filling stagignDir...")
self._fill_staging_dir(instance_data, anatomy)
instance.data.update(instance_data)
representations = []
for repre_data in instance_data.get("representations") or []:
self._fill_staging_dir(repre_data, anatomy)
representations.append(repre_data)
instance.data["representations"] = representations
def process(self, context):
self._context = context
@ -82,13 +106,39 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
"Missing `PYPE_PUBLISH_DATA`")
paths = os.environ["PYPE_PUBLISH_DATA"].split(os.pathsep)
session_set = False
for path in paths:
data = self._load_json(path)
if not session_set:
self.log.info("Setting session using data from file")
api.Session.update(data.get("session"))
os.environ.update(data.get("session"))
session_set = True
assert data, "failed to load json file"
self._process_path(data)
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` was not found."
"Could not set project `root` which may cause issues."
)
# TODO root filling should happen after collect Anatomy
self.log.info("Getting root setting for project \"{}\"".format(
project_name
))
anatomy = context.data["anatomy"]
self.log.info("anatomy: {}".format(anatomy.roots))
try:
session_is_set = False
for path in paths:
path = anatomy.fill_root(path)
data = self._load_json(path)
assert data, "failed to load json file"
if not session_is_set:
session_data = data["session"]
remapped = anatomy.roots_obj.path_remapper(
session_data["AVALON_WORKDIR"]
)
if remapped:
session_data["AVALON_WORKDIR"] = remapped
self.log.info("Setting session using data from file")
api.Session.update(session_data)
os.environ.update(session_data)
session_is_set = True
self._process_path(data, anatomy)
except Exception as e:
self.log.error(e, exc_info=True)
raise Exception("Error") from e

View file

@ -18,7 +18,7 @@ class ExtractBurnin(pype.api.Extractor):
label = "Extract burnins"
order = pyblish.api.ExtractorOrder + 0.03
families = ["review", "burnin"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
optional = True
def process(self, instance):

View file

@ -20,7 +20,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
label = "Extract Review"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
outputs = {}
ext_filter = []

View file

@ -481,9 +481,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
def copy_file(self, src_path, dst_path):
# TODO check drives if are the same to check if cas hardlink
dst_path = self.path_root_check(dst_path)
src_path = self.path_root_check(src_path)
dirname = os.path.dirname(dst_path)
try:
@ -513,75 +510,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
shutil.copy(src_path, dst_path)
def path_root_check(self, path):
normalized_path = os.path.normpath(path)
forward_slash_path = normalized_path.replace("\\", "/")
drive, _path = os.path.splitdrive(normalized_path)
if os.path.exists(drive + "/"):
key = "drive_check{}".format(drive)
if key not in self.path_checks:
self.log.debug(
"Drive \"{}\" exist. Nothing to change.".format(drive)
)
self.path_checks.append(key)
return normalized_path
path_env_key = "PYPE_STUDIO_PROJECTS_PATH"
mount_env_key = "PYPE_STUDIO_PROJECTS_MOUNT"
missing_envs = []
if path_env_key not in os.environ:
missing_envs.append(path_env_key)
if mount_env_key not in os.environ:
missing_envs.append(mount_env_key)
if missing_envs:
key = "missing_envs"
if key not in self.path_checks:
self.path_checks.append(key)
_add_s = ""
if len(missing_envs) > 1:
_add_s = "s"
self.log.warning((
"Can't replace MOUNT drive path to UNC path due to missing"
" environment variable{}: `{}`. This may cause issues"
" during publishing process."
).format(_add_s, ", ".join(missing_envs)))
return normalized_path
unc_root = os.environ[path_env_key].replace("\\", "/")
mount_root = os.environ[mount_env_key].replace("\\", "/")
# --- Remove slashes at the end of mount and unc roots ---
while unc_root.endswith("/"):
unc_root = unc_root[:-1]
while mount_root.endswith("/"):
mount_root = mount_root[:-1]
# ---
if forward_slash_path.startswith(unc_root):
self.log.debug((
"Path already starts with UNC root: \"{}\""
).format(unc_root))
return normalized_path
if not forward_slash_path.startswith(mount_root):
self.log.warning((
"Path do not start with MOUNT root \"{}\" "
"set in environment variable \"{}\""
).format(unc_root, mount_env_key))
return normalized_path
# Replace Mount root with Unc root
path = unc_root + forward_slash_path[len(mount_root):]
return os.path.normpath(path)
def version_from_representations(self, repres):
for repre in repres:
version = io.find_one({"_id": repre["parent"]})

View file

@ -5,6 +5,7 @@ import sys
import copy
import clique
import errno
import six
from pymongo import DeleteOne, InsertOne
import pyblish.api
@ -327,6 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
test_dest_files.append(
os.path.normpath(template_filled)
)
template_data["frame"] = repre_context["frame"]
self.log.debug(
"test_dest_files: {}".format(str(test_dest_files)))
@ -390,7 +392,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_start_frame,
dst_tail
).replace("..", ".")
repre['published_path'] = self.unc_convert(dst)
repre['published_path'] = dst
else:
# Single file
@ -418,7 +420,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
published_files.append(dst)
repre['published_path'] = self.unc_convert(dst)
repre['published_path'] = dst
self.log.debug("__ dst: {}".format(dst))
repre["publishedFiles"] = published_files
@ -522,23 +524,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("Hardlinking file .. {} -> {}".format(src, dest))
self.hardlink_file(src, dest)
def unc_convert(self, path):
self.log.debug("> __ path: `{}`".format(path))
drive, _path = os.path.splitdrive(path)
self.log.debug("> __ drive, _path: `{}`, `{}`".format(drive, _path))
if not os.path.exists(drive + "/"):
self.log.info("Converting to unc from environments ..")
path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH")
path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")
if "/" in path_mount:
path = path.replace(path_mount[0:-1], path_replace)
else:
path = path.replace(path_mount, path_replace)
return path
def copy_file(self, src, dst):
""" Copy given source to destination
@ -548,8 +533,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
Returns:
None
"""
src = self.unc_convert(src)
dst = self.unc_convert(dst)
src = os.path.normpath(src)
dst = os.path.normpath(dst)
self.log.debug("Copying file .. {} -> {}".format(src, dst))
@ -565,16 +548,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical
while True:
copyfile(src, dst)
try:
copyfile(src, dst)
except OSError as e:
self.log.critical("Cannot copy {} to {}".format(src, dst))
self.log.critical(e)
six.reraise(*sys.exc_info())
if str(getsize(src)) in str(getsize(dst)):
break
def hardlink_file(self, src, dst):
dirname = os.path.dirname(dst)
src = self.unc_convert(src)
dst = self.unc_convert(dst)
try:
os.makedirs(dirname)
except OSError as e:
@ -606,7 +591,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"name": subset_name,
"data": {
"families": instance.data.get('families')
},
},
"parent": asset["_id"]
}).inserted_id
@ -659,26 +644,35 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
families.append(instance_family)
families += current_families
self.log.debug("Registered root: {}".format(api.registered_root()))
# create relative source path for DB
try:
source = instance.data['source']
except KeyError:
if "source" in instance.data:
source = instance.data["source"]
else:
source = context.data["currentFile"]
source = source.replace(os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
api.registered_root())
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
anatomy = instance.context.data["anatomy"]
success, rootless_path = (
anatomy.roots_obj.find_root_template_from_path(source)
)
if success:
source = rootless_path
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(source))
self.log.debug("Source: {}".format(source))
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment"),
"machine": context.data.get("machine"),
"fps": context.data.get(
"fps", instance.data.get("fps"))}
version_data = {
"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment"),
"machine": context.data.get("machine"),
"fps": context.data.get(
"fps", instance.data.get("fps")
)
}
intent_value = instance.context.data.get("intent")
if intent_value and isinstance(intent_value, dict):
@ -720,7 +714,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
matching_profiles = None
highest_value = -1
for name, filters in self.template_name_profiles:
self.log.info(self.template_name_profiles)
for name, filters in self.template_name_profiles.items():
value = 0
families = filters.get("families")
if families:

View file

@ -1,3 +1,6 @@
# -*- coding: utf-8 -*-
"""Submit publishing job to farm."""
import os
import json
import re
@ -10,7 +13,7 @@ import pyblish.api
def _get_script():
"""Get path to the image sequence script"""
"""Get path to the image sequence script."""
try:
from pype.scripts import publish_filesequence
except Exception:
@ -20,17 +23,11 @@ def _get_script():
if module_path.endswith(".pyc"):
module_path = module_path[: -len(".pyc")] + ".py"
module_path = os.path.normpath(module_path)
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_MOUNT"])
network_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_PATH"])
module_path = module_path.replace(mount_root, network_root)
return module_path
return os.path.normpath(module_path)
# Logic to retrieve latest files concerning extendFrames
def get_latest_version(asset_name, subset_name, family):
"""Retrieve latest files concerning extendFrame feature."""
# Get asset
asset_name = io.find_one(
{"type": "asset", "name": asset_name}, projection={"name": True}
@ -64,9 +61,7 @@ def get_latest_version(asset_name, subset_name, family):
def get_resources(version, extension=None):
"""
Get the files from the specific version
"""
"""Get the files from the specific version."""
query = {"type": "representation", "parent": version["_id"]}
if extension:
query["name"] = extension
@ -86,14 +81,25 @@ def get_resources(version, extension=None):
return resources
def get_resource_files(resources, frame_range, override=True):
def get_resource_files(resources, frame_range=None):
"""Get resource files at given path.
If `frame_range` is specified those outside will be removed.
Arguments:
resources (list): List of resources
frame_range (list): Frame range to apply override
Returns:
list of str: list of collected resources
"""
res_collections, _ = clique.assemble(resources)
assert len(res_collections) == 1, "Multiple collections found"
res_collection = res_collections[0]
# Remove any frames
if override:
if frame_range is not None:
for frame in frame_range:
if frame not in res_collection.indexes:
continue
@ -146,16 +152,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
aov_filter = {"maya": ["beauty"]}
enviro_filter = [
"PATH",
"PYTHONPATH",
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
"PYPE_ROOT",
"PYPE_METADATA_FILE",
"PYPE_STUDIO_PROJECTS_PATH",
"PYPE_STUDIO_PROJECTS_MOUNT",
"AVALON_PROJECT"
"AVALON_PROJECT",
"PYPE_LOG_NO_COLORS"
]
# pool used to do the publishing job
@ -177,10 +179,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
def _submit_deadline_post_job(self, instance, job):
"""
"""Submit publish job to Deadline.
Deadline specific code separated from :meth:`process` for sake of
more universal code. Muster post job is sent directly by Muster
submitter, so this type of code isn't necessary for it.
"""
data = instance.data.copy()
subset = data["subset"]
@ -188,14 +192,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
batch=job["Props"]["Name"], subset=subset
)
metadata_filename = "{}_metadata.json".format(subset)
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
metadata_path = os.path.normpath(metadata_path)
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
metadata_path = metadata_path.replace(mount_root, network_root)
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
success, rootless_path = (
self.anatomy.roots_obj.find_root_template_from_path(output_dir)
)
if not success:
# `rootless_path` is not set to `output_dir` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(output_dir))
rootless_path = output_dir
# Generate the payload for Deadline submission
payload = {
@ -222,9 +230,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent
# job so they use the same environment
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(rootless_path, metadata_filename)
environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1"
try:
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
except KeyError:
# PYPE_PYTHON_EXE not set
pass
i = 0
for index, key in enumerate(environment):
if key.upper() in self.enviro_filter:
@ -250,14 +267,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
raise Exception(response.text)
def _copy_extend_frames(self, instance, representation):
"""
"""Copy existing frames from latest version.
This will copy all existing frames from subset's latest version back
to render directory and rename them to what renderer is expecting.
:param instance: instance to get required data from
:type instance: pyblish.plugin.Instance
"""
Arguments:
instance (pyblish.plugin.Instance): instance to get required
data from
representation (dict): presentation to operate on
"""
import speedcopy
self.log.info("Preparing to copy ...")
@ -297,9 +317,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# type
assert fn is not None, "padding string wasn't found"
# list of tuples (source, destination)
staging = representation.get("stagingDir")
staging = self.anatomy.fill_roots(staging)
resource_files.append(
(frame,
os.path.join(representation.get("stagingDir"),
os.path.join(staging,
"{}{}{}".format(pre,
fn.group("frame"),
post)))
@ -319,19 +341,20 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"Finished copying %i files" % len(resource_files))
def _create_instances_for_aov(self, instance_data, exp_files):
"""
"""Create instance for each AOV found.
This will create new instance for every aov it can detect in expected
files list.
:param instance_data: skeleton data for instance (those needed) later
by collector
:type instance_data: pyblish.plugin.Instance
:param exp_files: list of expected files divided by aovs
:type exp_files: list
:returns: list of instances
:rtype: list(publish.plugin.Instance)
"""
Arguments:
instance_data (pyblish.plugin.Instance): skeleton data for instance
(those needed) later by collector
exp_files (list): list of expected files divided by aovs
Returns:
list of instances
"""
task = os.environ["AVALON_TASK"]
subset = instance_data["subset"]
instances = []
@ -355,6 +378,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
subset_name = '{}_{}'.format(group_name, aov)
staging = os.path.dirname(list(cols[0])[0])
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(staging)
)
if success:
staging = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging))
self.log.info("Creating data for: {}".format(subset_name))
@ -397,26 +430,28 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
return instances
def _get_representations(self, instance, exp_files):
"""
"""Create representations for file sequences.
This will return representations of expected files if they are not
in hierarchy of aovs. There should be only one sequence of files for
most cases, but if not - we create representation from each of them.
:param instance: instance for which we are setting representations
:type instance: pyblish.plugin.Instance
:param exp_files: list of expected files
:type exp_files: list
:returns: list of representations
:rtype: list(dict)
"""
Arguments:
instance (pyblish.plugin.Instance): instance for which we are
setting representations
exp_files (list): list of expected files
Returns:
list of representations
"""
representations = []
cols, rem = clique.assemble(exp_files)
collections, remainders = clique.assemble(exp_files)
bake_render_path = instance.get("bakeRenderPath")
# create representation for every collected sequence
for c in cols:
ext = c.tail.lstrip(".")
for collection in collections:
ext = collection.tail.lstrip(".")
preview = False
# if filtered aov name is found in filename, toggle it for
# preview video rendering
@ -425,7 +460,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
for aov in self.aov_filter[app]:
if re.match(
r".+(?:\.|_)({})(?:\.|_).*".format(aov),
list(c)[0]
list(collection)[0]
):
preview = True
break
@ -434,14 +469,26 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if bake_render_path:
preview = False
staging = os.path.dirname(list(collection)[0])
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(staging)
)
if success:
staging = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging))
rep = {
"name": ext,
"ext": ext,
"files": [os.path.basename(f) for f in list(c)],
"files": [os.path.basename(f) for f in list(collection)],
"frameStart": int(instance.get("frameStartHandle")),
"frameEnd": int(instance.get("frameEndHandle")),
# If expectedFile are absolute, we need only filenames
"stagingDir": os.path.dirname(list(c)[0]),
"stagingDir": staging,
"fps": instance.get("fps"),
"tags": ["review", "preview"] if preview else [],
}
@ -454,15 +501,28 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
self._solve_families(instance, preview)
# add reminders as representations
for r in rem:
ext = r.split(".")[-1]
for remainder in remainders:
ext = remainder.split(".")[-1]
staging = os.path.dirname(remainder)
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(staging)
)
if success:
staging = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging))
rep = {
"name": ext,
"ext": ext,
"files": os.path.basename(r),
"stagingDir": os.path.dirname(r)
"files": os.path.basename(remainder),
"stagingDir": os.path.dirname(remainder),
}
if r in bake_render_path:
if remainder in bake_render_path:
rep.update({
"fps": instance.get("fps"),
"tags": ["review", "delete"]
@ -486,7 +546,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
instance["families"] = families
def process(self, instance):
"""
"""Process plugin.
Detect type of renderfarm submission and create and post dependend job
in case of Deadline. It creates json file with metadata needed for
publishing in directory of render.
@ -497,6 +558,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
data = instance.data.copy()
context = instance.context
self.context = context
self.anatomy = instance.context.data["anatomy"]
if hasattr(instance, "_log"):
data['_log'] = instance._log
@ -556,11 +618,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
except KeyError:
source = context.data["currentFile"]
source = source.replace(
os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"), api.registered_root()
success, rootless_path = (
self.anatomy.roots_obj.find_root_template_from_path(source)
)
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
if success:
source = rootless_path
else:
# `rootless_path` is not set to `source` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues."
).format(source))
families = ["render"]
@ -611,13 +680,29 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# look into instance data if representations are not having any
# which are having tag `publish_on_farm` and include them
for r in instance.data.get("representations", []):
if "publish_on_farm" in r.get("tags"):
for repre in instance.data.get("representations", []):
staging_dir = repre.get("stagingDir")
if staging_dir:
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(
staging_dir
)
)
if success:
repre["stagingDir"] = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging_dir))
repre["stagingDir"] = staging_dir
if "publish_on_farm" in repre.get("tags"):
# create representations attribute of not there
if "representations" not in instance_skeleton_data.keys():
instance_skeleton_data["representations"] = []
instance_skeleton_data["representations"].append(r)
instance_skeleton_data["representations"].append(repre)
instances = None
assert data.get("expectedFiles"), ("Submission from old Pype version"
@ -754,12 +839,21 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
with open(metadata_path, "w") as f:
json.dump(publish_job, f, indent=4, sort_keys=True)
def _extend_frames(self, asset, subset, start, end, override):
"""
This will get latest version of asset and update frame range based
on minimum and maximuma values
"""
def _extend_frames(self, asset, subset, start, end):
"""Get latest version of asset nad update frame range.
Based on minimum and maximuma values.
Arguments:
asset (str): asset name
subset (str): subset name
start (int): start frame
end (int): end frame
Returns:
(int, int): upddate frame start/end
"""
# Frame comparison
prev_start = None
prev_end = None

View file

@ -122,6 +122,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
workspace = context.data["workspaceDir"]
self._rs = renderSetup.instance()
current_layer = self._rs.getVisibleRenderLayer()
maya_render_layers = {l.name(): l for l in self._rs.getRenderLayers()}
self.maya_layers = maya_render_layers
@ -157,6 +158,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
attachTo = []
if sets:
for s in sets:
if "family" not in cmds.listAttr(s):
continue
attachTo.append(
{
"version": None, # we need integrator for that
@ -303,6 +307,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
instance.data.update(data)
self.log.debug("data: {}".format(json.dumps(data, indent=4)))
# Restore current layer.
self.log.info("Restoring to {}".format(current_layer.name()))
self._rs.switchToLayer(current_layer)
def parse_options(self, render_globals):
"""Get all overrides with a value, skip those without
@ -397,6 +405,8 @@ class ExpectedFiles:
multipart = False
def get(self, renderer, layer):
renderSetup.instance().switchToLayerUsingLegacyName(layer)
if renderer.lower() == "arnold":
return self._get_files(ExpectedFilesArnold(layer))
elif renderer.lower() == "vray":

View file

@ -1,6 +1,17 @@
# -*- coding: utf-8 -*-
"""Submitting render job to Deadline.
This module is taking care of submitting job from Maya to Deadline. It
creates job and set correct environments. Its behavior is controlled by
`DEADLINE_REST_URL` environment variable - pointing to Deadline Web Service
and `MayaSubmitDeadline.use_published (bool)` property telling Deadline to
use published scene workfile or not.
"""
import os
import json
import getpass
import re
import clique
from maya import cmds
@ -14,7 +25,7 @@ import pype.maya.lib as lib
def get_renderer_variables(renderlayer=None):
"""Retrieve the extension which has been set in the VRay settings
"""Retrieve the extension which has been set in the VRay settings.
Will return None if the current renderer is not VRay
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
@ -25,16 +36,21 @@ def get_renderer_variables(renderlayer=None):
Returns:
dict
"""
"""
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
render_attrs["padding"]))
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
filename_0 = cmds.renderSettings(
fullPath=True,
gin="#" * int(padding),
lut=True,
layer=renderlayer or lib.get_current_renderlayer())[0]
filename_0 = filename_0.replace('_<RenderPass>', '_beauty')
prefix_attr = "defaultRenderGlobals.imageFilePrefix"
if renderer == "vray":
# Maya's renderSettings function does not return V-Ray file extension
# so we get the extension from vraySettings
@ -46,62 +62,33 @@ def get_renderer_variables(renderlayer=None):
if extension is None:
extension = "png"
filename_prefix = "<Scene>/<Scene>_<Layer>/<Layer>"
if extension == "exr (multichannel)" or extension == "exr (deep)":
extension = "exr"
prefix_attr = "vraySettings.fileNamePrefix"
elif renderer == "renderman":
prefix_attr = "rmanGlobals.imageFileFormat"
elif renderer == "redshift":
# mapping redshift extension dropdown values to strings
ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"]
extension = ext_mapping[
cmds.getAttr("redshiftOptions.imageFormat")
]
else:
# Get the extension, getAttr defaultRenderGlobals.imageFormat
# returns an index number.
filename_base = os.path.basename(filename_0)
extension = os.path.splitext(filename_base)[-1].strip(".")
filename_prefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
filename_prefix = cmds.getAttr(prefix_attr)
return {"ext": extension,
"filename_prefix": filename_prefix,
"padding": padding,
"filename_0": filename_0}
def preview_fname(folder, scene, layer, padding, ext):
"""Return output file path with #### for padding.
Deadline requires the path to be formatted with # in place of numbers.
For example `/path/to/render.####.png`
Args:
folder (str): The root output folder (image path)
scene (str): The scene name
layer (str): The layer name to be rendered
padding (int): The padding length
ext(str): The output file extension
Returns:
str
"""
fileprefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
output = fileprefix + ".{number}.{ext}"
# RenderPass is currently hardcoded to "beauty" because its not important
# for the deadline submission, but we will need something to replace
# "<RenderPass>".
mapping = {
"<Scene>": "{scene}",
"<RenderLayer>": "{layer}",
"RenderPass": "beauty"
}
for key, value in mapping.items():
output = output.replace(key, value)
output = output.format(
scene=scene,
layer=layer,
number="#" * padding,
ext=ext
)
return os.path.join(folder, output)
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit available render layers to Deadline
"""Submit available render layers to Deadline.
Renders are submitted to a Deadline Web Service as
supplied via the environment variable DEADLINE_REST_URL
@ -194,22 +181,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
scene = os.path.splitext(filename)[0]
dirname = os.path.join(workspace, "renders")
renderlayer = instance.data['setMembers'] # rs_beauty
renderlayer_name = instance.data['subset'] # beauty
# renderlayer_globals = instance.data["renderGlobals"]
# legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
# Get the variables depending on the renderer
render_variables = get_renderer_variables(renderlayer)
output_filename_0 = preview_fname(folder=dirname,
scene=scene,
layer=renderlayer_name,
padding=render_variables["padding"],
ext=render_variables["ext"])
filename_0 = render_variables["filename_0"]
if self.use_published:
new_scene = os.path.splitext(filename)[0]
orig_scene = os.path.splitext(
os.path.basename(context.data["currentFile"]))[0]
filename_0 = render_variables["filename_0"].replace(
orig_scene, new_scene)
output_filename_0 = filename_0
try:
# Ensure render folder exists
@ -226,6 +213,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
# Top-level group name
"BatchName": filename,
# Asset dependency to wait for at least the scene file to sync.
"AssetDependency0": filepath,
# Job name, as seen in Monitor
"Name": jobname,
@ -284,7 +274,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
for aov, files in exp[0].items():
col = clique.assemble(files)[0][0]
outputFile = col.format('{head}{padding}{tail}')
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile # noqa: E501
OutputFilenames[expIndex] = outputFile
expIndex += 1
else:
@ -293,7 +283,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
# OutputFilenames[expIndex] = outputFile
# We need those to pass them to pype for it to set correct context
keys = [
"FTRACK_API_KEY",
@ -334,7 +323,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
raise Exception(response.text)
# Store output dir for unified publisher (filesequence)
instance.data["outputDir"] = os.path.dirname(output_filename_0)
instance.data["outputDir"] = os.path.dirname(filename_0)
instance.data["deadlineSubmissionJob"] = response.json()
def preflight_check(self, instance):

View file

@ -309,14 +309,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
# replace path for UNC / network share paths, co PYPE is found
# over network. It assumes PYPE is located somewhere in
# PYPE_STUDIO_CORE_PATH
pype_root = os.environ["PYPE_ROOT"].replace(
os.path.normpath(
os.environ['PYPE_STUDIO_CORE_MOUNT']), # noqa
os.path.normpath(
os.environ['PYPE_STUDIO_CORE_PATH'])) # noqa
pype_root = os.environ["PYPE_SETUP_PATH"]
# we must provide either full path to executable or use musters own
# python named MPython.exe, residing directly in muster bin
@ -517,33 +510,25 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
for key in environment:
for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
to_process = environment[key]
if key == "PYPE_STUDIO_CORE_MOUNT":
clean_path = environment[key]
elif "://" in environment[key]:
clean_path = environment[key]
elif os.pathsep not in to_process:
try:
path = environment[key]
path.decode('UTF-8', 'strict')
clean_path = os.path.normpath(path)
except UnicodeDecodeError:
print('path contains non UTF characters')
if "://" in value:
clean_path = value
else:
for path in environment[key].split(os.pathsep):
valid_paths = []
for path in value.split(os.pathsep):
if not path:
continue
try:
path.decode('UTF-8', 'strict')
clean_path += os.path.normpath(path) + os.pathsep
valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
# this should replace paths so they are pointing to network share
clean_path = clean_path.replace(
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
os.path.normpath(environment['PYPE_STUDIO_CORE_PATH']))
if valid_paths:
clean_path = os.pathsep.join(valid_paths)
clean_environment[key] = clean_path
return clean_environment

View file

@ -48,6 +48,14 @@ class CreateWritePrerender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -49,6 +49,14 @@ class CreateWriteRender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -237,7 +237,7 @@ class LoadSequence(api.Loader):
repr_cont = representation["context"]
file = self.fname
file = api.get_representation_path(representation)
if not file:
repr_id = representation["_id"]

View file

@ -128,6 +128,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# Top-level group name
"BatchName": script_name,
# Asset dependency to wait for at least the scene file to sync.
"AssetDependency0": script_path,
# Job name, as seen in Monitor
"Name": jobname,
@ -201,40 +204,32 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
environment["PATH"] = os.environ["PATH"]
# environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
for key in environment:
for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
to_process = environment[key]
if key == "PYPE_STUDIO_CORE_MOUNT":
clean_path = environment[key]
elif "://" in environment[key]:
clean_path = environment[key]
elif os.pathsep not in to_process:
try:
path = environment[key]
path.decode('UTF-8', 'strict')
clean_path = os.path.normpath(path)
except UnicodeDecodeError:
print('path contains non UTF characters')
if "://" in value:
clean_path = value
else:
for path in environment[key].split(os.pathsep):
valid_paths = []
for path in value.split(os.pathsep):
if not path:
continue
try:
path.decode('UTF-8', 'strict')
clean_path += os.path.normpath(path) + os.pathsep
valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
if valid_paths:
clean_path = os.pathsep.join(valid_paths)
if key == "PYTHONPATH":
clean_path = clean_path.replace('python2', 'python3')
clean_path = clean_path.replace(
os.path.normpath(
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
os.path.normpath(
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
self.log.debug("clean path: {}".format(clean_path))
clean_environment[key] = clean_path
environment = clean_environment

View file

@ -159,7 +159,7 @@ class CollectReviews(api.InstancePlugin):
version_data.update({k: instance.data[k] for k in transfer_data})
if 'version' in instance.data:
version_data["version"] = instance.data[version]
version_data["version"] = instance.data["version"]
# add to data of representation
version_data.update({

View file

@ -13,5 +13,5 @@ class CollectAudioVersion(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.info('Audio: {}'.format(instance.data['name']))
instance.data['version'] = '001'
instance.data['version'] = 1
self.log.info('Audio version to: {}'.format(instance.data['version']))

View file

@ -1,12 +0,0 @@
import pyblish.api
class CollectContextDataPremiera(pyblish.api.ContextPlugin):
"""Collecting data from temp json sent from premiera context"""
label = "Collect Premiera Context"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
data_path = context.data['rqst_json_data_path']
self.log.info("Context is: {}".format(data_path))

View file

@ -19,16 +19,18 @@ class CollectFrameranges(pyblish.api.InstancePlugin):
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
fps = metadata['ppro.timeline.fps']
fps = float(metadata['ppro.timeline.fps'])
sec_start = metadata['ppro.clip.start']
sec_end = metadata['ppro.clip.end']
fstart = instance.data.get('fstart')
fstart = instance.data.get('frameStart')
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
instance.data['name'],
fps, sec_start, sec_end, fstart, fend))
instance.data['startFrame'] = fstart
instance.data['endFrame'] = fend
instance.data['frameStart'] = fstart
instance.data['frameEnd'] = fend
instance.data['handleStart'] = instance.context.data['handleStart']
instance.data['handleEnd'] = instance.context.data['handleEnd']
instance.data['fps'] = metadata['ppro.timeline.fps']

View file

@ -26,7 +26,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
json_data = context.data.get("jsonData", None)
temp_context = {}
for instance in json_data['instances']:
if instance['family'] in 'projectfile':
if instance['family'] in 'workfile':
continue
in_info = {}
@ -35,10 +35,13 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
in_info['entity_type'] = 'Shot'
instance_pyblish = [
i for i in context.data["instances"] if i.data['asset'] in name][0]
i for i in context.data["instances"]
if i.data['asset'] in name][0]
in_info['custom_attributes'] = {
'fend': instance_pyblish.data['endFrame'],
'fstart': instance_pyblish.data['startFrame'],
'frameStart': instance_pyblish.data['frameStart'],
'frameEnd': instance_pyblish.data['frameEnd'],
'handleStart': instance_pyblish.data['handleStart'],
'handleEnd': instance_pyblish.data['handleEnd'],
'fps': instance_pyblish.data['fps']
}

View file

@ -0,0 +1,83 @@
import os
import pyblish.api
class CollectClipRepresentations(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Representations"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# add to representations
if not instance.data.get("representations"):
instance.data["representations"] = list()
ins_d = instance.data
staging_dir = ins_d["stagingDir"]
frame_start = ins_d["frameStart"]
frame_end = ins_d["frameEnd"]
handle_start = ins_d["handleStart"]
handle_end = ins_d["handleEnd"]
fps = ins_d["fps"]
files_list = ins_d.get("files")
if not files_list:
return
json_repr_ext = ins_d["jsonReprExt"]
json_repr_subset = ins_d["jsonReprSubset"]
if files_list:
file = next((f for f in files_list
if json_repr_subset in f), None)
else:
return
if json_repr_ext in ["mov", "mp4"]:
representation = {
"files": file,
"stagingDir": staging_dir,
"frameStart": frame_start,
"frameEnd": frame_end,
"frameStartFtrack": frame_start - handle_start,
"frameEndFtrack": frame_end - handle_end,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review", "delete"]
}
else:
representation = {
"files": file,
"stagingDir": staging_dir,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review"]
}
self.log.debug("representation: {}".format(representation))
instance.data["representations"].append(representation)
thumb = next((f for f in files_list
if "thumbnail" in f), None)
if thumb:
thumb_representation = {
'files': thumb,
'stagingDir': staging_dir,
'name': "thumbnail",
'thumbnail': True,
'ext': os.path.splitext(thumb)[-1].replace(".", "")
}
self.log.debug("representation: {}".format(thumb_representation))
instance.data["representations"].append(
thumb_representation)

View file

@ -0,0 +1,31 @@
import pyblish.api
class CollectResolution(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Resolution"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# getting metadata from jsonData key
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
pixel_aspect = float(metadata['ppro.format.pixelaspect'])
res_width = metadata['ppro.format.width']
res_height = metadata['ppro.format.height']
instance.data['pixelAspect'] = pixel_aspect
instance.data['resolutionWidth'] = res_width
instance.data['resolutionHeight'] = res_height
self.log.info(f"Resolution was set to: `{res_width}x{res_height}`,"
f" and pixel aspect ration to: `{pixel_aspect}`")

View file

@ -1,144 +0,0 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile"]
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
# template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
mock_template = anatomy_filled.publish.path
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
template = project["config"]["template"]["publish"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -1,140 +0,0 @@
import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
"""
order = pyblish.api.IntegratorOrder - 0.1
label = 'Integrate Hierarchy To Avalon'
families = ['clip']
def process(self, context):
if "hierarchyContext" not in context.data:
return
self.db = io
if not self.db.Session:
self.db.install()
input_data = context.data["hierarchyContext"]
self.import_to_avalon(input_data)
def import_to_avalon(self, input_data, parent=None):
for name in input_data:
self.log.info('input_data[name]: {}'.format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data['entity_type']
data = {}
# Process project
if entity_type.lower() == 'project':
entity = self.db.find_one({'type': 'project'})
# TODO: should be in validator?
assert (entity is not None), "Didn't find project in DB"
# get data from already existing project
for key, value in entity.get('data', {}).items():
data[key] = value
self.av_project = entity
# Raise error if project or parent are not set
elif self.av_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# Else process assset
else:
entity = self.db.find_one({'type': 'asset', 'name': name})
# Create entity if doesn't exist
if entity is None:
if self.av_project['_id'] == parent['_id']:
silo = None
elif parent['silo'] is None:
silo = parent['name']
else:
silo = parent['silo']
entity = self.create_avalon_asset(name, silo)
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____1____')
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
data = input_data[name]
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
entity = self.db.find_one({'type': 'asset', 'name': name})
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____2____')
# Else get data from already existing
else:
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('________')
for key, value in entity.get('data', {}).items():
data[key] = value
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
# do not store project's id as visualParent (silo asset)
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
# CUSTOM ATTRIBUTES
for k, val in entity_data.get('custom_attributes', {}).items():
data[k] = val
# Update entity data with input data
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
if 'childs' in entity_data:
self.import_to_avalon(entity_data['childs'], entity)
def create_avalon_asset(self, name, silo):
item = {
'schema': 'avalon-core:asset-2.0',
'name': name,
'silo': silo,
'parent': self.av_project['_id'],
'type': 'asset',
'data': {}
}
entity_id = self.db.insert_one(item).inserted_id
return self.db.find_one({'_id': entity_id})

View file

@ -1,171 +0,0 @@
import sys
import pyblish.api
import six
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder
label = 'Integrate Hierarchy To Ftrack'
families = ["clip"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in context.data:
return
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
# adding ftrack types from presets
ftrack_types = context.data['ftrackTypes']
self.import_to_ftrack(input_data, ftrack_types)
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type'].capitalize()
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (
'Missing custom attribute')
entity['custom_attributes'][key] = custom_attributes[key]
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)
for task in tasks_to_create:
self.create_task(
name=task,
task_type=ftrack_types[task],
parent=entity
)
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], ftrack_types, entity)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return entity

View file

@ -1,160 +1,71 @@
import os
import sys
import shutil
from pysync import walktree
from avalon import api as avalon
from avalon.lib import launch
from pyblish import api as pyblish
from app import api as app
from pprint import pprint
from .. import api
from pypeapp import Logger
import requests
from .lib import (
setup,
reload_pipeline,
ls,
LOAD_PATH,
CREATE_PATH,
PUBLISH_PATH
)
__all__ = [
"setup",
"reload_pipeline",
"ls"
]
log = api.Logger.getLogger(__name__, "premiere")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
EXTENSIONS_PATH_LOCAL = os.getenv("EXTENSIONS_PATH", None)
EXTENSIONS_CACHE_PATH = os.getenv("EXTENSIONS_CACHE_PATH", None)
EXTENSIONS_PATH_REMOTE = os.path.join(os.path.dirname(__file__), "extensions")
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
_clearing_cache = ["com.pype.rename", "com.pype.avalon"]
PUBLISH_PATH = os.path.join(
PLUGINS_DIR, "premiere", "publish"
).replace("\\", "/")
if os.getenv("PUBLISH_PATH", None):
os.environ["PUBLISH_PATH"] = os.pathsep.join(
os.environ["PUBLISH_PATH"].split(os.pathsep) +
[PUBLISH_PATH]
)
else:
os.environ["PUBLISH_PATH"] = PUBLISH_PATH
LOAD_PATH = os.path.join(PLUGINS_DIR, "premiere", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "premiere", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "premiere", "inventory")
def clearing_caches_ui():
'''Before every start of premiere it will make sure there is not
outdated stuff in cep_cache dir'''
for d in os.listdir(EXTENSIONS_CACHE_PATH):
match = [p for p in _clearing_cache
if str(p) in d]
if match:
try:
path = os.path.normpath(os.path.join(EXTENSIONS_CACHE_PATH, d))
log.info("Removing dir: {}".format(path))
shutil.rmtree(path, ignore_errors=True)
except Exception as e:
log.debug("problem: {}".format(e))
def request_aport(url_path, data={}):
try:
api.add_tool_to_environment(["aport_0.1"])
ip = os.getenv("PICO_IP", None)
if ip and ip.startswith('http'):
ip = ip.replace("http://", "")
port = int(os.getenv("PICO_PORT", None))
url = "http://{0}:{1}{2}".format(ip, port, url_path)
req = requests.post(url, data=data).text
return req
except Exception as e:
api.message(title="Premiere Aport Server",
message="Before you can run Premiere, start Aport Server. \n Error: {}".format(
e),
level="critical")
def extensions_sync():
# import time
process_pairs = list()
# get extensions dir in pype.premiere.extensions
# build dir path to premiere cep extensions
for name in os.listdir(EXTENSIONS_PATH_REMOTE):
print(name)
src = os.path.join(EXTENSIONS_PATH_REMOTE, name)
dst = os.path.join(EXTENSIONS_PATH_LOCAL, name)
process_pairs.append((name, src, dst))
# synchronize all extensions
for name, src, dst in process_pairs:
if not os.path.exists(dst):
os.makedirs(dst, mode=0o777)
walktree(source=src, target=dst, options_input=["y", ">"])
log.info("Extension {0} from `{1}` coppied to `{2}`".format(
name, src, dst
))
# time.sleep(10)
return
log = Logger().get_logger(__name__, "premiere")
def install():
api.set_avalon_workdir()
log.info("Registering Premiera plug-ins..")
reg_paths = request_aport("/api/register_plugin_path",
{"publish_path": PUBLISH_PATH})
"""Install Premiere-specific functionality of avalon-core.
# avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
# avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
# avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
This is where you install menus and register families, data
and loaders into Premiere.
It is called automatically when installing via `api.install(premiere)`.
See the Maya equivalent for inspiration on how to implement this.
"""
# Disable all families except for the ones we explicitly want to see
# family_states = [
# "imagesequence",
# "mov"
#
# ]
# avalon.data["familiesStateDefault"] = False
# avalon.data["familiesStateToggled"] = family_states
family_states = [
"imagesequence",
"mov"
]
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# load data from templates
api.load_data_from_templates()
log.info("pype.premiere installed")
# remove cep_cache from user temp dir
clearing_caches_ui()
pyblish.register_host("premiere")
pyblish.register_plugin_path(PUBLISH_PATH)
log.info("Registering Premiera plug-ins..")
# synchronize extensions
extensions_sync()
message = "The Pype extension has been installed. " \
"\nThe following publishing paths has been registered: " \
"\n\n{}".format(
reg_paths)
api.message(title="pyblish_paths", message=message, level="info")
# launching premiere
exe = r"C:\Program Files\Adobe\Adobe Premiere Pro CC 2019\Adobe Premiere Pro.exe".replace(
"\\", "/")
log.info("____path exists: {}".format(os.path.exists(exe)))
app.forward(args=[exe],
silent=False,
cwd=os.getcwd(),
env=dict(os.environ),
shell=None)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
def uninstall():
log.info("Deregistering Premiera plug-ins..")
"""Uninstall all tha was installed
This is where you undo everything that was done in `install()`.
That means, removing menus, deregistering families and data
and everything. It should be as though `install()` was never run,
because odds are calling this function means the user is interested
in re-installing shortly afterwards. If, for example, he has been
modifying the menu or registered families.
"""
pyblish.deregister_host("premiere")
pyblish.deregister_plugin_path(PUBLISH_PATH)
log.info("Deregistering Premiera plug-ins..")
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# reset data from templates
api.reset_data_from_templates()

View file

@ -0,0 +1,20 @@
@echo off
rem You need https://github.com/Adobe-CEP/CEP-Resources/raw/master/ZXPSignCMD/4.1.1/win64/ZXPSignCmd.exe
rem You need https://partners.adobe.com/exchangeprogram/creativecloud/support/exman-com-line-tool.html
rem !!! make sure you run windows power shell as admin
set pwd="12PPROext581"
echo ">>> creating certificate ..."
.\ZXPSignCmd -selfSignedCert CZ Prague OrbiTools "Signing robot" %pwd% certificate.p12
echo ">>> building com.pype"
.\ZXPSignCmd -sign com.pype/ pype.zxp certificate.p12 %pwd%
echo ">>> building com.pype.rename"
.\ZXPSignCmd -sign com.pype.rename/ pype_rename.zxp certificate.p12 %pwd%
echo ">>> installing com.pype"
.\ExManCmd.exe /install .\pype.zxp
echo ">>> installing com.pype.rename"
.\ExManCmd.exe /install .\pype_rename.zxp

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionList>
<Extension Id="com.pype.avalon">
<HostList>
<Host Name="PPRO" Port="7778"/>
</HostList>
</Extension>
</ExtensionList>

View file

@ -1,17 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Avalon</title>
<link rel="stylesheet" type="text/css" href="css/app.css">
</head>
<body onLoad="onLoaded()">
</body>
<script src="js/app.js"></script>
<script src="js/json2.js"></script>
</html>

View file

@ -1,60 +0,0 @@
/*************************************************************************
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2014 Adobe
* All Rights Reserved.
*
* NOTICE: Adobe permits you to use, modify, and distribute this file in
* accordance with the terms of the Adobe license agreement accompanying
* it. If you have received this file from a source other than Adobe,
* then your use, modification, or distribution of it requires the prior
* written permission of Adobe.
**************************************************************************/
// time display types
TIMEDISPLAY_24Timecode = 100;
TIMEDISPLAY_25Timecode = 101;
TIMEDISPLAY_2997DropTimecode = 102;
TIMEDISPLAY_2997NonDropTimecode = 103;
TIMEDISPLAY_30Timecode = 104;
TIMEDISPLAY_50Timecode = 105;
TIMEDISPLAY_5994DropTimecode = 106;
TIMEDISPLAY_5994NonDropTimecode = 107;
TIMEDISPLAY_60Timecode = 108;
TIMEDISPLAY_Frames = 109;
TIMEDISPLAY_23976Timecode = 110;
TIMEDISPLAY_16mmFeetFrames = 111;
TIMEDISPLAY_35mmFeetFrames = 112;
TIMEDISPLAY_48Timecode = 113;
TIMEDISPLAY_AudioSamplesTimecode = 200;
TIMEDISPLAY_AudioMsTimecode = 201;
// field type constants
FIELDTYPE_Progressive = 0;
FIELDTYPE_UpperFirst = 1;
FIELDTYPE_LowerFirst = 2;
// audio channel types
AUDIOCHANNELTYPE_Mono = 0;
AUDIOCHANNELTYPE_Stereo = 1;
AUDIOCHANNELTYPE_51 = 2;
AUDIOCHANNELTYPE_Multichannel = 3;
AUDIOCHANNELTYPE_4Channel = 4;
AUDIOCHANNELTYPE_8Channel = 5;
// vr projection type
VRPROJECTIONTYPE_None = 0;
VRPROJECTIONTYPE_Equirectangular = 1;
// vr stereoscopic type
VRSTEREOSCOPICTYPE_Monoscopic = 0;
VRSTEREOSCOPICTYPE_OverUnder = 1;
VRSTEREOSCOPICTYPE_SideBySide = 2;
NOT_SET = -400000;

File diff suppressed because it is too large Load diff

View file

@ -1,178 +0,0 @@
/* global app, XMPMeta, ExternalObject, CSXSEvent, Folder */
/* --------------------------------------
-. == [ part 0f PyPE CluB ] == .-
_______________.___._____________________
\______ \__ | |\______ \_ _____/
| ___// | | | ___/| __)_
| | \____ | | | | \
|____| / ______| |____| /_______ /
\/ \/
.. __/ CliP R3N4M3R \__ ..
*/
// variable br is defined in pypeAvalon.jsx
br = {
getSelectedVideoTrackItems: function () {
var seq = app.project.activeSequence;
var selected = [];
var videoTracks = seq.videoTracks;
var numOfVideoTracks = videoTracks.numTracks;
// VIDEO CLIPS IN SEQUENCES
for (var l = 0; l < numOfVideoTracks; l++) {
var videoTrack = seq.videoTracks[l];
if (videoTrack.isTargeted()) {
$.writeln(videoTrack.name);
var numOfClips = videoTrack.clips.numTracks;
for (var m = 0; m < numOfClips; m++) {
var clip = videoTrack.clips[m];
selected.push({
'name': clip.name,
'clip': clip,
'sequence': seq,
'videoTrack': videoTrack
});
}
}
}
var names = [];
var items = {};
var sorted = [];
for (var c = 0; c < selected.length; c++) {
items[selected[c].name] = selected[c];
names.push(selected[c].name);
}
names.sort();
for (var cl = 0; cl < names.length; cl++) {
sorted.push(items[names[cl]]);
}
return sorted;
},
/**
* Set Pype metadata into sequence metadata using XMP.
* This is `hackish` way to get over premiere lack of addressing unique clip on timeline,
* so we cannot store data directly per clip.
*
* @param {Object} sequence - sequence object
* @param {Object} data - to be serialized and saved
*/
setSequencePypeMetadata: function (sequence, data) { // eslint-disable-line no-unused-vars
var kPProPrivateProjectMetadataURI = 'http://ns.adobe.com/premierePrivateProjectMetaData/1.0/';
var metadata = sequence.projectItem.getProjectMetadata();
var pypeData = 'pypeData';
var xmp = new XMPMeta(metadata);
app.project.addPropertyToProjectMetadataSchema(pypeData, 'Pype Data', 2);
xmp.setProperty(kPProPrivateProjectMetadataURI, pypeData, JSON.stringify(data));
var str = xmp.serialize();
sequence.projectItem.setProjectMetadata(str, [pypeData]);
// test
var newMetadata = sequence.projectItem.getProjectMetadata();
var newXMP = new XMPMeta(newMetadata);
var found = newXMP.doesPropertyExist(kPProPrivateProjectMetadataURI, pypeData);
if (!found) {
app.setSDKEventMessage('metadata not set', 'error');
}
},
/**
* Get Pype metadata from sequence using XMP.
* @param {Object} sequence
* @return {Object}
*/
getSequencePypeMetadata: function (sequence) { // eslint-disable-line no-unused-vars
var kPProPrivateProjectMetadataURI = 'http://ns.adobe.com/premierePrivateProjectMetaData/1.0/';
var metadata = sequence.projectItem.getProjectMetadata();
var pypeData = 'pypeData';
var pypeDataN = 'Pype Data';
var xmp = new XMPMeta(metadata);
app.project.addPropertyToProjectMetadataSchema(pypeData, pypeDataN, 2);
var pypeDataValue = xmp.getProperty(kPProPrivateProjectMetadataURI, pypeData);
$.writeln('pypeDataValue');
$.writeln(pypeDataValue);
if (pypeDataValue === undefined) {
var pyMeta = {
clips: {},
tags: {}
};
br.setSequencePypeMetadata(sequence, pyMeta);
pypeDataValue = xmp.getProperty(kPProPrivateProjectMetadataURI, pypeData);
return br.getSequencePypeMetadata(sequence);
} else {
return JSON.parse(pypeDataValue);
}
},
renameTargetedTextLayer: function (data) {
$.writeln(data);
var selected = br.getSelectedVideoTrackItems();
var seq = app.project.activeSequence;
var metadata = br.getSequencePypeMetadata(seq);
var startCount = 10;
var stepCount = 10;
var padding = 3;
var newItems = {};
var episode = data.ep;
var episodeSuf = data.epSuffix;
var shotPref = 'sh';
var count = 0;
var seqCheck = '';
for (var c = 0; c < selected.length; c++) {
// fill in hierarchy if set
var parents = [];
var hierarchy = [];
var name = selected[c].name;
var sequenceName = name.slice(0, 5);
var shotNum = Number(name.slice((name.length - 3), name.length));
// if (sequenceName !== seqCheck) {
// seqCheck = sequenceName;
// count = 0;
// };
//
// var seqCount = (count * stepCount) + startCount;
// count += 1;
var newName = episode + sequenceName + shotPref + (shotNum).pad(padding);
$.writeln(newName);
selected[c].clip.name = newName;
parents.push({
'entityType': 'episode',
'entityName': episode + '_' + episodeSuf
});
hierarchy.push(episode + '_' + episodeSuf);
parents.push({
'entityType': 'sequence',
'entityName': episode + sequenceName
});
hierarchy.push(episode + sequenceName);
newItems[newName] = {
'parents': parents,
'hierarchy': hierarchy.join('/')
};
}
metadata.clips = newItems;
br.setSequencePypeMetadata(seq, metadata);
return JSON.stringify(metadata);
}
};
Number.prototype.pad = function (size) {
var s = String(this);
while (s.length < (size || 2)) {
s = "0" + s;
}
return s;
}

File diff suppressed because it is too large Load diff

View file

@ -1 +0,0 @@
application/vnd.adobe.air-ucf-package+zip

View file

@ -1,3 +0,0 @@
body{background-color:#323238;color:#eeeeee}#output{background:#121212;color:#eeeeee;padding:2em;font-family:monospace;font-weight:bold;min-height:8em}.dark>.list-group-item{background:#454747}
/*# sourceMappingURL=avalon.min.css.map */

View file

@ -1,162 +0,0 @@
<!DOCTYPE html>
<html>
<html lang="en">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>Pype extention</title>
<!-- Load the pico Javascript client, always automatically available at /pico.js -->
<script src="/pico.js"></script>
<!-- Or load our module proxy -->
<script src="/api.js"></script>
<link href="./css/bootstrap.min.css" type="text/css" rel="stylesheet">
<link href="./css/avalon.min.css" type="text/css" rel="stylesheet">
<script>
if (typeof module === 'object') {
window.module = module;
module = undefined;
}
</script>
<script src="./js/vendor/jquery-3.3.1.min.js"></script>
<script src="./js/vendor/CSInterface-8.js"></script>
<script src="./js/vendor/popper.min.js"></script>
<script src="./js/vendor/bootstrap.min.js"></script>
<script src="./js/vendor/json2.js"></script>
<script>
if (window.module) module = window.module;
var ENV;
</script>
</head>
<body>
<div id="section"><a href="javascript:history.go(0)">Refresh panel</a>
<ul class="list-group list-group-flush dark">
<li class="list-group-item" id="rename">
<div class="input-group input-group-sm mb-1">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Rename targeted text layers<br /> converts sc010sh020 <br />to lbb201sc010sh020<br />and creates ftrack metadata</span>
<div class="input-group-text">
<input type="text" name="episode" placeholder="lbb2" aria-label="episode" aria-describedby="basic-addon5" value="" style="width:75px;">
</div>
<div class="input-group-text">
<input type="text" name="ep_suffix" placeholder="nameofepisode" aria-label="Name of episode" aria-describedby="basic-addon5" value="">
</div>
</div>
<div class="input-group-append">
<button id="btn-rename" type="button" class="btn btn-info btn-sm btn-block">Rename</button>
</div>
</div>
</li>
<li class="list-group-item" id="publish">
<h5>Publish</h5>
<pre><code class="js"></code></pre>
<div class="input-group input-group-lg mb-4">
<!-- <div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">GUI</span>
<div class="input-group-text">
<input type="checkbox" name="gui" checked="checked" aria-label="Checkbox for following text input">
</div>
</div> -->
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Version Up</span>
<div class="input-group-text">
<input type="checkbox" name="version-up" checked="checked" aria-label="Checkbox for following text input">
</div>
<span class="input-group-text" id="basic-addon6">
Audio Only</span>
<div class="input-group-text">
<input type="checkbox" name="audio-only" aria-label="Check if you want to export only audio">
</div>
</div>
<div class="input-group-append">
<button id="btn-publish" type="button" class="btn btn-info btn-sm btn-block">Publish</button>
</div>
</div>
<div class="input-group input-group-sm mb-1">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Path to sending data json</span>
<div class="input-group-text">
<input type="text" name="send-path" placeholder="Path" aria-label="Path" aria-describedby="basic-addon5" value="">
</div>
</div>
<div class="input-group-append">
<button id="btn-send-reset" type="button" class="btn btn-info btn-sm btn-block">Reset</button>
</div>
</div>
<div class="input-group input-group-sm mb-3">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Path to getting data json</span>
<div class="input-group-text">
<input type="text" name="get-path" placeholder="Path" aria-label="Path" aria-describedby="basic-addon5" value="">
</div>
</div>
<div class="input-group-prepend">
<button id="btn-get-reset" type="button" class="btn btn-info btn-sm btn-block">Reset</button>
</div>
<div class="input-group-append">
<button id="btn-metadata" type="button" class="btn btn-info btn-sm btn-block">Injest metadata</button>
</div>
</div>
</li>
<li class="list-group-item"><button type="button" class="btn btn-info btn-sm btn-block" id="btn-newWorkfileVersion">Save next workfile version</button></li>
<li class="list-group-item"><button type="button" class="btn btn-info btn-sm btn-block" id="btn-get-frame">Get screen grab</button></li>
<li class="list-group-item" id="load">
<h5>Load/Update assets to timeline</h5>
<pre><code class="js"></code></pre>
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon6">Type</span>
<div class="input-group-text">
<input type="text" name="type" placeholder="renderAnimation" aria-label="Asset Type" aria-describedby="basic-addon5" value="renderAnimation">
</div>
<span class="input-group-text" id="basic-addon6">Ext</span>
<div class="input-group-text">
<input type="text" name="ext" placeholder="mxf" aria-label="File Extension" aria-describedby="basic-addon5" value="mxf">
</div>
<div class="input-group-append">
<button type="button" class="btn btn-info btn-sm btn-block" id="btn-getRernderAnimation">DO IT!</button>
</div>
</div>
</li>
</ul>
<hr />
<div class="col-md-6" id="source">
<!-- <pre>
<code class="python"></code>
</pre> -->
</div>
<h5>Output</h5>
<div class="row" id="output">
</div>
<script src="./js/pico_client.js"></script>
<script src="./js/avalon.js"></script>
</body>
</html>

View file

@ -1,367 +0,0 @@
/* global CSInterface, $, querySelector, api, displayResult */
var csi = new CSInterface();
var output = document.getElementById('output');
var rootFolderPath = csi.getSystemPath(SystemPath.EXTENSION);
var timecodes = cep_node.require('node-timecodes');
var process = cep_node.require('process');
function getEnv() {
csi.evalScript('pype.getProjectFileData();', function (result) {
process.env.EXTENSION_PATH = rootFolderPath
window.ENV = process.env;
var resultData = JSON.parse(result);
for (key in resultData) {
window.ENV[key] = resultData[key];
};
csi.evalScript('pype.setEnvs(' + JSON.stringify(window.ENV) + ')');
});
}
function renderClips() {
csi.evalScript('pype.transcodeExternal(' + rootFolderPath + ');', function (result) {
displayResult(result);
});
}
function displayResult(r) {
console.log(r);
csi.evalScript('$.writeln( ' + JSON.stringify(r) + ' )');
output.classList.remove("error");
output.innerText = r;
}
function displayError(e) {
output.classList.add("error");
output.innerText = e.message;
}
function loadJSX() {
// get the appName of the currently used app. For Premiere Pro it's "PPRO"
var appName = csi.hostEnvironment.appName;
var extensionPath = csi.getSystemPath(SystemPath.EXTENSION);
// load general JSX script independent of appName
var extensionRootGeneral = extensionPath + '/jsx/';
csi.evalScript('$._ext.evalFiles("' + extensionRootGeneral + '")');
// load JSX scripts based on appName
var extensionRootApp = extensionPath + '/jsx/' + appName + '/';
csi.evalScript('$._ext.evalFiles("' + extensionRootApp + '")');
// csi.evalScript('$._PPP_.logConsoleOutput()');
getEnv();
csi.evalScript('$._PPP_.updateEventPanel( "' + "all plugins are loaded" + '" )');
csi.evalScript('$._PPP_.updateEventPanel( "' + "testing function done" + '" )');
}
// run all at loading
loadJSX()
function loadAnimationRendersToTimeline() {
// it will get type of asset and extension from input
// and start loading script from jsx
var $ = querySelector('#load');
var data = {};
data.subset = $('input[name=type]').value;
data.subsetExt = $('input[name=ext]').value;
var requestList = [];
// get all selected clips
csi.evalScript('pype.getClipsForLoadingSubsets( "' + data.subset + '" )', function (result) {
// TODO: need to check if the clips are already created and this is just updating to last versions
var resultObj = JSON.parse(result);
var instances = resultObj[0];
var numTracks = resultObj[1];
var key = '';
// creating requesting list of dictionaries
for (key in instances) {
var clipData = {};
clipData.parentClip = instances[key];
clipData.asset = key;
clipData.subset = data.subset;
clipData.representation = data.subsetExt;
requestList.push(clipData);
}
// gets data from mongodb
api.load_representations(window.ENV['AVALON_PROJECT'], requestList).then(
function (avalonData) {
// creates or updates data on timeline
var makeData = {};
makeData.binHierarchy = data.subset + '/' + data.subsetExt;
makeData.clips = avalonData;
makeData.numTracks = numTracks;
csi.evalScript('pype.importFiles( ' + JSON.stringify(makeData) + ' )');
}
);
});
}
function evalScript(script) {
var callback = function (result) {
displayResult(result);
};
csi.evalScript(script, callback);
}
function deregister() {
api.deregister_plugin_path().then(displayResult);
}
function register() {
var $ = querySelector('#register');
var path = $('input[name=path]').value;
api.register_plugin_path(path).then(displayResult);
}
function getStagingDir() {
// create stagingDir
const fs = require('fs-extra');
const os = require('os');
const path = require('path');
const UUID = require('pure-uuid');
const id = new UUID(4).format();
const stagingDir = path.join(os.tmpdir(), id);
fs.mkdirs(stagingDir);
return stagingDir;
}
function convertPathString(path) {
return path.replace(
new RegExp('\\\\', 'g'), '/').replace(new RegExp('//\\?/', 'g'), '');
}
function publish() {
var $ = querySelector('#publish');
// var gui = $('input[name=gui]').checked;
var gui = true;
var versionUp = $('input[name=version-up]').checked;
var audioOnly = $('input[name=audio-only]').checked;
var jsonSendPath = $('input[name=send-path]').value;
var jsonGetPath = $('input[name=get-path]').value;
var publish_path = window.ENV['PUBLISH_PATH'];
if (jsonSendPath == '') {
// create temp staging directory on local
var stagingDir = convertPathString(getStagingDir());
// copy project file to stagingDir
const fs = require('fs-extra');
const path = require('path');
csi.evalScript('pype.getProjectFileData();', function (result) {
displayResult(result);
var data = JSON.parse(result);
displayResult(stagingDir);
displayResult(data.projectfile);
var destination = convertPathString(path.join(stagingDir, data.projectfile));
displayResult('copy project file');
displayResult(data.projectfile);
displayResult(destination);
fs.copyFile(data.projectpath, destination);
displayResult('project file coppied!');
});
// publishing file
csi.evalScript('pype.getPyblishRequest("' + stagingDir + '", ' + audioOnly + ');', function (r) {
var request = JSON.parse(r);
displayResult(JSON.stringify(request));
csi.evalScript('pype.encodeRepresentation(' + JSON.stringify(request) + ');', function (result) {
// create json for pyblish
var jsonfile = require('jsonfile');
var jsonSendPath = stagingDir + '_send.json'
var jsonGetPath = stagingDir + '_get.json'
$('input[name=send-path]').value = jsonSendPath;
$('input[name=get-path]').value = jsonGetPath;
var jsonContent = JSON.parse(result);
jsonfile.writeFile(jsonSendPath, jsonContent);
var checkingFile = function (path) {
var timeout = 1000;
setTimeout(function () {
if (fs.existsSync(path)) {
// register publish path
api.register_plugin_path(publish_path).then(displayResult);
// send json to pyblish
api.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
// check if resulted path exists as file
if (fs.existsSync(result.get_json_path)) {
// read json data from resulted path
displayResult('Updating metadata of clips after publishing');
jsonfile.readFile(result.get_json_path, function (err, json) {
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
})
// version up project
if (versionUp) {
displayResult('Saving new version of the project file');
csi.evalScript('pype.versionUpWorkFile();');
};
} else {
// if resulted path file not existing
displayResult('Publish has not been finished correctly. Hit Publish again to publish from already rendered data, or Reset to render all again.');
};
});
} else {
displayResult('waiting');
checkingFile(path);
};
},
timeout)
};
checkingFile(jsonContent.waitingFor)
});
});
} else {
// register publish path
api.register_plugin_path(publish_path).then(displayResult);
// send json to pyblish
api.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
// check if resulted path exists as file
if (fs.existsSync(result.get_json_path)) {
// read json data from resulted path
displayResult('Updating metadata of clips after publishing');
jsonfile.readFile(result.get_json_path, function (err, json) {
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
})
// version up project
if (versionUp) {
displayResult('Saving new version of the project file');
csi.evalScript('pype.versionUpWorkFile();');
};
} else {
// if resulted path file not existing
displayResult('Publish has not been finished correctly. Hit Publish again to publish from already rendered data, or Reset to render all again.');
};
});
};
// $('input[name=send-path]').value = '';
// $('input[name=get-path]').value = '';
}
function context() {
var $ = querySelector('#context');
var project = $('input[name=project]').value;
var asset = $('input[name=asset]').value;
var task = $('input[name=task]').value;
var app = $('input[name=app]').value;
api.context(project, asset, task, app).then(displayResult);
}
function tc(timecode) {
var seconds = timecodes.toSeconds(timecode);
var timec = timecodes.fromSeconds(seconds);
displayResult(seconds);
displayResult(timec);
}
function rename() {
var $ = querySelector('#rename');
var data = {};
data.ep = $('input[name=episode]').value;
data.epSuffix = $('input[name=ep_suffix]').value;
if (!data.ep) {
csi.evalScript('pype.alert_message("' + 'Need to fill episode code' + '")');
return;
};
if (!data.epSuffix) {
csi.evalScript('pype.alert_message("' + 'Need to fill episode longer suffix' + '")');
return;
};
csi.evalScript('br.renameTargetedTextLayer( ' + JSON.stringify(data) + ' );', function (result) {
displayResult(result);
});
}
// bind buttons
$('#btn-getRernderAnimation').click(function () {
loadAnimationRendersToTimeline();
});
$('#btn-rename').click(function () {
rename();
});
$('#btn-set-context').click(function () {
context();
});
$('#btn-register').click(function () {
register();
});
$('#btn-deregister').click(function () {
deregister();
});
$('#btn-publish').click(function () {
publish();
});
$('#btn-send-reset').click(function () {
var $ = querySelector('#publish');
$('input[name=send-path]').value = '';
});
$('#btn-get-reset').click(function () {
var $ = querySelector('#publish');
$('input[name=get-path]').value = '';
});
$('#btn-get-active-sequence').click(function () {
evalScript('pype.getActiveSequence();');
});
$('#btn-get-selected').click(function () {
$('#output').html('getting selected clips info ...');
evalScript('pype.getSelectedItems();');
});
$('#btn-get-env').click(function () {
displayResult(window.ENV);
});
$('#btn-get-projectitems').click(function () {
evalScript('pype.getProjectItems();');
});
$('#btn-metadata').click(function () {
var $ = querySelector('#publish');
var path = $('input[name=get-path]').value;
var jsonfile = require('jsonfile');
displayResult(path);
jsonfile.readFile(path, function (err, json) {
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
displayResult('Metadata of clips after publishing were updated');
})
});
$('#btn-get-frame').click(function () {
evalScript('$._PPP_.exportCurrentFrameAsPNG();');
});
$('#btn-tc').click(function () {
tc('00:23:47:10');
});
$('#btn-generateRequest').click(function () {
evalScript('pype.getPyblishRequest();');
});
$('#btn-newWorkfileVersion').click(function () {
evalScript('pype.versionUpWorkFile();');
});

View file

@ -1,75 +0,0 @@
// pico connection of python module name
var api = pico.importModule('api');
function querySelector(parent) {
return function (child) {
return document.querySelector(parent).querySelector(child)
};
}
var defs = {}
function jumpTo(name) {
var e = defs[name];
document.querySelectorAll('.highlight').forEach(function (el) {
el.classList.remove('highlight');
});
e.classList.add('highlight');
return false;
}
function unindent(code) {
var lines = code.split('\n');
var margin = -1;
for (var j = 0; j < lines.length; j++) {
var l = lines[j];
for (i = 0; i < l.length; i++) {
if (l[i] != " ") {
margin = i;
break;
}
}
if (margin > -1) {
break;
}
}
lines = lines.slice(j);
return lines.map(function (s) {
return s.substr(margin)
}).join('\n');
}
function ready() {
// // set the <code> element of each example to the corresponding functions source
// document.querySelectorAll('li pre code.js').forEach(function(e){
// var id = e.parentElement.parentElement.id;
// var f = window[id];
// var code = f.toString().split('\n').slice(2, -1).join('\n');
// e.innerText = unindent(code);
// })
document.querySelectorAll('li pre code.html').forEach(function (e) {
var html = e.parentElement.parentElement.querySelector('div.example').innerHTML;
e.innerText = unindent(html);
})
hljs.initHighlighting();
// // find all the elements representing the function definitions in the python source
// document.querySelectorAll('.python .hljs-function .hljs-title').forEach(function(e){
// var a = document.createElement('a');
// a.name = e.innerText;
// e.parentElement.insertBefore(a, e)
// return defs[e.innerText] = e.parentElement;
// });
// convert all 'api.X' strings to hyperlinks to jump to python source
document.querySelectorAll('.js').forEach(function (e) {
var code = e.innerHTML;
Object.keys(defs).forEach(function (k) {
code = code.replace('api.' + k + '(', '<a href="#' + k + '" onclick="jumpTo(\'' + k + '\')">api.' + k + '</a>(');
})
e.innerHTML = code;
})
}

View file

@ -1,92 +0,0 @@
/*************************************************************************
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2014 Adobe
* All Rights Reserved.
*
* NOTICE: Adobe permits you to use, modify, and distribute this file in
* accordance with the terms of the Adobe license agreement accompanying
* it. If you have received this file from a source other than Adobe,
* then your use, modification, or distribution of it requires the prior
* written permission of Adobe.
**************************************************************************/
var json2 = '~/AppData/Roaming/Adobe/CEP/extensions/com.pype.avalon/js/json2.js';
$.evalFile(json2);
if (typeof ($) == 'undefined') {
$ = {};
}
if (typeof (pype) == 'undefined') {
var pype = {};
}
if (typeof (br) == 'undefined') {
var br = {};
}
function keepExtention() {
return app.setExtensionPersistent("com.pype.avalon", 0);
}
keepExtention()
$._ext = {
// Evaluate a file and catch the exception.
evalFile: function (path) {
try {
$.evalFile(path);
$.writeln(path);
} catch (e) {
$.writeln(e);
alert("Exception:" + e);
}
},
// Evaluate all the files in the given folder
evalFiles: function (jsxFolderPath) {
var folder = new Folder(jsxFolderPath);
if (folder.exists) {
var jsxFiles = folder.getFiles("*.jsx");
for (var i = 0; i < jsxFiles.length; i++) {
var jsxFile = jsxFiles[i];
$._ext.evalFile(jsxFile);
}
}
},
// entry-point function to call scripts more easily & reliably
callScript: function (dataStr) {
try {
var dataObj = JSON.parse(decodeURIComponent(dataStr));
if (
!dataObj ||
!dataObj.namespace ||
!dataObj.scriptName ||
!dataObj.args
) {
throw new Error('Did not provide all needed info to callScript!');
}
// call the specified jsx-function
var result = $[dataObj.namespace][dataObj.scriptName].apply(
null,
dataObj.args
);
// build the payload-object to return
var payload = {
err: 0,
result: result
};
return encodeURIComponent(JSON.stringify(payload));
} catch (err) {
payload = {
err: err
};
return encodeURIComponent(JSON.stringify(payload));
}
}
};
// var dalsiJsxFile = 'C:\\Users\\hubert\\CODE\\pype-setup\\repos\\pype-config\\pype\\premiere\\extensions\\com.pype.avalon\\jsx\\pype.jsx';
// // $._ext.evalFile(dalsiJsxFile);
// $.evalFile(dalsiJsxFile);

View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionList>
<Extension Id="com.pype">
<HostList>
<Host Name="PPRO" Port="7766"/>
</HostList>
</Extension>
</ExtensionList>

View file

@ -2,9 +2,9 @@
<!-- /************************************************************************* * ADOBE CONFIDENTIAL * ___________________ * * Copyright 2014 Adobe * All Rights Reserved. * * NOTICE: Adobe permits you to use, modify, and distribute this file in *
accordance with the terms of the Adobe license agreement accompanying * it. If you have received this file from a source other than Adobe, * then your use, modification, or distribution of it requires the prior * written permission of Adobe.
**************************************************************************/ -->
<ExtensionManifest Version="5.0" ExtensionBundleId="com.pype.avalon" ExtensionBundleVersion="11.1" ExtensionBundleName="Pype Avalon Panel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ExtensionManifest Version="5.0" ExtensionBundleId="com.pype" ExtensionBundleVersion="11.1" ExtensionBundleName="Pype Panel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ExtensionList>
<Extension Id="com.pype.avalon" Version="10.3.0"/>
<Extension Id="com.pype" Version="10.3.0"/>
</ExtensionList>
<ExecutionEnvironment>
<HostList>
@ -19,11 +19,11 @@ accordance with the terms of the Adobe license agreement accompanying * it. If y
</ExecutionEnvironment>
<DispatchInfoList>
<Extension Id="com.pype.avalon">
<Extension Id="com.pype">
<DispatchInfo >
<Resources>
<MainPath>./index_remote.html</MainPath>
<ScriptPath>./pypeAvalon.jsx</ScriptPath>
<ScriptPath>./pypeApp.jsx</ScriptPath>
<CEFCommandLine>
<Parameter>--enable-nodejs</Parameter>
<Parameter>--mixed-context</Parameter>
@ -35,7 +35,7 @@ accordance with the terms of the Adobe license agreement accompanying * it. If y
</Lifecycle>
<UI>
<Type>Panel</Type>
<Menu>Avalon</Menu>
<Menu>Pype</Menu>
<Geometry>
<Size>
<Width>600</Width>

File diff suppressed because it is too large Load diff

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before After
Before After

Some files were not shown because too many files have changed in this diff Show more