mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/AY-4919_Move-Resolve-client-code
This commit is contained in:
commit
2b95bb5e23
62 changed files with 303 additions and 40 deletions
15
server_addon/photoshop/client/ayon_photoshop/__init__.py
Normal file
15
server_addon/photoshop/client/ayon_photoshop/__init__.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
from .version import __version__
|
||||
from .addon import (
|
||||
PHOTOSHOP_ADDON_ROOT,
|
||||
PhotoshopAddon,
|
||||
get_launch_script_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
"PHOTOSHOP_ADDON_ROOT",
|
||||
"PhotoshopAddon",
|
||||
"get_launch_script_path",
|
||||
)
|
||||
38
server_addon/photoshop/client/ayon_photoshop/addon.py
Normal file
38
server_addon/photoshop/client/ayon_photoshop/addon.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
import os
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
from .version import __version__
|
||||
|
||||
PHOTOSHOP_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class PhotoshopAddon(AYONAddon, IHostAddon):
|
||||
name = "photoshop"
|
||||
version = __version__
|
||||
host_name = "photoshop"
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
defaults = {
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"WEBSOCKET_URL": "ws://localhost:8099/ws/"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".psd", ".psb"]
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(PHOTOSHOP_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
return os.path.join(
|
||||
PHOTOSHOP_ADDON_ROOT, "api", "launch_script.py"
|
||||
)
|
||||
257
server_addon/photoshop/client/ayon_photoshop/api/README.md
Normal file
257
server_addon/photoshop/client/ayon_photoshop/api/README.md
Normal file
|
|
@ -0,0 +1,257 @@
|
|||
# Photoshop Integration
|
||||
|
||||
## Setup
|
||||
|
||||
The Photoshop integration requires two components to work; `extension` and `server`.
|
||||
|
||||
### Extension
|
||||
|
||||
To install the extension download [Extension Manager Command Line tool (ExManCmd)](https://github.com/Adobe-CEP/Getting-Started-guides/tree/master/Package%20Distribute%20Install#option-2---exmancmd).
|
||||
|
||||
```
|
||||
ExManCmd /install {path to addon}/api/extension.zxp
|
||||
```
|
||||
|
||||
### Server
|
||||
|
||||
The easiest way to get the server and Photoshop launch is with:
|
||||
|
||||
```
|
||||
python -c ^"import ayon_photoshop;ayon_photoshop.launch(""C:\Program Files\Adobe\Adobe Photoshop 2020\Photoshop.exe"")^"
|
||||
```
|
||||
|
||||
`avalon.photoshop.launch` launches the application and server, and also closes the server when Photoshop exists.
|
||||
|
||||
## Usage
|
||||
|
||||
The Photoshop extension can be found under `Window > Extensions > Ayon`. Once launched you should be presented with a panel like this:
|
||||
|
||||

|
||||
|
||||
|
||||
## Developing
|
||||
|
||||
### Extension
|
||||
When developing the extension you can load it [unsigned](https://github.com/Adobe-CEP/CEP-Resources/blob/master/CEP_9.x/Documentation/CEP%209.0%20HTML%20Extension%20Cookbook.md#debugging-unsigned-extensions).
|
||||
|
||||
When signing the extension you can use this [guide](https://github.com/Adobe-CEP/Getting-Started-guides/tree/master/Package%20Distribute%20Install#package-distribute-install-guide).
|
||||
|
||||
```
|
||||
ZXPSignCmd -selfSignedCert NA NA Ayon Ayon-Photoshop Ayon extension.p12
|
||||
ZXPSignCmd -sign {path to avalon-core}\avalon\photoshop\extension {path to avalon-core}\avalon\photoshop\extension.zxp extension.p12 avalon
|
||||
```
|
||||
|
||||
### Plugin Examples
|
||||
|
||||
These plugins were made with the [polly config](https://github.com/mindbender-studio/config). To fully integrate and load, you will have to use this config and add `image` to the [integration plugin](https://github.com/mindbender-studio/config/blob/master/polly/plugins/publish/integrate_asset.py).
|
||||
|
||||
#### Creator Plugin
|
||||
```python
|
||||
from avalon import photoshop
|
||||
|
||||
|
||||
class CreateImage(photoshop.Creator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
label = "Image"
|
||||
product_type = "image"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateImage, self).__init__(*args, **kwargs)
|
||||
```
|
||||
|
||||
#### Collector Plugin
|
||||
```python
|
||||
import pythoncom
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by LayerSet and file metadata
|
||||
|
||||
This collector takes into account assets that are associated with
|
||||
an LayerSet and marked with a unique identifier;
|
||||
|
||||
Identifier:
|
||||
id (str): "ayon.create.instance"
|
||||
"""
|
||||
|
||||
label = "Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["photoshop"]
|
||||
families_mapping = {
|
||||
"image": []
|
||||
}
|
||||
|
||||
def process(self, context):
|
||||
# Necessary call when running in a different thread which pyblish-qml
|
||||
# can be.
|
||||
pythoncom.CoInitialize()
|
||||
|
||||
photoshop_client = PhotoshopClientStub()
|
||||
layers = photoshop_client.get_layers()
|
||||
layers_meta = photoshop_client.get_layers_metadata()
|
||||
for layer in layers:
|
||||
layer_data = photoshop_client.read(layer, layers_meta)
|
||||
|
||||
# Skip layers without metadata.
|
||||
if layer_data is None:
|
||||
continue
|
||||
|
||||
# Skip containers.
|
||||
if "container" in layer_data["id"]:
|
||||
continue
|
||||
|
||||
# child_layers = [*layer.Layers]
|
||||
# self.log.debug("child_layers {}".format(child_layers))
|
||||
# if not child_layers:
|
||||
# self.log.info("%s skipped, it was empty." % layer.Name)
|
||||
# continue
|
||||
|
||||
instance = context.create_instance(layer.name)
|
||||
instance.append(layer)
|
||||
instance.data.update(layer_data)
|
||||
instance.data["families"] = self.families_mapping[
|
||||
layer_data["productType"]
|
||||
]
|
||||
instance.data["publish"] = layer.visible
|
||||
|
||||
# Produce diagnostic message for any graphical
|
||||
# user interface interested in visualising it.
|
||||
self.log.info("Found: \"%s\" " % instance.data["name"])
|
||||
```
|
||||
|
||||
#### Extractor Plugin
|
||||
```python
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class ExtractImage(publish.Extractor):
|
||||
"""Produce a flattened image file from instance
|
||||
|
||||
This plug-in takes into account only the layers in the group.
|
||||
"""
|
||||
|
||||
label = "Extract Image"
|
||||
hosts = ["photoshop"]
|
||||
families = ["image"]
|
||||
formats = ["png", "jpg"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting image to {}".format(staging_dir))
|
||||
|
||||
# Perform extraction
|
||||
stub = photoshop.stub()
|
||||
files = {}
|
||||
with photoshop.maintained_selection():
|
||||
self.log.info("Extracting %s" % str(list(instance)))
|
||||
with photoshop.maintained_visibility():
|
||||
# Hide all other layers.
|
||||
extract_ids = set([ll.id for ll in stub.
|
||||
get_layers_in_layers([instance[0]])])
|
||||
|
||||
for layer in stub.get_layers():
|
||||
# limit unnecessary calls to client
|
||||
if layer.visible and layer.id not in extract_ids:
|
||||
stub.set_visible(layer.id, False)
|
||||
|
||||
save_options = []
|
||||
if "png" in self.formats:
|
||||
save_options.append('png')
|
||||
if "jpg" in self.formats:
|
||||
save_options.append('jpg')
|
||||
|
||||
file_basename = os.path.splitext(
|
||||
stub.get_active_document_name()
|
||||
)[0]
|
||||
for extension in save_options:
|
||||
_filename = "{}.{}".format(file_basename, extension)
|
||||
files[extension] = _filename
|
||||
|
||||
full_filename = os.path.join(staging_dir, _filename)
|
||||
stub.saveAs(full_filename, extension, True)
|
||||
|
||||
representations = []
|
||||
for extension, filename in files.items():
|
||||
representations.append({
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
instance.data["representations"] = representations
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
self.log.info(f"Extracted {instance} to {staging_dir}")
|
||||
```
|
||||
|
||||
#### Loader Plugin
|
||||
```python
|
||||
from avalon import api, photoshop
|
||||
from ayon_core.pipeline import load, get_representation_path
|
||||
|
||||
stub = photoshop.stub()
|
||||
|
||||
|
||||
class ImageLoader(load.LoaderPlugin):
|
||||
"""Load images
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
|
||||
families = ["image"]
|
||||
representations = {"*"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
path = self.filepath_from_context(context)
|
||||
with photoshop.maintained_selection():
|
||||
layer = stub.import_smart_object(path)
|
||||
|
||||
self[:] = [layer]
|
||||
|
||||
return photoshop.containerise(
|
||||
name,
|
||||
namespace,
|
||||
layer,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
layer = container.pop("layer")
|
||||
repre_entity = context["representation"]
|
||||
with photoshop.maintained_selection():
|
||||
stub.replace_smart_object(
|
||||
layer, get_representation_path(repre_entity)
|
||||
)
|
||||
|
||||
stub.imprint(
|
||||
layer, {"representation": repre_entity["id"]}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
container["layer"].Delete()
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
```
|
||||
For easier debugging of Javascript:
|
||||
https://community.adobe.com/t5/download-install/adobe-extension-debuger-problem/td-p/10911704?page=1
|
||||
Add --enable-blink-features=ShadowDOMV0,CustomElementsV0 when starting Chrome
|
||||
then localhost:8078 (port set in `photoshop\extension\.debug`)
|
||||
|
||||
Or use Visual Studio Code https://medium.com/adobetech/extendscript-debugger-for-visual-studio-code-public-release-a2ff6161fa01
|
||||
|
||||
Or install CEF client from https://github.com/Adobe-CEP/CEP-Resources/tree/master/CEP_9.x
|
||||
## Resources
|
||||
- https://github.com/lohriialo/photoshop-scripting-python
|
||||
- https://www.adobe.com/devnet/photoshop/scripting.html
|
||||
- https://github.com/Adobe-CEP/Getting-Started-guides
|
||||
- https://github.com/Adobe-CEP/CEP-Resources
|
||||
41
server_addon/photoshop/client/ayon_photoshop/api/__init__.py
Normal file
41
server_addon/photoshop/client/ayon_photoshop/api/__init__.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
"""Public API
|
||||
|
||||
Anything that isn't defined here is INTERNAL and unreliable for external use.
|
||||
|
||||
"""
|
||||
|
||||
from .launch_logic import stub
|
||||
|
||||
from .pipeline import (
|
||||
PhotoshopHost,
|
||||
ls,
|
||||
containerise
|
||||
)
|
||||
from .plugin import (
|
||||
PhotoshopLoader,
|
||||
get_unique_layer_name
|
||||
)
|
||||
|
||||
|
||||
from .lib import (
|
||||
maintained_selection,
|
||||
maintained_visibility
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# launch_logic
|
||||
"stub",
|
||||
|
||||
# pipeline
|
||||
"PhotoshopHost",
|
||||
"ls",
|
||||
"containerise",
|
||||
|
||||
# Plugin
|
||||
"PhotoshopLoader",
|
||||
"get_unique_layer_name",
|
||||
|
||||
# lib
|
||||
"maintained_selection",
|
||||
"maintained_visibility",
|
||||
]
|
||||
BIN
server_addon/photoshop/client/ayon_photoshop/api/extension.zxp
Normal file
BIN
server_addon/photoshop/client/ayon_photoshop/api/extension.zxp
Normal file
Binary file not shown.
|
|
@ -0,0 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ExtensionList>
|
||||
<Extension Id="io.ynput.PS.panel">
|
||||
<HostList>
|
||||
<Host Name="PHXS" Port="8078"/>
|
||||
<Host Name="FLPR" Port="8078"/>
|
||||
</HostList>
|
||||
</Extension>
|
||||
</ExtensionList>
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<ExtensionManifest ExtensionBundleId="io.ynput.PS.panel" ExtensionBundleVersion="1.1.0" Version="7.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ExtensionList>
|
||||
<Extension Id="io.ynput.PS.panel" Version="1.0.1" />
|
||||
</ExtensionList>
|
||||
<ExecutionEnvironment>
|
||||
<HostList>
|
||||
<Host Name="PHSP" Version="19" />
|
||||
<Host Name="PHXS" Version="19" />
|
||||
</HostList>
|
||||
<LocaleList>
|
||||
<Locale Code="All" />
|
||||
</LocaleList>
|
||||
<RequiredRuntimeList>
|
||||
<RequiredRuntime Name="CSXS" Version="7.0" />
|
||||
</RequiredRuntimeList>
|
||||
</ExecutionEnvironment>
|
||||
<DispatchInfoList>
|
||||
<Extension Id="io.ynput.PS.panel">
|
||||
<DispatchInfo>
|
||||
<Resources>
|
||||
<MainPath>./index.html</MainPath>
|
||||
<CEFCommandLine />
|
||||
</Resources>
|
||||
<Lifecycle>
|
||||
<AutoVisible>true</AutoVisible>
|
||||
<StartOn>
|
||||
<!-- Photoshop dispatches this event on startup -->
|
||||
<Event>applicationActivate</Event>
|
||||
<Event>com.adobe.csxs.events.ApplicationInitialized</Event>
|
||||
</StartOn>
|
||||
</Lifecycle>
|
||||
<UI>
|
||||
<Type>Panel</Type>
|
||||
<Menu>AYON</Menu>
|
||||
<Geometry>
|
||||
<Size>
|
||||
<Width>300</Width>
|
||||
<Height>140</Height>
|
||||
</Size>
|
||||
<MaxSize>
|
||||
<Width>400</Width>
|
||||
<Height>200</Height>
|
||||
</MaxSize>
|
||||
</Geometry>
|
||||
<Icons>
|
||||
<Icon Type="Normal">./icons/ayon_logo.png</Icon>
|
||||
</Icons>
|
||||
</UI>
|
||||
</DispatchInfo>
|
||||
</Extension>
|
||||
</DispatchInfoList>
|
||||
</ExtensionManifest>
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,300 @@
|
|||
// client facing part of extension, creates WSRPC client (jsx cannot
|
||||
// do that)
|
||||
// consumes RPC calls from server (OpenPype) calls ./host/index.jsx and
|
||||
// returns values back (in json format)
|
||||
|
||||
var logReturn = function(result){ log.warn('Result: ' + result);};
|
||||
|
||||
var csInterface = new CSInterface();
|
||||
|
||||
log.warn("script start");
|
||||
|
||||
WSRPC.DEBUG = false;
|
||||
WSRPC.TRACE = false;
|
||||
|
||||
function myCallBack(){
|
||||
log.warn("Triggered index.jsx");
|
||||
}
|
||||
// importing through manifest.xml isn't working because relative paths
|
||||
// possibly TODO
|
||||
jsx.evalFile('./host/index.jsx', myCallBack);
|
||||
|
||||
function runEvalScript(script) {
|
||||
// because of asynchronous nature of functions in jsx
|
||||
// this waits for response
|
||||
return new Promise(function(resolve, reject){
|
||||
csInterface.evalScript(script, resolve);
|
||||
});
|
||||
}
|
||||
|
||||
/** main entry point **/
|
||||
startUp("WEBSOCKET_URL");
|
||||
|
||||
// get websocket server url from environment value
|
||||
async function startUp(url){
|
||||
log.warn("url", url);
|
||||
promis = runEvalScript("getEnv('" + url + "')");
|
||||
|
||||
var res = await promis;
|
||||
// run rest only after resolved promise
|
||||
main(res);
|
||||
}
|
||||
|
||||
function get_extension_version(){
|
||||
/** Returns version number from extension manifest.xml **/
|
||||
log.debug("get_extension_version")
|
||||
var path = csInterface.getSystemPath(SystemPath.EXTENSION);
|
||||
log.debug("extension path " + path);
|
||||
|
||||
var result = window.cep.fs.readFile(path + "/CSXS/manifest.xml");
|
||||
var version = undefined;
|
||||
if(result.err === 0){
|
||||
if (window.DOMParser) {
|
||||
const parser = new DOMParser();
|
||||
const xmlDoc = parser.parseFromString(result.data.toString(), 'text/xml');
|
||||
const children = xmlDoc.children;
|
||||
|
||||
for (let i = 0; i <= children.length; i++) {
|
||||
if (children[i] && children[i].getAttribute('ExtensionBundleVersion')) {
|
||||
version = children[i].getAttribute('ExtensionBundleVersion');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return version
|
||||
}
|
||||
|
||||
function main(websocket_url){
|
||||
// creates connection to 'websocket_url', registers routes
|
||||
log.warn("websocket_url", websocket_url);
|
||||
var default_url = 'ws://localhost:8099/ws/';
|
||||
|
||||
if (websocket_url == ''){
|
||||
websocket_url = default_url;
|
||||
}
|
||||
log.warn("connecting to:", websocket_url);
|
||||
RPC = new WSRPC(websocket_url, 5000); // spin connection
|
||||
|
||||
RPC.connect();
|
||||
|
||||
log.warn("connected");
|
||||
|
||||
function EscapeStringForJSX(str){
|
||||
// Replaces:
|
||||
// \ with \\
|
||||
// ' with \'
|
||||
// " with \"
|
||||
// See: https://stackoverflow.com/a/3967927/5285364
|
||||
return str.replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/"/g, '\\"');
|
||||
}
|
||||
|
||||
RPC.addRoute('Photoshop.open', function (data) {
|
||||
log.warn('Server called client route "open":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.path);
|
||||
return runEvalScript("fileOpen('" + escapedPath +"')")
|
||||
.then(function(result){
|
||||
log.warn("open: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.read', function (data) {
|
||||
log.warn('Server called client route "read":', data);
|
||||
return runEvalScript("getHeadline()")
|
||||
.then(function(result){
|
||||
log.warn("getHeadline: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.get_layers', function (data) {
|
||||
log.warn('Server called client route "get_layers":', data);
|
||||
return runEvalScript("getLayers()")
|
||||
.then(function(result){
|
||||
log.warn("getLayers: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.set_visible', function (data) {
|
||||
log.warn('Server called client route "set_visible":', data);
|
||||
return runEvalScript("setVisible(" + data.layer_id + ", " +
|
||||
data.visibility + ")")
|
||||
.then(function(result){
|
||||
log.warn("setVisible: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.get_active_document_name', function (data) {
|
||||
log.warn('Server called client route "get_active_document_name":',
|
||||
data);
|
||||
return runEvalScript("getActiveDocumentName()")
|
||||
.then(function(result){
|
||||
log.warn("save: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.get_active_document_full_name', function (data) {
|
||||
log.warn('Server called client route ' +
|
||||
'"get_active_document_full_name":', data);
|
||||
return runEvalScript("getActiveDocumentFullName()")
|
||||
.then(function(result){
|
||||
log.warn("save: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.save', function (data) {
|
||||
log.warn('Server called client route "save":', data);
|
||||
|
||||
return runEvalScript("save()")
|
||||
.then(function(result){
|
||||
log.warn("save: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.get_selected_layers', function (data) {
|
||||
log.warn('Server called client route "get_selected_layers":', data);
|
||||
|
||||
return runEvalScript("getSelectedLayers()")
|
||||
.then(function(result){
|
||||
log.warn("get_selected_layers: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.create_group', function (data) {
|
||||
log.warn('Server called client route "create_group":', data);
|
||||
|
||||
return runEvalScript("createGroup('" + data.name + "')")
|
||||
.then(function(result){
|
||||
log.warn("createGroup: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.group_selected_layers', function (data) {
|
||||
log.warn('Server called client route "group_selected_layers":',
|
||||
data);
|
||||
|
||||
return runEvalScript("groupSelectedLayers(null, "+
|
||||
"'" + data.name +"')")
|
||||
.then(function(result){
|
||||
log.warn("group_selected_layers: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.import_smart_object', function (data) {
|
||||
log.warn('Server called client "import_smart_object":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.path);
|
||||
return runEvalScript("importSmartObject('" + escapedPath +"', " +
|
||||
"'"+ data.name +"',"+
|
||||
+ data.as_reference +")")
|
||||
.then(function(result){
|
||||
log.warn("import_smart_object: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.replace_smart_object', function (data) {
|
||||
log.warn('Server called route "replace_smart_object":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.path);
|
||||
return runEvalScript("replaceSmartObjects("+data.layer_id+"," +
|
||||
"'" + escapedPath +"',"+
|
||||
"'"+ data.name +"')")
|
||||
.then(function(result){
|
||||
log.warn("replaceSmartObjects: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.delete_layer', function (data) {
|
||||
log.warn('Server called route "delete_layer":', data);
|
||||
return runEvalScript("deleteLayer("+data.layer_id+")")
|
||||
.then(function(result){
|
||||
log.warn("delete_layer: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.rename_layer', function (data) {
|
||||
log.warn('Server called route "rename_layer":', data);
|
||||
return runEvalScript("renameLayer("+data.layer_id+", " +
|
||||
"'"+ data.name +"')")
|
||||
.then(function(result){
|
||||
log.warn("rename_layer: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.select_layers', function (data) {
|
||||
log.warn('Server called client route "select_layers":', data);
|
||||
|
||||
return runEvalScript("selectLayers('" + data.layers +"')")
|
||||
.then(function(result){
|
||||
log.warn("select_layers: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.is_saved', function (data) {
|
||||
log.warn('Server called client route "is_saved":', data);
|
||||
|
||||
return runEvalScript("isSaved()")
|
||||
.then(function(result){
|
||||
log.warn("is_saved: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.saveAs', function (data) {
|
||||
log.warn('Server called client route "saveAsJPEG":', data);
|
||||
var escapedPath = EscapeStringForJSX(data.image_path);
|
||||
return runEvalScript("saveAs('" + escapedPath + "', " +
|
||||
"'" + data.ext + "', " +
|
||||
data.as_copy + ")")
|
||||
.then(function(result){
|
||||
log.warn("save: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.imprint', function (data) {
|
||||
log.warn('Server called client route "imprint":', data);
|
||||
var escaped = data.payload.replace(/\n/g, "\\n");
|
||||
return runEvalScript("imprint('" + escaped + "')")
|
||||
.then(function(result){
|
||||
log.warn("imprint: " + result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.get_extension_version', function (data) {
|
||||
log.warn('Server called client route "get_extension_version":', data);
|
||||
return get_extension_version();
|
||||
});
|
||||
|
||||
RPC.addRoute('Photoshop.close', function (data) {
|
||||
log.warn('Server called client route "close":', data);
|
||||
return runEvalScript("close()");
|
||||
});
|
||||
|
||||
RPC.call('Photoshop.ping').then(function (data) {
|
||||
log.warn('Result for calling server route "ping": ', data);
|
||||
return runEvalScript("ping()")
|
||||
.then(function(result){
|
||||
log.warn("ping: " + result);
|
||||
return result;
|
||||
});
|
||||
|
||||
}, function (error) {
|
||||
log.warn(error);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
log.warn("end script");
|
||||
2
server_addon/photoshop/client/ayon_photoshop/api/extension/client/loglevel.min.js
vendored
Normal file
2
server_addon/photoshop/client/ayon_photoshop/api/extension/client/loglevel.min.js
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
/*! loglevel - v1.6.8 - https://github.com/pimterry/loglevel - (c) 2020 Tim Perry - licensed MIT */
|
||||
!function(a,b){"use strict";"function"==typeof define&&define.amd?define(b):"object"==typeof module&&module.exports?module.exports=b():a.log=b()}(this,function(){"use strict";function a(a,b){var c=a[b];if("function"==typeof c.bind)return c.bind(a);try{return Function.prototype.bind.call(c,a)}catch(b){return function(){return Function.prototype.apply.apply(c,[a,arguments])}}}function b(){console.log&&(console.log.apply?console.log.apply(console,arguments):Function.prototype.apply.apply(console.log,[console,arguments])),console.trace&&console.trace()}function c(c){return"debug"===c&&(c="log"),typeof console!==i&&("trace"===c&&j?b:void 0!==console[c]?a(console,c):void 0!==console.log?a(console,"log"):h)}function d(a,b){for(var c=0;c<k.length;c++){var d=k[c];this[d]=c<a?h:this.methodFactory(d,a,b)}this.log=this.debug}function e(a,b,c){return function(){typeof console!==i&&(d.call(this,b,c),this[a].apply(this,arguments))}}function f(a,b,d){return c(a)||e.apply(this,arguments)}function g(a,b,c){function e(a){var b=(k[a]||"silent").toUpperCase();if(typeof window!==i){try{return void(window.localStorage[l]=b)}catch(a){}try{window.document.cookie=encodeURIComponent(l)+"="+b+";"}catch(a){}}}function g(){var a;if(typeof window!==i){try{a=window.localStorage[l]}catch(a){}if(typeof a===i)try{var b=window.document.cookie,c=b.indexOf(encodeURIComponent(l)+"=");-1!==c&&(a=/^([^;]+)/.exec(b.slice(c))[1])}catch(a){}return void 0===j.levels[a]&&(a=void 0),a}}var h,j=this,l="loglevel";a&&(l+=":"+a),j.name=a,j.levels={TRACE:0,DEBUG:1,INFO:2,WARN:3,ERROR:4,SILENT:5},j.methodFactory=c||f,j.getLevel=function(){return h},j.setLevel=function(b,c){if("string"==typeof b&&void 0!==j.levels[b.toUpperCase()]&&(b=j.levels[b.toUpperCase()]),!("number"==typeof b&&b>=0&&b<=j.levels.SILENT))throw"log.setLevel() called with invalid level: "+b;if(h=b,!1!==c&&e(b),d.call(j,b,a),typeof console===i&&b<j.levels.SILENT)return"No console available for logging"},j.setDefaultLevel=function(a){g()||j.setLevel(a,!1)},j.enableAll=function(a){j.setLevel(j.levels.TRACE,a)},j.disableAll=function(a){j.setLevel(j.levels.SILENT,a)};var m=g();null==m&&(m=null==b?"WARN":b),j.setLevel(m,!1)}var h=function(){},i="undefined",j=typeof window!==i&&typeof window.navigator!==i&&/Trident\/|MSIE /.test(window.navigator.userAgent),k=["trace","debug","info","warn","error"],l=new g,m={};l.getLogger=function(a){if("string"!=typeof a||""===a)throw new TypeError("You must supply a name when creating a logger.");var b=m[a];return b||(b=m[a]=new g(a,l.getLevel(),l.methodFactory)),b};var n=typeof window!==i?window.log:void 0;return l.noConflict=function(){return typeof window!==i&&window.log===l&&(window.log=n),l},l.getLoggers=function(){return m},l});
|
||||
|
|
@ -0,0 +1,393 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = global || self, global.WSRPC = factory());
|
||||
}(this, function () { 'use strict';
|
||||
|
||||
function _classCallCheck(instance, Constructor) {
|
||||
if (!(instance instanceof Constructor)) {
|
||||
throw new TypeError("Cannot call a class as a function");
|
||||
}
|
||||
}
|
||||
|
||||
var Deferred = function Deferred() {
|
||||
_classCallCheck(this, Deferred);
|
||||
|
||||
var self = this;
|
||||
self.resolve = null;
|
||||
self.reject = null;
|
||||
self.done = false;
|
||||
|
||||
function wrapper(func) {
|
||||
return function () {
|
||||
if (self.done) throw new Error('Promise already done');
|
||||
self.done = true;
|
||||
return func.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
self.promise = new Promise(function (resolve, reject) {
|
||||
self.resolve = wrapper(resolve);
|
||||
self.reject = wrapper(reject);
|
||||
});
|
||||
|
||||
self.promise.isPending = function () {
|
||||
return !self.done;
|
||||
};
|
||||
|
||||
return self;
|
||||
};
|
||||
|
||||
function logGroup(group, level, args) {
|
||||
console.group(group);
|
||||
console[level].apply(this, args);
|
||||
console.groupEnd();
|
||||
}
|
||||
|
||||
function log() {
|
||||
if (!WSRPC.DEBUG) return;
|
||||
logGroup('WSRPC.DEBUG', 'trace', arguments);
|
||||
}
|
||||
|
||||
function trace(msg) {
|
||||
if (!WSRPC.TRACE) return;
|
||||
var payload = msg;
|
||||
if ('data' in msg) payload = JSON.parse(msg.data);
|
||||
logGroup("WSRPC.TRACE", 'trace', [payload]);
|
||||
}
|
||||
|
||||
function getAbsoluteWsUrl(url) {
|
||||
if (/^\w+:\/\//.test(url)) return url;
|
||||
if (typeof window == 'undefined' && window.location.host.length < 1) throw new Error("Can not construct absolute URL from ".concat(window.location));
|
||||
var scheme = window.location.protocol === "https:" ? "wss:" : "ws:";
|
||||
var port = window.location.port === '' ? ":".concat(window.location.port) : '';
|
||||
var host = window.location.host;
|
||||
var path = url.replace(/^\/+/gm, '');
|
||||
return "".concat(scheme, "//").concat(host).concat(port, "/").concat(path);
|
||||
}
|
||||
|
||||
var readyState = Object.freeze({
|
||||
0: 'CONNECTING',
|
||||
1: 'OPEN',
|
||||
2: 'CLOSING',
|
||||
3: 'CLOSED'
|
||||
});
|
||||
|
||||
var WSRPC = function WSRPC(URL) {
|
||||
var reconnectTimeout = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1000;
|
||||
|
||||
_classCallCheck(this, WSRPC);
|
||||
|
||||
var self = this;
|
||||
URL = getAbsoluteWsUrl(URL);
|
||||
self.id = 1;
|
||||
self.eventId = 0;
|
||||
self.socketStarted = false;
|
||||
self.eventStore = {
|
||||
onconnect: {},
|
||||
onerror: {},
|
||||
onclose: {},
|
||||
onchange: {}
|
||||
};
|
||||
self.connectionNumber = 0;
|
||||
self.oneTimeEventStore = {
|
||||
onconnect: [],
|
||||
onerror: [],
|
||||
onclose: [],
|
||||
onchange: []
|
||||
};
|
||||
self.callQueue = [];
|
||||
|
||||
function createSocket() {
|
||||
var ws = new WebSocket(URL);
|
||||
|
||||
var rejectQueue = function rejectQueue() {
|
||||
self.connectionNumber++; // rejects incoming calls
|
||||
|
||||
var deferred; //reject all pending calls
|
||||
|
||||
while (0 < self.callQueue.length) {
|
||||
var callObj = self.callQueue.shift();
|
||||
deferred = self.store[callObj.id];
|
||||
delete self.store[callObj.id];
|
||||
|
||||
if (deferred && deferred.promise.isPending()) {
|
||||
deferred.reject('WebSocket error occurred');
|
||||
}
|
||||
} // reject all from the store
|
||||
|
||||
|
||||
for (var key in self.store) {
|
||||
if (!self.store.hasOwnProperty(key)) continue;
|
||||
deferred = self.store[key];
|
||||
|
||||
if (deferred && deferred.promise.isPending()) {
|
||||
deferred.reject('WebSocket error occurred');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function reconnect(callEvents) {
|
||||
setTimeout(function () {
|
||||
try {
|
||||
self.socket = createSocket();
|
||||
self.id = 1;
|
||||
} catch (exc) {
|
||||
callEvents('onerror', exc);
|
||||
delete self.socket;
|
||||
console.error(exc);
|
||||
}
|
||||
}, reconnectTimeout);
|
||||
}
|
||||
|
||||
ws.onclose = function (err) {
|
||||
log('ONCLOSE CALLED', 'STATE', self.public.state());
|
||||
trace(err);
|
||||
|
||||
for (var serial in self.store) {
|
||||
if (!self.store.hasOwnProperty(serial)) continue;
|
||||
|
||||
if (self.store[serial].hasOwnProperty('reject')) {
|
||||
self.store[serial].reject('Connection closed');
|
||||
}
|
||||
}
|
||||
|
||||
rejectQueue();
|
||||
callEvents('onclose', err);
|
||||
callEvents('onchange', err);
|
||||
reconnect(callEvents);
|
||||
};
|
||||
|
||||
ws.onerror = function (err) {
|
||||
log('ONERROR CALLED', 'STATE', self.public.state());
|
||||
trace(err);
|
||||
rejectQueue();
|
||||
callEvents('onerror', err);
|
||||
callEvents('onchange', err);
|
||||
log('WebSocket has been closed by error: ', err);
|
||||
};
|
||||
|
||||
function tryCallEvent(func, event) {
|
||||
try {
|
||||
return func(event);
|
||||
} catch (e) {
|
||||
if (e.hasOwnProperty('stack')) {
|
||||
log(e.stack);
|
||||
} else {
|
||||
log('Event function', func, 'raised unknown error:', e);
|
||||
}
|
||||
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
function callEvents(evName, event) {
|
||||
while (0 < self.oneTimeEventStore[evName].length) {
|
||||
var deferred = self.oneTimeEventStore[evName].shift();
|
||||
if (deferred.hasOwnProperty('resolve') && deferred.promise.isPending()) deferred.resolve();
|
||||
}
|
||||
|
||||
for (var i in self.eventStore[evName]) {
|
||||
if (!self.eventStore[evName].hasOwnProperty(i)) continue;
|
||||
var cur = self.eventStore[evName][i];
|
||||
tryCallEvent(cur, event);
|
||||
}
|
||||
}
|
||||
|
||||
ws.onopen = function (ev) {
|
||||
log('ONOPEN CALLED', 'STATE', self.public.state());
|
||||
trace(ev);
|
||||
|
||||
while (0 < self.callQueue.length) {
|
||||
// noinspection JSUnresolvedFunction
|
||||
self.socket.send(JSON.stringify(self.callQueue.shift(), 0, 1));
|
||||
}
|
||||
|
||||
callEvents('onconnect', ev);
|
||||
callEvents('onchange', ev);
|
||||
};
|
||||
|
||||
function handleCall(self, data) {
|
||||
if (!self.routes.hasOwnProperty(data.method)) throw new Error('Route not found');
|
||||
var connectionNumber = self.connectionNumber;
|
||||
var deferred = new Deferred();
|
||||
deferred.promise.then(function (result) {
|
||||
if (connectionNumber !== self.connectionNumber) return;
|
||||
self.socket.send(JSON.stringify({
|
||||
id: data.id,
|
||||
result: result
|
||||
}));
|
||||
}, function (error) {
|
||||
if (connectionNumber !== self.connectionNumber) return;
|
||||
self.socket.send(JSON.stringify({
|
||||
id: data.id,
|
||||
error: error
|
||||
}));
|
||||
});
|
||||
var func = self.routes[data.method];
|
||||
if (self.asyncRoutes[data.method]) return func.apply(deferred, [data.params]);
|
||||
|
||||
function badPromise() {
|
||||
throw new Error("You should register route with async flag.");
|
||||
}
|
||||
|
||||
var promiseMock = {
|
||||
resolve: badPromise,
|
||||
reject: badPromise
|
||||
};
|
||||
|
||||
try {
|
||||
deferred.resolve(func.apply(promiseMock, [data.params]));
|
||||
} catch (e) {
|
||||
deferred.reject(e);
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
function handleError(self, data) {
|
||||
if (!self.store.hasOwnProperty(data.id)) return log('Unknown callback');
|
||||
var deferred = self.store[data.id];
|
||||
if (typeof deferred === 'undefined') return log('Confirmation without handler');
|
||||
delete self.store[data.id];
|
||||
log('REJECTING', data.error);
|
||||
deferred.reject(data.error);
|
||||
}
|
||||
|
||||
function handleResult(self, data) {
|
||||
var deferred = self.store[data.id];
|
||||
if (typeof deferred === 'undefined') return log('Confirmation without handler');
|
||||
delete self.store[data.id];
|
||||
|
||||
if (data.hasOwnProperty('result')) {
|
||||
return deferred.resolve(data.result);
|
||||
}
|
||||
|
||||
return deferred.reject(data.error);
|
||||
}
|
||||
|
||||
ws.onmessage = function (message) {
|
||||
log('ONMESSAGE CALLED', 'STATE', self.public.state());
|
||||
trace(message);
|
||||
if (message.type !== 'message') return;
|
||||
var data;
|
||||
|
||||
try {
|
||||
data = JSON.parse(message.data);
|
||||
log(data);
|
||||
|
||||
if (data.hasOwnProperty('method')) {
|
||||
return handleCall(self, data);
|
||||
} else if (data.hasOwnProperty('error') && data.error === null) {
|
||||
return handleError(self, data);
|
||||
} else {
|
||||
return handleResult(self, data);
|
||||
}
|
||||
} catch (exception) {
|
||||
var err = {
|
||||
error: exception.message,
|
||||
result: null,
|
||||
id: data ? data.id : null
|
||||
};
|
||||
self.socket.send(JSON.stringify(err));
|
||||
console.error(exception);
|
||||
}
|
||||
};
|
||||
|
||||
return ws;
|
||||
}
|
||||
|
||||
function makeCall(func, args, params) {
|
||||
self.id += 2;
|
||||
var deferred = new Deferred();
|
||||
var callObj = Object.freeze({
|
||||
id: self.id,
|
||||
method: func,
|
||||
params: args
|
||||
});
|
||||
var state = self.public.state();
|
||||
|
||||
if (state === 'OPEN') {
|
||||
self.store[self.id] = deferred;
|
||||
self.socket.send(JSON.stringify(callObj));
|
||||
} else if (state === 'CONNECTING') {
|
||||
log('SOCKET IS', state);
|
||||
self.store[self.id] = deferred;
|
||||
self.callQueue.push(callObj);
|
||||
} else {
|
||||
log('SOCKET IS', state);
|
||||
|
||||
if (params && params['noWait']) {
|
||||
deferred.reject("Socket is: ".concat(state));
|
||||
} else {
|
||||
self.store[self.id] = deferred;
|
||||
self.callQueue.push(callObj);
|
||||
}
|
||||
}
|
||||
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
self.asyncRoutes = {};
|
||||
self.routes = {};
|
||||
self.store = {};
|
||||
self.public = Object.freeze({
|
||||
call: function call(func, args, params) {
|
||||
return makeCall(func, args, params);
|
||||
},
|
||||
addRoute: function addRoute(route, callback, isAsync) {
|
||||
self.asyncRoutes[route] = isAsync || false;
|
||||
self.routes[route] = callback;
|
||||
},
|
||||
deleteRoute: function deleteRoute(route) {
|
||||
delete self.asyncRoutes[route];
|
||||
return delete self.routes[route];
|
||||
},
|
||||
addEventListener: function addEventListener(event, func) {
|
||||
var eventId = self.eventId++;
|
||||
self.eventStore[event][eventId] = func;
|
||||
return eventId;
|
||||
},
|
||||
removeEventListener: function removeEventListener(event, index) {
|
||||
if (self.eventStore[event].hasOwnProperty(index)) {
|
||||
delete self.eventStore[event][index];
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
onEvent: function onEvent(event) {
|
||||
var deferred = new Deferred();
|
||||
self.oneTimeEventStore[event].push(deferred);
|
||||
return deferred.promise;
|
||||
},
|
||||
destroy: function destroy() {
|
||||
return self.socket.close();
|
||||
},
|
||||
state: function state() {
|
||||
return readyState[this.stateCode()];
|
||||
},
|
||||
stateCode: function stateCode() {
|
||||
if (self.socketStarted && self.socket) return self.socket.readyState;
|
||||
return 3;
|
||||
},
|
||||
connect: function connect() {
|
||||
self.socketStarted = true;
|
||||
self.socket = createSocket();
|
||||
}
|
||||
});
|
||||
self.public.addRoute('log', function (argsObj) {
|
||||
//console.info("Websocket sent: ".concat(argsObj));
|
||||
});
|
||||
self.public.addRoute('ping', function (data) {
|
||||
return data;
|
||||
});
|
||||
return self.public;
|
||||
};
|
||||
|
||||
WSRPC.DEBUG = false;
|
||||
WSRPC.TRACE = false;
|
||||
|
||||
return WSRPC;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=wsrpc.js.map
|
||||
1
server_addon/photoshop/client/ayon_photoshop/api/extension/client/wsrpc.min.js
vendored
Normal file
1
server_addon/photoshop/client/ayon_photoshop/api/extension/client/wsrpc.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,774 @@
|
|||
/*
|
||||
_ ______ __ _
|
||||
| / ___\ \/ / (_)___
|
||||
_ | \___ \\ / | / __|
|
||||
| |_| |___) / \ _ | \__ \
|
||||
\___/|____/_/\_(_)/ |___/
|
||||
|__/
|
||||
_ ____
|
||||
/\ /\___ _ __ ___(_) ___ _ __ |___ \
|
||||
\ \ / / _ \ '__/ __| |/ _ \| '_ \ __) |
|
||||
\ V / __/ | \__ \ | (_) | | | | / __/
|
||||
\_/ \___|_| |___/_|\___/|_| |_| |_____|
|
||||
*/
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
// JSX.js © and writtent by Trevor https://creative-scripts.com/jsx-js //
|
||||
// If you turn over is less the $50,000,000 then you don't have to pay anything //
|
||||
// License MIT, don't complain, don't sue NO MATTER WHAT //
|
||||
// If you turn over is more the $50,000,000 then you DO have to pay //
|
||||
// Contact me https://creative-scripts.com/contact for pricing and licensing //
|
||||
// Don't remove these commented lines //
|
||||
// For simple and effective calling of jsx from the js engine //
|
||||
// Version 2 last modified April 18 2018 //
|
||||
//////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Change log: //
|
||||
// JSX.js V2 is now independent of NodeJS and CSInterface.js <span class="wp-font-emots-emo-happy"></span> //
|
||||
// forceEval is now by default true //
|
||||
// It wraps the scripts in a try catch and an eval providing useful error handling //
|
||||
// One can set in the jsx engine $.includeStack = true to return the call stack in the event of an error //
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// JSX.js for calling jsx code from the js engine //
|
||||
// 2 methods included //
|
||||
// 1) jsx.evalScript AKA jsx.eval //
|
||||
// 2) jsx.evalFile AKA jsx.file //
|
||||
// Special features //
|
||||
// 1) Allows all changes in your jsx code to be reloaded into your extension at the click of a button //
|
||||
// 2) Can enable the $.fileName property to work and provides a $.__fileName() method as an alternative //
|
||||
// 3) Can force a callBack result from InDesign //
|
||||
// 4) No more csInterface.evalScript('alert("hello "' + title + " " + name + '");') //
|
||||
// use jsx.evalScript('alert("hello __title__ __name__");', {title: title, name: name}); //
|
||||
// 5) execute jsx files from your jsx folder like this jsx.evalFile('myFabJsxScript.jsx'); //
|
||||
// or from a relative path jsx.evalFile('../myFabScripts/myFabJsxScript.jsx'); //
|
||||
// or from an absolute url jsx.evalFile('/Path/to/my/FabJsxScript.jsx'); (mac) //
|
||||
// or from an absolute url jsx.evalFile('C:Path/to/my/FabJsxScript.jsx'); (windows) //
|
||||
// 6) Parameter can be entered in the from of a parameter list which can be in any order or as an object //
|
||||
// 7) Not camelCase sensitive (very useful for the illiterate) //
|
||||
// <span class="wp-font-emots-emo-sunglasses"></span> Dead easy to use BUT SPEND THE 3 TO 5 MINUTES IT SHOULD TAKE TO READ THE INSTRUCTIONS //
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* jshint undef:true, unused:true, esversion:6 */
|
||||
|
||||
//////////////////////////////////////
|
||||
// jsx is the interface for the API //
|
||||
//////////////////////////////////////
|
||||
|
||||
var jsx;
|
||||
|
||||
// Wrap everything in an anonymous function to prevent leeks
|
||||
(function() {
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
// Substitute some CSInterface functions to avoid dependency on it //
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
|
||||
var __dirname = (function() {
|
||||
var path, isMac;
|
||||
path = decodeURI(window.__adobe_cep__.getSystemPath('extension'));
|
||||
isMac = navigator.platform[0] === 'M'; // [M]ac
|
||||
path = path.replace('file://' + (isMac ? '' : '/'), '');
|
||||
return path;
|
||||
})();
|
||||
|
||||
var evalScript = function(script, callback) {
|
||||
callback = callback || function() {};
|
||||
window.__adobe_cep__.evalScript(script, callback);
|
||||
};
|
||||
|
||||
|
||||
////////////////////////////////////////////
|
||||
// In place of using the node path module //
|
||||
////////////////////////////////////////////
|
||||
|
||||
// jshint undef: true, unused: true
|
||||
|
||||
// A very minified version of the NodeJs Path module!!
|
||||
// For use outside of NodeJs
|
||||
// Majorly nicked by Trevor from Joyent
|
||||
var path = (function() {
|
||||
|
||||
var isString = function(arg) {
|
||||
return typeof arg === 'string';
|
||||
};
|
||||
|
||||
// var isObject = function(arg) {
|
||||
// return typeof arg === 'object' && arg !== null;
|
||||
// };
|
||||
|
||||
var basename = function(path) {
|
||||
if (!isString(path)) {
|
||||
throw new TypeError('Argument to path.basename must be a string');
|
||||
}
|
||||
var bits = path.split(/[\/\\]/g);
|
||||
return bits[bits.length - 1];
|
||||
};
|
||||
|
||||
// jshint undef: true
|
||||
// Regex to split a windows path into three parts: [*, device, slash,
|
||||
// tail] windows-only
|
||||
var splitDeviceRe =
|
||||
/^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
|
||||
|
||||
// Regex to split the tail part of the above into [*, dir, basename, ext]
|
||||
// var splitTailRe =
|
||||
// /^([\s\S]*?)((?:\.{1,2}|[^\\\/]+?|)(\.[^.\/\\]*|))(?:[\\\/]*)$/;
|
||||
|
||||
var win32 = {};
|
||||
// Function to split a filename into [root, dir, basename, ext]
|
||||
// var win32SplitPath = function(filename) {
|
||||
// // Separate device+slash from tail
|
||||
// var result = splitDeviceRe.exec(filename),
|
||||
// device = (result[1] || '') + (result[2] || ''),
|
||||
// tail = result[3] || '';
|
||||
// // Split the tail into dir, basename and extension
|
||||
// var result2 = splitTailRe.exec(tail),
|
||||
// dir = result2[1],
|
||||
// basename = result2[2],
|
||||
// ext = result2[3];
|
||||
// return [device, dir, basename, ext];
|
||||
// };
|
||||
|
||||
var win32StatPath = function(path) {
|
||||
var result = splitDeviceRe.exec(path),
|
||||
device = result[1] || '',
|
||||
isUnc = !!device && device[1] !== ':';
|
||||
return {
|
||||
device: device,
|
||||
isUnc: isUnc,
|
||||
isAbsolute: isUnc || !!result[2], // UNC paths are always absolute
|
||||
tail: result[3]
|
||||
};
|
||||
};
|
||||
|
||||
var normalizeUNCRoot = function(device) {
|
||||
return '\\\\' + device.replace(/^[\\\/]+/, '').replace(/[\\\/]+/g, '\\');
|
||||
};
|
||||
|
||||
var normalizeArray = function(parts, allowAboveRoot) {
|
||||
var res = [];
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
var p = parts[i];
|
||||
|
||||
// ignore empty parts
|
||||
if (!p || p === '.')
|
||||
continue;
|
||||
|
||||
if (p === '..') {
|
||||
if (res.length && res[res.length - 1] !== '..') {
|
||||
res.pop();
|
||||
} else if (allowAboveRoot) {
|
||||
res.push('..');
|
||||
}
|
||||
} else {
|
||||
res.push(p);
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
win32.normalize = function(path) {
|
||||
var result = win32StatPath(path),
|
||||
device = result.device,
|
||||
isUnc = result.isUnc,
|
||||
isAbsolute = result.isAbsolute,
|
||||
tail = result.tail,
|
||||
trailingSlash = /[\\\/]$/.test(tail);
|
||||
|
||||
// Normalize the tail path
|
||||
tail = normalizeArray(tail.split(/[\\\/]+/), !isAbsolute).join('\\');
|
||||
|
||||
if (!tail && !isAbsolute) {
|
||||
tail = '.';
|
||||
}
|
||||
if (tail && trailingSlash) {
|
||||
tail += '\\';
|
||||
}
|
||||
|
||||
// Convert slashes to backslashes when `device` points to an UNC root.
|
||||
// Also squash multiple slashes into a single one where appropriate.
|
||||
if (isUnc) {
|
||||
device = normalizeUNCRoot(device);
|
||||
}
|
||||
|
||||
return device + (isAbsolute ? '\\' : '') + tail;
|
||||
};
|
||||
win32.join = function() {
|
||||
var paths = [];
|
||||
for (var i = 0; i < arguments.length; i++) {
|
||||
var arg = arguments[i];
|
||||
if (!isString(arg)) {
|
||||
throw new TypeError('Arguments to path.join must be strings');
|
||||
}
|
||||
if (arg) {
|
||||
paths.push(arg);
|
||||
}
|
||||
}
|
||||
|
||||
var joined = paths.join('\\');
|
||||
|
||||
// Make sure that the joined path doesn't start with two slashes, because
|
||||
// normalize() will mistake it for an UNC path then.
|
||||
//
|
||||
// This step is skipped when it is very clear that the user actually
|
||||
// intended to point at an UNC path. This is assumed when the first
|
||||
// non-empty string arguments starts with exactly two slashes followed by
|
||||
// at least one more non-slash character.
|
||||
//
|
||||
// Note that for normalize() to treat a path as an UNC path it needs to
|
||||
// have at least 2 components, so we don't filter for that here.
|
||||
// This means that the user can use join to construct UNC paths from
|
||||
// a server name and a share name; for example:
|
||||
// path.join('//server', 'share') -> '\\\\server\\share\')
|
||||
if (!/^[\\\/]{2}[^\\\/]/.test(paths[0])) {
|
||||
joined = joined.replace(/^[\\\/]{2,}/, '\\');
|
||||
}
|
||||
return win32.normalize(joined);
|
||||
};
|
||||
|
||||
var posix = {};
|
||||
|
||||
// posix version
|
||||
posix.join = function() {
|
||||
var path = '';
|
||||
for (var i = 0; i < arguments.length; i++) {
|
||||
var segment = arguments[i];
|
||||
if (!isString(segment)) {
|
||||
throw new TypeError('Arguments to path.join must be strings');
|
||||
}
|
||||
if (segment) {
|
||||
if (!path) {
|
||||
path += segment;
|
||||
} else {
|
||||
path += '/' + segment;
|
||||
}
|
||||
}
|
||||
}
|
||||
return posix.normalize(path);
|
||||
};
|
||||
|
||||
// path.normalize(path)
|
||||
// posix version
|
||||
posix.normalize = function(path) {
|
||||
var isAbsolute = path.charAt(0) === '/',
|
||||
trailingSlash = path && path[path.length - 1] === '/';
|
||||
|
||||
// Normalize the path
|
||||
path = normalizeArray(path.split('/'), !isAbsolute).join('/');
|
||||
|
||||
if (!path && !isAbsolute) {
|
||||
path = '.';
|
||||
}
|
||||
if (path && trailingSlash) {
|
||||
path += '/';
|
||||
}
|
||||
|
||||
return (isAbsolute ? '/' : '') + path;
|
||||
};
|
||||
|
||||
win32.basename = posix.basename = basename;
|
||||
|
||||
this.win32 = win32;
|
||||
this.posix = posix;
|
||||
return (navigator.platform[0] === 'M') ? posix : win32;
|
||||
})();
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// The is the "main" function which is to be prototyped //
|
||||
// It run a small snippet in the jsx engine that //
|
||||
// 1) Assigns $.__dirname with the value of the extensions __dirname base path //
|
||||
// 2) Sets up a method $.__fileName() for retrieving from within the jsx script it's $.fileName value //
|
||||
// more on that method later //
|
||||
// At the end of the script the global declaration jsx = new Jsx(); has been made. //
|
||||
// If you like you can remove that and include in your relevant functions //
|
||||
// var jsx = new Jsx(); You would never call the Jsx function without the "new" declaration //
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
var Jsx = function() {
|
||||
var jsxScript;
|
||||
// Setup jsx function to enable the jsx scripts to easily retrieve their file location
|
||||
jsxScript = [
|
||||
'$.level = 0;',
|
||||
'if(!$.__fileNames){',
|
||||
' $.__fileNames = {};',
|
||||
' $.__dirname = "__dirname__";'.replace('__dirname__', __dirname),
|
||||
' $.__fileName = function(name){',
|
||||
' name = name || $.fileName;',
|
||||
' return ($.__fileNames && $.__fileNames[name]) || $.fileName;',
|
||||
' };',
|
||||
'}'
|
||||
].join('');
|
||||
evalScript(jsxScript);
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* [evalScript] For calling jsx scripts from the js engine
|
||||
*
|
||||
* The jsx.evalScript method is used for calling jsx scripts directly from the js engine
|
||||
* Allows for easy replacement i.e. variable insertions and for forcing eval.
|
||||
* For convenience jsx.eval or jsx.script or jsx.evalscript can be used instead of calling jsx.evalScript
|
||||
*
|
||||
* @param {String} jsxScript
|
||||
* The string that makes up the jsx script
|
||||
* it can contain a simple template like syntax for replacements
|
||||
* 'alert("__foo__");'
|
||||
* the __foo__ will be replaced as per the replacements parameter
|
||||
*
|
||||
* @param {Function} callback
|
||||
* The callback function you want the jsx script to trigger on completion
|
||||
* The result of the jsx script is passed as the argument to that function
|
||||
* The function can exist in some other file.
|
||||
* Note that InDesign does not automatically pass the callBack as a string.
|
||||
* Either write your InDesign in a way that it returns a sting the form of
|
||||
* return 'this is my result surrounded by quotes'
|
||||
* or use the force eval option
|
||||
* [Optional DEFAULT no callBack]
|
||||
*
|
||||
* @param {Object} replacements
|
||||
* The replacements to make on the jsx script
|
||||
* given the following script (template)
|
||||
* 'alert("__message__: " + __val__);'
|
||||
* and we want to change the script to
|
||||
* 'alert("I was born in the year: " + 1234);'
|
||||
* we would pass the following object
|
||||
* {"message": 'I was born in the year', "val": 1234}
|
||||
* or if not using reserved words like do we can leave out the key quotes
|
||||
* {message: 'I was born in the year', val: 1234}
|
||||
* [Optional DEFAULT no replacements]
|
||||
*
|
||||
* @param {Bolean} forceEval
|
||||
* If the script should be wrapped in an eval and try catch
|
||||
* This will 1) provide useful error feedback if heaven forbid it is needed
|
||||
* 2) The result will be a string which is required for callback results in InDesign
|
||||
* [Optional DEFAULT true]
|
||||
*
|
||||
* Note 1) The order of the parameters is irrelevant
|
||||
* Note 2) One can pass the arguments as an object if desired
|
||||
* jsx.evalScript(myCallBackFunction, 'alert("__myMessage__");', true);
|
||||
* is the same as
|
||||
* jsx.evalScript({
|
||||
* script: 'alert("__myMessage__");',
|
||||
* replacements: {myMessage: 'Hi there'},
|
||||
* callBack: myCallBackFunction,
|
||||
* eval: true
|
||||
* });
|
||||
* note that either lower or camelCase key names are valid
|
||||
* i.e. both callback or callBack will work
|
||||
*
|
||||
* The following keys are the same jsx || script || jsxScript || jsxscript || file
|
||||
* The following keys are the same callBack || callback
|
||||
* The following keys are the same replacements || replace
|
||||
* The following keys are the same eval || forceEval || forceeval
|
||||
* The following keys are the same forceEvalScript || forceevalscript || evalScript || evalscript;
|
||||
*
|
||||
* @return {Boolean} if the jsxScript was executed or not
|
||||
*/
|
||||
|
||||
Jsx.prototype.evalScript = function() {
|
||||
var arg, i, key, replaceThis, withThis, args, callback, forceEval, replacements, jsxScript, isBin;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////
|
||||
// sort out order which arguments into jsxScript, callback, replacements, forceEval //
|
||||
//////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
args = arguments;
|
||||
|
||||
// Detect if the parameters were passed as an object and if so allow for various keys
|
||||
if (args.length === 1 && (arg = args[0]) instanceof Object) {
|
||||
jsxScript = arg.jsxScript || arg.jsx || arg.script || arg.file || arg.jsxscript;
|
||||
callback = arg.callBack || arg.callback;
|
||||
replacements = arg.replacements || arg.replace;
|
||||
forceEval = arg.eval || arg.forceEval || arg.forceeval;
|
||||
} else {
|
||||
for (i = 0; i < 4; i++) {
|
||||
arg = args[i];
|
||||
if (arg === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (arg.constructor === String) {
|
||||
jsxScript = arg;
|
||||
continue;
|
||||
}
|
||||
if (arg.constructor === Object) {
|
||||
replacements = arg;
|
||||
continue;
|
||||
}
|
||||
if (arg.constructor === Function) {
|
||||
callback = arg;
|
||||
continue;
|
||||
}
|
||||
if (arg === false) {
|
||||
forceEval = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no script provide then not too much to do!
|
||||
if (!jsxScript) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Have changed the forceEval default to be true as I prefer the error handling
|
||||
if (forceEval !== false) {
|
||||
forceEval = true;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// On Illustrator and other apps the result of the jsx script is automatically passed as a string //
|
||||
// if you have a "script" containing the single number 1 and nothing else then the callBack will register as "1" //
|
||||
// On InDesign that same script will provide a blank callBack //
|
||||
// Let's say we have a callBack function var callBack = function(result){alert(result);} //
|
||||
// On Ai your see the 1 in the alert //
|
||||
// On ID your just see a blank alert //
|
||||
// To see the 1 in the alert you need to convert the result to a string and then it will show //
|
||||
// So if we rewrite out 1 byte script to '1' i.e. surround the 1 in quotes then the call back alert will show 1 //
|
||||
// If the scripts planed one can make sure that the results always passed as a string (including errors) //
|
||||
// otherwise one can wrap the script in an eval and then have the result passed as a string //
|
||||
// I have not gone through all the apps but can say //
|
||||
// for Ai you never need to set the forceEval to true //
|
||||
// for ID you if you have not coded your script appropriately and your want to send a result to the callBack then set forceEval to true //
|
||||
// I changed this that even on Illustrator it applies the try catch, Note the try catch will fail if $.level is set to 1 //
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
if (forceEval) {
|
||||
|
||||
isBin = (jsxScript.substring(0, 10) === '@JSXBIN@ES') ? '' : '\n';
|
||||
jsxScript = (
|
||||
// "\n''') + '';} catch(e){(function(e){var n, a=[]; for (n in e){a.push(n + ': ' + e[n])}; return a.join('\n')})(e)}");
|
||||
// "\n''') + '';} catch(e){e + (e.line ? ('\\nLine ' + (+e.line - 1)) : '')}");
|
||||
[
|
||||
"$.level = 0;",
|
||||
"try{eval('''" + isBin, // need to add an extra line otherwise #targetengine doesn't work ;-]
|
||||
jsxScript.replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/"/g, '\\"') + "\n''') + '';",
|
||||
"} catch (e) {",
|
||||
" (function(e) {",
|
||||
" var line, sourceLine, name, description, ErrorMessage, fileName, start, end, bug;",
|
||||
" line = +e.line" + (isBin === '' ? ';' : ' - 1;'), // To take into account the extra line added
|
||||
" fileName = File(e.fileName).fsName;",
|
||||
" sourceLine = line && e.source.split(/[\\r\\n]/)[line];",
|
||||
" name = e.name;",
|
||||
" description = e.description;",
|
||||
" ErrorMessage = name + ' ' + e.number + ': ' + description;",
|
||||
" if (fileName.length && !(/[\\/\\\\]\\d+$/.test(fileName))) {",
|
||||
" ErrorMessage += '\\nFile: ' + fileName;",
|
||||
" line++;",
|
||||
" }",
|
||||
" if (line){",
|
||||
" ErrorMessage += '\\nLine: ' + line +",
|
||||
" '-> ' + ((sourceLine.length < 300) ? sourceLine : sourceLine.substring(0,300) + '...');",
|
||||
" }",
|
||||
" if (e.start) {ErrorMessage += '\\nBug: ' + e.source.substring(e.start - 1, e.end)}",
|
||||
" if ($.includeStack) {ErrorMessage += '\\nStack:' + $.stack;}",
|
||||
" return ErrorMessage;",
|
||||
" })(e);",
|
||||
"}"
|
||||
].join('')
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////
|
||||
// deal with the replacements //
|
||||
// Note it's probably better to use ${template} `literals` //
|
||||
/////////////////////////////////////////////////////////////
|
||||
|
||||
if (replacements) {
|
||||
for (key in replacements) {
|
||||
if (replacements.hasOwnProperty(key)) {
|
||||
replaceThis = new RegExp('__' + key + '__', 'g');
|
||||
withThis = replacements[key];
|
||||
jsxScript = jsxScript.replace(replaceThis, withThis + '');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
evalScript(jsxScript, callback);
|
||||
return true;
|
||||
} catch (err) {
|
||||
////////////////////////////////////////////////
|
||||
// Do whatever error handling you want here ! //
|
||||
////////////////////////////////////////////////
|
||||
var newErr;
|
||||
newErr = new Error(err);
|
||||
alert('Error Eek: ' + newErr.stack);
|
||||
return false;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* [evalFile] For calling jsx scripts from the js engine
|
||||
*
|
||||
* The jsx.evalFiles method is used for executing saved jsx scripts
|
||||
* where the jsxScript parameter is a string of the jsx scripts file location.
|
||||
* For convenience jsx.file or jsx.evalfile can be used instead of jsx.evalFile
|
||||
*
|
||||
* @param {String} file
|
||||
* The path to jsx script
|
||||
* If only the base name is provided then the path will be presumed to be the
|
||||
* To execute files stored in the jsx folder located in the __dirname folder use
|
||||
* jsx.evalFile('myFabJsxScript.jsx');
|
||||
* To execute files stored in the a folder myFabScripts located in the __dirname folder use
|
||||
* jsx.evalFile('./myFabScripts/myFabJsxScript.jsx');
|
||||
* To execute files stored in the a folder myFabScripts located at an absolute url use
|
||||
* jsx.evalFile('/Path/to/my/FabJsxScript.jsx'); (mac)
|
||||
* or jsx.evalFile('C:Path/to/my/FabJsxScript.jsx'); (windows)
|
||||
*
|
||||
* @param {Function} callback
|
||||
* The callback function you want the jsx script to trigger on completion
|
||||
* The result of the jsx script is passed as the argument to that function
|
||||
* The function can exist in some other file.
|
||||
* Note that InDesign does not automatically pass the callBack as a string.
|
||||
* Either write your InDesign in a way that it returns a sting the form of
|
||||
* return 'this is my result surrounded by quotes'
|
||||
* or use the force eval option
|
||||
* [Optional DEFAULT no callBack]
|
||||
*
|
||||
* @param {Object} replacements
|
||||
* The replacements to make on the jsx script
|
||||
* give the following script (template)
|
||||
* 'alert("__message__: " + __val__);'
|
||||
* and we want to change the script to
|
||||
* 'alert("I was born in the year: " + 1234);'
|
||||
* we would pass the following object
|
||||
* {"message": 'I was born in the year', "val": 1234}
|
||||
* or if not using reserved words like do we can leave out the key quotes
|
||||
* {message: 'I was born in the year', val: 1234}
|
||||
* By default when possible the forceEvalScript will be set to true
|
||||
* The forceEvalScript option cannot be true when there are replacements
|
||||
* To force the forceEvalScript to be false you can send a blank set of replacements
|
||||
* jsx.evalFile('myFabScript.jsx', {}); Will NOT be executed using the $.evalScript method
|
||||
* jsx.evalFile('myFabScript.jsx'); Will YES be executed using the $.evalScript method
|
||||
* see the forceEvalScript parameter for details on this
|
||||
* [Optional DEFAULT no replacements]
|
||||
*
|
||||
* @param {Bolean} forceEval
|
||||
* If the script should be wrapped in an eval and try catch
|
||||
* This will 1) provide useful error feedback if heaven forbid it is needed
|
||||
* 2) The result will be a string which is required for callback results in InDesign
|
||||
* [Optional DEFAULT true]
|
||||
*
|
||||
* If no replacements are needed then the jsx script is be executed by using the $.evalFile method
|
||||
* This exposes the true value of the $.fileName property <span class="wp-font-emots-emo-sunglasses"></span>
|
||||
* In such a case it's best to avoid using the $.__fileName() with no base name as it won't work
|
||||
* BUT one can still use the $.__fileName('baseName') method which is more accurate than the standard $.fileName property <span class="wp-font-emots-emo-happy"></span>
|
||||
* Let's say you have a Drive called "Graphics" AND YOU HAVE a root folder on your "main" drive called "Graphics"
|
||||
* You call a script jsx.evalFile('/Volumes/Graphics/myFabScript.jsx');
|
||||
* $.fileName will give you '/Graphics/myFabScript.jsx' which is wrong
|
||||
* $.__fileName('myFabScript.jsx') will give you '/Volumes/Graphics/myFabScript.jsx' which is correct
|
||||
* $.__fileName() will not give you a reliable result
|
||||
* Note that if your calling multiple versions of myFabScript.jsx stored in multiple folders then you can get stuffed!
|
||||
* i.e. if the fileName is important to you then don't do that.
|
||||
* It also will force the result of the jsx file as a string which is particularly useful for InDesign callBacks
|
||||
*
|
||||
* Note 1) The order of the parameters is irrelevant
|
||||
* Note 2) One can pass the arguments as an object if desired
|
||||
* jsx.evalScript(myCallBackFunction, 'alert("__myMessage__");', true);
|
||||
* is the same as
|
||||
* jsx.evalScript({
|
||||
* script: 'alert("__myMessage__");',
|
||||
* replacements: {myMessage: 'Hi there'},
|
||||
* callBack: myCallBackFunction,
|
||||
* eval: false,
|
||||
* });
|
||||
* note that either lower or camelCase key names or valid
|
||||
* i.e. both callback or callBack will work
|
||||
*
|
||||
* The following keys are the same file || jsx || script || jsxScript || jsxscript
|
||||
* The following keys are the same callBack || callback
|
||||
* The following keys are the same replacements || replace
|
||||
* The following keys are the same eval || forceEval || forceeval
|
||||
*
|
||||
* @return {Boolean} if the jsxScript was executed or not
|
||||
*/
|
||||
|
||||
Jsx.prototype.evalFile = function() {
|
||||
var arg, args, callback, fileName, fileNameScript, forceEval, forceEvalScript,
|
||||
i, jsxFolder, jsxScript, newLine, replacements, success;
|
||||
|
||||
success = true; // optimistic <span class="wp-font-emots-emo-happy"></span>
|
||||
args = arguments;
|
||||
|
||||
jsxFolder = path.join(__dirname, 'jsx');
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// $.fileName does not return it's correct path in the jsx engine for files called from the js engine //
|
||||
// In Illustrator it returns an integer in InDesign it returns an empty string //
|
||||
// This script injection allows for the script to know it's path by calling //
|
||||
// $.__fileName(); //
|
||||
// on Illustrator this works pretty well //
|
||||
// on InDesign it's best to use with a bit of care //
|
||||
// If the a second script has been called the InDesing will "forget" the path to the first script //
|
||||
// 2 work-arounds for this //
|
||||
// 1) at the beginning of your script add var thePathToMeIs = $.fileName(); //
|
||||
// thePathToMeIs will not be forgotten after running the second script //
|
||||
// 2) $.__fileName('myBaseName.jsx'); //
|
||||
// for example you have file with the following path //
|
||||
// /path/to/me.jsx //
|
||||
// Call $.__fileName('me.jsx') and you will get /path/to/me.jsx even after executing a second script //
|
||||
// Note When the forceEvalScript option is used then you just use the regular $.fileName property //
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
fileNameScript = [
|
||||
// The if statement should not normally be executed
|
||||
'if(!$.__fileNames){',
|
||||
' $.__fileNames = {};',
|
||||
' $.__dirname = "__dirname__";'.replace('__dirname__', __dirname),
|
||||
' $.__fileName = function(name){',
|
||||
' name = name || $.fileName;',
|
||||
' return ($.__fileNames && $.__fileNames[name]) || $.fileName;',
|
||||
' };',
|
||||
'}',
|
||||
'$.__fileNames["__basename__"] = $.__fileNames["" + $.fileName] = "__fileName__";'
|
||||
].join('');
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////
|
||||
// sort out order which arguments into jsxScript, callback, replacements, forceEval //
|
||||
//////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
// Detect if the parameters were passed as an object and if so allow for various keys
|
||||
if (args.length === 1 && (arg = args[0]) instanceof Object) {
|
||||
jsxScript = arg.jsxScript || arg.jsx || arg.script || arg.file || arg.jsxscript;
|
||||
callback = arg.callBack || arg.callback;
|
||||
replacements = arg.replacements || arg.replace;
|
||||
forceEval = arg.eval || arg.forceEval || arg.forceeval;
|
||||
} else {
|
||||
for (i = 0; i < 5; i++) {
|
||||
arg = args[i];
|
||||
if (arg === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (arg.constructor.name === 'String') {
|
||||
jsxScript = arg;
|
||||
continue;
|
||||
}
|
||||
if (arg.constructor.name === 'Object') {
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// If no replacements are provided then the $.evalScript method will be used //
|
||||
// This will allow directly for the $.fileName property to be used //
|
||||
// If one does not want the $.evalScript method to be used then //
|
||||
// either send a blank object as the replacements {} //
|
||||
// or explicitly set the forceEvalScript option to false //
|
||||
// This can only be done if the parameters are passed as an object //
|
||||
// i.e. jsx.evalFile({file:'myFabScript.jsx', forceEvalScript: false}); //
|
||||
// if the file was called using //
|
||||
// i.e. jsx.evalFile('myFabScript.jsx'); //
|
||||
// then the following jsx code is called $.evalFile(new File('Path/to/myFabScript.jsx', 10000000000)) + ''; //
|
||||
// forceEval is never needed if the forceEvalScript is triggered //
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
replacements = arg;
|
||||
continue;
|
||||
}
|
||||
if (arg.constructor === Function) {
|
||||
callback = arg;
|
||||
continue;
|
||||
}
|
||||
if (arg === false) {
|
||||
forceEval = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no script provide then not too much to do!
|
||||
if (!jsxScript) {
|
||||
return false;
|
||||
}
|
||||
|
||||
forceEvalScript = !replacements;
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////
|
||||
// Get path of script //
|
||||
// Check if it's literal, relative or in jsx folder //
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
if (/^\/|[a-zA-Z]+:/.test(jsxScript)) { // absolute path Mac | Windows
|
||||
jsxScript = path.normalize(jsxScript);
|
||||
} else if (/^\.+\//.test(jsxScript)) {
|
||||
jsxScript = path.join(__dirname, jsxScript); // relative path
|
||||
} else {
|
||||
jsxScript = path.join(jsxFolder, jsxScript); // files in the jsxFolder
|
||||
}
|
||||
|
||||
if (forceEvalScript) {
|
||||
jsxScript = jsxScript.replace(/"/g, '\\"');
|
||||
// Check that the path exist, should change this to asynchronous at some point
|
||||
if (!window.cep.fs.stat(jsxScript).err) {
|
||||
jsxScript = fileNameScript.replace(/__fileName__/, jsxScript).replace(/__basename__/, path.basename(jsxScript)) +
|
||||
'$.evalFile(new File("' + jsxScript.replace(/\\/g, '\\\\') + '")) + "";';
|
||||
return this.evalScript(jsxScript, callback, forceEval);
|
||||
} else {
|
||||
throw new Error(`The file: {jsxScript} could not be found / read`);
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Replacements made so we can't use $.evalFile and need to read the jsx script for ourselves //
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fileName = jsxScript.replace(/\\/g, '\\\\').replace(/"/g, '\\"');
|
||||
try {
|
||||
jsxScript = window.cep.fs.readFile(jsxScript).data;
|
||||
} catch (er) {
|
||||
throw new Error(`The file: ${fileName} could not be read`);
|
||||
}
|
||||
// It is desirable that the injected fileNameScript is on the same line as the 1st line of the script
|
||||
// This is so that the $.line or error.line returns the same value as the actual file
|
||||
// However if the 1st line contains a # directive then we need to insert a new line and stuff the above problem
|
||||
// When possible i.e. when there's no replacements then $.evalFile will be used and then the whole issue is avoided
|
||||
newLine = /^\s*#/.test(jsxScript) ? '\n' : '';
|
||||
jsxScript = fileNameScript.replace(/__fileName__/, fileName).replace(/__basename__/, path.basename(fileName)) + newLine + jsxScript;
|
||||
|
||||
try {
|
||||
// evalScript(jsxScript, callback);
|
||||
return this.evalScript(jsxScript, callback, replacements, forceEval);
|
||||
} catch (err) {
|
||||
////////////////////////////////////////////////
|
||||
// Do whatever error handling you want here ! //
|
||||
////////////////////////////////////////////////
|
||||
var newErr;
|
||||
newErr = new Error(err);
|
||||
alert('Error Eek: ' + newErr.stack);
|
||||
return false;
|
||||
}
|
||||
|
||||
return success; // success should be an array but for now it's a Boolean
|
||||
};
|
||||
|
||||
|
||||
////////////////////////////////////
|
||||
// Setup alternative method names //
|
||||
////////////////////////////////////
|
||||
Jsx.prototype.eval = Jsx.prototype.script = Jsx.prototype.evalscript = Jsx.prototype.evalScript;
|
||||
Jsx.prototype.file = Jsx.prototype.evalfile = Jsx.prototype.evalFile;
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Examples //
|
||||
// jsx.evalScript('alert("foo");'); //
|
||||
// jsx.evalFile('foo.jsx'); // where foo.jsx is stored in the jsx folder at the base of the extensions directory //
|
||||
// jsx.evalFile('../myFolder/foo.jsx'); // where a relative or absolute file path is given //
|
||||
// //
|
||||
// using conventional methods one would use in the case were the values to swap were supplied by variables //
|
||||
// csInterface.evalScript('var q = "' + name + '"; alert("' + myString + '" ' + myOp + ' q);q;', callback); //
|
||||
// Using all the '' + foo + '' is very error prone //
|
||||
// jsx.evalScript('var q = "__name__"; alert(__string__ __opp__ q);q;',{'name':'Fred', 'string':'Hello ', 'opp':'+'}, callBack); //
|
||||
// is much simpler and less error prone //
|
||||
// //
|
||||
// more readable to use object //
|
||||
// jsx.evalFile({ //
|
||||
// file: 'yetAnotherFabScript.jsx', //
|
||||
// replacements: {"this": foo, That: bar, and: "&&", the: foo2, other: bar2}, //
|
||||
// eval: true //
|
||||
// }) //
|
||||
// Enjoy <span class="wp-font-emots-emo-happy"></span> //
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
jsx = new Jsx();
|
||||
})();
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,530 @@
|
|||
// json2.js
|
||||
// 2017-06-12
|
||||
// Public Domain.
|
||||
// NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
// USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
// NOT CONTROL.
|
||||
|
||||
// This file creates a global JSON object containing two methods: stringify
|
||||
// and parse. This file provides the ES5 JSON capability to ES3 systems.
|
||||
// If a project might run on IE8 or earlier, then this file should be included.
|
||||
// This file does nothing on ES5 systems.
|
||||
|
||||
// JSON.stringify(value, replacer, space)
|
||||
// value any JavaScript value, usually an object or array.
|
||||
// replacer an optional parameter that determines how object
|
||||
// values are stringified for objects. It can be a
|
||||
// function or an array of strings.
|
||||
// space an optional parameter that specifies the indentation
|
||||
// of nested structures. If it is omitted, the text will
|
||||
// be packed without extra whitespace. If it is a number,
|
||||
// it will specify the number of spaces to indent at each
|
||||
// level. If it is a string (such as "\t" or " "),
|
||||
// it contains the characters used to indent at each level.
|
||||
// This method produces a JSON text from a JavaScript value.
|
||||
// When an object value is found, if the object contains a toJSON
|
||||
// method, its toJSON method will be called and the result will be
|
||||
// stringified. A toJSON method does not serialize: it returns the
|
||||
// value represented by the name/value pair that should be serialized,
|
||||
// or undefined if nothing should be serialized. The toJSON method
|
||||
// will be passed the key associated with the value, and this will be
|
||||
// bound to the value.
|
||||
|
||||
// For example, this would serialize Dates as ISO strings.
|
||||
|
||||
// Date.prototype.toJSON = function (key) {
|
||||
// function f(n) {
|
||||
// // Format integers to have at least two digits.
|
||||
// return (n < 10)
|
||||
// ? "0" + n
|
||||
// : n;
|
||||
// }
|
||||
// return this.getUTCFullYear() + "-" +
|
||||
// f(this.getUTCMonth() + 1) + "-" +
|
||||
// f(this.getUTCDate()) + "T" +
|
||||
// f(this.getUTCHours()) + ":" +
|
||||
// f(this.getUTCMinutes()) + ":" +
|
||||
// f(this.getUTCSeconds()) + "Z";
|
||||
// };
|
||||
|
||||
// You can provide an optional replacer method. It will be passed the
|
||||
// key and value of each member, with this bound to the containing
|
||||
// object. The value that is returned from your method will be
|
||||
// serialized. If your method returns undefined, then the member will
|
||||
// be excluded from the serialization.
|
||||
|
||||
// If the replacer parameter is an array of strings, then it will be
|
||||
// used to select the members to be serialized. It filters the results
|
||||
// such that only members with keys listed in the replacer array are
|
||||
// stringified.
|
||||
|
||||
// Values that do not have JSON representations, such as undefined or
|
||||
// functions, will not be serialized. Such values in objects will be
|
||||
// dropped; in arrays they will be replaced with null. You can use
|
||||
// a replacer function to replace those with JSON values.
|
||||
|
||||
// JSON.stringify(undefined) returns undefined.
|
||||
|
||||
// The optional space parameter produces a stringification of the
|
||||
// value that is filled with line breaks and indentation to make it
|
||||
// easier to read.
|
||||
|
||||
// If the space parameter is a non-empty string, then that string will
|
||||
// be used for indentation. If the space parameter is a number, then
|
||||
// the indentation will be that many spaces.
|
||||
|
||||
// Example:
|
||||
|
||||
// text = JSON.stringify(["e", {pluribus: "unum"}]);
|
||||
// // text is '["e",{"pluribus":"unum"}]'
|
||||
|
||||
// text = JSON.stringify(["e", {pluribus: "unum"}], null, "\t");
|
||||
// // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
|
||||
|
||||
// text = JSON.stringify([new Date()], function (key, value) {
|
||||
// return this[key] instanceof Date
|
||||
// ? "Date(" + this[key] + ")"
|
||||
// : value;
|
||||
// });
|
||||
// // text is '["Date(---current time---)"]'
|
||||
|
||||
// JSON.parse(text, reviver)
|
||||
// This method parses a JSON text to produce an object or array.
|
||||
// It can throw a SyntaxError exception.
|
||||
|
||||
// The optional reviver parameter is a function that can filter and
|
||||
// transform the results. It receives each of the keys and values,
|
||||
// and its return value is used instead of the original value.
|
||||
// If it returns what it received, then the structure is not modified.
|
||||
// If it returns undefined then the member is deleted.
|
||||
|
||||
// Example:
|
||||
|
||||
// // Parse the text. Values that look like ISO date strings will
|
||||
// // be converted to Date objects.
|
||||
|
||||
// myData = JSON.parse(text, function (key, value) {
|
||||
// var a;
|
||||
// if (typeof value === "string") {
|
||||
// a =
|
||||
// /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
|
||||
// if (a) {
|
||||
// return new Date(Date.UTC(
|
||||
// +a[1], +a[2] - 1, +a[3], +a[4], +a[5], +a[6]
|
||||
// ));
|
||||
// }
|
||||
// return value;
|
||||
// }
|
||||
// });
|
||||
|
||||
// myData = JSON.parse(
|
||||
// "[\"Date(09/09/2001)\"]",
|
||||
// function (key, value) {
|
||||
// var d;
|
||||
// if (
|
||||
// typeof value === "string"
|
||||
// && value.slice(0, 5) === "Date("
|
||||
// && value.slice(-1) === ")"
|
||||
// ) {
|
||||
// d = new Date(value.slice(5, -1));
|
||||
// if (d) {
|
||||
// return d;
|
||||
// }
|
||||
// }
|
||||
// return value;
|
||||
// }
|
||||
// );
|
||||
|
||||
// This is a reference implementation. You are free to copy, modify, or
|
||||
// redistribute.
|
||||
|
||||
/*jslint
|
||||
eval, for, this
|
||||
*/
|
||||
|
||||
/*property
|
||||
JSON, apply, call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
|
||||
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
|
||||
lastIndex, length, parse, prototype, push, replace, slice, stringify,
|
||||
test, toJSON, toString, valueOf
|
||||
*/
|
||||
|
||||
|
||||
// Create a JSON object only if one does not already exist. We create the
|
||||
// methods in a closure to avoid creating global variables.
|
||||
|
||||
if (typeof JSON !== "object") {
|
||||
JSON = {};
|
||||
}
|
||||
|
||||
(function () {
|
||||
"use strict";
|
||||
|
||||
var rx_one = /^[\],:{}\s]*$/;
|
||||
var rx_two = /\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g;
|
||||
var rx_three = /"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g;
|
||||
var rx_four = /(?:^|:|,)(?:\s*\[)+/g;
|
||||
var rx_escapable = /[\\"\u0000-\u001f\u007f-\u009f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
var rx_dangerous = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return (n < 10)
|
||||
? "0" + n
|
||||
: n;
|
||||
}
|
||||
|
||||
function this_value() {
|
||||
return this.valueOf();
|
||||
}
|
||||
|
||||
if (typeof Date.prototype.toJSON !== "function") {
|
||||
|
||||
Date.prototype.toJSON = function () {
|
||||
|
||||
return isFinite(this.valueOf())
|
||||
? (
|
||||
this.getUTCFullYear()
|
||||
+ "-"
|
||||
+ f(this.getUTCMonth() + 1)
|
||||
+ "-"
|
||||
+ f(this.getUTCDate())
|
||||
+ "T"
|
||||
+ f(this.getUTCHours())
|
||||
+ ":"
|
||||
+ f(this.getUTCMinutes())
|
||||
+ ":"
|
||||
+ f(this.getUTCSeconds())
|
||||
+ "Z"
|
||||
)
|
||||
: null;
|
||||
};
|
||||
|
||||
Boolean.prototype.toJSON = this_value;
|
||||
Number.prototype.toJSON = this_value;
|
||||
String.prototype.toJSON = this_value;
|
||||
}
|
||||
|
||||
var gap;
|
||||
var indent;
|
||||
var meta;
|
||||
var rep;
|
||||
|
||||
|
||||
function quote(string) {
|
||||
|
||||
// If the string contains no control characters, no quote characters, and no
|
||||
// backslash characters, then we can safely slap some quotes around it.
|
||||
// Otherwise we must also replace the offending characters with safe escape
|
||||
// sequences.
|
||||
|
||||
rx_escapable.lastIndex = 0;
|
||||
return rx_escapable.test(string)
|
||||
? "\"" + string.replace(rx_escapable, function (a) {
|
||||
var c = meta[a];
|
||||
return typeof c === "string"
|
||||
? c
|
||||
: "\\u" + ("0000" + a.charCodeAt(0).toString(16)).slice(-4);
|
||||
}) + "\""
|
||||
: "\"" + string + "\"";
|
||||
}
|
||||
|
||||
|
||||
function str(key, holder) {
|
||||
|
||||
// Produce a string from holder[key].
|
||||
|
||||
var i; // The loop counter.
|
||||
var k; // The member key.
|
||||
var v; // The member value.
|
||||
var length;
|
||||
var mind = gap;
|
||||
var partial;
|
||||
var value = holder[key];
|
||||
|
||||
// If the value has a toJSON method, call it to obtain a replacement value.
|
||||
|
||||
if (
|
||||
value
|
||||
&& typeof value === "object"
|
||||
&& typeof value.toJSON === "function"
|
||||
) {
|
||||
value = value.toJSON(key);
|
||||
}
|
||||
|
||||
// If we were called with a replacer function, then call the replacer to
|
||||
// obtain a replacement value.
|
||||
|
||||
if (typeof rep === "function") {
|
||||
value = rep.call(holder, key, value);
|
||||
}
|
||||
|
||||
// What happens next depends on the value's type.
|
||||
|
||||
switch (typeof value) {
|
||||
case "string":
|
||||
return quote(value);
|
||||
|
||||
case "number":
|
||||
|
||||
// JSON numbers must be finite. Encode non-finite numbers as null.
|
||||
|
||||
return (isFinite(value))
|
||||
? String(value)
|
||||
: "null";
|
||||
|
||||
case "boolean":
|
||||
case "null":
|
||||
|
||||
// If the value is a boolean or null, convert it to a string. Note:
|
||||
// typeof null does not produce "null". The case is included here in
|
||||
// the remote chance that this gets fixed someday.
|
||||
|
||||
return String(value);
|
||||
|
||||
// If the type is "object", we might be dealing with an object or an array or
|
||||
// null.
|
||||
|
||||
case "object":
|
||||
|
||||
// Due to a specification blunder in ECMAScript, typeof null is "object",
|
||||
// so watch out for that case.
|
||||
|
||||
if (!value) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
// Make an array to hold the partial results of stringifying this object value.
|
||||
|
||||
gap += indent;
|
||||
partial = [];
|
||||
|
||||
// Is the value an array?
|
||||
|
||||
if (Object.prototype.toString.apply(value) === "[object Array]") {
|
||||
|
||||
// The value is an array. Stringify every element. Use null as a placeholder
|
||||
// for non-JSON values.
|
||||
|
||||
length = value.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
partial[i] = str(i, value) || "null";
|
||||
}
|
||||
|
||||
// Join all of the elements together, separated with commas, and wrap them in
|
||||
// brackets.
|
||||
|
||||
v = partial.length === 0
|
||||
? "[]"
|
||||
: gap
|
||||
? (
|
||||
"[\n"
|
||||
+ gap
|
||||
+ partial.join(",\n" + gap)
|
||||
+ "\n"
|
||||
+ mind
|
||||
+ "]"
|
||||
)
|
||||
: "[" + partial.join(",") + "]";
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
|
||||
// If the replacer is an array, use it to select the members to be stringified.
|
||||
|
||||
if (rep && typeof rep === "object") {
|
||||
length = rep.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
if (typeof rep[i] === "string") {
|
||||
k = rep[i];
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (
|
||||
(gap)
|
||||
? ": "
|
||||
: ":"
|
||||
) + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
// Otherwise, iterate through all of the keys in the object.
|
||||
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (
|
||||
(gap)
|
||||
? ": "
|
||||
: ":"
|
||||
) + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Join all of the member texts together, separated with commas,
|
||||
// and wrap them in braces.
|
||||
|
||||
v = partial.length === 0
|
||||
? "{}"
|
||||
: gap
|
||||
? "{\n" + gap + partial.join(",\n" + gap) + "\n" + mind + "}"
|
||||
: "{" + partial.join(",") + "}";
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
// If the JSON object does not yet have a stringify method, give it one.
|
||||
|
||||
if (typeof JSON.stringify !== "function") {
|
||||
meta = { // table of character substitutions
|
||||
"\b": "\\b",
|
||||
"\t": "\\t",
|
||||
"\n": "\\n",
|
||||
"\f": "\\f",
|
||||
"\r": "\\r",
|
||||
"\"": "\\\"",
|
||||
"\\": "\\\\"
|
||||
};
|
||||
JSON.stringify = function (value, replacer, space) {
|
||||
|
||||
// The stringify method takes a value and an optional replacer, and an optional
|
||||
// space parameter, and returns a JSON text. The replacer can be a function
|
||||
// that can replace values, or an array of strings that will select the keys.
|
||||
// A default replacer method can be provided. Use of the space parameter can
|
||||
// produce text that is more easily readable.
|
||||
|
||||
var i;
|
||||
gap = "";
|
||||
indent = "";
|
||||
|
||||
// If the space parameter is a number, make an indent string containing that
|
||||
// many spaces.
|
||||
|
||||
if (typeof space === "number") {
|
||||
for (i = 0; i < space; i += 1) {
|
||||
indent += " ";
|
||||
}
|
||||
|
||||
// If the space parameter is a string, it will be used as the indent string.
|
||||
|
||||
} else if (typeof space === "string") {
|
||||
indent = space;
|
||||
}
|
||||
|
||||
// If there is a replacer, it must be a function or an array.
|
||||
// Otherwise, throw an error.
|
||||
|
||||
rep = replacer;
|
||||
if (replacer && typeof replacer !== "function" && (
|
||||
typeof replacer !== "object"
|
||||
|| typeof replacer.length !== "number"
|
||||
)) {
|
||||
throw new Error("JSON.stringify");
|
||||
}
|
||||
|
||||
// Make a fake root object containing our value under the key of "".
|
||||
// Return the result of stringifying the value.
|
||||
|
||||
return str("", {"": value});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// If the JSON object does not yet have a parse method, give it one.
|
||||
|
||||
if (typeof JSON.parse !== "function") {
|
||||
JSON.parse = function (text, reviver) {
|
||||
|
||||
// The parse method takes a text and an optional reviver function, and returns
|
||||
// a JavaScript value if the text is a valid JSON text.
|
||||
|
||||
var j;
|
||||
|
||||
function walk(holder, key) {
|
||||
|
||||
// The walk method is used to recursively walk the resulting structure so
|
||||
// that modifications can be made.
|
||||
|
||||
var k;
|
||||
var v;
|
||||
var value = holder[key];
|
||||
if (value && typeof value === "object") {
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = walk(value, k);
|
||||
if (v !== undefined) {
|
||||
value[k] = v;
|
||||
} else {
|
||||
delete value[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(holder, key, value);
|
||||
}
|
||||
|
||||
|
||||
// Parsing happens in four stages. In the first stage, we replace certain
|
||||
// Unicode characters with escape sequences. JavaScript handles many characters
|
||||
// incorrectly, either silently deleting them, or treating them as line endings.
|
||||
|
||||
text = String(text);
|
||||
rx_dangerous.lastIndex = 0;
|
||||
if (rx_dangerous.test(text)) {
|
||||
text = text.replace(rx_dangerous, function (a) {
|
||||
return (
|
||||
"\\u"
|
||||
+ ("0000" + a.charCodeAt(0).toString(16)).slice(-4)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// In the second stage, we run the text against regular expressions that look
|
||||
// for non-JSON patterns. We are especially concerned with "()" and "new"
|
||||
// because they can cause invocation, and "=" because it can cause mutation.
|
||||
// But just to be safe, we want to reject all unexpected forms.
|
||||
|
||||
// We split the second stage into 4 regexp operations in order to work around
|
||||
// crippling inefficiencies in IE's and Safari's regexp engines. First we
|
||||
// replace the JSON backslash pairs with "@" (a non-JSON character). Second, we
|
||||
// replace all simple value tokens with "]" characters. Third, we delete all
|
||||
// open brackets that follow a colon or comma or that begin the text. Finally,
|
||||
// we look to see that the remaining characters are only whitespace or "]" or
|
||||
// "," or ":" or "{" or "}". If that is so, then the text is safe for eval.
|
||||
|
||||
if (
|
||||
rx_one.test(
|
||||
text
|
||||
.replace(rx_two, "@")
|
||||
.replace(rx_three, "]")
|
||||
.replace(rx_four, "")
|
||||
)
|
||||
) {
|
||||
|
||||
// In the third stage we use the eval function to compile the text into a
|
||||
// JavaScript structure. The "{" operator is subject to a syntactic ambiguity
|
||||
// in JavaScript: it can begin a block or an object literal. We wrap the text
|
||||
// in parens to eliminate the ambiguity.
|
||||
|
||||
j = eval("(" + text + ")");
|
||||
|
||||
// In the optional fourth stage, we recursively walk the new structure, passing
|
||||
// each name/value pair to a reviver function for possible transformation.
|
||||
|
||||
return (typeof reviver === "function")
|
||||
? walk({"": j}, "")
|
||||
: j;
|
||||
}
|
||||
|
||||
// If the text is not JSON parseable, then a SyntaxError is thrown.
|
||||
|
||||
throw new SyntaxError("JSON.parse");
|
||||
};
|
||||
}
|
||||
}());
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 3.5 KiB |
|
|
@ -0,0 +1,95 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style type="text/css">
|
||||
html, body, iframe {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border: 0px;
|
||||
margin: 0px;
|
||||
overflow: hidden;
|
||||
background-color: #424242;
|
||||
}
|
||||
button {width: 100%;}
|
||||
</style>
|
||||
|
||||
<style>
|
||||
button {width: 100%;}
|
||||
body {margin:0; padding:0; height: 100%;}
|
||||
html {height: 100%;}
|
||||
</style>
|
||||
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js">
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#workfiles-button").bind("click", function() {
|
||||
RPC.call('Photoshop.workfiles_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#loader-button").bind("click", function() {
|
||||
RPC.call('Photoshop.loader_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#publish-button").bind("click", function() {
|
||||
RPC.call('Photoshop.publish_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#sceneinventory-button").bind("click", function() {
|
||||
RPC.call('Photoshop.sceneinventory_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<script type=text/javascript>
|
||||
$(function() {
|
||||
$("a#experimental-button").bind("click", function() {
|
||||
RPC.call('Photoshop.experimental_tools_route').then(function (data) {
|
||||
}, function (error) {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<script type="text/javascript" src="./client/wsrpc.js"></script>
|
||||
<script type="text/javascript" src="./client/CSInterface.js"></script>
|
||||
<script type="text/javascript" src="./client/loglevel.min.js"></script>
|
||||
|
||||
<!-- helper library for better debugging of .jsx check its license! -->
|
||||
<script type="text/javascript" src="./host/JSX.js"></script>
|
||||
|
||||
<script type="text/javascript" src="./client/client.js"></script>
|
||||
|
||||
<a href=# id=workfiles-button><button>Workfiles...</button></a>
|
||||
<a href=# id=loader-button><button>Load...</button></a>
|
||||
<a href=# id=publish-button><button>Publish...</button></a>
|
||||
<a href=# id=sceneinventory-button><button>Manage...</button></a>
|
||||
<a href=# id=experimental-button><button>Experimental Tools...</button></a>
|
||||
</body>
|
||||
</html>
|
||||
406
server_addon/photoshop/client/ayon_photoshop/api/launch_logic.py
Normal file
406
server_addon/photoshop/client/ayon_photoshop/api/launch_logic.py
Normal file
|
|
@ -0,0 +1,406 @@
|
|||
import os
|
||||
import subprocess
|
||||
import collections
|
||||
import asyncio
|
||||
|
||||
from wsrpc_aiohttp import (
|
||||
WebSocketRoute,
|
||||
WebSocketAsync
|
||||
)
|
||||
|
||||
import ayon_api
|
||||
from qtpy import QtCore
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.pipeline import (
|
||||
registered_host,
|
||||
Anatomy,
|
||||
)
|
||||
from ayon_core.pipeline.workfile import (
|
||||
get_workfile_template_key_from_context,
|
||||
get_last_workfile,
|
||||
)
|
||||
from ayon_core.pipeline.template_data import get_template_data_with_names
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core.pipeline.context_tools import change_current_context
|
||||
|
||||
from .webserver import WebServerTool
|
||||
from .ws_stub import PhotoshopServerStub
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class ConnectionNotEstablishedYet(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MainThreadItem:
|
||||
"""Structure to store information about callback in main thread.
|
||||
|
||||
Item should be used to execute callback in main thread which may be needed
|
||||
for execution of Qt objects.
|
||||
|
||||
Item store callback (callable variable), arguments and keyword arguments
|
||||
for the callback. Item hold information about it's process.
|
||||
"""
|
||||
not_set = object()
|
||||
|
||||
def __init__(self, callback, *args, **kwargs):
|
||||
self._done = False
|
||||
self._exception = self.not_set
|
||||
self._result = self.not_set
|
||||
self._callback = callback
|
||||
self._args = args
|
||||
self._kwargs = kwargs
|
||||
|
||||
@property
|
||||
def done(self):
|
||||
return self._done
|
||||
|
||||
@property
|
||||
def exception(self):
|
||||
return self._exception
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
return self._result
|
||||
|
||||
def execute(self):
|
||||
"""Execute callback and store its result.
|
||||
|
||||
Method must be called from main thread. Item is marked as `done`
|
||||
when callback execution finished. Store output of callback of exception
|
||||
information when callback raises one.
|
||||
"""
|
||||
log.debug("Executing process in main thread")
|
||||
if self.done:
|
||||
log.warning("- item is already processed")
|
||||
return
|
||||
|
||||
log.info("Running callback: {}".format(str(self._callback)))
|
||||
try:
|
||||
result = self._callback(*self._args, **self._kwargs)
|
||||
self._result = result
|
||||
|
||||
except Exception as exc:
|
||||
self._exception = exc
|
||||
|
||||
finally:
|
||||
self._done = True
|
||||
|
||||
|
||||
def stub():
|
||||
"""
|
||||
Convenience function to get server RPC stub to call methods directed
|
||||
for host (Photoshop).
|
||||
It expects already created connection, started from client.
|
||||
Currently created when panel is opened (PS: Window>Extensions>Avalon)
|
||||
:return: <PhotoshopClientStub> where functions could be called from
|
||||
"""
|
||||
ps_stub = PhotoshopServerStub()
|
||||
if not ps_stub.client:
|
||||
raise ConnectionNotEstablishedYet("Connection is not created yet")
|
||||
|
||||
return ps_stub
|
||||
|
||||
|
||||
def show_tool_by_name(tool_name):
|
||||
kwargs = {}
|
||||
if tool_name == "loader":
|
||||
kwargs["use_context"] = True
|
||||
|
||||
host_tools.show_tool_by_name(tool_name, **kwargs)
|
||||
|
||||
|
||||
class ProcessLauncher(QtCore.QObject):
|
||||
route_name = "Photoshop"
|
||||
_main_thread_callbacks = collections.deque()
|
||||
|
||||
def __init__(self, subprocess_args):
|
||||
self._subprocess_args = subprocess_args
|
||||
self._log = None
|
||||
|
||||
super(ProcessLauncher, self).__init__()
|
||||
|
||||
# Keep track if launcher was already started
|
||||
self._started = False
|
||||
|
||||
self._process = None
|
||||
self._websocket_server = None
|
||||
|
||||
start_process_timer = QtCore.QTimer()
|
||||
start_process_timer.setInterval(100)
|
||||
|
||||
loop_timer = QtCore.QTimer()
|
||||
loop_timer.setInterval(200)
|
||||
|
||||
start_process_timer.timeout.connect(self._on_start_process_timer)
|
||||
loop_timer.timeout.connect(self._on_loop_timer)
|
||||
|
||||
self._start_process_timer = start_process_timer
|
||||
self._loop_timer = loop_timer
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(
|
||||
"{}-launcher".format(self.route_name)
|
||||
)
|
||||
return self._log
|
||||
|
||||
@property
|
||||
def websocket_server_is_running(self):
|
||||
if self._websocket_server is not None:
|
||||
return self._websocket_server.is_running
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_process_running(self):
|
||||
if self._process is not None:
|
||||
return self._process.poll() is None
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_host_connected(self):
|
||||
"""Returns True if connected, False if app is not running at all."""
|
||||
if not self.is_process_running:
|
||||
return False
|
||||
|
||||
try:
|
||||
_stub = stub()
|
||||
if _stub:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def execute_in_main_thread(cls, callback, *args, **kwargs):
|
||||
item = MainThreadItem(callback, *args, **kwargs)
|
||||
cls._main_thread_callbacks.append(item)
|
||||
return item
|
||||
|
||||
def start(self):
|
||||
if self._started:
|
||||
return
|
||||
self.log.info("Started launch logic of Photoshop")
|
||||
self._started = True
|
||||
self._start_process_timer.start()
|
||||
|
||||
def exit(self):
|
||||
""" Exit whole application. """
|
||||
if self._start_process_timer.isActive():
|
||||
self._start_process_timer.stop()
|
||||
if self._loop_timer.isActive():
|
||||
self._loop_timer.stop()
|
||||
|
||||
if self._websocket_server is not None:
|
||||
self._websocket_server.stop()
|
||||
|
||||
if self._process:
|
||||
self._process.kill()
|
||||
self._process.wait()
|
||||
|
||||
QtCore.QCoreApplication.exit()
|
||||
|
||||
def _on_loop_timer(self):
|
||||
# TODO find better way and catch errors
|
||||
# Run only callbacks that are in queue at the moment
|
||||
cls = self.__class__
|
||||
for _ in range(len(cls._main_thread_callbacks)):
|
||||
if cls._main_thread_callbacks:
|
||||
item = cls._main_thread_callbacks.popleft()
|
||||
item.execute()
|
||||
|
||||
if not self.is_process_running:
|
||||
self.log.info("Host process is not running. Closing")
|
||||
self.exit()
|
||||
|
||||
elif not self.websocket_server_is_running:
|
||||
self.log.info("Websocket server is not running. Closing")
|
||||
self.exit()
|
||||
|
||||
def _on_start_process_timer(self):
|
||||
# TODO add try except validations for each part in this method
|
||||
# Start server as first thing
|
||||
if self._websocket_server is None:
|
||||
self._init_server()
|
||||
return
|
||||
|
||||
# TODO add waiting time
|
||||
# Wait for webserver
|
||||
if not self.websocket_server_is_running:
|
||||
return
|
||||
|
||||
# Start application process
|
||||
if self._process is None:
|
||||
self._start_process()
|
||||
self.log.info("Waiting for host to connect")
|
||||
return
|
||||
|
||||
# TODO add waiting time
|
||||
# Wait until host is connected
|
||||
if self.is_host_connected:
|
||||
self._start_process_timer.stop()
|
||||
self._loop_timer.start()
|
||||
elif (
|
||||
not self.is_process_running
|
||||
or not self.websocket_server_is_running
|
||||
):
|
||||
self.exit()
|
||||
|
||||
def _init_server(self):
|
||||
if self._websocket_server is not None:
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"Initialization of websocket server for host communication"
|
||||
)
|
||||
|
||||
self._websocket_server = websocket_server = WebServerTool()
|
||||
if websocket_server.port_occupied(
|
||||
websocket_server.host_name,
|
||||
websocket_server.port
|
||||
):
|
||||
self.log.info(
|
||||
"Server already running, sending actual context and exit."
|
||||
)
|
||||
asyncio.run(websocket_server.send_context_change(self.route_name))
|
||||
self.exit()
|
||||
return
|
||||
|
||||
# Add Websocket route
|
||||
websocket_server.add_route("*", "/ws/", WebSocketAsync)
|
||||
# Add after effects route to websocket handler
|
||||
|
||||
print("Adding {} route".format(self.route_name))
|
||||
WebSocketAsync.add_route(
|
||||
self.route_name, PhotoshopRoute
|
||||
)
|
||||
self.log.info("Starting websocket server for host communication")
|
||||
websocket_server.start_server()
|
||||
|
||||
def _start_process(self):
|
||||
if self._process is not None:
|
||||
return
|
||||
self.log.info("Starting host process")
|
||||
try:
|
||||
self._process = subprocess.Popen(
|
||||
self._subprocess_args,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
except Exception:
|
||||
self.log.info("exce", exc_info=True)
|
||||
self.exit()
|
||||
|
||||
|
||||
class PhotoshopRoute(WebSocketRoute):
|
||||
"""
|
||||
One route, mimicking external application (like Harmony, etc).
|
||||
All functions could be called from client.
|
||||
'do_notify' function calls function on the client - mimicking
|
||||
notification after long running job on the server or similar
|
||||
"""
|
||||
instance = None
|
||||
|
||||
def init(self, **kwargs):
|
||||
# Python __init__ must be return "self".
|
||||
# This method might return anything.
|
||||
log.debug("someone called Photoshop route")
|
||||
self.instance = self
|
||||
return kwargs
|
||||
|
||||
# server functions
|
||||
async def ping(self):
|
||||
log.debug("someone called Photoshop route ping")
|
||||
|
||||
# This method calls function on the client side
|
||||
# client functions
|
||||
async def set_context(self, project, folder, task):
|
||||
"""
|
||||
Sets 'project' and 'folder' to envs, eg. setting context.
|
||||
|
||||
Opens last workile from that context if exists.
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
folder (str)
|
||||
task (str
|
||||
"""
|
||||
log.info("Setting context change")
|
||||
log.info(f"project {project} folder {folder} task {task}")
|
||||
|
||||
folder_entity = ayon_api.get_folder_by_path(project, folder)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project, folder_entity["id"], task
|
||||
)
|
||||
change_current_context(folder_entity, task_entity)
|
||||
|
||||
last_workfile_path = self._get_last_workfile_path(project,
|
||||
folder,
|
||||
task)
|
||||
if last_workfile_path and os.path.exists(last_workfile_path):
|
||||
ProcessLauncher.execute_in_main_thread(
|
||||
lambda: stub().open(last_workfile_path))
|
||||
|
||||
|
||||
async def read(self):
|
||||
log.debug("photoshop.read client calls server server calls "
|
||||
"photoshop client")
|
||||
return await self.socket.call('photoshop.read')
|
||||
|
||||
# panel routes for tools
|
||||
async def workfiles_route(self):
|
||||
self._tool_route("workfiles")
|
||||
|
||||
async def loader_route(self):
|
||||
self._tool_route("loader")
|
||||
|
||||
async def publish_route(self):
|
||||
self._tool_route("publisher")
|
||||
|
||||
async def sceneinventory_route(self):
|
||||
self._tool_route("sceneinventory")
|
||||
|
||||
async def experimental_tools_route(self):
|
||||
self._tool_route("experimental_tools")
|
||||
|
||||
def _tool_route(self, _tool_name):
|
||||
"""The address accessed when clicking on the buttons."""
|
||||
|
||||
ProcessLauncher.execute_in_main_thread(show_tool_by_name, _tool_name)
|
||||
|
||||
# Required return statement.
|
||||
return "nothing"
|
||||
|
||||
def _get_last_workfile_path(self, project_name, folder_path, task_name):
|
||||
"""Returns last workfile path if exists"""
|
||||
host = registered_host()
|
||||
host_name = "photoshop"
|
||||
template_key = get_workfile_template_key_from_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
host_name,
|
||||
)
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
data = get_template_data_with_names(
|
||||
project_name, folder_path, task_name, host_name
|
||||
)
|
||||
data["root"] = anatomy.roots
|
||||
|
||||
work_template = anatomy.get_template_item("work", template_key)
|
||||
|
||||
# Define saving file extension
|
||||
extensions = host.get_workfile_extensions()
|
||||
|
||||
work_root = work_template["directory"].format_strict(data)
|
||||
file_template = work_template["file"].template
|
||||
last_workfile_path = get_last_workfile(
|
||||
work_root, file_template, data, extensions, True
|
||||
)
|
||||
|
||||
return last_workfile_path
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
"""Script wraps launch mechanism of Photoshop implementations.
|
||||
|
||||
Arguments passed to the script are passed to launch function in host
|
||||
implementation. In all cases requires host app executable and may contain
|
||||
workfile or others.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ayon_photoshop.api.lib import main as host_main
|
||||
|
||||
# Get current file to locate start point of sys.argv
|
||||
CURRENT_FILE = os.path.abspath(__file__)
|
||||
|
||||
|
||||
def show_error_messagebox(title, message, detail_message=None):
|
||||
"""Function will show message and process ends after closing it."""
|
||||
from qtpy import QtWidgets, QtCore
|
||||
from ayon_core import style
|
||||
|
||||
app = QtWidgets.QApplication([])
|
||||
app.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
msgbox = QtWidgets.QMessageBox()
|
||||
msgbox.setWindowTitle(title)
|
||||
msgbox.setText(message)
|
||||
|
||||
if detail_message:
|
||||
msgbox.setDetailedText(detail_message)
|
||||
|
||||
msgbox.setWindowModality(QtCore.Qt.ApplicationModal)
|
||||
msgbox.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
||||
|
||||
def on_invalid_args(script_not_found):
|
||||
"""Show to user message box saying that something went wrong.
|
||||
|
||||
Tell user that arguments to launch implementation are invalid with
|
||||
arguments details.
|
||||
|
||||
Args:
|
||||
script_not_found (bool): Use different message based on this value.
|
||||
"""
|
||||
|
||||
title = "Invalid arguments"
|
||||
joined_args = ", ".join("\"{}\"".format(arg) for arg in sys.argv)
|
||||
if script_not_found:
|
||||
submsg = "Where couldn't find script path:\n\"{}\""
|
||||
else:
|
||||
submsg = "Expected Host executable after script path:\n\"{}\""
|
||||
|
||||
message = "BUG: Got invalid arguments so can't launch Host application."
|
||||
detail_message = "Process was launched with arguments:\n{}\n\n{}".format(
|
||||
joined_args,
|
||||
submsg.format(CURRENT_FILE)
|
||||
)
|
||||
|
||||
show_error_messagebox(title, message, detail_message)
|
||||
|
||||
|
||||
def main(argv):
|
||||
# Modify current file path to find match in sys.argv which may be different
|
||||
# on windows (different letter cases and slashes).
|
||||
modified_current_file = CURRENT_FILE.replace("\\", "/").lower()
|
||||
|
||||
# Create a copy of sys argv
|
||||
sys_args = list(argv)
|
||||
after_script_idx = None
|
||||
# Find script path in sys.argv to know index of argv where host
|
||||
# executable should be.
|
||||
for idx, item in enumerate(sys_args):
|
||||
if item.replace("\\", "/").lower() == modified_current_file:
|
||||
after_script_idx = idx + 1
|
||||
break
|
||||
|
||||
# Validate that there is at least one argument after script path
|
||||
launch_args = None
|
||||
if after_script_idx is not None:
|
||||
launch_args = sys_args[after_script_idx:]
|
||||
|
||||
if launch_args:
|
||||
# Launch host implementation
|
||||
host_main(*launch_args)
|
||||
else:
|
||||
# Show message box
|
||||
on_invalid_args(after_script_idx is None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
84
server_addon/photoshop/client/ayon_photoshop/api/lib.py
Normal file
84
server_addon/photoshop/client/ayon_photoshop/api/lib.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import os
|
||||
import sys
|
||||
import contextlib
|
||||
import traceback
|
||||
|
||||
from ayon_core.lib import env_value_to_bool, Logger, is_in_tests
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.pipeline import install_host
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core.tools.utils import get_ayon_qt_app
|
||||
|
||||
from .launch_logic import ProcessLauncher, stub
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def safe_excepthook(*args):
|
||||
traceback.print_exception(*args)
|
||||
|
||||
|
||||
def main(*subprocess_args):
|
||||
from ayon_photoshop.api import PhotoshopHost
|
||||
|
||||
host = PhotoshopHost()
|
||||
install_host(host)
|
||||
|
||||
sys.excepthook = safe_excepthook
|
||||
|
||||
# coloring in StdOutBroker
|
||||
os.environ["AYON_LOG_NO_COLORS"] = "0"
|
||||
app = get_ayon_qt_app()
|
||||
app.setQuitOnLastWindowClosed(False)
|
||||
|
||||
launcher = ProcessLauncher(subprocess_args)
|
||||
launcher.start()
|
||||
|
||||
if env_value_to_bool("HEADLESS_PUBLISH"):
|
||||
manager = AddonsManager()
|
||||
webpublisher_addon = manager["webpublisher"]
|
||||
launcher.execute_in_main_thread(
|
||||
webpublisher_addon.headless_publish,
|
||||
log,
|
||||
"ClosePS",
|
||||
is_in_tests()
|
||||
)
|
||||
elif env_value_to_bool("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH",
|
||||
default=True):
|
||||
|
||||
launcher.execute_in_main_thread(
|
||||
host_tools.show_workfiles,
|
||||
save=env_value_to_bool("WORKFILES_SAVE_AS")
|
||||
)
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
"""Maintain selection during context."""
|
||||
selection = stub().get_selected_layers()
|
||||
try:
|
||||
yield selection
|
||||
finally:
|
||||
stub().select_layers(selection)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_visibility(layers=None):
|
||||
"""Maintain visibility during context.
|
||||
|
||||
Args:
|
||||
layers (list) of PSItem (used for caching)
|
||||
"""
|
||||
visibility = {}
|
||||
if not layers:
|
||||
layers = stub().get_layers()
|
||||
for layer in layers:
|
||||
visibility[layer.id] = layer.visible
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for layer in layers:
|
||||
stub().set_visible(layer.id, visibility[layer.id])
|
||||
pass
|
||||
BIN
server_addon/photoshop/client/ayon_photoshop/api/panel.png
Normal file
BIN
server_addon/photoshop/client/ayon_photoshop/api/panel.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.6 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 13 KiB |
285
server_addon/photoshop/client/ayon_photoshop/api/pipeline.py
Normal file
285
server_addon/photoshop/client/ayon_photoshop/api/pipeline.py
Normal file
|
|
@ -0,0 +1,285 @@
|
|||
import os
|
||||
|
||||
from qtpy import QtWidgets
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import register_event_callback, Logger
|
||||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
)
|
||||
|
||||
from ayon_core.host import (
|
||||
HostBase,
|
||||
IWorkfileHost,
|
||||
ILoadHost,
|
||||
IPublishHost
|
||||
)
|
||||
|
||||
from ayon_core.pipeline.load import any_outdated_containers
|
||||
from ayon_core.tools.utils import get_ayon_qt_app
|
||||
from ayon_photoshop import PHOTOSHOP_ADDON_ROOT
|
||||
|
||||
from . import lib
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
PLUGINS_DIR = os.path.join(PHOTOSHOP_ADDON_ROOT, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class PhotoshopHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "photoshop"
|
||||
|
||||
def install(self):
|
||||
"""Install Photoshop-specific functionality needed for integration.
|
||||
|
||||
This function is called automatically on calling
|
||||
`api.install(photoshop)`.
|
||||
"""
|
||||
log.info("Installing OpenPype Photoshop...")
|
||||
pyblish.api.register_host("photoshop")
|
||||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
register_event_callback("application.launched", on_application_launch)
|
||||
|
||||
def current_file(self):
|
||||
try:
|
||||
full_name = lib.stub().get_active_document_full_name()
|
||||
if full_name and full_name != "null":
|
||||
return os.path.normpath(full_name).replace("\\", "/")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def work_root(self, session):
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
||||
def open_workfile(self, filepath):
|
||||
lib.stub().open(filepath)
|
||||
|
||||
return True
|
||||
|
||||
def save_workfile(self, filepath=None):
|
||||
_, ext = os.path.splitext(filepath)
|
||||
lib.stub().saveAs(filepath, ext[1:], True)
|
||||
|
||||
def get_current_workfile(self):
|
||||
return self.current_file()
|
||||
|
||||
def workfile_has_unsaved_changes(self):
|
||||
if self.current_file():
|
||||
return not lib.stub().is_saved()
|
||||
|
||||
return False
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".psd", ".psb"]
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def get_context_data(self):
|
||||
"""Get stored values for context (validation enable/disable etc)"""
|
||||
meta = _get_stub().get_layers_metadata()
|
||||
for item in meta:
|
||||
if item.get("id") == "publish_context":
|
||||
item.pop("id")
|
||||
return item
|
||||
|
||||
return {}
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
"""Store value needed for context"""
|
||||
item = data
|
||||
item["id"] = "publish_context"
|
||||
_get_stub().imprint(item["id"], item)
|
||||
|
||||
def list_instances(self):
|
||||
"""List all created instances to publish from current workfile.
|
||||
|
||||
Pulls from File > File Info
|
||||
|
||||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
stub = _get_stub()
|
||||
|
||||
if not stub:
|
||||
return []
|
||||
|
||||
instances = []
|
||||
layers_meta = stub.get_layers_metadata()
|
||||
if layers_meta:
|
||||
for instance in layers_meta:
|
||||
if instance.get("id") in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
instances.append(instance)
|
||||
|
||||
return instances
|
||||
|
||||
def remove_instance(self, instance):
|
||||
"""Remove instance from current workfile metadata.
|
||||
|
||||
Updates metadata of current file in File > File Info and removes
|
||||
icon highlight on group layer.
|
||||
|
||||
Args:
|
||||
instance (dict): instance representation from subsetmanager model
|
||||
"""
|
||||
stub = _get_stub()
|
||||
|
||||
if not stub:
|
||||
return
|
||||
|
||||
inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
|
||||
if not inst_id:
|
||||
log.warning("No instance identifier for {}".format(instance))
|
||||
return
|
||||
|
||||
stub.remove_instance(inst_id)
|
||||
|
||||
if instance.get("members"):
|
||||
item = stub.get_layer(instance["members"][0])
|
||||
if item:
|
||||
stub.rename_layer(item.id,
|
||||
item.name.replace(stub.PUBLISH_ICON, ''))
|
||||
|
||||
|
||||
def check_inventory():
|
||||
if not any_outdated_containers():
|
||||
return
|
||||
|
||||
# Warn about outdated containers.
|
||||
_app = get_ayon_qt_app()
|
||||
|
||||
message_box = QtWidgets.QMessageBox()
|
||||
message_box.setIcon(QtWidgets.QMessageBox.Warning)
|
||||
msg = "There are outdated containers in the scene."
|
||||
message_box.setText(msg)
|
||||
message_box.exec_()
|
||||
|
||||
|
||||
def on_application_launch():
|
||||
check_inventory()
|
||||
|
||||
|
||||
def ls():
|
||||
"""Yields containers from active Photoshop document
|
||||
|
||||
This is the host-equivalent of api.ls(), but instead of listing
|
||||
assets on disk, it lists assets already loaded in Photoshop; once loaded
|
||||
they are called 'containers'
|
||||
|
||||
Yields:
|
||||
dict: container
|
||||
|
||||
"""
|
||||
try:
|
||||
stub = lib.stub() # only after Photoshop is up
|
||||
except lib.ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
if not stub.get_active_document_name():
|
||||
return
|
||||
|
||||
layers_meta = stub.get_layers_metadata() # minimalize calls to PS
|
||||
for layer in stub.get_layers():
|
||||
data = stub.read(layer, layers_meta)
|
||||
|
||||
# Skip non-tagged layers.
|
||||
if not data:
|
||||
continue
|
||||
|
||||
# Filter to only containers.
|
||||
if "container" not in data["id"]:
|
||||
continue
|
||||
|
||||
# Append transient data
|
||||
data["objectName"] = layer.name.replace(stub.LOADED_ICON, '')
|
||||
data["layer"] = layer
|
||||
|
||||
yield data
|
||||
|
||||
|
||||
def _get_stub():
|
||||
"""Handle pulling stub from PS to run operations on host
|
||||
|
||||
Returns:
|
||||
(PhotoshopServerStub) or None
|
||||
"""
|
||||
try:
|
||||
stub = lib.stub() # only after Photoshop is up
|
||||
except lib.ConnectionNotEstablishedYet:
|
||||
print("Not connected yet, ignoring")
|
||||
return
|
||||
|
||||
if not stub.get_active_document_name():
|
||||
return
|
||||
|
||||
return stub
|
||||
|
||||
|
||||
def containerise(
|
||||
name, namespace, layer, context, loader=None, suffix="_CON"
|
||||
):
|
||||
"""Imprint layer with metadata
|
||||
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Arguments:
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
layer (PSItem): Layer to containerise
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of loader used to produce this container.
|
||||
suffix (str, optional): Suffix of container, defaults to `_CON`.
|
||||
|
||||
Returns:
|
||||
container (str): Name of container assembly
|
||||
"""
|
||||
layer.name = name + suffix
|
||||
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": context["representation"]["id"],
|
||||
"members": [str(layer.id)]
|
||||
}
|
||||
stub = lib.stub()
|
||||
stub.imprint(layer.id, data)
|
||||
|
||||
return layer
|
||||
|
||||
|
||||
def cache_and_get_instances(creator):
|
||||
"""Cache instances in shared data.
|
||||
|
||||
Storing all instances as a list as legacy instances might be still present.
|
||||
Args:
|
||||
creator (Creator): Plugin which would like to get instances from host.
|
||||
Returns:
|
||||
List[]: list of all instances stored in metadata
|
||||
"""
|
||||
shared_key = "openpype.photoshop.instances"
|
||||
if shared_key not in creator.collection_shared_data:
|
||||
creator.collection_shared_data[shared_key] = \
|
||||
creator.host.list_instances()
|
||||
return creator.collection_shared_data[shared_key]
|
||||
37
server_addon/photoshop/client/ayon_photoshop/api/plugin.py
Normal file
37
server_addon/photoshop/client/ayon_photoshop/api/plugin.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
import re
|
||||
|
||||
from ayon_core.pipeline import LoaderPlugin
|
||||
from .launch_logic import stub
|
||||
|
||||
|
||||
def get_unique_layer_name(layers, container_name, product_name):
|
||||
"""Prepare unique layer name.
|
||||
|
||||
Gets all layer names and if '<container_name>_<product_name>' is present,
|
||||
it adds suffix '1', or increases the suffix by 1.
|
||||
|
||||
Args:
|
||||
layers (list) of dict with layers info (name, id etc.)
|
||||
container_name (str):
|
||||
product_name (str):
|
||||
|
||||
Returns:
|
||||
str: name_00X (without version)
|
||||
"""
|
||||
name = "{}_{}".format(container_name, product_name)
|
||||
names = {}
|
||||
for layer in layers:
|
||||
layer_name = re.sub(r'_\d{3}$', '', layer.name)
|
||||
if layer_name in names.keys():
|
||||
names[layer_name] = names[layer_name] + 1
|
||||
else:
|
||||
names[layer_name] = 1
|
||||
occurrences = names.get(name, 0)
|
||||
|
||||
return "{}_{:0>3d}".format(name, occurrences + 1)
|
||||
|
||||
|
||||
class PhotoshopLoader(LoaderPlugin):
|
||||
@staticmethod
|
||||
def get_stub():
|
||||
return stub()
|
||||
241
server_addon/photoshop/client/ayon_photoshop/api/webserver.py
Normal file
241
server_addon/photoshop/client/ayon_photoshop/api/webserver.py
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
"""Webserver for communication with photoshop.
|
||||
|
||||
Aiohttp (Asyncio) based websocket server used for communication with host
|
||||
application.
|
||||
|
||||
This webserver is started in spawned Python process that opens DCC during
|
||||
its launch, waits for connection from DCC and handles communication going
|
||||
forward. Server is closed before Python process is killed.
|
||||
"""
|
||||
import os
|
||||
import logging
|
||||
import urllib
|
||||
import threading
|
||||
import asyncio
|
||||
import socket
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from wsrpc_aiohttp import WSRPCClient
|
||||
|
||||
from ayon_core.pipeline import get_global_context
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WebServerTool:
|
||||
"""
|
||||
Basic POC implementation of asychronic websocket RPC server.
|
||||
Uses class in external_app_1.py to mimic implementation for single
|
||||
external application.
|
||||
'test_client' folder contains two test implementations of client
|
||||
"""
|
||||
_instance = None
|
||||
|
||||
def __init__(self):
|
||||
WebServerTool._instance = self
|
||||
|
||||
self.client = None
|
||||
self.handlers = {}
|
||||
self.on_stop_callbacks = []
|
||||
|
||||
port = None
|
||||
host_name = "localhost"
|
||||
websocket_url = os.getenv("WEBSOCKET_URL")
|
||||
if websocket_url:
|
||||
parsed = urllib.parse.urlparse(websocket_url)
|
||||
port = parsed.port
|
||||
host_name = parsed.netloc.split(":")[0]
|
||||
if not port:
|
||||
port = 8098 # fallback
|
||||
|
||||
self.port = port
|
||||
self.host_name = host_name
|
||||
|
||||
self.app = web.Application()
|
||||
|
||||
# add route with multiple methods for single "external app"
|
||||
self.webserver_thread = WebServerThread(self, self.port)
|
||||
|
||||
def add_route(self, *args, **kwargs):
|
||||
self.app.router.add_route(*args, **kwargs)
|
||||
|
||||
def add_static(self, *args, **kwargs):
|
||||
self.app.router.add_static(*args, **kwargs)
|
||||
|
||||
def start_server(self):
|
||||
if self.webserver_thread and not self.webserver_thread.is_alive():
|
||||
self.webserver_thread.start()
|
||||
|
||||
def stop_server(self):
|
||||
self.stop()
|
||||
|
||||
async def send_context_change(self, host):
|
||||
"""
|
||||
Calls running webserver to inform about context change
|
||||
|
||||
Used when new PS/AE should be triggered,
|
||||
but one already running, without
|
||||
this publish would point to old context.
|
||||
"""
|
||||
client = WSRPCClient(os.getenv("WEBSOCKET_URL"),
|
||||
loop=asyncio.get_event_loop())
|
||||
await client.connect()
|
||||
|
||||
context = get_global_context()
|
||||
project_name = context["project_name"]
|
||||
folder_path = context["folder_path"]
|
||||
task_name = context["task_name"]
|
||||
log.info("Sending context change to {}{}/{}".format(
|
||||
project_name, folder_path, task_name
|
||||
))
|
||||
|
||||
await client.call(
|
||||
'{}.set_context'.format(host),
|
||||
project=project_name,
|
||||
folder=folder_path,
|
||||
task=task_name
|
||||
)
|
||||
await client.close()
|
||||
|
||||
def port_occupied(self, host_name, port):
|
||||
"""
|
||||
Check if 'url' is already occupied.
|
||||
|
||||
This could mean, that app is already running and we are trying open it
|
||||
again. In that case, use existing running webserver.
|
||||
Check here is easier than capturing exception from thread.
|
||||
"""
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con:
|
||||
result = con.connect_ex((host_name, port)) == 0
|
||||
|
||||
if result:
|
||||
print(f"Port {port} is already in use")
|
||||
return result
|
||||
|
||||
def call(self, func):
|
||||
log.debug("websocket.call {}".format(func))
|
||||
future = asyncio.run_coroutine_threadsafe(
|
||||
func,
|
||||
self.webserver_thread.loop
|
||||
)
|
||||
result = future.result()
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_instance():
|
||||
if WebServerTool._instance is None:
|
||||
WebServerTool()
|
||||
return WebServerTool._instance
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
if not self.webserver_thread:
|
||||
return False
|
||||
return self.webserver_thread.is_running
|
||||
|
||||
def stop(self):
|
||||
if not self.is_running:
|
||||
return
|
||||
try:
|
||||
log.debug("Stopping websocket server")
|
||||
self.webserver_thread.is_running = False
|
||||
self.webserver_thread.stop()
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Error has happened during Killing websocket server",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
def thread_stopped(self):
|
||||
for callback in self.on_stop_callbacks:
|
||||
callback()
|
||||
|
||||
|
||||
class WebServerThread(threading.Thread):
|
||||
""" Listener for websocket rpc requests.
|
||||
|
||||
It would be probably better to "attach" this to main thread (as for
|
||||
example Harmony needs to run something on main thread), but currently
|
||||
it creates separate thread and separate asyncio event loop
|
||||
"""
|
||||
def __init__(self, module, port):
|
||||
super(WebServerThread, self).__init__()
|
||||
|
||||
self.is_running = False
|
||||
self.port = port
|
||||
self.module = module
|
||||
self.loop = None
|
||||
self.runner = None
|
||||
self.site = None
|
||||
self.tasks = []
|
||||
|
||||
def run(self):
|
||||
self.is_running = True
|
||||
|
||||
try:
|
||||
log.info("Starting web server")
|
||||
self.loop = asyncio.new_event_loop() # create new loop for thread
|
||||
asyncio.set_event_loop(self.loop)
|
||||
|
||||
self.loop.run_until_complete(self.start_server())
|
||||
|
||||
websocket_url = "ws://localhost:{}/ws".format(self.port)
|
||||
|
||||
log.debug(
|
||||
"Running Websocket server on URL: \"{}\"".format(websocket_url)
|
||||
)
|
||||
|
||||
asyncio.ensure_future(self.check_shutdown(), loop=self.loop)
|
||||
self.loop.run_forever()
|
||||
except Exception:
|
||||
self.is_running = False
|
||||
log.warning(
|
||||
"Websocket Server service has failed", exc_info=True
|
||||
)
|
||||
raise
|
||||
finally:
|
||||
self.loop.close() # optional
|
||||
|
||||
self.is_running = False
|
||||
self.module.thread_stopped()
|
||||
log.info("Websocket server stopped")
|
||||
|
||||
async def start_server(self):
|
||||
""" Starts runner and TCPsite """
|
||||
self.runner = web.AppRunner(self.module.app)
|
||||
await self.runner.setup()
|
||||
self.site = web.TCPSite(self.runner, 'localhost', self.port)
|
||||
await self.site.start()
|
||||
|
||||
def stop(self):
|
||||
"""Sets is_running flag to false, 'check_shutdown' shuts server down"""
|
||||
self.is_running = False
|
||||
|
||||
async def check_shutdown(self):
|
||||
""" Future that is running and checks if server should be running
|
||||
periodically.
|
||||
"""
|
||||
while self.is_running:
|
||||
while self.tasks:
|
||||
task = self.tasks.pop(0)
|
||||
log.debug("waiting for task {}".format(task))
|
||||
await task
|
||||
log.debug("returned value {}".format(task.result))
|
||||
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
log.debug("Starting shutdown")
|
||||
await self.site.stop()
|
||||
log.debug("Site stopped")
|
||||
await self.runner.cleanup()
|
||||
log.debug("Runner stopped")
|
||||
tasks = [task for task in asyncio.all_tasks() if
|
||||
task is not asyncio.current_task()]
|
||||
list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
log.debug(f'Finished awaiting cancelled tasks, results: {results}...')
|
||||
await self.loop.shutdown_asyncgens()
|
||||
# to really make sure everything else has time to stop
|
||||
await asyncio.sleep(0.07)
|
||||
self.loop.stop()
|
||||
571
server_addon/photoshop/client/ayon_photoshop/api/ws_stub.py
Normal file
571
server_addon/photoshop/client/ayon_photoshop/api/ws_stub.py
Normal file
|
|
@ -0,0 +1,571 @@
|
|||
"""
|
||||
Stub handling connection from server to client.
|
||||
Used anywhere solution is calling client methods.
|
||||
"""
|
||||
import json
|
||||
import attr
|
||||
from wsrpc_aiohttp import WebSocketAsync
|
||||
|
||||
from .webserver import WebServerTool
|
||||
|
||||
|
||||
@attr.s
|
||||
class PSItem(object):
|
||||
"""
|
||||
Object denoting layer or group item in PS. Each item is created in
|
||||
PS by any Loader, but contains same fields, which are being used
|
||||
in later processing.
|
||||
"""
|
||||
# metadata
|
||||
id = attr.ib() # id created by AE, could be used for querying
|
||||
name = attr.ib() # name of item
|
||||
group = attr.ib(default=None) # item type (footage, folder, comp)
|
||||
parents = attr.ib(factory=list)
|
||||
visible = attr.ib(default=True)
|
||||
type = attr.ib(default=None)
|
||||
# all imported elements, single for
|
||||
members = attr.ib(factory=list)
|
||||
long_name = attr.ib(default=None)
|
||||
color_code = attr.ib(default=None) # color code of layer
|
||||
instance_id = attr.ib(default=None)
|
||||
|
||||
@property
|
||||
def clean_name(self):
|
||||
"""Returns layer name without publish icon highlight
|
||||
|
||||
Returns:
|
||||
(str)
|
||||
"""
|
||||
return (self.name.replace(PhotoshopServerStub.PUBLISH_ICON, '')
|
||||
.replace(PhotoshopServerStub.LOADED_ICON, ''))
|
||||
|
||||
|
||||
class PhotoshopServerStub:
|
||||
"""
|
||||
Stub for calling function on client (Photoshop js) side.
|
||||
Expects that client is already connected (started when avalon menu
|
||||
is opened).
|
||||
'self.websocketserver.call' is used as async wrapper
|
||||
"""
|
||||
PUBLISH_ICON = '\u2117 '
|
||||
LOADED_ICON = '\u25bc'
|
||||
|
||||
def __init__(self):
|
||||
self.websocketserver = WebServerTool.get_instance()
|
||||
self.client = self.get_client()
|
||||
|
||||
@staticmethod
|
||||
def get_client():
|
||||
"""
|
||||
Return first connected client to WebSocket
|
||||
TODO implement selection by Route
|
||||
:return: <WebSocketAsync> client
|
||||
"""
|
||||
clients = WebSocketAsync.get_clients()
|
||||
client = None
|
||||
if len(clients) > 0:
|
||||
key = list(clients.keys())[0]
|
||||
client = clients.get(key)
|
||||
|
||||
return client
|
||||
|
||||
def open(self, path):
|
||||
"""Open file located at 'path' (local).
|
||||
|
||||
Args:
|
||||
path(string): file path locally
|
||||
Returns: None
|
||||
"""
|
||||
self.websocketserver.call(
|
||||
self.client.call('Photoshop.open', path=path)
|
||||
)
|
||||
|
||||
def read(self, layer, layers_meta=None):
|
||||
"""Parses layer metadata from Headline field of active document.
|
||||
|
||||
Args:
|
||||
layer: (PSItem)
|
||||
layers_meta: full list from Headline (for performance in loops)
|
||||
Returns:
|
||||
(dict) of layer metadata stored in PS file
|
||||
|
||||
Example:
|
||||
{
|
||||
'id': 'pyblish.avalon.container',
|
||||
'loader': 'ImageLoader',
|
||||
'members': ['64'],
|
||||
'name': 'imageMainMiddle',
|
||||
'namespace': 'Hero_imageMainMiddle_001',
|
||||
'representation': '6203dc91e80934d9f6ee7d96',
|
||||
'schema': 'openpype:container-2.0'
|
||||
}
|
||||
"""
|
||||
if layers_meta is None:
|
||||
layers_meta = self.get_layers_metadata()
|
||||
|
||||
for layer_meta in layers_meta:
|
||||
layer_id = layer_meta.get("uuid") # legacy
|
||||
if layer_meta.get("members"):
|
||||
layer_id = layer_meta["members"][0]
|
||||
if str(layer.id) == str(layer_id):
|
||||
return layer_meta
|
||||
print("Unable to find layer metadata for {}".format(layer.id))
|
||||
|
||||
def imprint(self, item_id, data, all_layers=None, items_meta=None):
|
||||
"""Save layer metadata to Headline field of active document
|
||||
|
||||
Stores metadata in format:
|
||||
[{
|
||||
"active":true,
|
||||
"productName":"imageBG",
|
||||
"productType":"image",
|
||||
"id":"ayon.create.instance",
|
||||
"folderPath":"Town",
|
||||
"uuid": "8"
|
||||
}] - for created instances
|
||||
OR
|
||||
[{
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": "ayon.create.instance",
|
||||
"name": "imageMG",
|
||||
"namespace": "Jungle_imageMG_001",
|
||||
"loader": "ImageLoader",
|
||||
"representation": "5fbfc0ee30a946093c6ff18a",
|
||||
"members": [
|
||||
"40"
|
||||
]
|
||||
}] - for loaded instances
|
||||
|
||||
Args:
|
||||
item_id (str):
|
||||
data(string): json representation for single layer
|
||||
all_layers (list of PSItem): for performance, could be
|
||||
injected for usage in loop, if not, single call will be
|
||||
triggered
|
||||
items_meta(string): json representation from Headline
|
||||
(for performance - provide only if imprint is in
|
||||
loop - value should be same)
|
||||
Returns: None
|
||||
"""
|
||||
if not items_meta:
|
||||
items_meta = self.get_layers_metadata()
|
||||
|
||||
# json.dumps writes integer values in a dictionary to string, so
|
||||
# anticipating it here.
|
||||
item_id = str(item_id)
|
||||
is_new = True
|
||||
result_meta = []
|
||||
for item_meta in items_meta:
|
||||
if ((item_meta.get('members') and
|
||||
item_id == str(item_meta.get('members')[0])) or
|
||||
item_meta.get("instance_id") == item_id):
|
||||
is_new = False
|
||||
if data:
|
||||
item_meta.update(data)
|
||||
result_meta.append(item_meta)
|
||||
else:
|
||||
result_meta.append(item_meta)
|
||||
|
||||
if is_new:
|
||||
result_meta.append(data)
|
||||
|
||||
# Ensure only valid ids are stored.
|
||||
if not all_layers:
|
||||
all_layers = self.get_layers()
|
||||
layer_ids = [layer.id for layer in all_layers]
|
||||
cleaned_data = []
|
||||
|
||||
for item in result_meta:
|
||||
if item.get("members"):
|
||||
if int(item["members"][0]) not in layer_ids:
|
||||
continue
|
||||
|
||||
cleaned_data.append(item)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
self.websocketserver.call(
|
||||
self.client.call('Photoshop.imprint', payload=payload)
|
||||
)
|
||||
|
||||
def get_layers(self):
|
||||
"""Returns JSON document with all(?) layers in active document.
|
||||
|
||||
Returns: <list of PSItem>
|
||||
Format of tuple: { 'id':'123',
|
||||
'name': 'My Layer 1',
|
||||
'type': 'GUIDE'|'FG'|'BG'|'OBJ'
|
||||
'visible': 'true'|'false'
|
||||
"""
|
||||
res = self.websocketserver.call(
|
||||
self.client.call('Photoshop.get_layers')
|
||||
)
|
||||
|
||||
return self._to_records(res)
|
||||
|
||||
def get_layer(self, layer_id):
|
||||
"""
|
||||
Returns PSItem for specific 'layer_id' or None if not found
|
||||
Args:
|
||||
layer_id (string): unique layer id, stored in 'uuid' field
|
||||
|
||||
Returns:
|
||||
(PSItem) or None
|
||||
"""
|
||||
layers = self.get_layers()
|
||||
for layer in layers:
|
||||
if str(layer.id) == str(layer_id):
|
||||
return layer
|
||||
|
||||
def get_layers_in_layers(self, layers):
|
||||
"""Return all layers that belong to layers (might be groups).
|
||||
|
||||
Args:
|
||||
layers <list of PSItem>:
|
||||
|
||||
Returns:
|
||||
<list of PSItem>
|
||||
"""
|
||||
parent_ids = set([lay.id for lay in layers])
|
||||
|
||||
return self._get_layers_in_layers(parent_ids)
|
||||
|
||||
def get_layers_in_layers_ids(self, layers_ids, layers=None):
|
||||
"""Return all layers that belong to layers (might be groups).
|
||||
|
||||
Args:
|
||||
layers_ids <list of Int>
|
||||
layers <list of PSItem>:
|
||||
|
||||
Returns:
|
||||
<list of PSItem>
|
||||
"""
|
||||
parent_ids = set(layers_ids)
|
||||
|
||||
return self._get_layers_in_layers(parent_ids, layers)
|
||||
|
||||
def _get_layers_in_layers(self, parent_ids, layers=None):
|
||||
if not layers:
|
||||
layers = self.get_layers()
|
||||
|
||||
all_layers = layers
|
||||
ret = []
|
||||
|
||||
for layer in all_layers:
|
||||
parents = set(layer.parents)
|
||||
if len(parent_ids & parents) > 0:
|
||||
ret.append(layer)
|
||||
if layer.id in parent_ids:
|
||||
ret.append(layer)
|
||||
|
||||
return ret
|
||||
|
||||
def create_group(self, name):
|
||||
"""Create new group (eg. LayerSet)
|
||||
|
||||
Returns:
|
||||
<PSItem>
|
||||
"""
|
||||
enhanced_name = self.PUBLISH_ICON + name
|
||||
ret = self.websocketserver.call(
|
||||
self.client.call('Photoshop.create_group', name=enhanced_name)
|
||||
)
|
||||
# create group on PS is asynchronous, returns only id
|
||||
return PSItem(id=ret, name=name, group=True)
|
||||
|
||||
def group_selected_layers(self, name):
|
||||
"""Group selected layers into new LayerSet (eg. group)
|
||||
|
||||
Returns:
|
||||
(Layer)
|
||||
"""
|
||||
enhanced_name = self.PUBLISH_ICON + name
|
||||
res = self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.group_selected_layers', name=enhanced_name
|
||||
)
|
||||
)
|
||||
res = self._to_records(res)
|
||||
if res:
|
||||
rec = res.pop()
|
||||
rec.name = rec.name.replace(self.PUBLISH_ICON, '')
|
||||
return rec
|
||||
raise ValueError("No group record returned")
|
||||
|
||||
def get_selected_layers(self):
|
||||
"""Get a list of actually selected layers.
|
||||
|
||||
Returns: <list of Layer('id':XX, 'name':"YYY")>
|
||||
"""
|
||||
res = self.websocketserver.call(
|
||||
self.client.call('Photoshop.get_selected_layers')
|
||||
)
|
||||
return self._to_records(res)
|
||||
|
||||
def select_layers(self, layers):
|
||||
"""Selects specified layers in Photoshop by its ids.
|
||||
|
||||
Args:
|
||||
layers: <list of Layer('id':XX, 'name':"YYY")>
|
||||
"""
|
||||
layers_id = [str(lay.id) for lay in layers]
|
||||
self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.select_layers',
|
||||
layers=json.dumps(layers_id)
|
||||
)
|
||||
)
|
||||
|
||||
def get_active_document_full_name(self):
|
||||
"""Returns full name with path of active document via ws call
|
||||
|
||||
Returns(string):
|
||||
full path with name
|
||||
"""
|
||||
res = self.websocketserver.call(
|
||||
self.client.call('Photoshop.get_active_document_full_name')
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
def get_active_document_name(self):
|
||||
"""Returns just a name of active document via ws call
|
||||
|
||||
Returns(string):
|
||||
file name
|
||||
"""
|
||||
return self.websocketserver.call(
|
||||
self.client.call('Photoshop.get_active_document_name')
|
||||
)
|
||||
|
||||
def is_saved(self):
|
||||
"""Returns true if no changes in active document
|
||||
|
||||
Returns:
|
||||
<boolean>
|
||||
"""
|
||||
return self.websocketserver.call(
|
||||
self.client.call('Photoshop.is_saved')
|
||||
)
|
||||
|
||||
def save(self):
|
||||
"""Saves active document"""
|
||||
self.websocketserver.call(
|
||||
self.client.call('Photoshop.save')
|
||||
)
|
||||
|
||||
def saveAs(self, image_path, ext, as_copy):
|
||||
"""Saves active document to psd (copy) or png or jpg
|
||||
|
||||
Args:
|
||||
image_path(string): full local path
|
||||
ext: <string psd|jpg|png>
|
||||
as_copy: <boolean>
|
||||
Returns: None
|
||||
"""
|
||||
self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.saveAs',
|
||||
image_path=image_path,
|
||||
ext=ext,
|
||||
as_copy=as_copy
|
||||
)
|
||||
)
|
||||
|
||||
def set_visible(self, layer_id, visibility):
|
||||
"""Set layer with 'layer_id' to 'visibility'
|
||||
|
||||
Args:
|
||||
layer_id: <int>
|
||||
visibility: <true - set visible, false - hide>
|
||||
Returns: None
|
||||
"""
|
||||
self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.set_visible',
|
||||
layer_id=layer_id,
|
||||
visibility=visibility
|
||||
)
|
||||
)
|
||||
|
||||
def hide_all_others_layers(self, layers):
|
||||
"""hides all layers that are not part of the list or that are not
|
||||
children of this list
|
||||
|
||||
Args:
|
||||
layers (list): list of PSItem - highest hierarchy
|
||||
"""
|
||||
extract_ids = set([ll.id for ll in self.get_layers_in_layers(layers)])
|
||||
|
||||
self.hide_all_others_layers_ids(extract_ids)
|
||||
|
||||
def hide_all_others_layers_ids(self, extract_ids, layers=None):
|
||||
"""hides all layers that are not part of the list or that are not
|
||||
children of this list
|
||||
|
||||
Args:
|
||||
extract_ids (list): list of integer that should be visible
|
||||
layers (list) of PSItem (used for caching)
|
||||
"""
|
||||
if not layers:
|
||||
layers = self.get_layers()
|
||||
for layer in layers:
|
||||
if layer.visible and layer.id not in extract_ids:
|
||||
self.set_visible(layer.id, False)
|
||||
|
||||
def get_layers_metadata(self):
|
||||
"""Reads layers metadata from Headline from active document in PS.
|
||||
(Headline accessible by File > File Info)
|
||||
|
||||
Returns:
|
||||
(list)
|
||||
example:
|
||||
{"8":{"active":true,"productName":"imageBG",
|
||||
"productType":"image","id":"ayon.create.instance",
|
||||
"folderPath":"/Town"}}
|
||||
8 is layer(group) id - used for deletion, update etc.
|
||||
"""
|
||||
res = self.websocketserver.call(self.client.call('Photoshop.read'))
|
||||
layers_data = []
|
||||
try:
|
||||
if res:
|
||||
layers_data = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("{} cannot be parsed, recreate meta".format(res))
|
||||
# format of metadata changed from {} to [] because of standardization
|
||||
# keep current implementation logic as its working
|
||||
if isinstance(layers_data, dict):
|
||||
for layer_id, layer_meta in layers_data.items():
|
||||
if layer_meta.get("schema") != "openpype:container-2.0":
|
||||
layer_meta["members"] = [str(layer_id)]
|
||||
layers_data = list(layers_data.values())
|
||||
return layers_data
|
||||
|
||||
def import_smart_object(self, path, layer_name, as_reference=False):
|
||||
"""Import the file at `path` as a smart object to active document.
|
||||
|
||||
Args:
|
||||
path (str): File path to import.
|
||||
layer_name (str): Unique layer name to differentiate how many times
|
||||
same smart object was loaded
|
||||
as_reference (bool): pull in content or reference
|
||||
"""
|
||||
enhanced_name = self.LOADED_ICON + layer_name
|
||||
res = self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.import_smart_object',
|
||||
path=path,
|
||||
name=enhanced_name,
|
||||
as_reference=as_reference
|
||||
)
|
||||
)
|
||||
rec = self._to_records(res).pop()
|
||||
if rec:
|
||||
rec.name = rec.name.replace(self.LOADED_ICON, '')
|
||||
return rec
|
||||
|
||||
def replace_smart_object(self, layer, path, layer_name):
|
||||
"""Replace the smart object `layer` with file at `path`
|
||||
|
||||
Args:
|
||||
layer (PSItem):
|
||||
path (str): File to import.
|
||||
layer_name (str): Unique layer name to differentiate how many times
|
||||
same smart object was loaded
|
||||
"""
|
||||
enhanced_name = self.LOADED_ICON + layer_name
|
||||
self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.replace_smart_object',
|
||||
layer_id=layer.id,
|
||||
path=path,
|
||||
name=enhanced_name
|
||||
)
|
||||
)
|
||||
|
||||
def delete_layer(self, layer_id):
|
||||
"""Deletes specific layer by it's id.
|
||||
|
||||
Args:
|
||||
layer_id (int): id of layer to delete
|
||||
"""
|
||||
self.websocketserver.call(
|
||||
self.client.call('Photoshop.delete_layer', layer_id=layer_id)
|
||||
)
|
||||
|
||||
def rename_layer(self, layer_id, name):
|
||||
"""Renames specific layer by it's id.
|
||||
|
||||
Args:
|
||||
layer_id (int): id of layer to delete
|
||||
name (str): new name
|
||||
"""
|
||||
self.websocketserver.call(
|
||||
self.client.call(
|
||||
'Photoshop.rename_layer',
|
||||
layer_id=layer_id,
|
||||
name=name
|
||||
)
|
||||
)
|
||||
|
||||
def remove_instance(self, instance_id):
|
||||
cleaned_data = []
|
||||
|
||||
for item in self.get_layers_metadata():
|
||||
inst_id = item.get("instance_id") or item.get("uuid")
|
||||
if inst_id != instance_id:
|
||||
cleaned_data.append(item)
|
||||
|
||||
payload = json.dumps(cleaned_data, indent=4)
|
||||
|
||||
self.websocketserver.call(
|
||||
self.client.call('Photoshop.imprint', payload=payload)
|
||||
)
|
||||
|
||||
def get_extension_version(self):
|
||||
"""Returns version number of installed extension."""
|
||||
return self.websocketserver.call(
|
||||
self.client.call('Photoshop.get_extension_version')
|
||||
)
|
||||
|
||||
def close(self):
|
||||
"""Shutting down PS and process too.
|
||||
|
||||
For webpublishing only.
|
||||
"""
|
||||
# TODO change client.call to method with checks for client
|
||||
self.websocketserver.call(self.client.call('Photoshop.close'))
|
||||
|
||||
def _to_records(self, res):
|
||||
"""Converts string json representation into list of PSItem for
|
||||
dot notation access to work.
|
||||
|
||||
Args:
|
||||
res (string): valid json
|
||||
|
||||
Returns:
|
||||
<list of PSItem>
|
||||
"""
|
||||
try:
|
||||
layers_data = json.loads(res)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise ValueError("Received broken JSON {}".format(res))
|
||||
ret = []
|
||||
|
||||
# convert to AEItem to use dot donation
|
||||
if isinstance(layers_data, dict):
|
||||
layers_data = [layers_data]
|
||||
for d in layers_data:
|
||||
# currently implemented and expected fields
|
||||
ret.append(PSItem(
|
||||
d.get('id'),
|
||||
d.get('name'),
|
||||
d.get('group'),
|
||||
d.get('parents'),
|
||||
d.get('visible'),
|
||||
d.get('type'),
|
||||
d.get('members'),
|
||||
d.get('long_name'),
|
||||
d.get("color_code"),
|
||||
d.get("instance_id")
|
||||
))
|
||||
return ret
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from ayon_core.lib import (
|
||||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_applications import PreLaunchHook, LaunchTypes
|
||||
from ayon_photoshop import get_launch_script_path
|
||||
|
||||
|
||||
def get_launch_kwargs(kwargs):
|
||||
"""Explicit setting of kwargs for Popen for Photoshop.
|
||||
|
||||
Expected behavior
|
||||
- ayon_console opens window with logs
|
||||
- ayon has stdout/stderr available for capturing
|
||||
|
||||
Args:
|
||||
kwargs (Union[dict, None]): Current kwargs or None.
|
||||
|
||||
"""
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
if platform.system().lower() != "windows":
|
||||
return kwargs
|
||||
|
||||
if not is_using_ayon_console():
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE
|
||||
})
|
||||
else:
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
return kwargs
|
||||
|
||||
|
||||
class PhotoshopPrelaunchHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Hook add python executable and script path to Photoshop implementation
|
||||
before Photoshop executable and add last workfile path to launch arguments.
|
||||
|
||||
Existence of last workfile is checked. If workfile does not exists tries
|
||||
to copy templated workfile from predefined path.
|
||||
"""
|
||||
app_groups = {"photoshop"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = get_launch_script_path()
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)
|
||||
):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these arguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = get_launch_kwargs(
|
||||
self.launch_context.kwargs
|
||||
)
|
||||
127
server_addon/photoshop/client/ayon_photoshop/lib.py
Normal file
127
server_addon/photoshop/client/ayon_photoshop/lib.py
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
import re
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib import prepare_template_data
|
||||
from ayon_core.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance
|
||||
)
|
||||
from ayon_photoshop import api
|
||||
from ayon_photoshop.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
||||
class PSAutoCreator(AutoCreator):
|
||||
"""Generic autocreator to extend."""
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
self.log.debug("update_list:: {}".format(update_list))
|
||||
for created_inst, _changes in update_list:
|
||||
api.stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.product_type == self.product_type:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
folder_path = context.get_current_folder_path()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
|
||||
if existing_instance is None:
|
||||
existing_instance_folder = None
|
||||
else:
|
||||
existing_instance_folder = existing_instance["folderPath"]
|
||||
|
||||
if existing_instance is None:
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
data = {
|
||||
"folderPath": folder_path,
|
||||
"task": task_name,
|
||||
"variant": self.default_variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
None
|
||||
))
|
||||
|
||||
if not self.active_on_create:
|
||||
data["active"] = False
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.product_type, product_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
api.stub().imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
|
||||
elif (
|
||||
existing_instance_folder != folder_path
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
existing_instance["folderPath"] = folder_path
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["productName"] = product_name
|
||||
|
||||
|
||||
def clean_product_name(product_name):
|
||||
"""Clean all variants leftover {layer} from product name."""
|
||||
dynamic_data = prepare_template_data({"layer": "{layer}"})
|
||||
for value in dynamic_data.values():
|
||||
if value in product_name:
|
||||
product_name = (
|
||||
product_name
|
||||
.replace(value, "")
|
||||
.replace("__", "_")
|
||||
.replace("..", ".")
|
||||
)
|
||||
# clean trailing separator as Main_
|
||||
pattern = r'[\W_]+$'
|
||||
replacement = ''
|
||||
return re.sub(pattern, replacement, product_name)
|
||||
|
|
@ -0,0 +1,156 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_photoshop import api
|
||||
from ayon_photoshop.lib import PSAutoCreator, clean_product_name
|
||||
from ayon_core.lib import BoolDef, prepare_template_data
|
||||
from ayon_core.pipeline.create import get_product_name, CreatedInstance
|
||||
|
||||
|
||||
class AutoImageCreator(PSAutoCreator):
|
||||
"""Creates flatten image from all visible layers.
|
||||
|
||||
Used in simplified publishing as auto created instance.
|
||||
Must be enabled in Setting and template for product name provided
|
||||
"""
|
||||
identifier = "auto_image"
|
||||
product_type = "image"
|
||||
|
||||
# Settings
|
||||
default_variant = ""
|
||||
# - Mark by default instance for review
|
||||
mark_for_review = True
|
||||
active_on_create = True
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.creator_identifier == self.identifier:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
folder_path = context.get_current_folder_path()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
|
||||
existing_folder_path = None
|
||||
if existing_instance is not None:
|
||||
existing_folder_path = existing_instance["folderPath"]
|
||||
|
||||
if existing_instance is None:
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
|
||||
data = {
|
||||
"folderPath": folder_path,
|
||||
"task": task_name,
|
||||
}
|
||||
|
||||
if not self.active_on_create:
|
||||
data["active"] = False
|
||||
|
||||
creator_attributes = {"mark_for_review": self.mark_for_review}
|
||||
data.update({"creator_attributes": creator_attributes})
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.product_type, product_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
api.stub().imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
|
||||
elif ( # existing instance from different context
|
||||
existing_folder_path != folder_path
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
existing_instance["folderPath"] = folder_path
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["productName"] = product_name
|
||||
|
||||
api.stub().imprint(existing_instance.get("instance_id"),
|
||||
existing_instance.data_to_store())
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review",
|
||||
default=self.mark_for_review
|
||||
)
|
||||
]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review"
|
||||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["AutoImageCreator"]
|
||||
)
|
||||
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.default_variant = plugin_settings["default_variant"]
|
||||
self.mark_for_review = plugin_settings["mark_for_review"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for flatten image.
|
||||
|
||||
Studio might configure simple publishing workflow. In that case
|
||||
`image` instance is automatically created which will publish flat
|
||||
image from all visible layers.
|
||||
|
||||
Artist might disable this instance from publishing or from creating
|
||||
review for it though.
|
||||
"""
|
||||
|
||||
def get_product_name(
|
||||
self,
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name=None,
|
||||
instance=None
|
||||
):
|
||||
if host_name is None:
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
task_name = task_type = None
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
dynamic_data = prepare_template_data({"layer": "{layer}"})
|
||||
|
||||
product_name = get_product_name(
|
||||
project_name,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
self.product_type,
|
||||
variant,
|
||||
dynamic_data=dynamic_data
|
||||
)
|
||||
return clean_product_name(product_name)
|
||||
|
|
@ -0,0 +1,265 @@
|
|||
import re
|
||||
|
||||
from ayon_core.lib import BoolDef
|
||||
from ayon_core.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
CreatorError
|
||||
)
|
||||
from ayon_core.lib import prepare_template_data
|
||||
from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS
|
||||
from ayon_photoshop import api
|
||||
from ayon_photoshop.api.pipeline import cache_and_get_instances
|
||||
from ayon_photoshop.lib import clean_product_name
|
||||
|
||||
|
||||
class ImageCreator(Creator):
|
||||
"""Creates image instance for publishing.
|
||||
|
||||
Result of 'image' instance is image of all visible layers, or image(s) of
|
||||
selected layers.
|
||||
"""
|
||||
identifier = "image"
|
||||
label = "Image"
|
||||
product_type = "image"
|
||||
description = "Image creator"
|
||||
|
||||
# Settings
|
||||
default_variants = ""
|
||||
mark_for_review = False
|
||||
active_on_create = True
|
||||
|
||||
def create(self, product_name_from_ui, data, pre_create_data):
|
||||
groups_to_create = []
|
||||
top_layers_to_wrap = []
|
||||
create_empty_group = False
|
||||
|
||||
stub = api.stub() # only after PS is up
|
||||
if pre_create_data.get("use_selection"):
|
||||
try:
|
||||
top_level_selected_items = stub.get_selected_layers()
|
||||
except ValueError:
|
||||
raise CreatorError("Cannot group locked Background layer!")
|
||||
|
||||
only_single_item_selected = len(top_level_selected_items) == 1
|
||||
if (
|
||||
only_single_item_selected or
|
||||
pre_create_data.get("create_multiple")):
|
||||
for selected_item in top_level_selected_items:
|
||||
if selected_item.group:
|
||||
groups_to_create.append(selected_item)
|
||||
else:
|
||||
top_layers_to_wrap.append(selected_item)
|
||||
else:
|
||||
group = stub.group_selected_layers(product_name_from_ui)
|
||||
groups_to_create.append(group)
|
||||
else:
|
||||
try:
|
||||
stub.select_layers(stub.get_layers())
|
||||
group = stub.group_selected_layers(product_name_from_ui)
|
||||
except ValueError:
|
||||
raise CreatorError("Cannot group locked Background layer!")
|
||||
|
||||
groups_to_create.append(group)
|
||||
|
||||
# create empty group if nothing selected
|
||||
if not groups_to_create and not top_layers_to_wrap:
|
||||
group = stub.create_group(product_name_from_ui)
|
||||
groups_to_create.append(group)
|
||||
|
||||
# wrap each top level layer into separate new group
|
||||
for layer in top_layers_to_wrap:
|
||||
stub.select_layers([layer])
|
||||
group = stub.group_selected_layers(layer.name)
|
||||
groups_to_create.append(group)
|
||||
|
||||
layer_name = ''
|
||||
# use artist chosen option OR force layer if more products are created
|
||||
# to differentiate them
|
||||
use_layer_name = (pre_create_data.get("use_layer_name") or
|
||||
len(groups_to_create) > 1)
|
||||
for group in groups_to_create:
|
||||
product_name = product_name_from_ui # reset to name from creator UI
|
||||
layer_names_in_hierarchy = []
|
||||
created_group_name = self._clean_highlights(stub, group.name)
|
||||
|
||||
if use_layer_name:
|
||||
layer_name = re.sub(
|
||||
"[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS),
|
||||
"",
|
||||
group.name
|
||||
)
|
||||
if "{layer}" not in product_name.lower():
|
||||
product_name += "{Layer}"
|
||||
|
||||
layer_fill = prepare_template_data({"layer": layer_name})
|
||||
product_name = product_name.format(**layer_fill)
|
||||
product_name = clean_product_name(product_name)
|
||||
|
||||
if group.long_name:
|
||||
for directory in group.long_name[::-1]:
|
||||
name = self._clean_highlights(stub, directory)
|
||||
layer_names_in_hierarchy.append(name)
|
||||
|
||||
data_update = {
|
||||
"productName": product_name,
|
||||
"members": [str(group.id)],
|
||||
"layer_name": layer_name,
|
||||
"long_name": "_".join(layer_names_in_hierarchy)
|
||||
}
|
||||
data.update(data_update)
|
||||
|
||||
mark_for_review = (pre_create_data.get("mark_for_review") or
|
||||
self.mark_for_review)
|
||||
creator_attributes = {"mark_for_review": mark_for_review}
|
||||
data.update({"creator_attributes": creator_attributes})
|
||||
|
||||
if not self.active_on_create:
|
||||
data["active"] = False
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.product_type, product_name, data, self
|
||||
)
|
||||
|
||||
stub.imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
self._add_instance_to_context(new_instance)
|
||||
# reusing existing group, need to rename afterwards
|
||||
if not create_empty_group:
|
||||
stub.rename_layer(group.id,
|
||||
stub.PUBLISH_ICON + created_group_name)
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
# legacy instances have family=='image'
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family"))
|
||||
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
self.log.debug("update_list:: {}".format(update_list))
|
||||
for created_inst, _changes in update_list:
|
||||
if created_inst.get("layer"):
|
||||
# not storing PSItem layer to metadata
|
||||
created_inst.pop("layer")
|
||||
api.stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
self.host.remove_instance(instance)
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection", default=True,
|
||||
label="Create only for selected"),
|
||||
BoolDef("create_multiple",
|
||||
default=True,
|
||||
label="Create separate instance for each selected"),
|
||||
BoolDef("use_layer_name",
|
||||
default=False,
|
||||
label="Use layer name in product"),
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Create separate review",
|
||||
default=False
|
||||
)
|
||||
]
|
||||
return output
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review"
|
||||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["ImageCreator"]
|
||||
)
|
||||
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.default_variants = plugin_settings["default_variants"]
|
||||
self.mark_for_review = plugin_settings["mark_for_review"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for Image instances
|
||||
|
||||
Main publishable item in Photoshop will be of `image` product type.
|
||||
Result of this item (instance) is picture that could be loaded and
|
||||
used in another DCCs (for example as single layer in composition in
|
||||
AfterEffects, reference in Maya etc).
|
||||
|
||||
There are couple of options what to publish:
|
||||
- separate image per selected layer (or group of layers)
|
||||
- one image for all selected layers
|
||||
- all visible layers (groups) flattened into single image
|
||||
|
||||
In most cases you would like to keep `Create only for selected`
|
||||
toggled on and select what you would like to publish.
|
||||
Toggling this option off will allow you to create instance for all
|
||||
visible layers without a need to select them explicitly.
|
||||
|
||||
Use 'Create separate instance for each selected' to create separate
|
||||
images per selected layer (group of layers).
|
||||
|
||||
'Use layer name in product' will explicitly add layer name into
|
||||
product name. Position of this name is configurable in
|
||||
`project_settings/global/tools/creator/product_name_profiles`.
|
||||
If layer placeholder ({layer}) is not used in `product_name_profiles`
|
||||
but layer name should be used (set explicitly in UI or implicitly if
|
||||
multiple images should be created), it is added in capitalized form
|
||||
as a suffix to product name.
|
||||
|
||||
Each image could have its separate review created if necessary via
|
||||
`Create separate review` toggle.
|
||||
But more use case is to use separate `review` instance to create review
|
||||
from all published items.
|
||||
"""
|
||||
|
||||
def _handle_legacy(self, instance_data):
|
||||
"""Converts old instances to new format."""
|
||||
if not instance_data.get("members"):
|
||||
instance_data["members"] = [instance_data.get("uuid")]
|
||||
|
||||
if instance_data.get("uuid"):
|
||||
# uuid not needed, replaced with unique instance_id
|
||||
api.stub().remove_instance(instance_data.get("uuid"))
|
||||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("variant"):
|
||||
instance_data["variant"] = ''
|
||||
|
||||
return instance_data
|
||||
|
||||
def _clean_highlights(self, stub, item):
|
||||
return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON,
|
||||
'')
|
||||
|
||||
def get_dynamic_data(
|
||||
self,
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name,
|
||||
instance
|
||||
):
|
||||
if instance is not None:
|
||||
layer_name = instance.get("layer_name")
|
||||
if layer_name:
|
||||
return {"layer": layer_name}
|
||||
return {"layer": "{layer}"}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
from ayon_photoshop.lib import PSAutoCreator
|
||||
|
||||
|
||||
class ReviewCreator(PSAutoCreator):
|
||||
"""Creates review instance which might be disabled from publishing."""
|
||||
identifier = "review"
|
||||
product_type = "review"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Auto creator for review.
|
||||
|
||||
Photoshop review is created from all published images or from all
|
||||
visible layers if no `image` instances got created.
|
||||
|
||||
Review might be disabled by an artist (instance shouldn't be deleted as
|
||||
it will get recreated in next publish either way).
|
||||
"""
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["ReviewCreator"]
|
||||
)
|
||||
|
||||
self.default_variant = plugin_settings["default_variant"]
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
from ayon_photoshop.lib import PSAutoCreator
|
||||
|
||||
|
||||
class WorkfileCreator(PSAutoCreator):
|
||||
identifier = "workfile"
|
||||
product_type = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Auto creator for workfile.
|
||||
|
||||
It is expected that each publish will also publish its source workfile
|
||||
for safekeeping. This creator triggers automatically without need for
|
||||
an artist to remember and trigger it explicitly.
|
||||
|
||||
Workfile instance could be disabled if it is not required to publish
|
||||
workfile. (Instance shouldn't be deleted though as it will be recreated
|
||||
in next publish automatically).
|
||||
"""
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["WorkfileCreator"]
|
||||
)
|
||||
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
import re
|
||||
|
||||
from ayon_core.pipeline import get_representation_path
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_photoshop.api import get_unique_layer_name
|
||||
|
||||
|
||||
class ImageLoader(photoshop.PhotoshopLoader):
|
||||
"""Load images
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
|
||||
product_types = {"image", "render"}
|
||||
representations = {"*"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
stub = self.get_stub()
|
||||
layer_name = get_unique_layer_name(
|
||||
stub.get_layers(),
|
||||
context["folder"]["name"],
|
||||
name
|
||||
)
|
||||
with photoshop.maintained_selection():
|
||||
path = self.filepath_from_context(context)
|
||||
layer = self.import_layer(path, layer_name, stub)
|
||||
|
||||
self[:] = [layer]
|
||||
namespace = namespace or layer_name
|
||||
|
||||
return photoshop.containerise(
|
||||
name,
|
||||
namespace,
|
||||
layer,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
""" Switch asset or change version """
|
||||
stub = self.get_stub()
|
||||
|
||||
layer = container.pop("layer")
|
||||
|
||||
repre_entity = context["representation"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
layer_name = "{}_{}".format(folder_name, product_name)
|
||||
# switching assets
|
||||
if namespace_from_container != layer_name:
|
||||
layer_name = get_unique_layer_name(
|
||||
stub.get_layers(), folder_name, product_name
|
||||
)
|
||||
else: # switching version - keep same name
|
||||
layer_name = container["namespace"]
|
||||
|
||||
path = get_representation_path(repre_entity)
|
||||
with photoshop.maintained_selection():
|
||||
stub.replace_smart_object(
|
||||
layer, path, layer_name
|
||||
)
|
||||
|
||||
stub.imprint(
|
||||
layer.id, {"representation": repre_entity["id"]}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Removes element from scene: deletes layer + removes from Headline
|
||||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
stub = self.get_stub()
|
||||
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_layer(layer.id)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def import_layer(self, file_name, layer_name, stub):
|
||||
return stub.import_smart_object(file_name, layer_name)
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
import os
|
||||
|
||||
import qargparse
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_photoshop.api import get_unique_layer_name
|
||||
|
||||
|
||||
class ImageFromSequenceLoader(photoshop.PhotoshopLoader):
|
||||
""" Load specific image from sequence
|
||||
|
||||
Used only as quick load of reference file from a sequence.
|
||||
|
||||
Plain ImageLoader picks first frame from sequence.
|
||||
|
||||
Loads only existing files - currently not possible to limit loaders
|
||||
to single select - multiselect. If user selects multiple repres, list
|
||||
for all of them is provided, but selection is only single file.
|
||||
This loader will be triggered multiple times, but selected name will
|
||||
match only to proper path.
|
||||
|
||||
Loader doesn't do containerization as there is currently no data model
|
||||
of 'frame of rendered files' (only rendered sequence), update would be
|
||||
difficult.
|
||||
"""
|
||||
|
||||
product_types = {"render"}
|
||||
representations = {"*"}
|
||||
options = []
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
if data.get("frame"):
|
||||
path = os.path.join(
|
||||
os.path.dirname(path), data["frame"]
|
||||
)
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
stub = self.get_stub()
|
||||
layer_name = get_unique_layer_name(
|
||||
stub.get_layers(), context["folder"]["name"], name
|
||||
)
|
||||
|
||||
with photoshop.maintained_selection():
|
||||
layer = stub.import_smart_object(path, layer_name)
|
||||
|
||||
self[:] = [layer]
|
||||
namespace = namespace or layer_name
|
||||
|
||||
return namespace
|
||||
|
||||
@classmethod
|
||||
def get_options(cls, repre_contexts):
|
||||
"""
|
||||
Returns list of files for selected 'repre_contexts'.
|
||||
|
||||
It returns only files with same extension as in context as it is
|
||||
expected that context points to sequence of frames.
|
||||
|
||||
Returns:
|
||||
(list) of qargparse.Choice
|
||||
"""
|
||||
files = []
|
||||
for context in repre_contexts:
|
||||
fname = cls.filepath_from_context(context)
|
||||
_, file_extension = os.path.splitext(fname)
|
||||
|
||||
for file_name in os.listdir(os.path.dirname(fname)):
|
||||
if not file_name.endswith(file_extension):
|
||||
continue
|
||||
files.append(file_name)
|
||||
|
||||
# return selection only if there is something
|
||||
if not files or len(files) <= 1:
|
||||
return []
|
||||
|
||||
return [
|
||||
qargparse.Choice(
|
||||
"frame",
|
||||
label="Select specific file",
|
||||
items=files,
|
||||
default=0,
|
||||
help="Which frame should be loaded?"
|
||||
)
|
||||
]
|
||||
|
||||
def update(self, container, context):
|
||||
"""No update possible, not containerized."""
|
||||
pass
|
||||
|
||||
def remove(self, container):
|
||||
"""No update possible, not containerized."""
|
||||
pass
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
import re
|
||||
|
||||
from ayon_core.pipeline import get_representation_path
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_photoshop.api import get_unique_layer_name
|
||||
|
||||
|
||||
class ReferenceLoader(photoshop.PhotoshopLoader):
|
||||
"""Load reference images
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
|
||||
Inheriting from 'load_image' didn't work because of
|
||||
"Cannot write to closing transport", possible refactor.
|
||||
"""
|
||||
|
||||
product_types = {"image", "render"}
|
||||
representations = {"*"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
stub = self.get_stub()
|
||||
layer_name = get_unique_layer_name(
|
||||
stub.get_layers(), context["folder"]["name"], name
|
||||
)
|
||||
with photoshop.maintained_selection():
|
||||
path = self.filepath_from_context(context)
|
||||
layer = self.import_layer(path, layer_name, stub)
|
||||
|
||||
self[:] = [layer]
|
||||
namespace = namespace or layer_name
|
||||
|
||||
return photoshop.containerise(
|
||||
name,
|
||||
namespace,
|
||||
layer,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
""" Switch asset or change version."""
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
layer_name = "{}_{}".format(folder_name, product_name)
|
||||
# switching assets
|
||||
if namespace_from_container != layer_name:
|
||||
layer_name = get_unique_layer_name(
|
||||
stub.get_layers(), folder_name, product_name
|
||||
)
|
||||
else: # switching version - keep same name
|
||||
layer_name = container["namespace"]
|
||||
|
||||
path = get_representation_path(repre_entity)
|
||||
with photoshop.maintained_selection():
|
||||
stub.replace_smart_object(
|
||||
layer, path, layer_name
|
||||
)
|
||||
|
||||
stub.imprint(
|
||||
layer.id, {"representation": repre_entity["id"]}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
"""Removes element from scene: deletes layer + removes from Headline
|
||||
|
||||
Args:
|
||||
container (dict): container to be removed - used to get layer_id
|
||||
"""
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
stub.imprint(layer.id, {})
|
||||
stub.delete_layer(layer.id)
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def import_layer(self, file_name, layer_name, stub):
|
||||
return stub.import_smart_object(
|
||||
file_name, layer_name, as_reference=True
|
||||
)
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Close PS after publish. For Webpublishing only."""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class ClosePS(pyblish.api.ContextPlugin):
|
||||
"""Close PS after publish. For Webpublishing only.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 14
|
||||
label = "Close PS"
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
hosts = ["photoshop"]
|
||||
targets = ["automated"]
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("ClosePS")
|
||||
|
||||
stub = photoshop.stub()
|
||||
self.log.info("Shutting down PS")
|
||||
stub.save()
|
||||
stub.close()
|
||||
self.log.info("PS closed")
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
|
||||
|
||||
class CollectAutoImage(pyblish.api.ContextPlugin):
|
||||
"""Creates auto image in non artist based publishes (Webpublisher).
|
||||
"""
|
||||
|
||||
label = "Collect Auto Image"
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
|
||||
targets = ["automated"]
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
creator_identifier = instance.data.get("creator_identifier")
|
||||
if creator_identifier and creator_identifier == "auto_image":
|
||||
self.log.debug("Auto image instance found, won't create new")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
proj_settings = context.data["project_settings"]
|
||||
host_name = context.data["hostName"]
|
||||
folder_entity = context.data["folderEntity"]
|
||||
task_entity = context.data["taskEntity"]
|
||||
task_name = task_type = None
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
auto_creator = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"AutoImageCreator", {})
|
||||
|
||||
if not auto_creator or not auto_creator["enabled"]:
|
||||
self.log.debug("Auto image creator disabled, won't create new")
|
||||
return
|
||||
|
||||
stub = photoshop.stub()
|
||||
stored_items = stub.get_layers_metadata()
|
||||
for item in stored_items:
|
||||
if item.get("creator_identifier") == "auto_image":
|
||||
if not item.get("active"):
|
||||
self.log.debug("Auto_image instance disabled")
|
||||
return
|
||||
|
||||
layer_items = stub.get_layers()
|
||||
|
||||
publishable_ids = [layer.id for layer in layer_items
|
||||
if layer.visible]
|
||||
|
||||
# collect stored image instances
|
||||
instance_names = []
|
||||
for layer_item in layer_items:
|
||||
layer_meta_data = stub.read(layer_item, stored_items)
|
||||
|
||||
# Skip layers without metadata.
|
||||
if layer_meta_data is None:
|
||||
continue
|
||||
|
||||
# Skip containers.
|
||||
if "container" in layer_meta_data["id"]:
|
||||
continue
|
||||
|
||||
# active might not be in legacy meta
|
||||
if layer_meta_data.get("active", True) and layer_item.visible:
|
||||
instance_names.append(layer_meta_data["productName"])
|
||||
|
||||
if len(instance_names) == 0:
|
||||
variants = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"CreateImage", {}).get(
|
||||
"default_variants", [''])
|
||||
product_type = "image"
|
||||
|
||||
variant = context.data.get("variant") or variants[0]
|
||||
|
||||
product_name = get_product_name(
|
||||
project_name,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
product_type,
|
||||
variant,
|
||||
)
|
||||
|
||||
instance = context.create_instance(product_name)
|
||||
instance.data["folderPath"] = folder_entity["path"]
|
||||
instance.data["productType"] = product_type
|
||||
instance.data["productName"] = product_name
|
||||
instance.data["ids"] = publishable_ids
|
||||
instance.data["publish"] = True
|
||||
instance.data["creator_identifier"] = "auto_image"
|
||||
instance.data["family"] = product_type
|
||||
instance.data["families"] = [product_type]
|
||||
|
||||
if auto_creator["mark_for_review"]:
|
||||
instance.data["creator_attributes"] = {"mark_for_review": True}
|
||||
instance.data["families"].append("review")
|
||||
|
||||
self.log.info("auto image instance: {} ".format(instance.data))
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class CollectAutoImageRefresh(pyblish.api.ContextPlugin):
|
||||
"""Refreshes auto_image instance with currently visible layers..
|
||||
"""
|
||||
|
||||
label = "Collect Auto Image Refresh"
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
creator_identifier = instance.data.get("creator_identifier")
|
||||
if creator_identifier and creator_identifier == "auto_image":
|
||||
self.log.debug("Auto image instance found, won't create new")
|
||||
# refresh existing auto image instance with current visible
|
||||
publishable_ids = [layer.id for layer in photoshop.stub().get_layers() # noqa
|
||||
if layer.visible]
|
||||
instance.data["ids"] = publishable_ids
|
||||
return
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
instance -> productType ("review")
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
|
||||
|
||||
class CollectAutoReview(pyblish.api.ContextPlugin):
|
||||
"""Create review instance in non artist based workflow.
|
||||
|
||||
Called only if PS is triggered in Webpublisher or in tests.
|
||||
"""
|
||||
|
||||
label = "Collect Auto Review"
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
targets = ["automated"]
|
||||
|
||||
publish = True
|
||||
|
||||
def process(self, context):
|
||||
product_type = "review"
|
||||
has_review = False
|
||||
for instance in context:
|
||||
if instance.data["productType"] == product_type:
|
||||
self.log.debug("Review instance found, won't create new")
|
||||
has_review = True
|
||||
|
||||
creator_attributes = instance.data.get("creator_attributes", {})
|
||||
if (creator_attributes.get("mark_for_review") and
|
||||
"review" not in instance.data["families"]):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
if has_review:
|
||||
return
|
||||
|
||||
stub = photoshop.stub()
|
||||
stored_items = stub.get_layers_metadata()
|
||||
for item in stored_items:
|
||||
if item.get("creator_identifier") == product_type:
|
||||
if not item.get("active"):
|
||||
self.log.debug("Review instance disabled")
|
||||
return
|
||||
|
||||
auto_creator = context.data["project_settings"].get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"ReviewCreator", {})
|
||||
|
||||
if not auto_creator or not auto_creator["enabled"]:
|
||||
self.log.debug("Review creator disabled, won't create new")
|
||||
return
|
||||
|
||||
variant = (context.data.get("variant") or
|
||||
auto_creator["default_variant"])
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
proj_settings = context.data["project_settings"]
|
||||
host_name = context.data["hostName"]
|
||||
folder_entity = context.data["folderEntity"]
|
||||
task_entity = context.data["taskEntity"]
|
||||
task_name = task_type = None
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
product_name = get_product_name(
|
||||
project_name,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
product_type,
|
||||
variant,
|
||||
project_settings=proj_settings
|
||||
)
|
||||
|
||||
instance = context.create_instance(product_name)
|
||||
instance.data.update({
|
||||
"label": product_name,
|
||||
"name": product_name,
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"families": [product_type],
|
||||
"representations": [],
|
||||
"folderPath": folder_entity["path"],
|
||||
"publish": self.publish
|
||||
})
|
||||
|
||||
self.log.debug("auto review created::{}".format(instance.data))
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
|
||||
|
||||
class CollectAutoWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Workfile"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
targets = ["automated"]
|
||||
|
||||
def process(self, context):
|
||||
product_type = "workfile"
|
||||
file_path = context.data["currentFile"]
|
||||
_, ext = os.path.splitext(file_path)
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
workfile_representation = {
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"files": base_name,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
for instance in context:
|
||||
if instance.data["productType"] == product_type:
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
instance.data.update({
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"representations": [],
|
||||
})
|
||||
|
||||
# creating representation
|
||||
_, ext = os.path.splitext(file_path)
|
||||
instance.data["representations"].append(
|
||||
workfile_representation)
|
||||
|
||||
return
|
||||
|
||||
stub = photoshop.stub()
|
||||
stored_items = stub.get_layers_metadata()
|
||||
for item in stored_items:
|
||||
if item.get("creator_identifier") == product_type:
|
||||
if not item.get("active"):
|
||||
self.log.debug("Workfile instance disabled")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
proj_settings = context.data["project_settings"]
|
||||
auto_creator = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"WorkfileCreator", {})
|
||||
|
||||
if not auto_creator or not auto_creator["enabled"]:
|
||||
self.log.debug("Workfile creator disabled, won't create new")
|
||||
return
|
||||
|
||||
# context.data["variant"] might come only from collect_batch_data
|
||||
variant = (context.data.get("variant") or
|
||||
auto_creator["default_variant"])
|
||||
|
||||
task_name = context.data["task"]
|
||||
host_name = context.data["hostName"]
|
||||
folder_entity = context.data["folderEntity"]
|
||||
task_entity = context.data["taskEntity"]
|
||||
task_name = task_type = None
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
|
||||
product_name = get_product_name(
|
||||
project_name,
|
||||
task_name,
|
||||
task_type,
|
||||
host_name,
|
||||
product_type,
|
||||
variant,
|
||||
project_settings=proj_settings
|
||||
)
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(product_name)
|
||||
instance.data.update({
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"families": [product_type],
|
||||
"representations": [],
|
||||
"folderPath": folder_entity["path"]
|
||||
})
|
||||
|
||||
# creating representation
|
||||
instance.data["representations"].append(workfile_representation)
|
||||
|
||||
self.log.debug("auto workfile review created:{}".format(instance.data))
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
"""Parses batch context from json and continues in publish process.
|
||||
|
||||
Provides:
|
||||
context -> Loaded batch file.
|
||||
- folderPath
|
||||
- task (task name)
|
||||
- taskType
|
||||
- project_name
|
||||
- variant
|
||||
|
||||
Code is practically copy of `openype/hosts/webpublish/collect_batch_data` as
|
||||
webpublisher should be eventually ejected as an addon, eg. mentioned plugin
|
||||
shouldn't be pushed into general publish plugins.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.webpublisher.lib import (
|
||||
get_batch_context_info,
|
||||
parse_json
|
||||
)
|
||||
from ayon_core.lib import is_in_tests
|
||||
|
||||
|
||||
class CollectBatchData(pyblish.api.ContextPlugin):
|
||||
"""Collect batch data from json stored in 'AYON_PUBLISH_DATA' env dir.
|
||||
|
||||
The directory must contain 'manifest.json' file where batch data should be
|
||||
stored.
|
||||
"""
|
||||
# must be really early, context values are only in json file
|
||||
order = pyblish.api.CollectorOrder - 0.495
|
||||
label = "Collect batch data"
|
||||
hosts = ["photoshop"]
|
||||
targets = ["webpublish"]
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("CollectBatchData")
|
||||
batch_dir = (
|
||||
os.environ.get("AYON_PUBLISH_DATA")
|
||||
or os.environ.get("OPENPYPE_PUBLISH_DATA")
|
||||
)
|
||||
if is_in_tests():
|
||||
self.log.debug("Automatic testing, no batch data, skipping")
|
||||
return
|
||||
|
||||
assert batch_dir, (
|
||||
"Missing `AYON_PUBLISH_DATA`")
|
||||
|
||||
assert os.path.exists(batch_dir), \
|
||||
"Folder {} doesn't exist".format(batch_dir)
|
||||
|
||||
project_name = os.environ.get("AYON_PROJECT_NAME")
|
||||
if project_name is None:
|
||||
raise AssertionError(
|
||||
"Environment `AYON_PROJECT_NAME` was not found."
|
||||
"Could not set project `root` which may cause issues."
|
||||
)
|
||||
|
||||
batch_data = parse_json(os.path.join(batch_dir, "manifest.json"))
|
||||
|
||||
context.data["batchDir"] = batch_dir
|
||||
context.data["batchData"] = batch_data
|
||||
|
||||
folder_path, task_name, task_type = get_batch_context_info(
|
||||
batch_data["context"]
|
||||
)
|
||||
|
||||
os.environ["AYON_FOLDER_PATH"] = folder_path
|
||||
os.environ["AYON_TASK_NAME"] = task_name
|
||||
|
||||
context.data["folderPath"] = folder_path
|
||||
context.data["task"] = task_name
|
||||
context.data["taskType"] = task_type
|
||||
context.data["project_name"] = project_name
|
||||
context.data["variant"] = batch_data["variant"]
|
||||
|
|
@ -0,0 +1,268 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import prepare_template_data, is_in_tests
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
||||
"""Creates instances for layers marked by configurable color.
|
||||
|
||||
Used in remote publishing when artists marks publishable layers by color-
|
||||
coding. Top level layers (group) must be marked by specific color to be
|
||||
published as an instance of 'image' product type.
|
||||
|
||||
Can add group for all publishable layers to allow creation of flattened
|
||||
image. (Cannot contain special background layer as it cannot be grouped!)
|
||||
|
||||
Based on value `create_flatten_image` from Settings:
|
||||
- "yes": create flattened 'image' product of all publishable layers + create
|
||||
'image' product per publishable layer
|
||||
- "only": create ONLY flattened 'image' product of all publishable layers
|
||||
- "no": do not create flattened 'image' product at all,
|
||||
only separate products per marked layer.
|
||||
|
||||
Identifier:
|
||||
id (str): "ayon.create.instance"
|
||||
"""
|
||||
|
||||
label = "Collect Color-coded Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["photoshop"]
|
||||
targets = ["automated"]
|
||||
|
||||
# configurable by Settings
|
||||
color_code_mapping = []
|
||||
create_flatten_image = "no"
|
||||
flatten_product_name_template = ""
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("CollectColorCodedInstances")
|
||||
batch_dir = (
|
||||
os.environ.get("AYON_PUBLISH_DATA")
|
||||
or os.environ.get("OPENPYPE_PUBLISH_DATA")
|
||||
)
|
||||
if (
|
||||
is_in_tests()
|
||||
and (
|
||||
not batch_dir or not os.path.exists(batch_dir)
|
||||
)
|
||||
):
|
||||
self.log.debug("Automatic testing, no batch data, skipping")
|
||||
return
|
||||
|
||||
existing_product_names = self._get_existing_product_names(context)
|
||||
|
||||
# from CollectBatchData
|
||||
folder_path = context.data["folderPath"]
|
||||
task_name = context.data["task"]
|
||||
variant = context.data["variant"]
|
||||
project_name = context.data["projectEntity"]["name"]
|
||||
|
||||
naming_conventions = get_project_settings(project_name).get(
|
||||
"photoshop", {}).get(
|
||||
"publish", {}).get(
|
||||
"ValidateNaming", {})
|
||||
|
||||
stub = photoshop.stub()
|
||||
layers = stub.get_layers()
|
||||
|
||||
publishable_layers = []
|
||||
created_instances = []
|
||||
product_type_from_settings = None
|
||||
for layer in layers:
|
||||
self.log.debug("Layer:: {}".format(layer))
|
||||
if layer.parents:
|
||||
self.log.debug("!!! Not a top layer, skip")
|
||||
continue
|
||||
|
||||
if not layer.visible:
|
||||
self.log.debug("Not visible, skip")
|
||||
continue
|
||||
|
||||
resolved_product_type, resolved_product_template = (
|
||||
self._resolve_mapping(layer)
|
||||
)
|
||||
|
||||
if not resolved_product_template or not resolved_product_type:
|
||||
self.log.debug("!!! Not found product type or template, skip")
|
||||
continue
|
||||
|
||||
if not product_type_from_settings:
|
||||
product_type_from_settings = resolved_product_type
|
||||
|
||||
fill_pairs = {
|
||||
"variant": variant,
|
||||
"family": resolved_product_type,
|
||||
"product": {"type": resolved_product_type},
|
||||
"task": task_name,
|
||||
"layer": layer.clean_name
|
||||
}
|
||||
|
||||
product_name = resolved_product_template.format(
|
||||
**prepare_template_data(fill_pairs))
|
||||
|
||||
product_name = self._clean_product_name(
|
||||
stub, naming_conventions, product_name, layer
|
||||
)
|
||||
|
||||
if product_name in existing_product_names:
|
||||
self.log.info((
|
||||
"Product {} already created, skipping."
|
||||
).format(product_name))
|
||||
continue
|
||||
|
||||
if self.create_flatten_image != "flatten_only":
|
||||
instance = self._create_instance(
|
||||
context,
|
||||
layer,
|
||||
resolved_product_type,
|
||||
folder_path,
|
||||
product_name,
|
||||
task_name
|
||||
)
|
||||
created_instances.append(instance)
|
||||
|
||||
existing_product_names.append(product_name)
|
||||
publishable_layers.append(layer)
|
||||
|
||||
if self.create_flatten_image != "no" and publishable_layers:
|
||||
self.log.debug("create_flatten_image")
|
||||
if not self.flatten_product_name_template:
|
||||
self.log.warning("No template for flatten image")
|
||||
return
|
||||
|
||||
fill_pairs.pop("layer")
|
||||
product_name = self.flatten_product_name_template.format(
|
||||
**prepare_template_data(fill_pairs))
|
||||
|
||||
first_layer = publishable_layers[0] # dummy layer
|
||||
first_layer.name = product_name
|
||||
product_type = product_type_from_settings # inherit product type
|
||||
instance = self._create_instance(
|
||||
context,
|
||||
first_layer,
|
||||
product_type,
|
||||
folder_path,
|
||||
product_name,
|
||||
task_name
|
||||
)
|
||||
instance.data["ids"] = [layer.id for layer in publishable_layers]
|
||||
created_instances.append(instance)
|
||||
|
||||
for instance in created_instances:
|
||||
# Produce diagnostic message for any graphical
|
||||
# user interface interested in visualising it.
|
||||
self.log.info("Found: \"%s\" " % instance.data["name"])
|
||||
self.log.info("instance: {} ".format(instance.data))
|
||||
|
||||
def _get_existing_product_names(self, context):
|
||||
"""Collect manually created instances from workfile.
|
||||
|
||||
Shouldn't be any as Webpublisher bypass publishing via Openpype, but
|
||||
might be some if workfile published through OP is reused.
|
||||
"""
|
||||
existing_product_names = []
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is not False:
|
||||
existing_product_names.append(instance.data.get("productName"))
|
||||
|
||||
return existing_product_names
|
||||
|
||||
def _create_instance(
|
||||
self,
|
||||
context,
|
||||
layer,
|
||||
product_type,
|
||||
folder_path,
|
||||
product_name,
|
||||
task_name
|
||||
):
|
||||
instance = context.create_instance(layer.name)
|
||||
instance.data["publish"] = True
|
||||
instance.data["productType"] = product_type
|
||||
instance.data["productName"] = product_name
|
||||
instance.data["folderPath"] = folder_path
|
||||
instance.data["task"] = task_name
|
||||
instance.data["layer"] = layer
|
||||
instance.data["family"] = product_type
|
||||
instance.data["families"] = [product_type]
|
||||
|
||||
return instance
|
||||
|
||||
def _resolve_mapping(self, layer):
|
||||
"""Matches 'layer' color code and name to mapping.
|
||||
|
||||
If both color code AND name regex is configured, BOTH must be valid
|
||||
If layer matches to multiple mappings, only first is used!
|
||||
"""
|
||||
product_type_list = []
|
||||
product_name_list = []
|
||||
for mapping in self.color_code_mapping:
|
||||
if (
|
||||
mapping["color_code"]
|
||||
and layer.color_code not in mapping["color_code"]
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
mapping["layer_name_regex"]
|
||||
and not any(
|
||||
re.search(pattern, layer.name)
|
||||
for pattern in mapping["layer_name_regex"]
|
||||
)
|
||||
):
|
||||
continue
|
||||
|
||||
product_type_list.append(mapping["product_type"])
|
||||
product_name_list.append(mapping["product_name_template"])
|
||||
|
||||
if len(product_name_list) > 1:
|
||||
self.log.warning(
|
||||
"Multiple mappings found for '{}'".format(layer.name)
|
||||
)
|
||||
self.log.warning("Only first product name template used!")
|
||||
product_name_list[:] = product_name_list[0]
|
||||
|
||||
if len(product_type_list) > 1:
|
||||
self.log.warning(
|
||||
"Multiple mappings found for '{}'".format(layer.name)
|
||||
)
|
||||
self.log.warning("Only first product type used!")
|
||||
product_type_list[:] = product_type_list[0]
|
||||
|
||||
resolved_product_template = None
|
||||
if product_name_list:
|
||||
resolved_product_template = product_name_list.pop()
|
||||
|
||||
product_type = None
|
||||
if product_type_list:
|
||||
product_type = product_type_list.pop()
|
||||
|
||||
self.log.debug("resolved_product_type {}".format(product_type))
|
||||
self.log.debug("resolved_product_template {}".format(
|
||||
resolved_product_template))
|
||||
return product_type, resolved_product_template
|
||||
|
||||
def _clean_product_name(
|
||||
self, stub, naming_conventions, product_name, layer
|
||||
):
|
||||
"""Cleans invalid characters from product name and layer name."""
|
||||
if re.search(naming_conventions["invalid_chars"], product_name):
|
||||
product_name = re.sub(
|
||||
naming_conventions["invalid_chars"],
|
||||
naming_conventions["replace_char"],
|
||||
product_name
|
||||
)
|
||||
layer_name = re.sub(
|
||||
naming_conventions["invalid_chars"],
|
||||
naming_conventions["replace_char"],
|
||||
layer.clean_name
|
||||
)
|
||||
layer.name = layer_name
|
||||
stub.rename_layer(layer.id, layer_name)
|
||||
|
||||
return product_name
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Current File"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
def process(self, context):
|
||||
context.data["currentFile"] = os.path.normpath(
|
||||
photoshop.stub().get_active_document_full_name()
|
||||
).replace("\\", "/")
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import os
|
||||
import re
|
||||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class CollectExtensionVersion(pyblish.api.ContextPlugin):
|
||||
""" Pulls and compares version of installed extension.
|
||||
|
||||
It is recommended to use same extension as in provided Openpype code.
|
||||
|
||||
Please use Anastasiy’s Extension Manager or ZXPInstaller to update
|
||||
extension in case of an error.
|
||||
|
||||
You can locate extension.zxp in your installed Openpype code in
|
||||
`repos/avalon-core/avalon/photoshop`
|
||||
"""
|
||||
# This technically should be a validator, but other collectors might be
|
||||
# impacted with usage of obsolete extension, so collector that runs first
|
||||
# was chosen
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Collect extension version"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, context):
|
||||
installed_version = photoshop.stub().get_extension_version()
|
||||
|
||||
if not installed_version:
|
||||
raise ValueError("Unknown version, probably old extension")
|
||||
|
||||
manifest_url = os.path.join(os.path.dirname(photoshop.__file__),
|
||||
"extension", "CSXS", "manifest.xml")
|
||||
|
||||
if not os.path.exists(manifest_url):
|
||||
self.log.debug("Unable to locate extension manifest, not checking")
|
||||
return
|
||||
|
||||
expected_version = None
|
||||
with open(manifest_url) as fp:
|
||||
content = fp.read()
|
||||
|
||||
found = re.findall(r'(ExtensionBundleVersion=")([0-9\.]+)(")',
|
||||
content)
|
||||
if found:
|
||||
expected_version = found[0][1]
|
||||
|
||||
if expected_version != installed_version:
|
||||
msg = "Expected version '{}' found '{}'\n".format(
|
||||
expected_version, installed_version)
|
||||
msg += "Please update your installed extension, it might not work "
|
||||
msg += "properly."
|
||||
|
||||
raise ValueError(msg)
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api
|
||||
|
||||
|
||||
class CollectImage(pyblish.api.InstancePlugin):
|
||||
"""Collect layer metadata into a instance.
|
||||
|
||||
Used later in validation
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.200
|
||||
label = 'Collect Image'
|
||||
|
||||
hosts = ["photoshop"]
|
||||
families = ["image"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("members"):
|
||||
layer = api.stub().get_layer(instance.data["members"][0])
|
||||
instance.data["layer"] = layer
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
"""Collects published version of workfile and increments it.
|
||||
|
||||
For synchronization of published image and workfile version it is required
|
||||
to store workfile version from workfile file name in context.data["version"].
|
||||
In remote publishing this name is unreliable (artist might not follow naming
|
||||
convention etc.), last published workfile version for particular workfile
|
||||
product is used instead.
|
||||
|
||||
This plugin runs only in remote publishing (eg. Webpublisher).
|
||||
|
||||
Requires:
|
||||
context.data["folderEntity"]
|
||||
|
||||
Provides:
|
||||
context["version"] - incremented latest published workfile version
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
||||
|
||||
class CollectPublishedVersion(pyblish.api.ContextPlugin):
|
||||
"""Collects published version of workfile and increments it."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.190
|
||||
label = "Collect published version"
|
||||
hosts = ["photoshop"]
|
||||
targets = ["automated"]
|
||||
|
||||
def process(self, context):
|
||||
workfile_product_name = None
|
||||
for instance in context:
|
||||
if instance.data["productType"] == "workfile":
|
||||
workfile_product_name = instance.data["productName"]
|
||||
break
|
||||
|
||||
if not workfile_product_name:
|
||||
self.log.warning("No workfile instance found, "
|
||||
"synchronization of version will not work.")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
folder_id = context.data["folderEntity"]["id"]
|
||||
|
||||
version_entity = ayon_api.get_last_version_by_product_name(
|
||||
project_name, workfile_product_name, folder_id
|
||||
)
|
||||
|
||||
if version_entity:
|
||||
version_int = int(version_entity["version"]) + 1
|
||||
else:
|
||||
version_int = get_versioning_start(
|
||||
project_name,
|
||||
"photoshop",
|
||||
task_name=context.data["task"],
|
||||
task_type=context.data["taskType"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
self.log.debug(f"Setting {version_int} to context.")
|
||||
context.data["version"] = version_int
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
instance -> family ("review")
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectReview(pyblish.api.ContextPlugin):
|
||||
"""Adds review to families for instances marked to be reviewable.
|
||||
"""
|
||||
|
||||
label = "Collect Review"
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
if (creator_attributes.get("mark_for_review") and
|
||||
"review" not in instance.data["families"]):
|
||||
instance.data["families"].append("review")
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectVersion(pyblish.api.InstancePlugin):
|
||||
"""Collect version for publishable instances.
|
||||
|
||||
Used to synchronize version from workfile to all publishable instances:
|
||||
- image (manually created or color coded)
|
||||
- review
|
||||
- workfile
|
||||
|
||||
Dev comment:
|
||||
Explicit collector created to control this from single place and not from
|
||||
3 different.
|
||||
|
||||
Workfile set here explicitly as version might to be forced from latest + 1
|
||||
because of Webpublisher.
|
||||
(This plugin must run after CollectPublishedVersion!)
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.200
|
||||
label = 'Collect Version'
|
||||
|
||||
hosts = ["photoshop"]
|
||||
families = ["image", "review", "workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
workfile_version = instance.context.data["version"]
|
||||
self.log.debug(f"Applying version {workfile_version}")
|
||||
instance.data["version"] = workfile_version
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Workfile"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
for instance in context:
|
||||
if instance.data["productType"] == "workfile":
|
||||
file_path = context.data["currentFile"]
|
||||
_, ext = os.path.splitext(file_path)
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
|
||||
# creating representation
|
||||
_, ext = os.path.splitext(file_path)
|
||||
instance.data["representations"].append({
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"files": base_name,
|
||||
"stagingDir": staging_dir,
|
||||
})
|
||||
return
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class ExtractImage(pyblish.api.ContextPlugin):
|
||||
"""Extract all layers (groups) marked for publish.
|
||||
|
||||
Usually publishable instance is created as a wrapper of layer(s). For each
|
||||
publishable instance so many images as there is 'formats' is created.
|
||||
|
||||
Logic tries to hide/unhide layers minimum times.
|
||||
|
||||
Called once for all publishable instances.
|
||||
"""
|
||||
|
||||
order = publish.Extractor.order - 0.48
|
||||
label = "Extract Image"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
families = ["image", "background"]
|
||||
formats = ["png", "jpg"]
|
||||
|
||||
def process(self, context):
|
||||
stub = photoshop.stub()
|
||||
hidden_layer_ids = set()
|
||||
|
||||
all_layers = stub.get_layers()
|
||||
for layer in all_layers:
|
||||
if not layer.visible:
|
||||
hidden_layer_ids.add(layer.id)
|
||||
stub.hide_all_others_layers_ids([], layers=all_layers)
|
||||
|
||||
with photoshop.maintained_selection():
|
||||
with photoshop.maintained_visibility(layers=all_layers):
|
||||
for instance in context:
|
||||
if instance.data["productType"] not in self.families:
|
||||
continue
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting image to {}".format(staging_dir))
|
||||
|
||||
# Perform extraction
|
||||
files = {}
|
||||
ids = set()
|
||||
# real layers and groups
|
||||
members = instance.data("members")
|
||||
if members:
|
||||
ids.update(set([int(member) for member in members]))
|
||||
# virtual groups collected by color coding or auto_image
|
||||
add_ids = instance.data.pop("ids", None)
|
||||
if add_ids:
|
||||
ids.update(set(add_ids))
|
||||
extract_ids = set([ll.id for ll in stub.
|
||||
get_layers_in_layers_ids(ids, all_layers)
|
||||
if ll.id not in hidden_layer_ids])
|
||||
|
||||
for extracted_id in extract_ids:
|
||||
stub.set_visible(extracted_id, True)
|
||||
|
||||
file_basename = os.path.splitext(
|
||||
stub.get_active_document_name()
|
||||
)[0]
|
||||
for extension in self.formats:
|
||||
_filename = "{}.{}".format(file_basename,
|
||||
extension)
|
||||
files[extension] = _filename
|
||||
|
||||
full_filename = os.path.join(staging_dir,
|
||||
_filename)
|
||||
stub.saveAs(full_filename, extension, True)
|
||||
self.log.info(f"Extracted: {extension}")
|
||||
|
||||
representations = []
|
||||
for extension, filename in files.items():
|
||||
representations.append({
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
instance.data["representations"] = representations
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
self.log.info(f"Extracted {instance} to {staging_dir}")
|
||||
|
||||
for extracted_id in extract_ids:
|
||||
stub.set_visible(extracted_id, False)
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
||||
Upon calling this method the staging directory is stored inside
|
||||
the instance.data['stagingDir']
|
||||
"""
|
||||
|
||||
from ayon_core.pipeline.publish import get_instance_staging_dir
|
||||
|
||||
return get_instance_staging_dir(instance)
|
||||
|
|
@ -0,0 +1,328 @@
|
|||
import os
|
||||
import shutil
|
||||
from PIL import Image
|
||||
|
||||
from ayon_core.lib import (
|
||||
run_subprocess,
|
||||
get_ffmpeg_tool_args,
|
||||
)
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class ExtractReview(publish.Extractor):
|
||||
"""
|
||||
Produce a flattened or sequence image files from all 'image' instances.
|
||||
|
||||
If no 'image' instance is created, it produces flattened image from
|
||||
all visible layers.
|
||||
|
||||
It creates review, thumbnail and mov representations.
|
||||
|
||||
'review' family could be used in other steps as a reference, as it
|
||||
contains flattened image by default. (Eg. artist could load this
|
||||
review as a single item and see full image. In most cases 'image'
|
||||
product type is separated by layers to better usage in animation
|
||||
or comp.)
|
||||
"""
|
||||
|
||||
label = "Extract Review"
|
||||
hosts = ["photoshop"]
|
||||
families = ["review"]
|
||||
|
||||
# Extract Options
|
||||
jpg_options = None
|
||||
mov_options = None
|
||||
make_image_sequence = None
|
||||
max_downscale_size = 8192
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting image to {}".format(staging_dir))
|
||||
|
||||
fps = instance.data.get("fps", 25)
|
||||
stub = photoshop.stub()
|
||||
self.output_seq_filename = os.path.splitext(
|
||||
stub.get_active_document_name())[0] + ".%04d.jpg"
|
||||
|
||||
layers = self._get_layers_from_image_instances(instance)
|
||||
self.log.info("Layers image instance found: {}".format(layers))
|
||||
|
||||
repre_name = "jpg"
|
||||
repre_skeleton = {
|
||||
"name": repre_name,
|
||||
"ext": "jpg",
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags'],
|
||||
}
|
||||
|
||||
if instance.data["productType"] != "review":
|
||||
self.log.debug(
|
||||
"Existing extracted file from image product type used."
|
||||
)
|
||||
# enable creation of review, without this jpg review would clash
|
||||
# with jpg of the image product type
|
||||
output_name = repre_name
|
||||
repre_name = "{}_{}".format(repre_name, output_name)
|
||||
repre_skeleton.update({"name": repre_name,
|
||||
"outputName": output_name})
|
||||
|
||||
img_file = self.output_seq_filename % 0
|
||||
self._prepare_file_for_image_product_type(
|
||||
img_file, instance, staging_dir
|
||||
)
|
||||
repre_skeleton.update({
|
||||
"files": img_file,
|
||||
})
|
||||
processed_img_names = [img_file]
|
||||
elif self.make_image_sequence and len(layers) > 1:
|
||||
self.log.debug("Extract layers to image sequence.")
|
||||
img_list = self._save_sequence_images(staging_dir, layers)
|
||||
|
||||
repre_skeleton.update({
|
||||
"frameStart": 0,
|
||||
"frameEnd": len(img_list),
|
||||
"fps": fps,
|
||||
"files": img_list,
|
||||
})
|
||||
processed_img_names = img_list
|
||||
else:
|
||||
self.log.debug("Extract layers to flatten image.")
|
||||
img_file = self._save_flatten_image(staging_dir, layers)
|
||||
|
||||
repre_skeleton.update({
|
||||
"files": img_file,
|
||||
})
|
||||
processed_img_names = [img_file]
|
||||
|
||||
instance.data["representations"].append(repre_skeleton)
|
||||
|
||||
ffmpeg_args = get_ffmpeg_tool_args("ffmpeg")
|
||||
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
source_files_pattern = os.path.join(staging_dir,
|
||||
self.output_seq_filename)
|
||||
source_files_pattern = self._check_and_resize(processed_img_names,
|
||||
source_files_pattern,
|
||||
staging_dir)
|
||||
self._generate_thumbnail(
|
||||
list(ffmpeg_args),
|
||||
instance,
|
||||
source_files_pattern,
|
||||
staging_dir)
|
||||
|
||||
no_of_frames = len(processed_img_names)
|
||||
if no_of_frames > 1:
|
||||
self._generate_mov(
|
||||
list(ffmpeg_args),
|
||||
instance,
|
||||
fps,
|
||||
no_of_frames,
|
||||
source_files_pattern,
|
||||
staging_dir)
|
||||
|
||||
self.log.info(f"Extracted {instance} to {staging_dir}")
|
||||
|
||||
def _prepare_file_for_image_product_type(
|
||||
self, img_file, instance, staging_dir
|
||||
):
|
||||
"""Converts existing file for image product type to .jpg
|
||||
|
||||
Image instance could have its own separate review (instance per layer
|
||||
for example). This uses extracted file instead of extracting again.
|
||||
Args:
|
||||
img_file (str): name of output file (with 0000 value for ffmpeg
|
||||
later)
|
||||
instance:
|
||||
staging_dir (str): temporary folder where extracted file is located
|
||||
"""
|
||||
repre_file = instance.data["representations"][0]
|
||||
source_file_path = os.path.join(repre_file["stagingDir"],
|
||||
repre_file["files"])
|
||||
if not os.path.exists(source_file_path):
|
||||
raise RuntimeError(f"{source_file_path} doesn't exist for "
|
||||
"review to create from")
|
||||
_, ext = os.path.splitext(repre_file["files"])
|
||||
if ext != ".jpg":
|
||||
im = Image.open(source_file_path)
|
||||
if (im.mode in ('RGBA', 'LA') or (
|
||||
im.mode == 'P' and 'transparency' in im.info)):
|
||||
# without this it produces messy low quality jpg
|
||||
rgb_im = Image.new("RGBA", (im.width, im.height), "#ffffff")
|
||||
rgb_im.alpha_composite(im)
|
||||
rgb_im.convert("RGB").save(os.path.join(staging_dir, img_file))
|
||||
else:
|
||||
im.save(os.path.join(staging_dir, img_file))
|
||||
else:
|
||||
# handles already .jpg
|
||||
shutil.copy(source_file_path,
|
||||
os.path.join(staging_dir, img_file))
|
||||
|
||||
def _generate_mov(self, ffmpeg_path, instance, fps, no_of_frames,
|
||||
source_files_pattern, staging_dir):
|
||||
"""Generates .mov to upload to Ftrack.
|
||||
|
||||
Args:
|
||||
ffmpeg_path (str): path to ffmpeg
|
||||
instance (Pyblish Instance)
|
||||
fps (str)
|
||||
no_of_frames (int):
|
||||
source_files_pattern (str): name of source file
|
||||
staging_dir (str): temporary location to store thumbnail
|
||||
Updates:
|
||||
instance - adds representation portion
|
||||
"""
|
||||
# Generate mov.
|
||||
mov_path = os.path.join(staging_dir, "review.mov")
|
||||
self.log.info(f"Generate mov review: {mov_path}")
|
||||
args = ffmpeg_path + [
|
||||
"-y",
|
||||
"-i", source_files_pattern,
|
||||
"-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2",
|
||||
"-vframes", str(no_of_frames),
|
||||
mov_path
|
||||
]
|
||||
self.log.debug("mov args:: {}".format(args))
|
||||
_output = run_subprocess(args)
|
||||
instance.data["representations"].append({
|
||||
"name": "mov",
|
||||
"ext": "mov",
|
||||
"files": os.path.basename(mov_path),
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": 1,
|
||||
"frameEnd": no_of_frames,
|
||||
"fps": fps,
|
||||
"tags": self.mov_options['tags']
|
||||
})
|
||||
|
||||
def _generate_thumbnail(
|
||||
self, ffmpeg_args, instance, source_files_pattern, staging_dir
|
||||
):
|
||||
"""Generates scaled down thumbnail and adds it as representation.
|
||||
|
||||
Args:
|
||||
ffmpeg_path (str): path to ffmpeg
|
||||
instance (Pyblish Instance)
|
||||
source_files_pattern (str): name of source file
|
||||
staging_dir (str): temporary location to store thumbnail
|
||||
Updates:
|
||||
instance - adds representation portion
|
||||
"""
|
||||
# Generate thumbnail
|
||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||
self.log.info(f"Generate thumbnail {thumbnail_path}")
|
||||
args = ffmpeg_args + [
|
||||
"-y",
|
||||
"-i", source_files_pattern,
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
thumbnail_path
|
||||
]
|
||||
self.log.debug("thumbnail args:: {}".format(args))
|
||||
_output = run_subprocess(args)
|
||||
instance.data["representations"].append({
|
||||
"name": "thumbnail",
|
||||
"ext": "jpg",
|
||||
"outputName": "thumb",
|
||||
"files": os.path.basename(thumbnail_path),
|
||||
"stagingDir": staging_dir,
|
||||
"tags": ["thumbnail", "delete"]
|
||||
})
|
||||
instance.data["thumbnailPath"] = thumbnail_path
|
||||
|
||||
def _check_and_resize(self, processed_img_names, source_files_pattern,
|
||||
staging_dir):
|
||||
"""Check if saved image could be used in ffmpeg.
|
||||
|
||||
Ffmpeg has max size 16384x16384. Saved image(s) must be resized to be
|
||||
used as a source for thumbnail or review mov.
|
||||
"""
|
||||
Image.MAX_IMAGE_PIXELS = None
|
||||
first_url = os.path.join(staging_dir, processed_img_names[0])
|
||||
with Image.open(first_url) as im:
|
||||
width, height = im.size
|
||||
|
||||
if width > self.max_downscale_size or height > self.max_downscale_size:
|
||||
resized_dir = os.path.join(staging_dir, "resized")
|
||||
os.mkdir(resized_dir)
|
||||
source_files_pattern = os.path.join(resized_dir,
|
||||
self.output_seq_filename)
|
||||
for file_name in processed_img_names:
|
||||
source_url = os.path.join(staging_dir, file_name)
|
||||
with Image.open(source_url) as res_img:
|
||||
# 'thumbnail' automatically keeps aspect ratio
|
||||
res_img.thumbnail((self.max_downscale_size,
|
||||
self.max_downscale_size),
|
||||
Image.ANTIALIAS)
|
||||
res_img.save(os.path.join(resized_dir, file_name))
|
||||
|
||||
return source_files_pattern
|
||||
|
||||
def _get_layers_from_image_instances(self, instance):
|
||||
"""Collect all layers from 'instance'.
|
||||
|
||||
Returns:
|
||||
(list) of PSItem
|
||||
"""
|
||||
layers = []
|
||||
# creating review for existing 'image' instance
|
||||
if (
|
||||
instance.data["productType"] == "image"
|
||||
and instance.data.get("layer")
|
||||
):
|
||||
layers.append(instance.data["layer"])
|
||||
return layers
|
||||
|
||||
for image_instance in instance.context:
|
||||
if image_instance.data["productType"] != "image":
|
||||
continue
|
||||
if not image_instance.data.get("layer"):
|
||||
# dummy instance for flatten image
|
||||
continue
|
||||
layers.append(image_instance.data.get("layer"))
|
||||
|
||||
return sorted(layers)
|
||||
|
||||
def _save_flatten_image(self, staging_dir, layers):
|
||||
"""Creates flat image from 'layers' into 'staging_dir'.
|
||||
|
||||
Returns:
|
||||
(str): path to new image
|
||||
"""
|
||||
img_filename = self.output_seq_filename % 0
|
||||
output_image_path = os.path.join(staging_dir, img_filename)
|
||||
stub = photoshop.stub()
|
||||
|
||||
with photoshop.maintained_visibility():
|
||||
self.log.info("Extracting {}".format(layers))
|
||||
if layers:
|
||||
stub.hide_all_others_layers(layers)
|
||||
|
||||
stub.saveAs(output_image_path, 'jpg', True)
|
||||
|
||||
return img_filename
|
||||
|
||||
def _save_sequence_images(self, staging_dir, layers):
|
||||
"""Creates separate flat images from 'layers' into 'staging_dir'.
|
||||
|
||||
Used as source for multi frames .mov to review at once.
|
||||
Returns:
|
||||
(list): paths to new images
|
||||
"""
|
||||
stub = photoshop.stub()
|
||||
|
||||
list_img_filename = []
|
||||
with photoshop.maintained_visibility():
|
||||
for i, layer in enumerate(layers):
|
||||
self.log.info("Extracting {}".format(layer))
|
||||
|
||||
img_filename = self.output_seq_filename % i
|
||||
output_image_path = os.path.join(staging_dir, img_filename)
|
||||
list_img_filename.append(img_filename)
|
||||
|
||||
with photoshop.maintained_visibility():
|
||||
stub.hide_all_others_layers([layer])
|
||||
stub.saveAs(output_image_path, 'jpg', True)
|
||||
|
||||
return list_img_filename
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
from ayon_core.pipeline import publish
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class ExtractSaveScene(publish.Extractor):
|
||||
"""Save scene before extraction."""
|
||||
|
||||
order = publish.Extractor.order - 0.49
|
||||
label = "Extract Save Scene"
|
||||
hosts = ["photoshop"]
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
photoshop.stub().save()
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Folder does not match</title>
|
||||
<description>
|
||||
## Collected folder path is not same as in context
|
||||
|
||||
{msg}
|
||||
### How to repair?
|
||||
{repair_msg}
|
||||
Refresh Publish afterwards (circle arrow at the bottom right).
|
||||
|
||||
If that's not correct value, close workfile and reopen via Workfiles to get
|
||||
proper context folder path OR disable this validator and publish again
|
||||
if you are publishing to different context deliberately.
|
||||
|
||||
(Context means combination of project, folder path and task name.)
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Product name</title>
|
||||
<description>
|
||||
## Invalid product or layer name
|
||||
|
||||
Product or layer name cannot contain specific characters (spaces etc) which could cause issue when product name is used in a published file name.
|
||||
{msg}
|
||||
|
||||
### How to repair?
|
||||
|
||||
You can fix this with "repair" button on the right and press Refresh publishing button at the bottom right.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
Not all characters are available in a file names on all OS. Wrong characters could be configured in Settings.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import get_errored_plugins_from_context
|
||||
from ayon_core.lib import version_up
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class IncrementWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Increment the current workfile.
|
||||
|
||||
Saves the current scene with an increased version number.
|
||||
"""
|
||||
|
||||
label = "Increment Workfile"
|
||||
order = pyblish.api.IntegratorOrder + 9.0
|
||||
hosts = ["photoshop"]
|
||||
families = ["workfile"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
errored_plugins = get_errored_plugins_from_context(instance.context)
|
||||
if errored_plugins:
|
||||
raise RuntimeError(
|
||||
"Skipping incrementing current file because publishing failed."
|
||||
)
|
||||
|
||||
scene_path = version_up(instance.context.data["currentFile"])
|
||||
_, ext = os.path.splitext(scene_path)
|
||||
photoshop.stub().saveAs(scene_path, ext[1:], True)
|
||||
|
||||
self.log.info("Incremented workfile to: {}".format(scene_path))
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_current_folder_path
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_photoshop import api as photoshop
|
||||
|
||||
|
||||
class ValidateInstanceFolderRepair(pyblish.api.Action):
|
||||
"""Repair the instance folder."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (
|
||||
result["error"] is not None
|
||||
and result["instance"] is not None
|
||||
and result["instance"] not in failed
|
||||
):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
stub = photoshop.stub()
|
||||
current_folder_path = get_current_folder_path()
|
||||
for instance in instances:
|
||||
data = stub.read(instance[0])
|
||||
data["folderPath"] = current_folder_path
|
||||
stub.imprint(instance[0], data)
|
||||
|
||||
|
||||
class ValidateInstanceAsset(OptionalPyblishPluginMixin,
|
||||
pyblish.api.InstancePlugin):
|
||||
"""Validate the instance folder is the current selected context folder.
|
||||
|
||||
As it might happen that multiple worfiles are opened, switching
|
||||
between them would mess with selected context.
|
||||
In that case outputs might be output under wrong folder!
|
||||
|
||||
Repair action will use Context folder value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate Instance Folder"
|
||||
hosts = ["photoshop"]
|
||||
optional = True
|
||||
actions = [ValidateInstanceFolderRepair]
|
||||
order = ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_folder_path = instance.data["folderPath"]
|
||||
current_folder_path = get_current_folder_path()
|
||||
|
||||
if instance_folder_path != current_folder_path:
|
||||
msg = (
|
||||
f"Instance folder {instance_folder_path} is not the same"
|
||||
f" as current context {current_folder_path}."
|
||||
|
||||
)
|
||||
repair_msg = (
|
||||
"Repair with 'Repair' button"
|
||||
f" to use '{current_folder_path}'.\n"
|
||||
)
|
||||
formatting_data = {"msg": msg,
|
||||
"repair_msg": repair_msg}
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_photoshop import api as photoshop
|
||||
from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishXmlValidationError,
|
||||
)
|
||||
|
||||
|
||||
class ValidateNamingRepair(pyblish.api.Action):
|
||||
"""Repair the instance folder."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (
|
||||
result["error"] is not None
|
||||
and result["instance"] is not None
|
||||
and result["instance"] not in failed
|
||||
):
|
||||
failed.append(result["instance"])
|
||||
|
||||
invalid_chars, replace_char = plugin.get_replace_chars()
|
||||
self.log.debug("{} --- {}".format(invalid_chars, replace_char))
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
stub = photoshop.stub()
|
||||
for instance in instances:
|
||||
self.log.debug("validate_naming instance {}".format(instance))
|
||||
current_layer_state = stub.get_layer(instance.data["layer"].id)
|
||||
self.log.debug("current_layer{}".format(current_layer_state))
|
||||
|
||||
layer_meta = stub.read(current_layer_state)
|
||||
instance_id = (layer_meta.get("instance_id") or
|
||||
layer_meta.get("uuid"))
|
||||
if not instance_id:
|
||||
self.log.warning("Unable to repair, cannot find layer")
|
||||
continue
|
||||
|
||||
layer_name = re.sub(invalid_chars,
|
||||
replace_char,
|
||||
current_layer_state.clean_name)
|
||||
layer_name = stub.PUBLISH_ICON + layer_name
|
||||
|
||||
stub.rename_layer(current_layer_state.id, layer_name)
|
||||
|
||||
product_name = re.sub(invalid_chars, replace_char,
|
||||
instance.data["productName"])
|
||||
|
||||
# format from Tool Creator
|
||||
product_name = re.sub(
|
||||
"[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS),
|
||||
"",
|
||||
product_name
|
||||
)
|
||||
|
||||
layer_meta["productName"] = product_name
|
||||
stub.imprint(instance_id, layer_meta)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ValidateNaming(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance name.
|
||||
|
||||
Spaces in names are not allowed. Will be replace with underscores.
|
||||
"""
|
||||
|
||||
label = "Validate Naming"
|
||||
hosts = ["photoshop"]
|
||||
order = ValidateContentsOrder
|
||||
families = ["image"]
|
||||
actions = [ValidateNamingRepair]
|
||||
|
||||
# configured by Settings
|
||||
invalid_chars = ''
|
||||
replace_char = ''
|
||||
|
||||
def process(self, instance):
|
||||
help_msg = ' Use Repair button to fix it and then refresh publish.'
|
||||
|
||||
layer = instance.data.get("layer")
|
||||
if layer:
|
||||
msg = "Name \"{}\" is not allowed.{}".format(
|
||||
layer.clean_name, help_msg
|
||||
)
|
||||
formatting_data = {"msg": msg}
|
||||
if re.search(self.invalid_chars, layer.clean_name):
|
||||
raise PublishXmlValidationError(
|
||||
self, msg, formatting_data=formatting_data
|
||||
)
|
||||
|
||||
product_name = instance.data["productName"]
|
||||
msg = "Product \"{}\" is not allowed.{}".format(
|
||||
product_name, help_msg
|
||||
)
|
||||
formatting_data = {"msg": msg}
|
||||
if re.search(self.invalid_chars, product_name):
|
||||
raise PublishXmlValidationError(
|
||||
self, msg, formatting_data=formatting_data
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_replace_chars(cls):
|
||||
"""Pass values configured in Settings for Repair."""
|
||||
return cls.invalid_chars, cls.replace_char
|
||||
Binary file not shown.
3
server_addon/photoshop/client/ayon_photoshop/version.py
Normal file
3
server_addon/photoshop/client/ayon_photoshop/version.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'photoshop' version."""
|
||||
__version__ = "0.2.0"
|
||||
6
server_addon/photoshop/client/pyproject.toml
Normal file
6
server_addon/photoshop/client/pyproject.toml
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
[project]
|
||||
name="photoshop"
|
||||
description="AYON Phostoshop addon."
|
||||
|
||||
[ayon.runtimeDependencies]
|
||||
wsrpc_aiohttp = "^3.1.1" # websocket server
|
||||
|
|
@ -1,3 +1,10 @@
|
|||
name = "photoshop"
|
||||
title = "Photoshop"
|
||||
version = "0.1.3"
|
||||
version = "0.2.0"
|
||||
|
||||
client_dir = "ayon_photoshop"
|
||||
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.2",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue