mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
feat(ppro): wip publishing clips
This commit is contained in:
parent
8a484e830c
commit
b04805a451
38 changed files with 254 additions and 7770 deletions
|
|
@ -24,6 +24,7 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.49
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("registred_hosts: `{}`".format(pyblish.api.registered_hosts()))
|
||||
io.install()
|
||||
# get json paths from data
|
||||
input_json_path = os.environ.get("AC_PUBLISH_INPATH")
|
||||
|
|
@ -32,6 +33,8 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
rqst_json_data_path = Path(input_json_path)
|
||||
post_json_data_path = Path(output_json_path)
|
||||
|
||||
context.data['post_json_data_path'] = str(post_json_data_path)
|
||||
|
||||
# get avalon session data and convert \ to /
|
||||
_S = avalon.session
|
||||
|
||||
|
|
@ -61,10 +64,6 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
context.data["hostVersion"] = \
|
||||
_S["AVALON_APP_VERSION"] = host_version
|
||||
|
||||
# register pyblish for filtering of hosts in plugins
|
||||
pyblish.api.deregister_all_hosts()
|
||||
pyblish.api.register_host(host)
|
||||
|
||||
# get current file
|
||||
current_file = json_data.get("currentFile", None)
|
||||
assert current_file, "No `currentFile` data in json file"
|
||||
|
|
|
|||
|
|
@ -37,11 +37,6 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
presets = context.data["presets"][host]
|
||||
|
||||
rules_tasks = presets["rules_tasks"]
|
||||
ftrack_types = rules_tasks["ftrackTypes"]
|
||||
assert ftrack_types, ("No `ftrack_types` data in"
|
||||
"`/presets/[host]/rules_tasks.json` file")
|
||||
|
||||
context.data["ftrackTypes"] = ftrack_types
|
||||
|
||||
asset_default = presets["asset_default"]
|
||||
assert asset_default, ("No `asset_default` data in"
|
||||
|
|
@ -61,14 +56,15 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
# get handles > first try from asset data
|
||||
handle_start = context.data["assetData"].get("handleStart", None)
|
||||
handle_end = context.data["assetData"].get("handleEnd", None)
|
||||
if not all([handle_start, handle_end]):
|
||||
if (handle_start is None) or (handle_end is None):
|
||||
# get frame start > second try from parent data
|
||||
handle_start = asset_default["handleStart"]
|
||||
handle_end = asset_default["handleEnd"]
|
||||
handle_start = asset_default.get("handleStart", None)
|
||||
handle_end = asset_default.get("handleEnd", None)
|
||||
|
||||
assert all([
|
||||
handle_start,
|
||||
handle_end]), ("No `handle_start, handle_end` data found")
|
||||
assert (
|
||||
(handle_start is not None) or (
|
||||
handle_end is not None)), (
|
||||
"No `handle_start, handle_end` data found")
|
||||
|
||||
instances = []
|
||||
|
||||
|
|
@ -76,46 +72,52 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
name, ext = os.path.splitext(current_file)
|
||||
|
||||
# get current file host
|
||||
family = "projectfile"
|
||||
families = "filesave"
|
||||
family = "workfile"
|
||||
subset_name = "{0}{1}".format(task, 'Default')
|
||||
instance_name = "{0}_{1}_{2}".format(name,
|
||||
family,
|
||||
subset_name)
|
||||
# Set label
|
||||
label = "{0} - {1} > {2}".format(name, task, families)
|
||||
label = "{0} - {1}".format(name, task)
|
||||
|
||||
# get project file instance Data
|
||||
pf_instance = [inst for inst in instances_data
|
||||
if inst.get("family", None) in 'projectfile']
|
||||
self.log.debug('pf_instance: {}'.format(pf_instance))
|
||||
# get working file into instance for publishing
|
||||
instance = context.create_instance(instance_name)
|
||||
if pf_instance:
|
||||
instance.data.update(pf_instance[0])
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"task": task,
|
||||
"representation": ext[1:],
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"label": label,
|
||||
"name": name,
|
||||
# "hierarchy": hierarchy,
|
||||
# "parents": parents,
|
||||
"family": family,
|
||||
"families": [families, 'ftrack'],
|
||||
"publish": True,
|
||||
# "files": files_list
|
||||
})
|
||||
instances.append(instance)
|
||||
wf_instance = next((inst for inst in instances_data
|
||||
if inst.get("family", None) in 'workfile'), None)
|
||||
|
||||
if wf_instance:
|
||||
self.log.debug('wf_instance: {}'.format(wf_instance))
|
||||
|
||||
version = int(wf_instance.get("version", None))
|
||||
# get working file into instance for publishing
|
||||
instance = context.create_instance(instance_name)
|
||||
instance.data.update(wf_instance)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"task": task,
|
||||
"representations": [{
|
||||
"files": current_file,
|
||||
'stagingDir': staging_dir,
|
||||
'name': "projectfile",
|
||||
'ext': ext[1:]
|
||||
}],
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"label": label,
|
||||
"name": name,
|
||||
"family": family,
|
||||
"families": ["ftrack"],
|
||||
"publish": True,
|
||||
"version": version
|
||||
})
|
||||
instances.append(instance)
|
||||
|
||||
for inst in instances_data:
|
||||
# for key, value in inst.items():
|
||||
# self.log.debug('instance[key]: {}'.format(key))
|
||||
#
|
||||
version = inst.get("version", None)
|
||||
version = int(inst.get("version", None))
|
||||
assert version, "No `version` string in json file"
|
||||
|
||||
name = asset = inst.get("name", None)
|
||||
|
|
@ -125,7 +127,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
assert family, "No `family` key in json_data.instance: {}".format(
|
||||
inst)
|
||||
|
||||
if family in 'projectfile':
|
||||
if family in 'workfile':
|
||||
continue
|
||||
|
||||
files_list = inst.get("files", None)
|
||||
|
|
@ -151,14 +153,10 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
# create list of tasks for creation
|
||||
if not inst.get('tasks', None):
|
||||
inst['tasks'] = list()
|
||||
if not inst.get('tasksTypes', None):
|
||||
inst['tasksTypes'] = {}
|
||||
|
||||
# append taks into list for later hierarchy cration
|
||||
ftrack_task_type = ftrack_types[task]
|
||||
if task not in inst['tasks']:
|
||||
inst['tasks'].append(task)
|
||||
inst['tasksTypes'][task] = ftrack_task_type
|
||||
|
||||
host = rules_tasks["taskHost"][task]
|
||||
subsets = rules_tasks["taskSubsets"][task]
|
||||
|
|
@ -187,7 +185,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
family = subset
|
||||
subset_name = "{0}{1}".format(subset, "Main")
|
||||
elif "reference" in subset:
|
||||
family = "render"
|
||||
family = "review"
|
||||
subset_name = "{0}{1}".format(family, "Reference")
|
||||
else:
|
||||
subset_name = "{0}{1}".format(subset, 'Default')
|
||||
|
|
@ -199,15 +197,13 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
|
||||
instance = context.create_instance(name)
|
||||
files = [f for f in files_list
|
||||
if subset in f or "thumbnail" in f
|
||||
]
|
||||
if subset in f or "thumbnail" in f]
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"tasks": subset_dict[subset],
|
||||
"taskTypes": inst['tasksTypes'],
|
||||
"fstart": frame_start,
|
||||
"frameStart": frame_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"host": host,
|
||||
|
|
@ -221,6 +217,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
"family": family,
|
||||
"families": [subset, inst["family"], 'ftrack'],
|
||||
"jsonData": inst,
|
||||
"jsonReprSubset": subset,
|
||||
"jsonReprExt": ext,
|
||||
"publish": True,
|
||||
"version": version})
|
||||
self.log.info(
|
||||
|
|
@ -229,9 +227,6 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
|
||||
context.data["instances"] = instances
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
# context[:] = sorted(context, key=self.sort_by_task)
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
||||
def sort_by_task(self, instance):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
import json
|
||||
import clique
|
||||
import pyblish.api
|
||||
|
||||
from pypeapp import Anatomy
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
""" Extract all instances to a serialized json file. """
|
||||
|
|
@ -14,28 +14,27 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
json_path = context.data['post_json_data_path']
|
||||
|
||||
data = dict(self.serialize(context.data()))
|
||||
# self.log.info(data)
|
||||
|
||||
instances_data = []
|
||||
for instance in context:
|
||||
|
||||
iData = {}
|
||||
for key, value in instance.data.items():
|
||||
if isinstance(value, clique.Collection):
|
||||
value = value.format()
|
||||
|
||||
try:
|
||||
json.dumps(value)
|
||||
iData[key] = value
|
||||
except KeyError:
|
||||
msg = "\"{0}\"".format(value)
|
||||
msg += " in instance.data[\"{0}\"]".format(key)
|
||||
msg += " could not be serialized."
|
||||
self.log.debug(msg)
|
||||
|
||||
instances_data.append(iData)
|
||||
|
||||
data["instances"] = instances_data
|
||||
# instances_data = []
|
||||
# for instance in context:
|
||||
#
|
||||
# iData = {}
|
||||
# for key, value in instance.data.items():
|
||||
# if isinstance(value, clique.Collection):
|
||||
# value = value.format()
|
||||
#
|
||||
# try:
|
||||
# json.dumps(value)
|
||||
# iData[key] = value
|
||||
# except KeyError:
|
||||
# msg = "\"{0}\"".format(value)
|
||||
# msg += " in instance.data[\"{0}\"]".format(key)
|
||||
# msg += " could not be serialized."
|
||||
# self.log.debug(msg)
|
||||
#
|
||||
# instances_data.append(iData)
|
||||
#
|
||||
# data["instances"] = instances_data
|
||||
|
||||
with open(json_path, "w") as outfile:
|
||||
outfile.write(json.dumps(data, indent=4, sort_keys=True))
|
||||
|
|
@ -60,6 +59,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
|
||||
# self.log.info("1: {}".format(data))
|
||||
|
||||
if isinstance(data, Anatomy):
|
||||
return
|
||||
|
||||
if not isinstance(data, dict):
|
||||
# self.log.info("2: {}".format(data))
|
||||
return data
|
||||
|
|
@ -88,6 +90,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
# loops if dictionary
|
||||
data[key] = self.serialize(value)
|
||||
|
||||
if isinstance(value, Anatomy):
|
||||
continue
|
||||
|
||||
if isinstance(value, (list or tuple)):
|
||||
# loops if list or tuple
|
||||
for i, item in enumerate(value):
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
# CUSTOM ATTRIBUTES
|
||||
custom_attributes = entity_data.get('custom_attributes', [])
|
||||
instances = [
|
||||
i for i in self.context[:] if i.data['asset'] in entity['name']
|
||||
i for i in self.context if i.data['asset'] in entity['name']
|
||||
]
|
||||
for key in custom_attributes:
|
||||
assert (key in entity['custom_attributes']), (
|
||||
|
|
|
|||
|
|
@ -3,9 +3,10 @@ import pyblish.api
|
|||
import clique
|
||||
import pype.api
|
||||
import pype.lib
|
||||
import pype.plugin
|
||||
|
||||
|
||||
class ExtractReview(pyblish.api.InstancePlugin):
|
||||
class ExtractReview(pype.plugin.InstancePlugin):
|
||||
"""Extracting Review mov file for Ftrack
|
||||
|
||||
Compulsory attribute of representation is tags list with "review",
|
||||
|
|
@ -20,7 +21,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
label = "Extract Review"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
families = ["review"]
|
||||
hosts = ["nuke", "maya", "shell"]
|
||||
hosts = ["nuke", "maya", "shell", "premiere"]
|
||||
|
||||
outputs = {}
|
||||
ext_filter = []
|
||||
|
|
@ -54,14 +55,21 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
config = instance.context.data["presets"]
|
||||
ext_filters = config["plugins"]["global"]["publish"]["ExtractReview"]["ext_filter"]
|
||||
self.log.info("ext_filters: {}".format(ext_filters))
|
||||
|
||||
# filter out mov and img sequences
|
||||
representations_new = representations[:]
|
||||
for repre in representations:
|
||||
self.log.info("Repre ext: {}".format(repre['ext']))
|
||||
self.log.info("self.ext_filter: {}".format(self.ext_filter))
|
||||
|
||||
if repre['ext'] not in self.ext_filter:
|
||||
if repre['ext'] not in ext_filters:
|
||||
continue
|
||||
|
||||
tags = repre.get("tags", [])
|
||||
self.log.info("Repre tags: {}".format(tags))
|
||||
|
||||
if "multipartExr" in tags:
|
||||
# ffmpeg doesn't support multipart exrs
|
||||
|
|
|
|||
|
|
@ -664,12 +664,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
families += current_families
|
||||
|
||||
self.log.debug("Registered root: {}".format(api.registered_root()))
|
||||
self.log.debug("PYPE_STUDIO_PROJECTS_MOUNT: {}".format(os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")))
|
||||
# create relative source path for DB
|
||||
try:
|
||||
source = instance.data['source']
|
||||
except KeyError:
|
||||
source = context.data["currentFile"]
|
||||
source = source.replace(os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
|
||||
self.log.debug("source: {}".format(source))
|
||||
source = str(source).replace(os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
|
||||
api.registered_root())
|
||||
relative_path = os.path.relpath(source, api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -13,5 +13,5 @@ class CollectAudioVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
self.log.info('Audio: {}'.format(instance.data['name']))
|
||||
instance.data['version'] = '001'
|
||||
instance.data['version'] = 1
|
||||
self.log.info('Audio version to: {}'.format(instance.data['version']))
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectContextDataPremiera(pyblish.api.ContextPlugin):
|
||||
"""Collecting data from temp json sent from premiera context"""
|
||||
|
||||
label = "Collect Premiera Context"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
data_path = context.data['rqst_json_data_path']
|
||||
self.log.info("Context is: {}".format(data_path))
|
||||
|
|
@ -19,16 +19,18 @@ class CollectFrameranges(pyblish.api.InstancePlugin):
|
|||
metadata = instance.data.get('jsonData').get('metadata')
|
||||
|
||||
# getting important metadata time calculation
|
||||
fps = metadata['ppro.timeline.fps']
|
||||
fps = float(metadata['ppro.timeline.fps'])
|
||||
sec_start = metadata['ppro.clip.start']
|
||||
sec_end = metadata['ppro.clip.end']
|
||||
fstart = instance.data.get('fstart')
|
||||
fstart = instance.data.get('frameStart')
|
||||
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
|
||||
|
||||
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
|
||||
instance.data['name'],
|
||||
fps, sec_start, sec_end, fstart, fend))
|
||||
|
||||
instance.data['startFrame'] = fstart
|
||||
instance.data['endFrame'] = fend
|
||||
instance.data['frameStart'] = fstart
|
||||
instance.data['frameEnd'] = fend
|
||||
instance.data['handleStart'] = instance.context.data['handleStart']
|
||||
instance.data['handleEnd'] = instance.context.data['handleEnd']
|
||||
instance.data['fps'] = metadata['ppro.timeline.fps']
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
json_data = context.data.get("jsonData", None)
|
||||
temp_context = {}
|
||||
for instance in json_data['instances']:
|
||||
if instance['family'] in 'projectfile':
|
||||
if instance['family'] in 'workfile':
|
||||
continue
|
||||
|
||||
in_info = {}
|
||||
|
|
@ -35,10 +35,13 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
in_info['entity_type'] = 'Shot'
|
||||
|
||||
instance_pyblish = [
|
||||
i for i in context.data["instances"] if i.data['asset'] in name][0]
|
||||
i for i in context.data["instances"]
|
||||
if i.data['asset'] in name][0]
|
||||
in_info['custom_attributes'] = {
|
||||
'fend': instance_pyblish.data['endFrame'],
|
||||
'fstart': instance_pyblish.data['startFrame'],
|
||||
'frameStart': instance_pyblish.data['frameStart'],
|
||||
'frameEnd': instance_pyblish.data['frameEnd'],
|
||||
'handleStart': instance_pyblish.data['handleStart'],
|
||||
'handleEnd': instance_pyblish.data['handleEnd'],
|
||||
'fps': instance_pyblish.data['fps']
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,82 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectClipRepresentations(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting frameranges needed for ftrack integration
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Clip Representations"
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# add to representations
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = list()
|
||||
|
||||
ins_d = instance.data
|
||||
staging_dir = ins_d["stagingDir"]
|
||||
frame_start = ins_d["frameStart"]
|
||||
frame_end = ins_d["frameEnd"]
|
||||
handle_start = ins_d["handleStart"]
|
||||
handle_end = ins_d["handleEnd"]
|
||||
fps = ins_d["fps"]
|
||||
files_list = ins_d.get("files")
|
||||
|
||||
if not files_list:
|
||||
return
|
||||
|
||||
json_repr_ext = ins_d["jsonReprExt"]
|
||||
json_repr_subset = ins_d["jsonReprSubset"]
|
||||
|
||||
if files_list:
|
||||
file = next((f for f in files_list
|
||||
if json_repr_subset in f), None)
|
||||
else:
|
||||
return
|
||||
|
||||
if json_repr_ext in ["mov", "mp4"]:
|
||||
representation = {
|
||||
"files": file,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"frameStartFtrack": frame_start - handle_start,
|
||||
"frameEndFtrack": frame_end - handle_end,
|
||||
"step": 1,
|
||||
"fps": fps,
|
||||
"name": json_repr_subset,
|
||||
"ext": json_repr_ext,
|
||||
"tags": ["preview", "review", "burnins", "reformat"]
|
||||
}
|
||||
else:
|
||||
representation = {
|
||||
"files": file,
|
||||
"stagingDir": staging_dir,
|
||||
"step": 1,
|
||||
"fps": fps,
|
||||
"name": json_repr_subset,
|
||||
"ext": json_repr_ext,
|
||||
"tags": ["review"]
|
||||
}
|
||||
self.log.debug("representation: {}".format(representation))
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
thumb = next((f for f in files_list
|
||||
if "thumbnail" in f), None)
|
||||
if thumb:
|
||||
thumb_representation = {
|
||||
'files': thumb,
|
||||
'stagingDir': staging_dir,
|
||||
'name': "thumbnail",
|
||||
'thumbnail': True,
|
||||
'ext': "png"
|
||||
}
|
||||
self.log.debug("representation: {}".format(thumb_representation))
|
||||
instance.data["representations"].append(
|
||||
thumb_representation)
|
||||
31
pype/plugins/premiere/publish/collect_resolution.py
Normal file
31
pype/plugins/premiere/publish/collect_resolution.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectResolution(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting frameranges needed for ftrack integration
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Clip Resolution"
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# getting metadata from jsonData key
|
||||
metadata = instance.data.get('jsonData').get('metadata')
|
||||
|
||||
# getting important metadata time calculation
|
||||
pixel_aspect = float(metadata['ppro.format.pixelaspect'])
|
||||
res_width = metadata['ppro.format.width']
|
||||
res_height = metadata['ppro.format.height']
|
||||
|
||||
instance.data['pixelAspect'] = pixel_aspect
|
||||
instance.data['resolutionWidth'] = res_width
|
||||
instance.data['resolutionHeight'] = res_height
|
||||
|
||||
self.log.info(f"Resolution was set to: `{res_width}x{res_height}`,"
|
||||
f" and pixel aspect ration to: `{pixel_aspect}`")
|
||||
|
|
@ -1,144 +0,0 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
|
||||
"""Generate the assumed destination path where the file will be stored"""
|
||||
|
||||
label = "Integrate Assumed Destination"
|
||||
order = pyblish.api.IntegratorOrder - 0.05
|
||||
families = ["clip", "projectfile"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.create_destination_template(instance)
|
||||
|
||||
template_data = instance.data["assumedTemplateData"]
|
||||
# template = instance.data["template"]
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
# template = anatomy.publish.path
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
mock_template = anatomy_filled.publish.path
|
||||
|
||||
# For now assume resources end up in a "resources" folder in the
|
||||
# published folder
|
||||
mock_destination = os.path.join(os.path.dirname(mock_template),
|
||||
"resources")
|
||||
|
||||
# Clean the path
|
||||
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
|
||||
|
||||
# Define resource destination and transfers
|
||||
resources = instance.data.get("resources", list())
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for resource in resources:
|
||||
|
||||
# Add destination to the resource
|
||||
source_filename = os.path.basename(resource["source"])
|
||||
destination = os.path.join(mock_destination, source_filename)
|
||||
|
||||
# Force forward slashes to fix issue with software unable
|
||||
# to work correctly with backslashes in specific scenarios
|
||||
# (e.g. escape characters in PLN-151 V-Ray UDIM)
|
||||
destination = destination.replace("\\", "/")
|
||||
|
||||
resource['destination'] = destination
|
||||
|
||||
# Collect transfers for the individual files of the resource
|
||||
# e.g. all individual files of a cache or UDIM textures.
|
||||
files = resource['files']
|
||||
for fsrc in files:
|
||||
fname = os.path.basename(fsrc)
|
||||
fdest = os.path.join(mock_destination, fname)
|
||||
transfers.append([fsrc, fdest])
|
||||
|
||||
instance.data["resources"] = resources
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def create_destination_template(self, instance):
|
||||
"""Create a filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
{subset}.{representation}
|
||||
Args:
|
||||
instance: the instance to publish
|
||||
|
||||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info(subset_name)
|
||||
asset_name = instance.data["asset"]
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
|
||||
project = io.find_one(
|
||||
{
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
},
|
||||
projection={"config": True, "data": True}
|
||||
)
|
||||
|
||||
template = project["config"]["template"]["publish"]
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset.get('silo')
|
||||
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
"code": project['data']['code']},
|
||||
"silo": silo,
|
||||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
self.log.info(template_data)
|
||||
instance.data["template"] = template
|
||||
|
|
@ -1,140 +0,0 @@
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Create entities in ftrack based on collected data from premiere
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = 'Integrate Hierarchy To Avalon'
|
||||
families = ['clip']
|
||||
|
||||
def process(self, context):
|
||||
if "hierarchyContext" not in context.data:
|
||||
return
|
||||
|
||||
self.db = io
|
||||
if not self.db.Session:
|
||||
self.db.install()
|
||||
|
||||
input_data = context.data["hierarchyContext"]
|
||||
self.import_to_avalon(input_data)
|
||||
|
||||
def import_to_avalon(self, input_data, parent=None):
|
||||
|
||||
for name in input_data:
|
||||
self.log.info('input_data[name]: {}'.format(input_data[name]))
|
||||
entity_data = input_data[name]
|
||||
entity_type = entity_data['entity_type']
|
||||
|
||||
data = {}
|
||||
# Process project
|
||||
if entity_type.lower() == 'project':
|
||||
entity = self.db.find_one({'type': 'project'})
|
||||
# TODO: should be in validator?
|
||||
assert (entity is not None), "Didn't find project in DB"
|
||||
|
||||
# get data from already existing project
|
||||
for key, value in entity.get('data', {}).items():
|
||||
data[key] = value
|
||||
|
||||
self.av_project = entity
|
||||
# Raise error if project or parent are not set
|
||||
elif self.av_project is None or parent is None:
|
||||
raise AssertionError(
|
||||
"Collected items are not in right order!"
|
||||
)
|
||||
# Else process assset
|
||||
else:
|
||||
entity = self.db.find_one({'type': 'asset', 'name': name})
|
||||
# Create entity if doesn't exist
|
||||
if entity is None:
|
||||
if self.av_project['_id'] == parent['_id']:
|
||||
silo = None
|
||||
elif parent['silo'] is None:
|
||||
silo = parent['name']
|
||||
else:
|
||||
silo = parent['silo']
|
||||
entity = self.create_avalon_asset(name, silo)
|
||||
self.log.info('entity: {}'.format(entity))
|
||||
self.log.info('data: {}'.format(entity.get('data', {})))
|
||||
self.log.info('____1____')
|
||||
data['entityType'] = entity_type
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data['tasks'] = tasks
|
||||
parents = []
|
||||
visualParent = None
|
||||
data = input_data[name]
|
||||
if self.av_project['_id'] != parent['_id']:
|
||||
visualParent = parent['_id']
|
||||
parents.extend(parent.get('data', {}).get('parents', []))
|
||||
parents.append(parent['name'])
|
||||
data['visualParent'] = visualParent
|
||||
data['parents'] = parents
|
||||
|
||||
self.db.update_many(
|
||||
{'_id': entity['_id']},
|
||||
{'$set': {
|
||||
'data': data,
|
||||
}})
|
||||
|
||||
entity = self.db.find_one({'type': 'asset', 'name': name})
|
||||
self.log.info('entity: {}'.format(entity))
|
||||
self.log.info('data: {}'.format(entity.get('data', {})))
|
||||
self.log.info('____2____')
|
||||
|
||||
# Else get data from already existing
|
||||
else:
|
||||
self.log.info('entity: {}'.format(entity))
|
||||
self.log.info('data: {}'.format(entity.get('data', {})))
|
||||
self.log.info('________')
|
||||
for key, value in entity.get('data', {}).items():
|
||||
data[key] = value
|
||||
|
||||
data['entityType'] = entity_type
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data['tasks'] = tasks
|
||||
parents = []
|
||||
visualParent = None
|
||||
# do not store project's id as visualParent (silo asset)
|
||||
|
||||
if self.av_project['_id'] != parent['_id']:
|
||||
visualParent = parent['_id']
|
||||
parents.extend(parent.get('data', {}).get('parents', []))
|
||||
parents.append(parent['name'])
|
||||
data['visualParent'] = visualParent
|
||||
data['parents'] = parents
|
||||
|
||||
# CUSTOM ATTRIBUTES
|
||||
for k, val in entity_data.get('custom_attributes', {}).items():
|
||||
data[k] = val
|
||||
|
||||
# Update entity data with input data
|
||||
self.db.update_many(
|
||||
{'_id': entity['_id']},
|
||||
{'$set': {
|
||||
'data': data,
|
||||
}})
|
||||
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_avalon(entity_data['childs'], entity)
|
||||
|
||||
def create_avalon_asset(self, name, silo):
|
||||
item = {
|
||||
'schema': 'avalon-core:asset-2.0',
|
||||
'name': name,
|
||||
'silo': silo,
|
||||
'parent': self.av_project['_id'],
|
||||
'type': 'asset',
|
||||
'data': {}
|
||||
}
|
||||
entity_id = self.db.insert_one(item).inserted_id
|
||||
|
||||
return self.db.find_one({'_id': entity_id})
|
||||
|
|
@ -1,171 +0,0 @@
|
|||
import sys
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
|
||||
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Create entities in ftrack based on collected data from premiere
|
||||
Example of entry data:
|
||||
{
|
||||
"ProjectXS": {
|
||||
"entity_type": "Project",
|
||||
"custom_attributes": {
|
||||
"fps": 24,...
|
||||
},
|
||||
"tasks": [
|
||||
"Compositing",
|
||||
"Lighting",... *task must exist as task type in project schema*
|
||||
],
|
||||
"childs": {
|
||||
"sq01": {
|
||||
"entity_type": "Sequence",
|
||||
...
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = 'Integrate Hierarchy To Ftrack'
|
||||
families = ["clip"]
|
||||
optional = False
|
||||
|
||||
def process(self, context):
|
||||
self.context = context
|
||||
if "hierarchyContext" not in context.data:
|
||||
return
|
||||
|
||||
self.ft_project = None
|
||||
self.session = context.data["ftrackSession"]
|
||||
|
||||
input_data = context.data["hierarchyContext"]
|
||||
|
||||
# adding ftrack types from presets
|
||||
ftrack_types = context.data['ftrackTypes']
|
||||
|
||||
self.import_to_ftrack(input_data, ftrack_types)
|
||||
|
||||
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
|
||||
for entity_name in input_data:
|
||||
entity_data = input_data[entity_name]
|
||||
entity_type = entity_data['entity_type'].capitalize()
|
||||
|
||||
if entity_type.lower() == 'project':
|
||||
query = 'Project where full_name is "{}"'.format(entity_name)
|
||||
entity = self.session.query(query).one()
|
||||
self.ft_project = entity
|
||||
self.task_types = self.get_all_task_types(entity)
|
||||
|
||||
elif self.ft_project is None or parent is None:
|
||||
raise AssertionError(
|
||||
"Collected items are not in right order!"
|
||||
)
|
||||
|
||||
# try to find if entity already exists
|
||||
else:
|
||||
query = '{} where name is "{}" and parent_id is "{}"'.format(
|
||||
entity_type, entity_name, parent['id']
|
||||
)
|
||||
try:
|
||||
entity = self.session.query(query).one()
|
||||
except Exception:
|
||||
entity = None
|
||||
|
||||
# Create entity if not exists
|
||||
if entity is None:
|
||||
entity = self.create_entity(
|
||||
name=entity_name,
|
||||
type=entity_type,
|
||||
parent=parent
|
||||
)
|
||||
# self.log.info('entity: {}'.format(dict(entity)))
|
||||
# CUSTOM ATTRIBUTES
|
||||
custom_attributes = entity_data.get('custom_attributes', [])
|
||||
instances = [
|
||||
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
|
||||
for key in custom_attributes:
|
||||
assert (key in entity['custom_attributes']), (
|
||||
'Missing custom attribute')
|
||||
|
||||
entity['custom_attributes'][key] = custom_attributes[key]
|
||||
for instance in instances:
|
||||
instance.data['ftrackShotId'] = entity['id']
|
||||
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# TASKS
|
||||
tasks = entity_data.get('tasks', [])
|
||||
existing_tasks = []
|
||||
tasks_to_create = []
|
||||
for child in entity['children']:
|
||||
if child.entity_type.lower() == 'task':
|
||||
existing_tasks.append(child['name'])
|
||||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
if task in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
continue
|
||||
tasks_to_create.append(task)
|
||||
|
||||
for task in tasks_to_create:
|
||||
self.create_task(
|
||||
name=task,
|
||||
task_type=ftrack_types[task],
|
||||
parent=entity
|
||||
)
|
||||
|
||||
if 'childs' in entity_data:
|
||||
self.import_to_ftrack(
|
||||
entity_data['childs'], ftrack_types, entity)
|
||||
|
||||
def get_all_task_types(self, project):
|
||||
tasks = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_types = proj_template['_task_type_schema']['types']
|
||||
|
||||
for type in temp_task_types:
|
||||
if type['name'] not in tasks:
|
||||
tasks[type['name']] = type
|
||||
|
||||
return tasks
|
||||
|
||||
def create_task(self, name, task_type, parent):
|
||||
task = self.session.create('Task', {
|
||||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
# TODO not secured!!! - check if task_type exists
|
||||
self.log.info(task_type)
|
||||
self.log.info(self.task_types)
|
||||
task['type'] = self.task_types[task_type]
|
||||
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return task
|
||||
|
||||
def create_entity(self, name, type, parent):
|
||||
entity = self.session.create(type, {
|
||||
'name': name,
|
||||
'parent': parent
|
||||
})
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return entity
|
||||
|
|
@ -88,13 +88,13 @@ $.batchrenamer = {
|
|||
selected[c].clip.name = newName;
|
||||
|
||||
parents.push({
|
||||
'entityType': 'episode',
|
||||
'entityType': 'Episode',
|
||||
'entityName': episode + '_' + episodeSuf
|
||||
});
|
||||
hierarchy.push(episode + '_' + episodeSuf);
|
||||
|
||||
parents.push({
|
||||
'entityType': 'sequence',
|
||||
'entityType': 'Sequence',
|
||||
'entityName': episode + sequenceName
|
||||
});
|
||||
hierarchy.push(episode + sequenceName);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
/* global app, qe, alert, File, $, JSON, ProjectItemType, XMPMeta */
|
||||
/* global app, qe, alert, File, $, JSON, ProjectItemType, XMPMeta, parseFloat */
|
||||
/*
|
||||
.____ _ ___ .____.______
|
||||
--- - - -- / . \// // . \ ___/ --- ---- - -
|
||||
|
|
@ -502,7 +502,7 @@ $.pype = {
|
|||
// process instances
|
||||
// check if asset in metadata
|
||||
// add it to sequence metadata
|
||||
if (instances[i].family !== 'projectfile') {
|
||||
if (instances[i].family !== 'workfile') {
|
||||
var data = {};
|
||||
data.family = instances[i].family;
|
||||
data.ftrackShotId = instances[i].ftrackShotId;
|
||||
|
|
@ -676,12 +676,12 @@ $.pype = {
|
|||
var sequenceSize = $.pype.getImageSize();
|
||||
metadata['ppro.videoTrack.name'] = videoTrack.name;
|
||||
metadata['ppro.sequence.name'] = sequence.name;
|
||||
metadata['ppro.source.fps'] = (1 / interpretation.frameRate);
|
||||
metadata['ppro.timeline.fps'] = (1 / settings.videoFrameRate.seconds);
|
||||
metadata['ppro.source.fps'] = parseFloat(1 / interpretation.frameRate).toFixed(4);
|
||||
metadata['ppro.timeline.fps'] = parseFloat(1 / settings.videoFrameRate.seconds).toFixed(4);
|
||||
metadata['ppro.source.path'] = $.pype.convertPathString(clip.projectItem.getMediaPath());
|
||||
metadata['ppro.format.width'] = sequenceSize.h;
|
||||
metadata['ppro.format.height'] = sequenceSize.v;
|
||||
metadata['ppro.format.pixelaspect'] = interpretation.pixelAspectRatio;
|
||||
metadata['ppro.format.pixelaspect'] = parseFloat(interpretation.pixelAspectRatio).toFixed(4);
|
||||
metadata['ppro.source.start'] = clip.inPoint.seconds;
|
||||
metadata['ppro.source.end'] = clip.outPoint.seconds;
|
||||
metadata['ppro.source.duration'] = clip.duration.seconds;
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ var ENV;
|
|||
<button type="button" class="btn btn-info btn-sm btn-block" id="btn-get-frame">Get screen grab</button>
|
||||
</li>
|
||||
|
||||
<li class="list-group-item" id="load">
|
||||
<!-- <li class="list-group-item" id="load">
|
||||
<h5>Load/Update assets to timeline</h5>
|
||||
<pre><code class="js"></code></pre>
|
||||
<div class="input-group-prepend">
|
||||
|
|
@ -141,7 +141,7 @@ var ENV;
|
|||
<button type="button" class="btn btn-info btn-sm btn-block" id="btn-getRernderAnimation">DO IT!</button>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</li> -->
|
||||
</ul>
|
||||
|
||||
<hr />
|
||||
|
|
|
|||
|
|
@ -162,7 +162,6 @@ function _publish () {
|
|||
var audioOnly = $.querySelector('input[name=audio-only]').checked;
|
||||
var jsonSendPath = $.querySelector('input[name=send-path]').value;
|
||||
var jsonGetPath = $.querySelector('input[name=get-path]').value;
|
||||
var publishPath = _pype.ENV.PUBLISH_PATH;
|
||||
|
||||
if (jsonSendPath === '') {
|
||||
// create temp staging directory on local
|
||||
|
|
@ -218,12 +217,14 @@ function _publish () {
|
|||
"adobePublishJsonPathSend": jsonSendPath,
|
||||
"adobePublishJsonPathGet": jsonGetPath,
|
||||
"gui": gui,
|
||||
"publishPath": publishPath,
|
||||
"publishPath": convertPathString(_pype.ENV.PUBLISH_PATH),
|
||||
"project": _pype.ENV.AVALON_PROJECT,
|
||||
"asset": _pype.ENV.AVALON_ASSET,
|
||||
"task": _pype.ENV.AVALON_TASK,
|
||||
"workdir": _pype.ENV.AVALON_WORKDIR
|
||||
"workdir": convertPathString(_pype.ENV.AVALON_WORKDIR),
|
||||
"host": _pype.ENV.AVALON_APP
|
||||
}
|
||||
displayResult('dataToPublish: ' + JSON.stringify(dataToPublish));
|
||||
pras.publish(dataToPublish).then(function (result) {
|
||||
displayResult(
|
||||
'pype.js:publish < pras.publish: ' + JSON.stringify(result));
|
||||
|
|
@ -232,9 +233,9 @@ function _publish () {
|
|||
// read json data from resulted path
|
||||
displayResult('Updating metadata of clips after publishing');
|
||||
|
||||
jsonfile.readFile(result.return_data_path, function (json) {
|
||||
_pype.csi.evalScript('$.pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
|
||||
});
|
||||
// jsonfile.readFile(result.return_data_path, function (json) {
|
||||
// _pype.csi.evalScript('$.pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
|
||||
// });
|
||||
|
||||
// version up project
|
||||
if (versionUp) {
|
||||
|
|
@ -257,8 +258,6 @@ function _publish () {
|
|||
});
|
||||
});
|
||||
} else {
|
||||
// register publish path
|
||||
pras.register_plugin_path(publishPath).then(displayResult);
|
||||
// send json to pyblish
|
||||
pras.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
|
||||
const jsonfile = require('jsonfile');
|
||||
|
|
@ -406,14 +405,19 @@ $('#btn-newWorkfileVersion').click(function () {
|
|||
});
|
||||
|
||||
$('#btn-testing').click(function () {
|
||||
var data = {
|
||||
"adobePublishJsonPathSend": "C:/Users/jezsc/_PYPE_testing/testing_data/premiere/95478408-91ee-4522-81f6-f1689060664f_send.json",
|
||||
"adobePublishJsonPathGet": "C:/Users/jezsc/_PYPE_testing/testing_data/premiere/95478408-91ee-4522-81f6-f1689060664f_get.json",
|
||||
"gui": true,
|
||||
"project": "J01_jakub_test",
|
||||
"asset": "editorial",
|
||||
"task": "conforming"
|
||||
}
|
||||
// var data = {
|
||||
// "adobePublishJsonPathSend": "C:/Users/jezsc/_PYPE_testing/testing_data/premiere/95478408-91ee-4522-81f6-f1689060664f_send.json",
|
||||
// "adobePublishJsonPathGet": "C:/Users/jezsc/_PYPE_testing/testing_data/premiere/95478408-91ee-4522-81f6-f1689060664f_get.json",
|
||||
// "gui": true,
|
||||
// "project": "J01_jakub_test",
|
||||
// "asset": "editorial",
|
||||
// "task": "conforming",
|
||||
// "workdir": "C:/Users/jezsc/_PYPE_testing/projects/J01_jakub_test/editorial/work/conforming",
|
||||
// "publishPath": "C:/Users/jezsc/CODE/pype-setup/repos/pype/pype/plugins/premiere/publish",
|
||||
// "host": "premiere"
|
||||
// }
|
||||
var data = {"adobePublishJsonPathSend":"C:/Users/jezsc/AppData/Local/Temp/887ed0c3-d772-4105-b285-847ef53083cd_send.json","adobePublishJsonPathGet":"C:/Users/jezsc/AppData/Local/Temp/887ed0c3-d772-4105-b285-847ef53083cd_get.json","gui":true,"publishPath":"C:/Users/jezsc/CODE/pype-setup/repos/pype/pype/plugins/premiere/publish","project":"J01_jakub_test","asset":"editorial","task":"conforming","workdir":"C:/Users/jezsc/_PYPE_testing/projects/J01_jakub_test/editorial/work/conforming","host":"premiere"}
|
||||
|
||||
pras.publish(data);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -13,12 +13,11 @@ var pras = {
|
|||
},
|
||||
getRequestFromRestApiServer: function (url, options, callback) {
|
||||
_pype.displayResult('url: ' + url);
|
||||
_pype.displayResult('options: ' + JSON.stringify(options));
|
||||
|
||||
// define options in case there is null comming
|
||||
if (options === null) {
|
||||
options = {
|
||||
method: 'get',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
|
|
|||
3
pype/premiere/ppro_test/css/avalon.min.css
vendored
3
pype/premiere/ppro_test/css/avalon.min.css
vendored
|
|
@ -1,3 +0,0 @@
|
|||
body{background-color:#323238;color:#eeeeee}#output{background:#121212;color:#eeeeee;padding:2em;font-family:monospace;font-weight:bold;min-height:8em}.dark>.list-group-item{background:#454747}
|
||||
|
||||
/*# sourceMappingURL=avalon.min.css.map */
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"version": 3,
|
||||
"file": "avalon.min.css",
|
||||
"sources": [
|
||||
"avalon.scss"
|
||||
],
|
||||
"names": [],
|
||||
"mappings": "AAAA,AAAA,IAAI,AAAC,CACH,gBAAgB,CAAE,OAAO,CACzB,KAAK,CAAE,OAAO,CACf,AAED,AAAA,OAAO,AAAC,CACN,UAAU,CAAE,OAAO,CACnB,KAAK,CAAE,OAAO,CACd,OAAO,CAAE,GAAG,CACZ,WAAW,CAAE,SAAS,CACtB,WAAW,CAAE,IAAI,CACjB,UAAU,CAAE,GAAG,CAChB,AAED,AAAA,KAAK,CAAG,gBAAgB,AAAC,CACvB,UAAU,CAAE,OAAO,CACpB"
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
body {
|
||||
background-color: #323238;
|
||||
color: #eeeeee;
|
||||
}
|
||||
|
||||
#output {
|
||||
background: #121212;
|
||||
color: #eeeeee;
|
||||
padding: 2em;
|
||||
font-family: monospace;
|
||||
font-weight: bold;
|
||||
min-height: 8em;
|
||||
}
|
||||
|
||||
.dark > .list-group-item {
|
||||
background: #454747;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1,48 +0,0 @@
|
|||
[0403/172908.369:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.371:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.371:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/172908.371:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<html lang="en">
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<title>Pype extention</title>
|
||||
|
||||
<link href="./css/bootstrap.min.css" type="text/css" rel="stylesheet">
|
||||
<link href="./css/avalon.min.css" type="text/css" rel="stylesheet">
|
||||
|
||||
<script>
|
||||
if (typeof module === 'object') {
|
||||
window.module = module;
|
||||
module = undefined;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script src="./js/vendor/popper.min.js"></script>
|
||||
<script src="./js/vendor/bootstrap.min.js"></script>
|
||||
<script src="./js/vendor/CSInterface-8.js"></script>
|
||||
<script src="./js/vendor/jquery-3.3.1.min.js"></script>
|
||||
<script src="./js/vendor/json2.js"></script>
|
||||
|
||||
<script>
|
||||
if (window.module)
|
||||
module = window.module;
|
||||
var ENV;
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div id="section">
|
||||
<a href="javascript:history.go(0)">Refresh panel</a>
|
||||
<ul class="list-group list-group-flush dark">
|
||||
<li class="list-group-item" id="rename">
|
||||
<div class="input-group input-group-sm mb-1">
|
||||
<div class="input-group-append">
|
||||
<button id="btn-get-env" type="button" class="btn btn-info btn-sm btn-block">get Env</button>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<hr />
|
||||
|
||||
<div class="col-md-6" id="source">
|
||||
<!-- <pre> <code class="python"></code> </pre> -->
|
||||
</div>
|
||||
<h5>Output</h5>
|
||||
<div class="row" id="output"></div>
|
||||
|
||||
<script src="./js/avalon.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
|
@ -1,368 +0,0 @@
|
|||
/* global CSInterface, $, querySelector, api, displayResult */
|
||||
|
||||
var csi = new CSInterface();
|
||||
var output = document.getElementById('output');
|
||||
|
||||
var rootFolderPath = csi.getSystemPath(SystemPath.EXTENSION);
|
||||
var timecodes = cep_node.require('node-timecodes');
|
||||
var process = cep_node.require('process');
|
||||
|
||||
|
||||
function getEnv() {
|
||||
csi.evalScript('pype.getProjectFileData();', function (result) {
|
||||
process.env.EXTENSION_PATH = rootFolderPath
|
||||
window.ENV = process.env;
|
||||
var resultData = JSON.parse(result);
|
||||
for (key in resultData) {
|
||||
window.ENV[key] = resultData[key];
|
||||
};
|
||||
csi.evalScript('pype.setEnvs(' + JSON.stringify(window.ENV) + ')');
|
||||
});
|
||||
}
|
||||
|
||||
function renderClips() {
|
||||
csi.evalScript('pype.transcodeExternal(' + rootFolderPath + ');', function (result) {
|
||||
displayResult(result);
|
||||
});
|
||||
}
|
||||
|
||||
function displayResult(r) {
|
||||
console.log(r);
|
||||
csi.evalScript('$.writeln( ' + JSON.stringify(r) + ' )');
|
||||
output.classList.remove("error");
|
||||
output.innerText = r;
|
||||
}
|
||||
|
||||
function displayError(e) {
|
||||
output.classList.add("error");
|
||||
output.innerText = e.message;
|
||||
}
|
||||
|
||||
function loadJSX() {
|
||||
// get the appName of the currently used app. For Premiere Pro it's "PPRO"
|
||||
var appName = csi.hostEnvironment.appName;
|
||||
var extensionPath = csi.getSystemPath(SystemPath.EXTENSION);
|
||||
|
||||
// load general JSX script independent of appName
|
||||
var extensionRootGeneral = extensionPath + '/jsx/';
|
||||
csi.evalScript('$._ext.evalFiles("' + extensionRootGeneral + '")');
|
||||
|
||||
// load JSX scripts based on appName
|
||||
var extensionRootApp = extensionPath + '/jsx/' + appName + '/';
|
||||
csi.evalScript('$._ext.evalFiles("' + extensionRootApp + '")');
|
||||
// csi.evalScript('$._PPP_.logConsoleOutput()');
|
||||
getEnv();
|
||||
|
||||
csi.evalScript('$._PPP_.updateEventPanel( "' + "all plugins are loaded" + '" )');
|
||||
csi.evalScript('$._PPP_.updateEventPanel( "' + "testing function done" + '" )');
|
||||
|
||||
}
|
||||
|
||||
// run all at loading
|
||||
loadJSX()
|
||||
|
||||
|
||||
function loadAnimationRendersToTimeline() {
|
||||
// it will get type of asset and extension from input
|
||||
// and start loading script from jsx
|
||||
var $ = querySelector('#load');
|
||||
var data = {};
|
||||
data.subset = $('input[name=type]').value;
|
||||
data.subsetExt = $('input[name=ext]').value;
|
||||
var requestList = [];
|
||||
// get all selected clips
|
||||
csi.evalScript('pype.getClipsForLoadingSubsets( "' + data.subset + '" )', function (result) {
|
||||
// TODO: need to check if the clips are already created and this is just updating to last versions
|
||||
var resultObj = JSON.parse(result);
|
||||
var instances = resultObj[0];
|
||||
var numTracks = resultObj[1];
|
||||
|
||||
var key = '';
|
||||
// creating requesting list of dictionaries
|
||||
for (key in instances) {
|
||||
var clipData = {};
|
||||
clipData.parentClip = instances[key];
|
||||
clipData.asset = key;
|
||||
clipData.subset = data.subset;
|
||||
clipData.representation = data.subsetExt;
|
||||
requestList.push(clipData);
|
||||
}
|
||||
// gets data from mongodb
|
||||
api.load_representations(window.ENV['AVALON_PROJECT'], requestList).then(
|
||||
function (avalonData) {
|
||||
// creates or updates data on timeline
|
||||
var makeData = {};
|
||||
makeData.binHierarchy = data.subset + '/' + data.subsetExt;
|
||||
makeData.clips = avalonData;
|
||||
makeData.numTracks = numTracks;
|
||||
csi.evalScript('pype.importFiles( ' + JSON.stringify(makeData) + ' )');
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function evalScript(script) {
|
||||
var callback = function (result) {
|
||||
displayResult(result);
|
||||
};
|
||||
csi.evalScript(script, callback);
|
||||
}
|
||||
|
||||
function deregister() {
|
||||
api.deregister_plugin_path().then(displayResult);
|
||||
}
|
||||
|
||||
function register() {
|
||||
var $ = querySelector('#register');
|
||||
var path = $('input[name=path]').value;
|
||||
api.register_plugin_path(path).then(displayResult);
|
||||
}
|
||||
|
||||
function getStagingDir() {
|
||||
// create stagingDir
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const UUID = require('pure-uuid');
|
||||
const id = new UUID(4).format();
|
||||
const stagingDir = path.join(os.tmpdir(), id);
|
||||
|
||||
fs.mkdirs(stagingDir);
|
||||
return stagingDir;
|
||||
|
||||
}
|
||||
|
||||
function convertPathString(path) {
|
||||
return path.replace(
|
||||
new RegExp('\\\\', 'g'), '/').replace(new RegExp('//\\?/', 'g'), '');
|
||||
}
|
||||
|
||||
function publish() {
|
||||
var $ = querySelector('#publish');
|
||||
// var gui = $('input[name=gui]').checked;
|
||||
var gui = true;
|
||||
var versionUp = $('input[name=version-up]').checked;
|
||||
var audioOnly = $('input[name=audio-only]').checked;
|
||||
var jsonSendPath = $('input[name=send-path]').value;
|
||||
var jsonGetPath = $('input[name=get-path]').value;
|
||||
var publish_path = window.ENV['PUBLISH_PATH'];
|
||||
|
||||
if (jsonSendPath == '') {
|
||||
// create temp staging directory on local
|
||||
var stagingDir = convertPathString(getStagingDir());
|
||||
|
||||
// copy project file to stagingDir
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
csi.evalScript('pype.getProjectFileData();', function (result) {
|
||||
displayResult(result);
|
||||
var data = JSON.parse(result);
|
||||
displayResult(stagingDir);
|
||||
displayResult(data.projectfile);
|
||||
var destination = convertPathString(path.join(stagingDir, data.projectfile));
|
||||
displayResult('copy project file');
|
||||
displayResult(data.projectfile);
|
||||
displayResult(destination);
|
||||
fs.copyFile(data.projectpath, destination);
|
||||
displayResult('project file coppied!');
|
||||
});
|
||||
|
||||
// publishing file
|
||||
csi.evalScript('pype.getPyblishRequest("' + stagingDir + '", ' + audioOnly + ');', function (r) {
|
||||
var request = JSON.parse(r);
|
||||
displayResult(JSON.stringify(request));
|
||||
|
||||
csi.evalScript('pype.encodeRepresentation(' + JSON.stringify(request) + ');', function (result) {
|
||||
// create json for pyblish
|
||||
var jsonfile = require('jsonfile');
|
||||
var jsonSendPath = stagingDir + '_send.json'
|
||||
var jsonGetPath = stagingDir + '_get.json'
|
||||
$('input[name=send-path]').value = jsonSendPath;
|
||||
$('input[name=get-path]').value = jsonGetPath;
|
||||
var jsonContent = JSON.parse(result);
|
||||
jsonfile.writeFile(jsonSendPath, jsonContent);
|
||||
var checkingFile = function (path) {
|
||||
var timeout = 1000;
|
||||
setTimeout(function () {
|
||||
if (fs.existsSync(path)) {
|
||||
// register publish path
|
||||
api.register_plugin_path(publish_path).then(displayResult);
|
||||
// send json to pyblish
|
||||
api.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
|
||||
// check if resulted path exists as file
|
||||
if (fs.existsSync(result.get_json_path)) {
|
||||
// read json data from resulted path
|
||||
displayResult('Updating metadata of clips after publishing');
|
||||
|
||||
jsonfile.readFile(result.get_json_path, function (err, json) {
|
||||
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
|
||||
})
|
||||
|
||||
// version up project
|
||||
if (versionUp) {
|
||||
displayResult('Saving new version of the project file');
|
||||
csi.evalScript('pype.versionUpWorkFile();');
|
||||
};
|
||||
} else {
|
||||
// if resulted path file not existing
|
||||
displayResult('Publish has not been finished correctly. Hit Publish again to publish from already rendered data, or Reset to render all again.');
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
} else {
|
||||
displayResult('waiting');
|
||||
checkingFile(path);
|
||||
};
|
||||
},
|
||||
timeout)
|
||||
};
|
||||
|
||||
checkingFile(jsonContent.waitingFor)
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// register publish path
|
||||
api.register_plugin_path(publish_path).then(displayResult);
|
||||
// send json to pyblish
|
||||
api.publish(jsonSendPath, jsonGetPath, gui).then(function (result) {
|
||||
// check if resulted path exists as file
|
||||
if (fs.existsSync(result.get_json_path)) {
|
||||
// read json data from resulted path
|
||||
displayResult('Updating metadata of clips after publishing');
|
||||
|
||||
jsonfile.readFile(result.get_json_path, function (err, json) {
|
||||
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
|
||||
})
|
||||
|
||||
// version up project
|
||||
if (versionUp) {
|
||||
displayResult('Saving new version of the project file');
|
||||
csi.evalScript('pype.versionUpWorkFile();');
|
||||
};
|
||||
} else {
|
||||
// if resulted path file not existing
|
||||
displayResult('Publish has not been finished correctly. Hit Publish again to publish from already rendered data, or Reset to render all again.');
|
||||
};
|
||||
|
||||
});
|
||||
};
|
||||
// $('input[name=send-path]').value = '';
|
||||
// $('input[name=get-path]').value = '';
|
||||
}
|
||||
|
||||
function context() {
|
||||
var $ = querySelector('#context');
|
||||
var project = $('input[name=project]').value;
|
||||
var asset = $('input[name=asset]').value;
|
||||
var task = $('input[name=task]').value;
|
||||
var app = $('input[name=app]').value;
|
||||
api.context(project, asset, task, app).then(displayResult);
|
||||
}
|
||||
|
||||
function tc(timecode) {
|
||||
var seconds = timecodes.toSeconds(timecode);
|
||||
var timec = timecodes.fromSeconds(seconds);
|
||||
displayResult(seconds);
|
||||
displayResult(timec);
|
||||
}
|
||||
|
||||
function rename() {
|
||||
var $ = querySelector('#rename');
|
||||
var data = {};
|
||||
data.ep = $('input[name=episode]').value;
|
||||
data.epSuffix = $('input[name=ep_suffix]').value;
|
||||
|
||||
if (!data.ep) {
|
||||
csi.evalScript('pype.alert_message("' + 'Need to fill episode code' + '")');
|
||||
return;
|
||||
};
|
||||
|
||||
if (!data.epSuffix) {
|
||||
csi.evalScript('pype.alert_message("' + 'Need to fill episode longer suffix' + '")');
|
||||
return;
|
||||
};
|
||||
|
||||
csi.evalScript('br.renameTargetedTextLayer( ' + JSON.stringify(data) + ' );', function (result) {
|
||||
displayResult(result);
|
||||
});
|
||||
}
|
||||
|
||||
// bind buttons
|
||||
$('#btn-getRernderAnimation').click(function () {
|
||||
loadAnimationRendersToTimeline();
|
||||
});
|
||||
|
||||
$('#btn-rename').click(function () {
|
||||
rename();
|
||||
});
|
||||
|
||||
$('#btn-set-context').click(function () {
|
||||
context();
|
||||
});
|
||||
|
||||
$('#btn-register').click(function () {
|
||||
register();
|
||||
});
|
||||
|
||||
$('#btn-deregister').click(function () {
|
||||
deregister();
|
||||
});
|
||||
|
||||
$('#btn-publish').click(function () {
|
||||
publish();
|
||||
});
|
||||
|
||||
$('#btn-send-reset').click(function () {
|
||||
var $ = querySelector('#publish');
|
||||
$('input[name=send-path]').value = '';
|
||||
});
|
||||
$('#btn-get-reset').click(function () {
|
||||
var $ = querySelector('#publish');
|
||||
$('input[name=get-path]').value = '';
|
||||
});
|
||||
$('#btn-get-active-sequence').click(function () {
|
||||
evalScript('pype.getActiveSequence();');
|
||||
});
|
||||
|
||||
$('#btn-get-selected').click(function () {
|
||||
$('#output').html('getting selected clips info ...');
|
||||
evalScript('pype.getSelectedItems();');
|
||||
});
|
||||
|
||||
$('#btn-get-env').click(function () {
|
||||
console.log("print this")
|
||||
});
|
||||
|
||||
$('#btn-get-projectitems').click(function () {
|
||||
evalScript('pype.getProjectItems();');
|
||||
});
|
||||
|
||||
$('#btn-metadata').click(function () {
|
||||
var $ = querySelector('#publish');
|
||||
var path = $('input[name=get-path]').value;
|
||||
var jsonfile = require('jsonfile');
|
||||
displayResult(path);
|
||||
jsonfile.readFile(path, function (err, json) {
|
||||
csi.evalScript('pype.dumpPublishedInstancesToMetadata(' + JSON.stringify(json) + ');');
|
||||
displayResult('Metadata of clips after publishing were updated');
|
||||
})
|
||||
|
||||
|
||||
});
|
||||
$('#btn-get-frame').click(function () {
|
||||
evalScript('$._PPP_.exportCurrentFrameAsPNG();');
|
||||
});
|
||||
|
||||
$('#btn-tc').click(function () {
|
||||
tc('00:23:47:10');
|
||||
});
|
||||
|
||||
$('#btn-generateRequest').click(function () {
|
||||
evalScript('pype.getPyblishRequest();');
|
||||
});
|
||||
|
||||
$('#btn-newWorkfileVersion').click(function () {
|
||||
evalScript('pype.versionUpWorkFile();');
|
||||
});
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,109 +0,0 @@
|
|||
// connecting pype module pype rest api server (pras)
|
||||
|
||||
var csi = new CSInterface();
|
||||
|
||||
var pras = {
|
||||
csi: csi,
|
||||
getPresets: function () {
|
||||
var url = pras.getApiServerUrl();
|
||||
var projectName = 'J01_jakub_test'
|
||||
var urlType = 'adobe/presets'
|
||||
var restApiGetUrl = [url, urlType, projectName].join('/')
|
||||
return restApiGetUrl
|
||||
},
|
||||
/**
|
||||
* Return url for pype rest api server service
|
||||
* @return {url string}
|
||||
*/
|
||||
getApiServerUrl: function () {
|
||||
var url = window.ENV.PYPE_REST_API_URL;
|
||||
return url
|
||||
},
|
||||
getEnv: function () {
|
||||
pras.csi.evalScript('pype.getProjectFileData();', function (result) {
|
||||
var process = cep_node.require('process');
|
||||
process.env.EXTENSION_PATH = rootFolderPath
|
||||
window.ENV = process.env;
|
||||
var resultData = JSON.parse(result);
|
||||
for (var key in resultData) {
|
||||
window.ENV[key] = resultData[key];
|
||||
};
|
||||
csi.evalScript('pype.setEnvs(' + JSON.stringify(window.ENV) + ')');
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
pras.getEnv()
|
||||
|
||||
function querySelector (parent) {
|
||||
return function (child) {
|
||||
return document.querySelector(parent).querySelector(child)
|
||||
};
|
||||
}
|
||||
|
||||
var defs = {}
|
||||
|
||||
function jumpTo (name) {
|
||||
var e = defs[name];
|
||||
document.querySelectorAll('.highlight').forEach(function (el) {
|
||||
el.classList.remove('highlight');
|
||||
});
|
||||
e.classList.add('highlight');
|
||||
return false;
|
||||
}
|
||||
|
||||
function unindent (code) {
|
||||
var lines = code.split('\n');
|
||||
var margin = -1;
|
||||
for (var j = 0; j < lines.length; j++) {
|
||||
var l = lines[j];
|
||||
for (i = 0; i < l.length; i++) {
|
||||
if (l[i] != " ") {
|
||||
margin = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (margin > -1) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
lines = lines.slice(j);
|
||||
return lines.map(function (s) {
|
||||
return s.substr(margin)
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
|
||||
function ready () {
|
||||
// // set the <code> element of each example to the corresponding functions source
|
||||
// document.querySelectorAll('li pre code.js').forEach(function(e){
|
||||
// var id = e.parentElement.parentElement.id;
|
||||
// var f = window[id];
|
||||
// var code = f.toString().split('\n').slice(2, -1).join('\n');
|
||||
// e.innerText = unindent(code);
|
||||
// })
|
||||
|
||||
document.querySelectorAll('li pre code.html').forEach(function (e) {
|
||||
var html = e.parentElement.parentElement.querySelector('div.example').innerHTML;
|
||||
e.innerText = unindent(html);
|
||||
})
|
||||
|
||||
hljs.initHighlighting();
|
||||
|
||||
// // find all the elements representing the function definitions in the python source
|
||||
// document.querySelectorAll('.python .hljs-function .hljs-title').forEach(function(e){
|
||||
// var a = document.createElement('a');
|
||||
// a.name = e.innerText;
|
||||
// e.parentElement.insertBefore(a, e)
|
||||
// return defs[e.innerText] = e.parentElement;
|
||||
// });
|
||||
|
||||
// convert all 'api.X' strings to hyperlinks to jump to python source
|
||||
document.querySelectorAll('.js').forEach(function (e) {
|
||||
var code = e.innerHTML;
|
||||
Object.keys(defs).forEach(function (k) {
|
||||
code = code.replace('api.' + k + '(', '<a href="#' + k + '" onclick="jumpTo(\'' + k + '\')">api.' + k + '</a>(');
|
||||
})
|
||||
e.innerHTML = code;
|
||||
})
|
||||
}
|
||||
1193
pype/premiere/ppro_test/js/vendor/CSInterface-8.js
vendored
1193
pype/premiere/ppro_test/js/vendor/CSInterface-8.js
vendored
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
489
pype/premiere/ppro_test/js/vendor/json2.js
vendored
489
pype/premiere/ppro_test/js/vendor/json2.js
vendored
|
|
@ -1,489 +0,0 @@
|
|||
/*
|
||||
json2.js
|
||||
2014-02-04
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
|
||||
|
||||
This file creates a global JSON object containing two methods: stringify
|
||||
and parse.
|
||||
|
||||
JSON.stringify(value, replacer, space)
|
||||
value any JavaScript value, usually an object or array.
|
||||
|
||||
replacer an optional parameter that determines how object
|
||||
values are stringified for objects. It can be a
|
||||
function or an array of strings.
|
||||
|
||||
space an optional parameter that specifies the indentation
|
||||
of nested structures. If it is omitted, the text will
|
||||
be packed without extra whitespace. If it is a number,
|
||||
it will specify the number of spaces to indent at each
|
||||
level. If it is a string (such as '\t' or ' '),
|
||||
it contains the characters used to indent at each level.
|
||||
|
||||
This method produces a JSON text from a JavaScript value.
|
||||
|
||||
When an object value is found, if the object contains a toJSON
|
||||
method, its toJSON method will be called and the result will be
|
||||
stringified. A toJSON method does not serialize: it returns the
|
||||
value represented by the name/value pair that should be serialized,
|
||||
or undefined if nothing should be serialized. The toJSON method
|
||||
will be passed the key associated with the value, and this will be
|
||||
bound to the value
|
||||
|
||||
For example, this would serialize Dates as ISO strings.
|
||||
|
||||
Date.prototype.toJSON = function (key) {
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
return this.getUTCFullYear() + '-' +
|
||||
f(this.getUTCMonth() + 1) + '-' +
|
||||
f(this.getUTCDate()) + 'T' +
|
||||
f(this.getUTCHours()) + ':' +
|
||||
f(this.getUTCMinutes()) + ':' +
|
||||
f(this.getUTCSeconds()) + 'Z';
|
||||
};
|
||||
|
||||
You can provide an optional replacer method. It will be passed the
|
||||
key and value of each member, with this bound to the containing
|
||||
object. The value that is returned from your method will be
|
||||
serialized. If your method returns undefined, then the member will
|
||||
be excluded from the serialization.
|
||||
|
||||
If the replacer parameter is an array of strings, then it will be
|
||||
used to select the members to be serialized. It filters the results
|
||||
such that only members with keys listed in the replacer array are
|
||||
stringified.
|
||||
|
||||
Values that do not have JSON representations, such as undefined or
|
||||
functions, will not be serialized. Such values in objects will be
|
||||
dropped; in arrays they will be replaced with null. You can use
|
||||
a replacer function to replace those with JSON values.
|
||||
JSON.stringify(undefined) returns undefined.
|
||||
|
||||
The optional space parameter produces a stringification of the
|
||||
value that is filled with line breaks and indentation to make it
|
||||
easier to read.
|
||||
|
||||
If the space parameter is a non-empty string, then that string will
|
||||
be used for indentation. If the space parameter is a number, then
|
||||
the indentation will be that many spaces.
|
||||
|
||||
Example:
|
||||
|
||||
text = JSON.stringify(['e', {pluribus: 'unum'}]);
|
||||
// text is '["e",{"pluribus":"unum"}]'
|
||||
|
||||
|
||||
text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
|
||||
// text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
|
||||
|
||||
text = JSON.stringify([new Date()], function (key, value) {
|
||||
return this[key] instanceof Date ?
|
||||
'Date(' + this[key] + ')' : value;
|
||||
});
|
||||
// text is '["Date(---current time---)"]'
|
||||
|
||||
|
||||
JSON.parse(text, reviver)
|
||||
This method parses a JSON text to produce an object or array.
|
||||
It can throw a SyntaxError exception.
|
||||
|
||||
The optional reviver parameter is a function that can filter and
|
||||
transform the results. It receives each of the keys and values,
|
||||
and its return value is used instead of the original value.
|
||||
If it returns what it received, then the structure is not modified.
|
||||
If it returns undefined then the member is deleted.
|
||||
|
||||
Example:
|
||||
|
||||
// Parse the text. Values that look like ISO date strings will
|
||||
// be converted to Date objects.
|
||||
|
||||
myData = JSON.parse(text, function (key, value) {
|
||||
var a;
|
||||
if (typeof value === 'string') {
|
||||
a =
|
||||
/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
|
||||
if (a) {
|
||||
return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
|
||||
+a[5], +a[6]));
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
|
||||
var d;
|
||||
if (typeof value === 'string' &&
|
||||
value.slice(0, 5) === 'Date(' &&
|
||||
value.slice(-1) === ')') {
|
||||
d = new Date(value.slice(5, -1));
|
||||
if (d) {
|
||||
return d;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
|
||||
This is a reference implementation. You are free to copy, modify, or
|
||||
redistribute.
|
||||
*/
|
||||
|
||||
/*jslint evil: true, regexp: true */
|
||||
|
||||
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
|
||||
call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
|
||||
getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
|
||||
lastIndex, length, parse, prototype, push, replace, slice, stringify,
|
||||
test, toJSON, toString, valueOf
|
||||
*/
|
||||
|
||||
|
||||
// Create a JSON object only if one does not already exist. We create the
|
||||
// methods in a closure to avoid creating global variables.
|
||||
|
||||
if (typeof JSON !== 'object') {
|
||||
JSON = {};
|
||||
}
|
||||
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
function f(n) {
|
||||
// Format integers to have at least two digits.
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
if (typeof Date.prototype.toJSON !== 'function') {
|
||||
|
||||
Date.prototype.toJSON = function () {
|
||||
|
||||
return isFinite(this.valueOf())
|
||||
? this.getUTCFullYear() + '-' +
|
||||
f(this.getUTCMonth() + 1) + '-' +
|
||||
f(this.getUTCDate()) + 'T' +
|
||||
f(this.getUTCHours()) + ':' +
|
||||
f(this.getUTCMinutes()) + ':' +
|
||||
f(this.getUTCSeconds()) + 'Z'
|
||||
: null;
|
||||
};
|
||||
|
||||
String.prototype.toJSON =
|
||||
Number.prototype.toJSON =
|
||||
Boolean.prototype.toJSON = function () {
|
||||
return this.valueOf();
|
||||
};
|
||||
}
|
||||
|
||||
var cx,
|
||||
escapable,
|
||||
gap,
|
||||
indent,
|
||||
meta,
|
||||
rep;
|
||||
|
||||
|
||||
function quote(string) {
|
||||
|
||||
// If the string contains no control characters, no quote characters, and no
|
||||
// backslash characters, then we can safely slap some quotes around it.
|
||||
// Otherwise we must also replace the offending characters with safe escape
|
||||
// sequences.
|
||||
|
||||
escapable.lastIndex = 0;
|
||||
return escapable.test(string) ? '"' + string.replace(escapable, function (a) {
|
||||
var c = meta[a];
|
||||
return typeof c === 'string'
|
||||
? c
|
||||
: '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
|
||||
}) + '"' : '"' + string + '"';
|
||||
}
|
||||
|
||||
|
||||
function str(key, holder) {
|
||||
|
||||
// Produce a string from holder[key].
|
||||
|
||||
var i, // The loop counter.
|
||||
k, // The member key.
|
||||
v, // The member value.
|
||||
length,
|
||||
mind = gap,
|
||||
partial,
|
||||
value = holder[key];
|
||||
|
||||
// If the value has a toJSON method, call it to obtain a replacement value.
|
||||
|
||||
if (value && typeof value === 'object' &&
|
||||
typeof value.toJSON === 'function') {
|
||||
value = value.toJSON(key);
|
||||
}
|
||||
|
||||
// If we were called with a replacer function, then call the replacer to
|
||||
// obtain a replacement value.
|
||||
|
||||
if (typeof rep === 'function') {
|
||||
value = rep.call(holder, key, value);
|
||||
}
|
||||
|
||||
// What happens next depends on the value's type.
|
||||
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return quote(value);
|
||||
|
||||
case 'number':
|
||||
|
||||
// JSON numbers must be finite. Encode non-finite numbers as null.
|
||||
|
||||
return isFinite(value) ? String(value) : 'null';
|
||||
|
||||
case 'boolean':
|
||||
case 'null':
|
||||
|
||||
// If the value is a boolean or null, convert it to a string. Note:
|
||||
// typeof null does not produce 'null'. The case is included here in
|
||||
// the remote chance that this gets fixed someday.
|
||||
|
||||
return String(value);
|
||||
|
||||
// If the type is 'object', we might be dealing with an object or an array or
|
||||
// null.
|
||||
|
||||
case 'object':
|
||||
|
||||
// Due to a specification blunder in ECMAScript, typeof null is 'object',
|
||||
// so watch out for that case.
|
||||
|
||||
if (!value) {
|
||||
return 'null';
|
||||
}
|
||||
|
||||
// Make an array to hold the partial results of stringifying this object value.
|
||||
|
||||
gap += indent;
|
||||
partial = [];
|
||||
|
||||
// Is the value an array?
|
||||
|
||||
if (Object.prototype.toString.apply(value) === '[object Array]') {
|
||||
|
||||
// The value is an array. Stringify every element. Use null as a placeholder
|
||||
// for non-JSON values.
|
||||
|
||||
length = value.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
partial[i] = str(i, value) || 'null';
|
||||
}
|
||||
|
||||
// Join all of the elements together, separated with commas, and wrap them in
|
||||
// brackets.
|
||||
|
||||
v = partial.length === 0
|
||||
? '[]'
|
||||
: gap
|
||||
? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']'
|
||||
: '[' + partial.join(',') + ']';
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
|
||||
// If the replacer is an array, use it to select the members to be stringified.
|
||||
|
||||
if (rep && typeof rep === 'object') {
|
||||
length = rep.length;
|
||||
for (i = 0; i < length; i += 1) {
|
||||
if (typeof rep[i] === 'string') {
|
||||
k = rep[i];
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (gap ? ': ' : ':') + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
// Otherwise, iterate through all of the keys in the object.
|
||||
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = str(k, value);
|
||||
if (v) {
|
||||
partial.push(quote(k) + (gap ? ': ' : ':') + v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Join all of the member texts together, separated with commas,
|
||||
// and wrap them in braces.
|
||||
|
||||
v = partial.length === 0
|
||||
? '{}'
|
||||
: gap
|
||||
? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}'
|
||||
: '{' + partial.join(',') + '}';
|
||||
gap = mind;
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
// If the JSON object does not yet have a stringify method, give it one.
|
||||
|
||||
if (typeof JSON.stringify !== 'function') {
|
||||
escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
meta = { // table of character substitutions
|
||||
'\b': '\\b',
|
||||
'\t': '\\t',
|
||||
'\n': '\\n',
|
||||
'\f': '\\f',
|
||||
'\r': '\\r',
|
||||
'"' : '\\"',
|
||||
'\\': '\\\\'
|
||||
};
|
||||
JSON.stringify = function (value, replacer, space) {
|
||||
|
||||
// The stringify method takes a value and an optional replacer, and an optional
|
||||
// space parameter, and returns a JSON text. The replacer can be a function
|
||||
// that can replace values, or an array of strings that will select the keys.
|
||||
// A default replacer method can be provided. Use of the space parameter can
|
||||
// produce text that is more easily readable.
|
||||
|
||||
var i;
|
||||
gap = '';
|
||||
indent = '';
|
||||
|
||||
// If the space parameter is a number, make an indent string containing that
|
||||
// many spaces.
|
||||
|
||||
if (typeof space === 'number') {
|
||||
for (i = 0; i < space; i += 1) {
|
||||
indent += ' ';
|
||||
}
|
||||
|
||||
// If the space parameter is a string, it will be used as the indent string.
|
||||
|
||||
} else if (typeof space === 'string') {
|
||||
indent = space;
|
||||
}
|
||||
|
||||
// If there is a replacer, it must be a function or an array.
|
||||
// Otherwise, throw an error.
|
||||
|
||||
rep = replacer;
|
||||
if (replacer && typeof replacer !== 'function' &&
|
||||
(typeof replacer !== 'object' ||
|
||||
typeof replacer.length !== 'number')) {
|
||||
throw new Error('JSON.stringify');
|
||||
}
|
||||
|
||||
// Make a fake root object containing our value under the key of ''.
|
||||
// Return the result of stringifying the value.
|
||||
|
||||
return str('', {'': value});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// If the JSON object does not yet have a parse method, give it one.
|
||||
|
||||
if (typeof JSON.parse !== 'function') {
|
||||
cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
|
||||
JSON.parse = function (text, reviver) {
|
||||
|
||||
// The parse method takes a text and an optional reviver function, and returns
|
||||
// a JavaScript value if the text is a valid JSON text.
|
||||
|
||||
var j;
|
||||
|
||||
function walk(holder, key) {
|
||||
|
||||
// The walk method is used to recursively walk the resulting structure so
|
||||
// that modifications can be made.
|
||||
|
||||
var k, v, value = holder[key];
|
||||
if (value && typeof value === 'object') {
|
||||
for (k in value) {
|
||||
if (Object.prototype.hasOwnProperty.call(value, k)) {
|
||||
v = walk(value, k);
|
||||
if (v !== undefined) {
|
||||
value[k] = v;
|
||||
} else {
|
||||
delete value[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(holder, key, value);
|
||||
}
|
||||
|
||||
|
||||
// Parsing happens in four stages. In the first stage, we replace certain
|
||||
// Unicode characters with escape sequences. JavaScript handles many characters
|
||||
// incorrectly, either silently deleting them, or treating them as line endings.
|
||||
|
||||
text = String(text);
|
||||
cx.lastIndex = 0;
|
||||
if (cx.test(text)) {
|
||||
text = text.replace(cx, function (a) {
|
||||
return '\\u' +
|
||||
('0000' + a.charCodeAt(0).toString(16)).slice(-4);
|
||||
});
|
||||
}
|
||||
|
||||
// In the second stage, we run the text against regular expressions that look
|
||||
// for non-JSON patterns. We are especially concerned with '()' and 'new'
|
||||
// because they can cause invocation, and '=' because it can cause mutation.
|
||||
// But just to be safe, we want to reject all unexpected forms.
|
||||
|
||||
// We split the second stage into 4 regexp operations in order to work around
|
||||
// crippling inefficiencies in IE's and Safari's regexp engines. First we
|
||||
// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
|
||||
// replace all simple value tokens with ']' characters. Third, we delete all
|
||||
// open brackets that follow a colon or comma or that begin the text. Finally,
|
||||
// we look to see that the remaining characters are only whitespace or ']' or
|
||||
// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
|
||||
|
||||
if (/^[\],:{}\s]*$/
|
||||
.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@')
|
||||
.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']')
|
||||
.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
|
||||
|
||||
// In the third stage we use the eval function to compile the text into a
|
||||
// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
|
||||
// in JavaScript: it can begin a block or an object literal. We wrap the text
|
||||
// in parens to eliminate the ambiguity.
|
||||
|
||||
j = eval('(' + text + ')');
|
||||
|
||||
// In the optional fourth stage, we recursively walk the new structure, passing
|
||||
// each name/value pair to a reviver function for possible transformation.
|
||||
|
||||
return typeof reviver === 'function'
|
||||
? walk({'': j}, '')
|
||||
: j;
|
||||
}
|
||||
|
||||
// If the text is not JSON parseable, then a SyntaxError is thrown.
|
||||
|
||||
throw new SyntaxError('JSON.parse');
|
||||
};
|
||||
}
|
||||
}());
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -111,8 +111,7 @@ class AdobeRestApi(RestApi):
|
|||
try:
|
||||
set_context(
|
||||
self.dbcon,
|
||||
data,
|
||||
'adobecommunicator'
|
||||
data
|
||||
)
|
||||
result = run_publish(data)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import pype
|
|||
from pypeapp import execute
|
||||
import pyblish.api
|
||||
from pypeapp import Logger
|
||||
from pprint import pformat
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
PUBLISH_PATHS = []
|
||||
|
|
@ -18,7 +18,7 @@ self = sys.modules[__name__]
|
|||
self.dbcon = False
|
||||
|
||||
|
||||
def set_context(dbcon_in, data, app):
|
||||
def set_context(dbcon_in, data):
|
||||
''' Sets context for pyblish (must be done before pyblish is launched)
|
||||
:param project: Name of `Project` where instance should be published
|
||||
:type project: str
|
||||
|
|
@ -52,7 +52,7 @@ def set_context(dbcon_in, data, app):
|
|||
os.environ["AVALON_WORKDIR"] = data["workdir"]
|
||||
os.environ["AVALON_HIERARCHY"] = hierarchy
|
||||
os.environ["AVALON_PROJECTCODE"] = av_project['data'].get('code', '')
|
||||
os.environ["AVALON_APP"] = app
|
||||
os.environ["AVALON_APP"] = data["host"]
|
||||
|
||||
self.dbcon.install()
|
||||
S["current_dir"] = os.path.normpath(os.getcwd())
|
||||
|
|
@ -123,7 +123,7 @@ def cli_publish(data, gui=True):
|
|||
args += ["gui"]
|
||||
|
||||
envcopy = os.environ.copy()
|
||||
envcopy["PYBLISH_HOSTS"] = "adobecommunicator"
|
||||
envcopy["PYBLISH_HOSTS"] = data.get("host", "adobecommunicator")
|
||||
envcopy["AC_PUBLISH_INPATH"] = json_data_path
|
||||
envcopy["AC_PUBLISH_OUTPATH"] = return_data_path
|
||||
envcopy["PYBLISH_GUI"] = "pyblish_lite"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue