Merge branch 'sync_server_fix_local_drive' into feature/sites_local_settings

This commit is contained in:
iLLiCiTiT 2021-03-11 18:32:52 +01:00
commit 2bde26c5fa
58 changed files with 1365 additions and 314 deletions

View file

@ -2,6 +2,10 @@
"""Open install dialog."""
import sys
import os
os.chdir(os.path.dirname(__file__)) # for override sys.path in Deadline
from Qt import QtWidgets # noqa
from Qt.QtCore import Signal # noqa

View file

@ -50,6 +50,8 @@ def get_asset_settings():
fps = asset_data.get("fps")
frame_start = asset_data.get("frameStart")
frame_end = asset_data.get("frameEnd")
handle_start = asset_data.get("handleStart")
handle_end = asset_data.get("handleEnd")
resolution_width = asset_data.get("resolutionWidth")
resolution_height = asset_data.get("resolutionHeight")
entity_type = asset_data.get("entityType")
@ -58,6 +60,8 @@ def get_asset_settings():
"fps": fps,
"frameStart": frame_start,
"frameEnd": frame_end,
"handleStart": handle_start,
"handleEnd": handle_end,
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height
}
@ -150,13 +154,14 @@ def application_launch():
# It is now moved so it it manually called.
# ensure_scene_settings()
# check_inventory()
pype_harmony_path = Path(__file__).parent / "js" / "PypeHarmony.js"
# fills PYPE_HARMONY_JS
pype_harmony_path = Path(__file__).parent.parent / "js" / "PypeHarmony.js"
pype_harmony_js = pype_harmony_path.read_text()
# go through js/creators, loaders and publish folders and load all scripts
script = ""
for item in ["creators", "loaders", "publish"]:
dir_to_scan = Path(__file__).parent / "js" / item
dir_to_scan = Path(__file__).parent.parent / "js" / item
for child in dir_to_scan.iterdir():
script += child.read_text()
@ -210,12 +215,14 @@ def uninstall():
def on_pyblish_instance_toggled(instance, old_value, new_value):
"""Toggle node enabling on instance toggles."""
try:
node = None
if instance.data.get("setMembers"):
node = instance.data["setMembers"][0]
if node:
harmony.send(
{
"function": "PypeHarmony.toggleInstance",
"args": [instance[0], new_value]
"args": [node, new_value]
}
)
except IndexError:
print(f"Instance '{instance}' is missing node")

View file

@ -4,7 +4,8 @@
// ***************************************************************************
var LD_OPENHARMONY_PATH = System.getenv('LIB_OPENHARMONY_PATH');
include(LD_OPENHARMONY_PATH + '/openHarmony.js');
LD_OPENHARMONY_PATH = LD_OPENHARMONY_PATH + '/openHarmony.js';
LD_OPENHARMONY_PATH = LD_OPENHARMONY_PATH.replace(/\\/g, "/");

View file

@ -5,9 +5,9 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}

View file

@ -3,13 +3,15 @@
// * ImageSequenceLoader *
// ***************************************************************************
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}
if (typeof $ === 'undefined'){
$ = this.__proto__['$'];
}
/**
* @namespace
@ -92,6 +94,9 @@ ImageSequenceLoader.getUniqueColumnName = function(columnPrefix) {
* ];
*/
ImageSequenceLoader.prototype.importFiles = function(args) {
MessageLog.trace("ImageSequence:: " + typeof PypeHarmony);
MessageLog.trace("ImageSequence $:: " + typeof $);
MessageLog.trace("ImageSequence OH:: " + typeof PypeHarmony.OpenHarmony);
var PNGTransparencyMode = 0; // Premultiplied wih Black
var TGATransparencyMode = 0; // Premultiplied wih Black
var SGITransparencyMode = 0; // Premultiplied wih Black

View file

@ -5,12 +5,14 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}
if (typeof $ === 'undefined'){
$ = this.__proto__['$'];
}
/**
* @namespace
* @classdesc Image Sequence loader JS code.

View file

@ -5,9 +5,9 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}

View file

@ -5,9 +5,9 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}
@ -42,7 +42,8 @@ CollectFarmRender.prototype.getRenderNodeSettings = function(n) {
n, frame.current(), 'DRAWING_TYPE'),
node.getTextAttr(
n, frame.current(), 'LEADING_ZEROS'),
node.getTextAttr(n, frame.current(), 'START')
node.getTextAttr(n, frame.current(), 'START'),
node.getEnable(n)
];
return output;

View file

@ -5,9 +5,9 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}

View file

@ -5,12 +5,11 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}
/**
* @namespace
* @classdesc Code for extracting palettes.

View file

@ -5,9 +5,9 @@
// check if PypeHarmony is defined and if not, load it.
if (typeof PypeHarmony !== 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS');
include(PYPE_HARMONY_JS + '/pype_harmony.js');
if (typeof PypeHarmony === 'undefined') {
var PYPE_HARMONY_JS = System.getenv('PYPE_HARMONY_JS') + '/PypeHarmony.js';
include(PYPE_HARMONY_JS.replace(/\\/g, "/"));
}

View file

@ -9,7 +9,7 @@ class CreateRender(plugin.Creator):
name = "renderDefault"
label = "Render"
family = "renderLocal"
family = "render"
node_type = "WRITE"
def __init__(self, *args, **kwargs):

View file

@ -76,7 +76,7 @@ class ImageSequenceLoader(api.Loader):
"""
self_name = self.__class__.__name__
node = harmony.find_node_by_name(container["name"], "READ")
node = container.get("nodes").pop()
path = api.get_representation_path(representation)
collections, remainder = clique.assemble(
@ -129,7 +129,7 @@ class ImageSequenceLoader(api.Loader):
container (dict): Container data.
"""
node = harmony.find_node_by_name(container["name"], "READ")
node = container.get("nodes").pop()
harmony.send(
{"function": "PypeHarmony.deleteNode", "args": [node]}
)

View file

@ -1,6 +1,7 @@
import os
import pyblish.api
import pyblish.api
class CollectAudio(pyblish.api.InstancePlugin):
"""
@ -15,9 +16,10 @@ class CollectAudio(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.499
label = "Collect Audio"
hosts = ["harmony"]
families = ["renderlayer"]
families = ["render.farm"]
def process(self, instance):
full_file_name = None
audio_dir = os.path.join(
os.path.dirname(instance.context.data.get("currentFile")), 'audio')
if os.path.isdir(audio_dir):
@ -27,7 +29,9 @@ class CollectAudio(pyblish.api.InstancePlugin):
if file_ext not in ['.wav', '.mp3', '.aiff']:
self.log.error("Unsupported file {}.{}".format(file_name,
file_ext))
full_file_name = None
audio_file_path = os.path.join('audio', full_file_name)
self.log.debug("audio_file_path {}".format(audio_file_path))
instance.data["audioFile"] = audio_file_path
if full_file_name:
audio_file_path = os.path.join('audio', full_file_name)
self.log.debug("audio_file_path {}".format(audio_file_path))
instance.data["audioFile"] = audio_file_path

View file

@ -7,6 +7,7 @@ from avalon import harmony, api
import pype.lib.abstract_collect_render
from pype.lib.abstract_collect_render import RenderInstance
import pype.lib
@attr.s
@ -51,8 +52,8 @@ class CollectFarmRender(pype.lib.abstract_collect_render.
This returns full path with file name determined by Write node
settings.
"""
start = render_instance.frameStart
end = render_instance.frameEnd
start = render_instance.frameStart - render_instance.handleStart
end = render_instance.frameEnd + render_instance.handleEnd
node = render_instance.setMembers[0]
self_name = self.__class__.__name__
# 0 - filename / 1 - type / 2 - zeros / 3 - start
@ -73,23 +74,19 @@ class CollectFarmRender(pype.lib.abstract_collect_render.
f"Cannot determine file extension for {info[1]}")
path = Path(render_instance.source).parent
# is sequence start node on write node offsetting whole sequence?
expected_files = []
# Harmony 17 needs at least one '.' in file_prefix, but not at end
file_prefix = info[0]
file_prefix += '.temp'
# '-' in name is important for Harmony17
for frame in range(start, end + 1):
expected_files.append(
path / "{}{}.{}".format(
file_prefix,
path / "{}-{}.{}".format(
render_instance.subset,
str(frame).rjust(int(info[2]) + 1, "0"),
ext
)
)
self.log.debug("expected_files::{}".format(expected_files))
return expected_files
def get_instances(self, context):
@ -116,7 +113,7 @@ class CollectFarmRender(pype.lib.abstract_collect_render.
if data["family"] != "renderFarm":
continue
# 0 - filename / 1 - type / 2 - zeros / 3 - start
# 0 - filename / 1 - type / 2 - zeros / 3 - start / 4 - enabled
info = harmony.send(
{
"function": f"PypeHarmony.Publish.{self_name}."
@ -126,24 +123,28 @@ class CollectFarmRender(pype.lib.abstract_collect_render.
# TODO: handle pixel aspect and frame step
# TODO: set Deadline stuff (pools, priority, etc. by presets)
subset_name = node.split("/")[1].replace('Farm', '')
# because of using 'renderFarm' as a family, replace 'Farm' with
# capitalized task name
subset_name = node.split("/")[1].replace(
'Farm',
context.data["anatomyData"]["task"].capitalize())
render_instance = HarmonyRenderInstance(
version=version,
time=api.time(),
source=context.data["currentFile"],
label=subset_name,
label=node.split("/")[1],
subset=subset_name,
asset=api.Session["AVALON_ASSET"],
attachTo=False,
setMembers=[node],
publish=True,
publish=info[4],
review=False,
renderer=None,
priority=50,
name=node.split("/")[1],
family="renderlayer",
families=["renderlayer"],
family="render.farm",
families=["render.farm"],
resolutionWidth=context.data["resolutionWidth"],
resolutionHeight=context.data["resolutionHeight"],
@ -157,12 +158,15 @@ class CollectFarmRender(pype.lib.abstract_collect_render.
# time settings
frameStart=context.data["frameStart"],
frameEnd=context.data["frameEnd"],
handleStart=context.data["handleStart"], # from DB
handleEnd=context.data["handleEnd"], # from DB
frameStep=1,
outputType="Image",
outputFormat=info[1],
outputStartFrame=info[3],
leadingZeros=info[2],
toBeRenderedOn='deadline'
toBeRenderedOn='deadline',
ignoreFrameHandleCheck=True
)
self.log.debug(render_instance)

View file

@ -20,7 +20,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
hosts = ["harmony"]
families_mapping = {
"render": ["imagesequence", "review", "ftrack"],
"render": ["review", "ftrack"],
"harmony.template": [],
"palette": ["palette", "ftrack"]
}
@ -54,8 +54,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
continue
instance = context.create_instance(node.split("/")[-1])
instance.append(node)
instance.data.update(data)
instance.data["setMembers"] = [node]
instance.data["publish"] = harmony.send(
{"function": "node.getEnable", "args": [node]}
)["result"]

View file

@ -14,6 +14,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
label = "Palettes"
order = pyblish.api.CollectorOrder + 0.003
hosts = ["harmony"]
# list of regexes for task names where collecting should happen
allowed_tasks = []

View file

@ -25,13 +25,28 @@ class CollectScene(pyblish.api.ContextPlugin):
context.data["scenePath"] = os.path.join(
result[1], result[2] + ".xstage")
context.data["frameRate"] = result[3]
context.data["frameStart"] = result[4]
context.data["frameEnd"] = result[5]
context.data["frameStartHandle"] = result[4]
context.data["frameEndHandle"] = result[5]
context.data["audioPath"] = result[6]
context.data["resolutionWidth"] = result[7]
context.data["resolutionHeight"] = result[8]
context.data["FOV"] = result[9]
# harmony always starts from 1. frame
# 1001 - 10010 >> 1 - 10
# frameStart, frameEnd already collected by global plugin
offset = context.data["frameStart"] - 1
frame_start = context.data["frameStart"] - offset
frames_count = context.data["frameEnd"] - \
context.data["frameStart"] + 1
# increase by handleStart - real frame range
# frameStart != frameStartHandle with handle presence
context.data["frameStart"] = int(frame_start) + \
context.data["handleStart"]
context.data["frameEnd"] = int(frames_count) + \
context.data["frameStart"] - 1
all_nodes = harmony.send(
{"function": "node.subNodes", "args": ["Top"]}
)["result"]

View file

@ -7,7 +7,6 @@ from PIL import Image, ImageDraw, ImageFont
from avalon import harmony
import pype.api
import pype.hosts.harmony
class ExtractPalette(pype.api.Extractor):

View file

@ -17,7 +17,7 @@ class ExtractRender(pyblish.api.InstancePlugin):
label = "Extract Render"
order = pyblish.api.ExtractorOrder
hosts = ["harmony"]
families = ["renderLocal"]
families = ["render"]
def process(self, instance):
# Collect scene data.
@ -47,7 +47,8 @@ class ExtractRender(pyblish.api.InstancePlugin):
harmony.send(
{
"function": func,
"args": [instance[0], path + "/" + instance.data["name"]]
"args": [instance.data["setMembers"][0],
path + "/" + instance.data["name"]]
}
)
harmony.save_scene()
@ -75,7 +76,7 @@ class ExtractRender(pyblish.api.InstancePlugin):
collections, remainder = clique.assemble(files, minimum_items=1)
assert not remainder, (
"There should not be a remainder for {0}: {1}".format(
instance[0], remainder
instance.data["setMembers"][0], remainder
)
)
self.log.debug(collections)

View file

@ -23,7 +23,7 @@ class ExtractTemplate(pype.api.Extractor):
self.log.info(f"Outputting template to {staging_dir}")
dependencies = []
self.get_dependencies(instance[0], dependencies)
self.get_dependencies(instance.data["setMembers"][0], dependencies)
# Get backdrops.
backdrops = {}
@ -46,11 +46,11 @@ class ExtractTemplate(pype.api.Extractor):
dependencies.append(node)
# Make sure we dont export the instance node.
if instance[0] in dependencies:
dependencies.remove(instance[0])
if instance.data["setMembers"][0] in dependencies:
dependencies.remove(instance.data["setMembers"][0])
# Export template.
pype.hosts.harmony.export_template(
pype.hosts.harmony.api.export_template(
unique_backdrops, dependencies, filepath
)

View file

@ -5,8 +5,6 @@ import shutil
from zipfile import ZipFile
import pype.api
from avalon import harmony
import pype.hosts.harmony
class ExtractWorkfile(pype.api.Extractor):

View file

@ -19,6 +19,12 @@ class ValidateAudio(pyblish.api.InstancePlugin):
optional = True
def process(self, instance):
node = None
if instance.data.get("setMembers"):
node = instance.data["setMembers"][0]
if not node:
return
# Collect scene data.
func = """function func(write_node)
{
@ -29,7 +35,7 @@ class ValidateAudio(pyblish.api.InstancePlugin):
func
"""
result = harmony.send(
{"function": func, "args": [instance[0]]}
{"function": func, "args": [node]}
)["result"]
audio_path = result[0]

View file

@ -25,9 +25,9 @@ class ValidateInstanceRepair(pyblish.api.Action):
instances = pyblish.api.instances_by_plugin(failed, plugin)
for instance in instances:
data = harmony.read(instance[0])
data = harmony.read(instance.data["setMembers"][0])
data["asset"] = os.environ["AVALON_ASSET"]
harmony.imprint(instance[0], data)
harmony.imprint(instance.data["setMembers"][0], data)
class ValidateInstance(pyblish.api.InstancePlugin):

View file

@ -18,9 +18,12 @@ class ValidateSceneSettingsRepair(pyblish.api.Action):
def process(self, context, plugin):
"""Repair action entry point."""
pype.hosts.harmony.set_scene_settings(
pype.hosts.harmony.get_asset_settings()
)
expected = pype.hosts.harmony.api.get_asset_settings()
asset_settings = _update_frames(dict.copy(expected))
asset_settings["frameStart"] = 1
asset_settings["frameEnd"] = asset_settings["frameEnd"] + \
asset_settings["handleEnd"]
pype.hosts.harmony.api.set_scene_settings(asset_settings)
if not os.path.exists(context.data["scenePath"]):
self.log.info("correcting scene name")
scene_dir = os.path.dirname(context.data["currentFile"])
@ -45,16 +48,12 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
def process(self, instance):
"""Plugin entry point."""
expected_settings = pype.hosts.harmony.get_asset_settings()
expected_settings = pype.hosts.harmony.api.get_asset_settings()
self.log.info(expected_settings)
# Harmony is expected to start at 1.
frame_start = expected_settings["frameStart"]
frame_end = expected_settings["frameEnd"]
expected_settings["frameEnd"] = frame_end - frame_start + 1
expected_settings["frameStart"] = 1
self.log.info(instance.context.data['anatomyData']['asset'])
expected_settings = _update_frames(dict.copy(expected_settings))
expected_settings["frameEndHandle"] = expected_settings["frameEnd"] +\
expected_settings["handleEnd"]
if any(string in instance.context.data['anatomyData']['asset']
for string in self.frame_check_filter):
@ -73,13 +72,19 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
expected_settings.pop("resolutionWidth")
expected_settings.pop("resolutionHeight")
self.log.debug(expected_settings)
current_settings = {
"fps": fps,
"frameStart": instance.context.data.get("frameStart"),
"frameEnd": instance.context.data.get("frameEnd"),
"frameStart": instance.context.data["frameStart"],
"frameEnd": instance.context.data["frameEnd"],
"handleStart": instance.context.data.get("handleStart"),
"handleEnd": instance.context.data.get("handleEnd"),
"frameEndHandle": instance.context.data.get("frameEndHandle"),
"resolutionWidth": instance.context.data.get("resolutionWidth"),
"resolutionHeight": instance.context.data.get("resolutionHeight"),
}
self.log.debug("curr:: {}".format(current_settings))
invalid_settings = []
for key, value in expected_settings.items():
@ -90,6 +95,13 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
"current": current_settings[key]
})
if ((expected_settings["handleStart"]
or expected_settings["handleEnd"])
and invalid_settings):
msg = "Handles included in calculation. Remove handles in DB " +\
"or extend frame range in timeline."
invalid_settings[-1]["reason"] = msg
msg = "Found invalid settings:\n{}".format(
json.dumps(invalid_settings, sort_keys=True, indent=4)
)
@ -97,3 +109,24 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
assert os.path.exists(instance.context.data.get("scenePath")), (
"Scene file not found (saved under wrong name)"
)
def _update_frames(expected_settings):
"""
Calculate proper frame range including handles set in DB.
Harmony requires rendering from 1, so frame range is always moved
to 1.
Args:
expected_settings (dict): pulled from DB
Returns:
modified expected_setting (dict)
"""
frames_count = expected_settings["frameEnd"] - \
expected_settings["frameStart"] + 1
expected_settings["frameStart"] = 1.0 + expected_settings["handleStart"]
expected_settings["frameEnd"] = \
expected_settings["frameStart"] + frames_count - 1
return expected_settings

View file

@ -46,6 +46,13 @@ class RenderInstance(object):
frameEnd = attr.ib() # start end
frameStep = attr.ib() # frame step
handleStart = attr.ib(default=None) # start frame
handleEnd = attr.ib(default=None) # start frame
# for softwares (like Harmony) where frame range cannot be set by DB
# handles need to be propagated if exist
ignoreFrameHandleCheck = attr.ib(default=False)
# --------------------
# With default values
# metadata
@ -154,8 +161,8 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
frame_start_render = int(render_instance.frameStart)
frame_end_render = int(render_instance.frameEnd)
if (int(context.data['frameStartHandle']) == frame_start_render
if (render_instance.ignoreFrameHandleCheck or
int(context.data['frameStartHandle']) == frame_start_render
and int(context.data['frameEndHandle']) == frame_end_render): # noqa: W503, E501
handle_start = context.data['handleStart']

View file

@ -80,6 +80,7 @@ def any_outdated():
"database".format(**container))
checked.add(representation)
return False

View file

@ -63,6 +63,7 @@ class AfterEffectsSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"PYPE_USERNAME",
"PYPE_DEV",
"PYPE_LOG_NO_COLORS"
@ -76,6 +77,8 @@ class AfterEffectsSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline
dln_job_info.EnvironmentKeyValue = "{key}={value}".format(
key=key,
value=val)
# to recognize job from PYPE for turning Event On/Off
dln_job_info.EnvironmentKeyValue = "PYPE_RENDER_JOB=1"
return dln_job_info

View file

@ -236,7 +236,7 @@ class HarmonySubmitDeadline(
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["harmony"]
families = ["renderlayer"]
families = ["render.farm"]
if not os.environ.get("DEADLINE_REST_URL"):
optional = False
active = False
@ -254,8 +254,8 @@ class HarmonySubmitDeadline(
job_info.Name = self._instance.data["name"]
job_info.Plugin = "HarmonyPype"
job_info.Frames = "{}-{}".format(
self._instance.data["frameStart"],
self._instance.data["frameEnd"]
self._instance.data["frameStartHandle"],
self._instance.data["frameEndHandle"]
)
# for now, get those from presets. Later on it should be
# configurable in Harmony UI directly.
@ -272,6 +272,7 @@ class HarmonySubmitDeadline(
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"PYPE_USERNAME",
"PYPE_DEV",
"PYPE_LOG_NO_COLORS"
@ -286,6 +287,9 @@ class HarmonySubmitDeadline(
key=key,
value=val)
# to recognize job from PYPE for turning Event On/Off
job_info.EnvironmentKeyValue = "PYPE_RENDER_JOB=1"
return job_info
def _unzip_scene_file(self, published_scene: Path) -> Path:

View file

@ -431,6 +431,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"PYPE_USERNAME",
"PYPE_DEV",
"PYPE_LOG_NO_COLORS"
@ -440,6 +441,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
if key in os.environ}, **api.Session)
environment["PYPE_LOG_NO_COLORS"] = "1"
environment["PYPE_MAYA_VERSION"] = cmds.about(v=True)
# to recognize job from PYPE for turning Event On/Off
environment["PYPE_RENDER_JOB"] = "1"
self.payload_skeleton["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,

View file

@ -218,6 +218,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"PYTHONPATH",
"PATH",
"AVALON_SCHEMA",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
@ -265,7 +269,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
clean_environment[key] = clean_path
environment = clean_environment
# to recognize job from PYPE for turning Event On/Off
environment["PYPE_RENDER_JOB"] = "1"
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,

View file

@ -5,6 +5,7 @@ import os
import json
import re
from copy import copy, deepcopy
import sys
import pype.api
from avalon import api, io
@ -13,36 +14,6 @@ from avalon.vendor import requests, clique
import pyblish.api
def _get_script(path):
# pass input path if exists
if path:
if os.path.exists(path):
return str(path)
else:
raise
"""Get path to the image sequence script."""
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
try:
from pype.scripts import publish_filesequence
except Exception:
assert False, "Expected module 'publish_deadline'to be available"
module_path = publish_filesequence.__file__
if module_path.endswith(".pyc"):
module_path = module_path[: -len(".pyc")] + ".py"
path = Path(os.path.normpath(module_path)).resolve(strict=True)
assert path is not None, ("Cannot determine path")
return str(path)
def get_resources(version, extension=None):
"""Get the files from the specific version."""
query = {"type": "representation", "parent": version["_id"]}
@ -127,6 +98,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
label = "Submit image sequence jobs to Deadline or Muster"
order = pyblish.api.IntegratorOrder + 0.2
icon = "tractor"
deadline_plugin = "Pype"
hosts = ["fusion", "maya", "nuke", "celaction", "aftereffects", "harmony"]
@ -144,8 +116,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"FTRACK_SERVER",
"PYPE_METADATA_FILE",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"PYPE_PUBLISH_JOB"
"PYPE_LOG_NO_COLORS",
"PYPE_USERNAME"
"PYPE_USERNAME",
"PYPE_RENDER_JOB",
"PYPE_PUBLISH_JOB"
]
# custom deadline atributes
@ -171,7 +149,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# list of family names to transfer to new family if present
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
plugin_python_version = "3.7"
plugin_pype_version = "3.0"
# script path for publish_filesequence.py
publishing_script = None
@ -207,7 +185,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
).format(output_dir))
roothless_mtdt_p = metadata_path
return (metadata_path, roothless_mtdt_p)
return metadata_path, roothless_mtdt_p
def _submit_deadline_post_job(self, instance, job, instances):
"""Submit publish job to Deadline.
@ -235,10 +213,30 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
'render',
override_version)
# Transfer the environment from the original job to this dependent
# job so they use the same environment
metadata_path, roothless_metadata_path = \
self._create_metadata_path(instance)
environment = job["Props"].get("Env", {})
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["AVALON_ASSET"] = io.Session["AVALON_ASSET"]
environment["AVALON_TASK"] = io.Session["AVALON_TASK"]
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
environment["PYPE_LOG_NO_COLORS"] = "1"
environment["PYPE_USERNAME"] = instance.context.data["user"]
environment["PYPE_PUBLISH_JOB"] = "1"
environment["PYPE_RENDER_JOB"] = "0"
args = [
'publish',
roothless_metadata_path
]
# Generate the payload for Deadline submission
payload = {
"JobInfo": {
"Plugin": "Python",
"Plugin": self.deadline_plugin,
"BatchName": job["Props"]["Batch"],
"Name": job_name,
"UserName": job["Props"]["User"],
@ -255,9 +253,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"OutputDirectory0": output_dir
},
"PluginInfo": {
"Version": self.plugin_python_version,
"ScriptFile": _get_script(self.publishing_script),
"Arguments": "",
"Version": self.plugin_pype_version,
"Arguments": " ".join(args),
"SingleFrameOnly": "True",
},
# Mandatory for Deadline, may be empty
@ -274,20 +271,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
else:
payload["JobInfo"]["JobDependency0"] = job["_id"]
# Transfer the environment from the original job to this dependent
# job so they use the same environment
metadata_path, roothless_metadata_path = self._create_metadata_path(
instance)
environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = roothless_metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1"
environment["PYPE_USERNAME"] = instance.context.data["user"]
try:
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
except KeyError:
# PYPE_PYTHON_EXE not set
pass
i = 0
for index, key in enumerate(environment):
if key.upper() in self.enviro_filter:
@ -1065,4 +1048,4 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Directory
publish_folder = os.path.dirname(file_path)
return publish_folder
return publish_folder

View file

@ -93,3 +93,17 @@ class AbstractProvider(metaclass=ABCMeta):
only parents and their parents)
"""
pass
@abstractmethod
def resolve_path(self, path, root_config, anatomy=None):
"""
Replaces root placeholders with appropriate real value from
'root_configs' (from Settings or Local Settings) or Anatomy
(mainly for 'studio' site)
Args:
path(string): path with '{root[work]}/...'
root_config(dict): from Settings or Local Settings
anatomy (Anatomy): prepared anatomy object for project
"""
pass

View file

@ -678,6 +678,16 @@ class GDriveHandler(AbstractProvider):
return
return provider_presets
def resolve_path(self, path, root_config, anatomy=None):
if not root_config.get("root"):
root_config = {"root": root_config}
try:
return path.format(**root_config)
except KeyError:
msg = "Error in resolving remote root, unknown key"
log.error(msg)
def _handle_q(self, q, trashed=False):
""" API list call contain trashed and hidden files/folder by default.
Usually we dont want those, must be included in query explicitly.

View file

@ -85,6 +85,25 @@ class LocalDriveHandler(AbstractProvider):
def get_tree(self):
return
def resolve_path(self, path, root_config, anatomy=None):
if root_config and not root_config.get("root"):
root_config = {"root": root_config}
try:
if not root_config:
raise KeyError
path = path.format(**root_config)
except KeyError:
try:
path = anatomy.fill_root(path)
except KeyError:
msg = "Error in resolving local root from anatomy"
log.error(msg)
raise ValueError(msg)
return path
def _copy(self, source_path, target_path):
print("copying {}->{}".format(source_path, target_path))
shutil.copy(source_path, target_path)

View file

@ -111,8 +111,7 @@ class SyncServer(PypeModule, ITrayModule):
Sets 'enabled' according to global settings for the module.
Shouldnt be doing any initialization, thats a job for 'tray_init'
"""
sync_server_settings = module_settings[self.name]
self.enabled = sync_server_settings["enabled"]
self.enabled = module_settings[self.name]["enabled"]
if asyncio is None:
raise AssertionError(
"SyncServer module requires Python 3.5 or higher."
@ -363,8 +362,8 @@ class SyncServer(PypeModule, ITrayModule):
Returns:
(string)
"""
active_site = self.get_sync_project_setting(project_name)['config']\
['active_site']
active_site = self.get_sync_project_setting(
project_name)['config']['active_site']
if active_site == self.LOCAL_SITE:
return get_local_site_id()
return active_site
@ -399,11 +398,19 @@ class SyncServer(PypeModule, ITrayModule):
"""
Returns remote (theirs) site for 'project_name' from settings
"""
return self.get_sync_project_setting(project_name)['config']\
['remote_site']
return self.get_sync_project_setting(
project_name)['config']['remote_site']
""" End of Public API """
def get_local_file_path(self, collection, file_path):
"""
Externalized for app
"""
local_file_path, _ = self._resolve_paths(file_path, collection)
return local_file_path
def _get_remote_sites_from_settings(self, sync_settings):
if not self.enabled or not sync_settings['enabled']:
return []
@ -453,7 +460,7 @@ class SyncServer(PypeModule, ITrayModule):
"There are not set presets for SyncServer OR "
"Credentials provided are invalid, "
"no syncing possible").
format(str(self.sync_project_settings)), exc_info=True)
format(str(self.sync_project_settings)), exc_info=True)
self.enabled = False
def tray_start(self):
@ -529,20 +536,23 @@ class SyncServer(PypeModule, ITrayModule):
For performance
"""
sync_project_presets = {}
sync_project_settings = {}
if not self.connection:
self.connection = AvalonMongoDB()
self.connection.install()
for collection in self.connection.database.collection_names(False):
sync_settings = self.get_sync_project_setting(collection)
sync_settings = self._parse_sync_settings_from_settings(
get_project_settings(collection))
if sync_settings:
sync_project_presets[collection] = sync_settings
default_sites = self._get_default_site_configs()
sync_settings['sites'].update(default_sites)
sync_project_settings[collection] = sync_settings
if not sync_project_presets:
if not sync_project_settings:
log.info("No enabled and configured projects for sync.")
self.sync_project_settings = sync_project_presets
self.sync_project_settings = sync_project_settings
def get_sync_project_settings(self, refresh=False):
"""
@ -625,9 +635,7 @@ class SyncServer(PypeModule, ITrayModule):
initiated_handlers = {}
configured_sites = {}
default_config = {'provider': 'local_drive'}
all_sites = {self.DEFAULT_SITE: default_config,
self.LOCAL_SITE: default_config}
all_sites = self._get_default_site_configs()
all_sites.update(project_setting.get("sites"))
for site_name, config in all_sites.items():
handler = initiated_handlers. \
@ -644,11 +652,18 @@ class SyncServer(PypeModule, ITrayModule):
return configured_sites
def _get_default_site_configs(self):
default_config = {'provider': 'local_drive'}
all_sites = {self.DEFAULT_SITE: default_config,
self.LOCAL_SITE: default_config}
return all_sites
def get_provider_for_site(self, project_name, site):
"""
Return provider name for site.
"""
site_preset = self.get_sync_project_setting(project_name)["sites"].get(site)
site_preset = self.get_sync_project_setting(project_name)["sites"].\
get(site)
if site_preset:
return site_preset["provider"]
@ -767,7 +782,7 @@ class SyncServer(PypeModule, ITrayModule):
return SyncStatus.DO_NOTHING
async def upload(self, collection, file, representation, provider_name,
site_name, tree=None, preset=None):
remote_site_name, tree=None, preset=None):
"""
Upload single 'file' of a 'representation' to 'provider'.
Source url is taken from 'file' portion, where {root} placeholder
@ -797,42 +812,40 @@ class SyncServer(PypeModule, ITrayModule):
# this part modifies structure on 'remote_site', only single
# thread can do that at a time, upload/download to prepared
# structure should be run in parallel
handler = lib.factory.get_provider(provider_name, site_name,
tree=tree, presets=preset)
remote_handler = lib.factory.get_provider(provider_name,
remote_site_name,
tree=tree,
presets=preset)
root_configs = self._get_roots_config(self.sync_project_settings,
collection,
site_name)
remote_file = self._get_remote_file_path(file, root_configs)
file_path = file.get("path", "")
local_file_path, remote_file_path = self._resolve_paths(
file_path, collection, remote_site_name, remote_handler
)
local_file = self.get_local_file_path(collection,
file.get("path", ""))
target_folder = os.path.dirname(remote_file)
folder_id = handler.create_folder(target_folder)
target_folder = os.path.dirname(remote_file_path)
folder_id = remote_handler.create_folder(target_folder)
if not folder_id:
err = "Folder {} wasn't created. Check permissions.".\
format(target_folder)
raise NotADirectoryError(err)
remote_site = self.get_remote_site(collection)
loop = asyncio.get_running_loop()
file_id = await loop.run_in_executor(None,
handler.upload_file,
local_file,
remote_file,
remote_handler.upload_file,
local_file_path,
remote_file_path,
self,
collection,
file,
representation,
remote_site,
remote_site_name,
True
)
return file_id
async def download(self, collection, file, representation, provider_name,
site_name, tree=None, preset=None):
remote_site_name, tree=None, preset=None):
"""
Downloads file to local folder denoted in representation.Context.
@ -850,16 +863,16 @@ class SyncServer(PypeModule, ITrayModule):
(string) - 'name' of local file
"""
with self.lock:
handler = lib.factory.get_provider(provider_name, site_name,
tree=tree, presets=preset)
remote_handler = lib.factory.get_provider(provider_name,
remote_site_name,
tree=tree,
presets=preset)
root_configs = self._get_roots_config(self.sync_project_settings,
collection,
site_name)
remote_file_path = self._get_remote_file_path(file, root_configs)
file_path = file.get("path", "")
local_file_path, remote_file_path = self._resolve_paths(
file_path, collection, remote_site_name, remote_handler
)
local_file_path = self.get_local_file_path(collection,
file.get("path", ""))
local_folder = os.path.dirname(local_file_path)
os.makedirs(local_folder, exist_ok=True)
@ -867,7 +880,7 @@ class SyncServer(PypeModule, ITrayModule):
loop = asyncio.get_running_loop()
file_id = await loop.run_in_executor(None,
handler.download_file,
remote_handler.download_file,
remote_file_path,
local_file_path,
self,
@ -1184,7 +1197,7 @@ class SyncServer(PypeModule, ITrayModule):
Returns:
only logs, catches IndexError and OSError
"""
my_local_site = self.get_my_local_site()
my_local_site = get_local_site_id()
if my_local_site != site_name:
self.log.warning("Cannot remove non local file for {}".
format(site_name))
@ -1206,12 +1219,14 @@ class SyncServer(PypeModule, ITrayModule):
return
representation = representation.pop()
local_file_path = ''
for file in representation.get("files"):
local_file_path, _ = self._resolve_paths(file.get("path", ""),
collection
)
try:
self.log.debug("Removing {}".format(file["path"]))
local_file = self.get_local_file_path(collection,
file.get("path", ""))
os.remove(local_file)
self.log.debug("Removing {}".format(local_file_path))
os.remove(local_file_path)
except IndexError:
msg = "No file set for {}".format(representation_id)
self.log.debug(msg)
@ -1222,22 +1237,13 @@ class SyncServer(PypeModule, ITrayModule):
raise ValueError(msg)
try:
folder = os.path.dirname(local_file)
folder = os.path.dirname(local_file_path)
os.rmdir(folder)
except OSError:
msg = "folder {} cannot be removed".format(folder)
self.log.warning(msg)
raise ValueError(msg)
def get_my_local_site(self):
""" TODO remove
Returns name of current user local_site, its Pype wide.
Returns:
(string)
"""
return get_local_site_id()
def get_loop_delay(self, project_name):
"""
Return count of seconds before next synchronization loop starts
@ -1320,59 +1326,35 @@ class SyncServer(PypeModule, ITrayModule):
val = {"files.$[f].sites.$[s].progress": progress}
return val
def get_local_file_path(self, collection, path):
def _resolve_paths(self, file_path, collection,
remote_site_name=None, remote_handler=None):
"""
Auxiliary function for replacing rootless path with real path
Returns tuple of local and remote file paths with {root}
placeholders replaced with proper values from Settings or Anatomy
Works with multi roots.
If root definition is not found in Settings, anatomy is used
Args:
collection (string): project name
path (dictionary): 'path' to file with {root}
Returns:
(string) - absolute path on local system
Args:
file_path(string): path with {root}
collection(string): project name
remote_site_name(string): remote site
remote_handler(AbstractProvider): implementation
Returns:
(string, string) - proper absolute paths
"""
local_active_site = self.get_active_site(collection)
sites = self.get_sync_project_setting(collection)["sites"]
root_config = sites[local_active_site]["root"]
remote_file_path = ''
if remote_handler:
root_configs = self._get_roots_config(self.sync_project_settings,
collection,
remote_site_name)
if not root_config.get("root"):
root_config = {"root": root_config}
remote_file_path = remote_handler.resolve_path(file_path,
root_configs)
try:
path = path.format(**root_config)
except KeyError:
try:
anatomy = self.get_anatomy(collection)
path = anatomy.fill_root(path)
except KeyError:
msg = "Error in resolving local root from anatomy"
self.log.error(msg)
raise ValueError(msg)
local_handler = lib.factory.get_provider(
'local_drive', self.get_active_site(collection))
local_file_path = local_handler.resolve_path(
file_path, None, self.get_anatomy(collection))
return path
def _get_remote_file_path(self, file, root_config):
"""
Auxiliary function for replacing rootless path with real path
Args:
file (dictionary): file info, get 'path' to file with {root}
root_config (dict): value of {root} for remote location
Returns:
(string) - absolute path on remote location
"""
path = file.get("path", "")
if not root_config.get("root"):
root_config = {"root": root_config}
try:
return path.format(**root_config)
except KeyError:
msg = "Error in resolving remote root, unknown key"
self.log.error(msg)
return local_file_path, remote_file_path
def _get_retries_arr(self, project_name):
"""

View file

@ -159,7 +159,8 @@ class SyncProjectListWidget(ProjectListWidget):
model.clear()
project_name = None
for project_name in self.sync_server.get_sync_project_settings().keys():
for project_name in self.sync_server.get_sync_project_settings().\
keys():
if self.sync_server.is_paused() or \
self.sync_server.is_project_paused(project_name):
icon = self._get_icon("paused")
@ -203,7 +204,6 @@ class SyncProjectListWidget(ProjectListWidget):
menu = QtWidgets.QMenu()
actions_mapping = {}
action = None
if self.sync_server.is_project_paused(self.project_name):
action = QtWidgets.QAction("Unpause")
actions_mapping[action] = self._unpause
@ -212,7 +212,7 @@ class SyncProjectListWidget(ProjectListWidget):
actions_mapping[action] = self._pause
menu.addAction(action)
if self.local_site == self.sync_server.get_my_local_site():
if self.local_site == get_local_site_id():
action = QtWidgets.QAction("Clear local project")
actions_mapping[action] = self._clear_project
menu.addAction(action)
@ -241,6 +241,7 @@ class SyncProjectListWidget(ProjectListWidget):
self.project_name = None
self.refresh()
class ProjectModel(QtCore.QAbstractListModel):
def __init__(self, *args, projects=None, **kwargs):
super(ProjectModel, self).__init__(*args, **kwargs)
@ -256,6 +257,7 @@ class ProjectModel(QtCore.QAbstractListModel):
def rowCount(self, index):
return len(self.todos)
class SyncRepresentationWidget(QtWidgets.QWidget):
"""
Summary dialog with list of representations that matches current
@ -478,7 +480,7 @@ class SyncRepresentationWidget(QtWidgets.QWidget):
local_site_name = self.sync_server.get_my_local_site()
try:
self.sync_server.add_site(
self.table_view.model()._project,
project_name,
self.representation_id,
local_site_name
)
@ -538,6 +540,9 @@ class SyncRepresentationWidget(QtWidgets.QWidget):
return
fpath = self.item.path
project = self.table_view.model()._project
fpath = self.sync_server.get_local_file_path(project, fpath)
fpath = os.path.normpath(os.path.dirname(fpath))
if os.path.isdir(fpath):
if 'win' in sys.platform: # windows
@ -795,14 +800,12 @@ class SyncRepresentationModel(QtCore.QAbstractTableModel):
repre.get("files_size", 0),
1,
STATUS[repre.get("status", -1)],
self.sync_server.get_local_file_path(self._project,
files[0].get('path'))
files[0].get('path')
)
self._data.append(item)
self._rec_loaded += 1
def canFetchMore(self, index):
"""
Check if there are more records than currently loaded
@ -854,6 +857,9 @@ class SyncRepresentationModel(QtCore.QAbstractTableModel):
self.sort = {self.SORT_BY_COLUMN[index]: order, '_id': 1}
self.query = self.get_default_query()
# import json
# log.debug(json.dumps(self.query, indent=4).replace('False', 'false').\
# replace('True', 'true').replace('None', 'null'))
representations = self.dbcon.aggregate(self.query)
self.refresh(representations)
@ -876,6 +882,7 @@ class SyncRepresentationModel(QtCore.QAbstractTableModel):
project (str): name of project
"""
self._project = project
self.sync_server.set_sync_project_settings()
self.local_site = self.sync_server.get_active_site(self._project)
self.remote_site = self.sync_server.get_remote_site(self._project)
self.refresh()
@ -891,7 +898,6 @@ class SyncRepresentationModel(QtCore.QAbstractTableModel):
Returns:
(QModelIndex)
"""
index = None
for i in range(self.rowCount(None)):
index = self.index(i, 0)
value = self.data(index, Qt.UserRole)
@ -1000,7 +1006,7 @@ class SyncRepresentationModel(QtCore.QAbstractTableModel):
0]},
'failed_remote_tries': {
'$cond': [{'$size': '$order_remote.tries'},
{'$first': '$order_local.tries'},
{'$first': '$order_remote.tries'},
0]},
'paused_remote': {
'$cond': [{'$size': "$order_remote.paused"},
@ -1027,9 +1033,9 @@ class SyncRepresentationModel(QtCore.QAbstractTableModel):
# select last touch of file
'updated_dt_remote': {'$max': "$updated_dt_remote"},
'failed_remote': {'$sum': '$failed_remote'},
'failed_local': {'$sum': '$paused_remote'},
'failed_local_tries': {'$sum': '$failed_local_tries'},
'failed_local': {'$sum': '$failed_local'},
'failed_remote_tries': {'$sum': '$failed_remote_tries'},
'failed_local_tries': {'$sum': '$failed_local_tries'},
'paused_remote': {'$sum': '$paused_remote'},
'paused_local': {'$sum': '$paused_local'},
'updated_dt_local': {'$max': "$updated_dt_local"}
@ -1386,8 +1392,10 @@ class SyncRepresentationDetailWidget(QtWidgets.QWidget):
return
fpath = self.item.path
fpath = os.path.normpath(os.path.dirname(fpath))
project = self.table_view.model()._project
fpath = self.sync_server.get_local_file_path(project, fpath)
fpath = os.path.normpath(os.path.dirname(fpath))
if os.path.isdir(fpath):
if 'win' in sys.platform: # windows
subprocess.Popen('explorer "%s"' % fpath)
@ -1600,8 +1608,7 @@ class SyncRepresentationDetailModel(QtCore.QAbstractTableModel):
STATUS[repre.get("status", -1)],
repre.get("tries"),
'\n'.join(errors),
self.sync_server.get_local_file_path(self._project,
file.get('path'))
file.get('path')
)
self._data.append(item)
@ -1669,7 +1676,6 @@ class SyncRepresentationDetailModel(QtCore.QAbstractTableModel):
Returns:
(QModelIndex)
"""
index = None
for i in range(self.rowCount(None)):
index = self.index(i, 0)
value = self.data(index, Qt.UserRole)
@ -1777,14 +1783,15 @@ class SyncRepresentationDetailModel(QtCore.QAbstractTableModel):
"$order_local.error",
[""]]}},
'tries': {'$first': {
'$cond': [{'$size': "$order_local.tries"},
"$order_local.tries",
{'$cond': [
{'$size': "$order_remote.tries"},
"$order_remote.tries",
[]
]}
]}}
'$cond': [
{'$size': "$order_local.tries"},
"$order_local.tries",
{'$cond': [
{'$size': "$order_remote.tries"},
"$order_remote.tries",
[]
]}
]}}
}},
{"$project": self.projection},
{"$sort": self.sort},
@ -2015,6 +2022,7 @@ class SizeDelegate(QtWidgets.QStyledItemDelegate):
value /= 1024.0
return "%.1f%s%s" % (value, 'Yi', suffix)
def _convert_progress(value):
try:
progress = float(value)

View file

@ -6,6 +6,7 @@ import json
from pathlib import Path
from pype.lib import PypeLogger
from pype.api import get_app_environments_for_context
class PypeCommands:
@ -63,6 +64,14 @@ class PypeCommands:
import pyblish.api
import pyblish.util
env = get_app_environments_for_context(
os.environ["AVALON_PROJECT"],
os.environ["AVALON_ASSET"],
os.environ["AVALON_TASK"],
os.environ["AVALON_APP_NAME"]
)
os.environ.update(env)
log = Logger.get_logger()
install()

View file

@ -12,7 +12,7 @@
"optional": false,
"use_published": true,
"priority": 50,
"Chunk Size": 10,
"chunk_size": 10,
"primary_pool": "",
"secondary_pool": "",
"group": "",
@ -23,7 +23,7 @@
"optional": false,
"use_published": true,
"priority": 50,
"Chunk Size": 10000,
"chunk_size": 10000,
"primary_pool": "",
"secondary_pool": "",
"group": "",
@ -34,7 +34,7 @@
"optional": false,
"use_published": true,
"priority": 50,
"Chunk Size": 10000,
"chunk_size": 10000,
"primary_pool": "",
"secondary_pool": "",
"group": "",

View file

@ -1015,12 +1015,10 @@
"host_name": "harmony",
"environment": {
"AVALON_HARMONY_WORKFILES_ON_LAUNCH": "1",
"PYBLISH_GUI_ALWAYS_EXEC": "1",
"LIB_OPENHARMONY_PATH": "{PYPE_ROOT}/pype/vendor/OpenHarmony",
"__environment_keys__": {
"harmony": [
"AVALON_HARMONY_WORKFILES_ON_LAUNCH",
"PYBLISH_GUI_ALWAYS_EXEC",
"LIB_OPENHARMONY_PATH"
]
}

View file

@ -84,7 +84,7 @@
},
{
"type": "number",
"key": "Chunk Size",
"key": "chunk_size",
"label": "Chunk Size"
},
{
@ -138,7 +138,7 @@
},
{
"type": "number",
"key": "Chunk Size",
"key": "chunk_size",
"label": "Chunk Size"
},
{
@ -192,7 +192,7 @@
},
{
"type": "number",
"key": "Chunk Size",
"key": "chunk_size",
"label": "Chunk Size"
},
{

View file

@ -529,7 +529,7 @@ def get_default_anatomy_settings(clear_metadata=True):
return result
def get_applied_anatomy_settings(project_name):
def get_anatomy_settings(project_name, site_name=None, exclude_locals=False):
"""Project anatomy data with applied studio and project overrides."""
if not project_name:
raise ValueError(
@ -546,20 +546,15 @@ def get_applied_anatomy_settings(project_name):
clear_metadata_from_settings(result)
if not exclude_locals:
local_settings = get_local_settings()
apply_local_settings_on_anatomy_settings(
result, local_settings, project_name, site_name
)
return result
def get_anatomy_settings(project_name, site_name=None):
result = get_applied_anatomy_settings(project_name)
local_settings = get_local_settings()
apply_local_settings_on_anatomy_settings(
result, local_settings, project_name, site_name
)
return result
def get_applied_project_settings(project_name):
def get_project_settings(project_name, exclude_locals=False):
"""Project settings with applied studio and project overrides."""
if not project_name:
raise ValueError(
@ -574,16 +569,12 @@ def get_applied_project_settings(project_name):
result = apply_overrides(studio_overrides, project_overrides)
clear_metadata_from_settings(result)
return result
def get_project_settings(project_name):
result = get_applied_project_settings(project_name)
local_settings = get_local_settings()
apply_local_settings_on_project_settings(
result, local_settings, project_name
)
if not exclude_locals:
local_settings = get_local_settings()
apply_local_settings_on_project_settings(
result, local_settings, project_name
)
return result

View file

@ -106,7 +106,7 @@ class IntentModel(QtGui.QStandardItemModel):
intents_preset = (
get_system_settings()
.get("modules", {})
.get("Ftrack", {})
.get("ftrack", {})
.get("intent", {})
)

@ -1 +1 @@
Subproject commit 9e6b0d02e5a147cbafdcaeee7d786d4767e14c94
Subproject commit 8d3364dc8ae73a33726ba3279ff75adff73c6239

@ -1 +0,0 @@
Subproject commit 7adabe8f0e6858bfe5b6bf0b39bd428ed72d0452

View file

@ -116,7 +116,8 @@ from igniter.tools import get_pype_path_from_db # noqa
from igniter.bootstrap_repos import PypeVersion # noqa: E402
bootstrap = BootstrapRepos()
silent_commands = ["run", "igniter", "standalonepublisher"]
silent_commands = ["run", "igniter", "standalonepublisher",
"extractenvironments"]
def set_pype_global_environments() -> None:
@ -129,7 +130,10 @@ def set_pype_global_environments() -> None:
# TODO Global environments will be stored in "general" settings so loading
# will be modified and can be done in igniter.
env = acre.merge(all_env["global"], dict(os.environ))
env = acre.merge(
acre.parse(all_env["global"]),
dict(os.environ)
)
os.environ.clear()
os.environ.update(env)
@ -528,8 +532,8 @@ def boot():
from igniter.terminal_splash import play_animation
# don't play for silenced commands
if all(item not in sys.argv for item in silent_commands):
play_animation()
# if all(item not in sys.argv for item in silent_commands):
# play_animation()
# ------------------------------------------------------------------------
# Process arguments
@ -607,9 +611,6 @@ def boot():
except KeyError:
pass
from pype import cli
from pype.lib import terminal as t
from pype.version import __version__
print(">>> loading environments ...")
# Avalon environments must be set before avalon module is imported
print(" - for Avalon ...")
@ -619,6 +620,10 @@ def boot():
print(" - for modules ...")
set_modules_environments()
from pype import cli
from pype.lib import terminal as t
from pype.version import __version__
assert version_path, "Version path not defined."
info = get_info()
info.insert(0, f">>> Using Pype from [ {version_path} ]")

View file

@ -0,0 +1,37 @@
[State]
Type=Enum
Items=Global Enabled;Opt-In;Disabled
Category=Options
CategoryOrder=0
CategoryIndex=0
Label=State
Default=Global Enabled
Description=How this event plug-in should respond to events. If Global, all jobs and slaves will trigger the events for this plugin. If Opt-In, jobs and slaves can choose to trigger the events for this plugin. If Disabled, no events are triggered for this plugin.
[PythonSearchPaths]
Type=MultiLineMultiFolder
Label=Additional Python Search Paths
Category=Options
CategoryOrder=0
CategoryIndex=1
Default=
Description=The list of paths to append to the PYTHONPATH environment variable. This allows the Python job to find custom modules in non-standard locations.
[LoggingLevel]
Type=Enum
Label=Logging Level
Category=Options
CategoryOrder=0
CategoryIndex=2
Items=DEBUG;INFO;WARNING;ERROR
Default=DEBUG
Description=Logging level where printing will start.
[PypeExecutable]
Type=MultiLineMultiFolder
Label=Path to Pype executable dir
Category=Job Plugins
CategoryOrder=1
CategoryIndex=1
Default=
Description=

View file

@ -0,0 +1,190 @@
import Deadline.Events
import Deadline.Scripting
def GetDeadlineEventListener():
return PypeEventListener()
def CleanupDeadlineEventListener(eventListener):
eventListener.Cleanup()
class PypeEventListener(Deadline.Events.DeadlineEventListener):
"""
Called on every Deadline plugin event, used for injecting Pype
environment variables into rendering process.
Expects that job already contains env vars:
AVALON_PROJECT
AVALON_ASSET
AVALON_TASK
AVALON_APP_NAME
Without these only global environment would be pulled from Pype
Configure 'Path to Pype executable dir' in Deadlines
'Tools > Configure Events > pype '
Only directory path is needed.
"""
def __init__(self):
self.OnJobSubmittedCallback += self.OnJobSubmitted
self.OnJobStartedCallback += self.OnJobStarted
self.OnJobFinishedCallback += self.OnJobFinished
self.OnJobRequeuedCallback += self.OnJobRequeued
self.OnJobFailedCallback += self.OnJobFailed
self.OnJobSuspendedCallback += self.OnJobSuspended
self.OnJobResumedCallback += self.OnJobResumed
self.OnJobPendedCallback += self.OnJobPended
self.OnJobReleasedCallback += self.OnJobReleased
self.OnJobDeletedCallback += self.OnJobDeleted
self.OnJobErrorCallback += self.OnJobError
self.OnJobPurgedCallback += self.OnJobPurged
self.OnHouseCleaningCallback += self.OnHouseCleaning
self.OnRepositoryRepairCallback += self.OnRepositoryRepair
self.OnSlaveStartedCallback += self.OnSlaveStarted
self.OnSlaveStoppedCallback += self.OnSlaveStopped
self.OnSlaveIdleCallback += self.OnSlaveIdle
self.OnSlaveRenderingCallback += self.OnSlaveRendering
self.OnSlaveStartingJobCallback += self.OnSlaveStartingJob
self.OnSlaveStalledCallback += self.OnSlaveStalled
self.OnIdleShutdownCallback += self.OnIdleShutdown
self.OnMachineStartupCallback += self.OnMachineStartup
self.OnThermalShutdownCallback += self.OnThermalShutdown
self.OnMachineRestartCallback += self.OnMachineRestart
def Cleanup(self):
del self.OnJobSubmittedCallback
del self.OnJobStartedCallback
del self.OnJobFinishedCallback
del self.OnJobRequeuedCallback
del self.OnJobFailedCallback
del self.OnJobSuspendedCallback
del self.OnJobResumedCallback
del self.OnJobPendedCallback
del self.OnJobReleasedCallback
del self.OnJobDeletedCallback
del self.OnJobErrorCallback
del self.OnJobPurgedCallback
del self.OnHouseCleaningCallback
del self.OnRepositoryRepairCallback
del self.OnSlaveStartedCallback
del self.OnSlaveStoppedCallback
del self.OnSlaveIdleCallback
del self.OnSlaveRenderingCallback
del self.OnSlaveStartingJobCallback
del self.OnSlaveStalledCallback
del self.OnIdleShutdownCallback
del self.OnMachineStartupCallback
del self.OnThermalShutdownCallback
del self.OnMachineRestartCallback
def set_pype_executable_path(self, job):
"""
Sets configurable PypeExecutable value to job extra infos.
GlobalJobPreLoad takes this value, pulls env vars for each task
from specific worker itself. GlobalJobPreLoad is not easily
configured, so we are configuring Event itself.
"""
pype_execs = self.GetConfigEntryWithDefault("PypeExecutable", "")
job.SetJobExtraInfoKeyValue("pype_executables", pype_execs)
Deadline.Scripting.RepositoryUtils.SaveJob(job)
def updateFtrackStatus(self, job, statusName, createIfMissing=False):
"""Updates version status on ftrack"""
pass
def OnJobSubmitted(self, job):
# self.LogInfo("OnJobSubmitted LOGGING")
# for 1st time submit
self.set_pype_executable_path(job)
self.updateFtrackStatus(job, "Render Queued")
def OnJobStarted(self, job):
# self.LogInfo("OnJobStarted")
self.set_pype_executable_path(job)
self.updateFtrackStatus(job, "Rendering")
def OnJobFinished(self, job):
# self.LogInfo("OnJobFinished")
self.updateFtrackStatus(job, "Artist Review")
def OnJobRequeued(self, job):
# self.LogInfo("OnJobRequeued LOGGING")
self.set_pype_executable_path(job)
def OnJobFailed(self, job):
pass
def OnJobSuspended(self, job):
# self.LogInfo("OnJobSuspended LOGGING")
self.updateFtrackStatus(job, "Render Queued")
def OnJobResumed(self, job):
# self.LogInfo("OnJobResumed LOGGING")
self.set_pype_executable_path(job)
self.updateFtrackStatus(job, "Rendering")
def OnJobPended(self, job):
# self.LogInfo("OnJobPended LOGGING")
pass
def OnJobReleased(self, job):
pass
def OnJobDeleted(self, job):
pass
def OnJobError(self, job, task, report):
# self.LogInfo("OnJobError LOGGING")
pass
def OnJobPurged(self, job):
pass
def OnHouseCleaning(self):
pass
def OnRepositoryRepair(self, job, *args):
pass
def OnSlaveStarted(self, job):
# self.LogInfo("OnSlaveStarted LOGGING")
pass
def OnSlaveStopped(self, job):
pass
def OnSlaveIdle(self, job):
pass
def OnSlaveRendering(self, host_name, job):
# self.LogInfo("OnSlaveRendering LOGGING")
pass
def OnSlaveStartingJob(self, host_name, job):
# self.LogInfo("OnSlaveStartingJob LOGGING")
self.set_pype_executable_path(job)
def OnSlaveStalled(self, job):
pass
def OnIdleShutdown(self, job):
pass
def OnMachineStartup(self, job):
pass
def OnThermalShutdown(self, job):
pass
def OnMachineRestart(self, job):
pass

View file

@ -0,0 +1,92 @@
# -*- coding: utf-8 -*-
import os
import tempfile
import time
import subprocess
import json
from Deadline.Scripting import RepositoryUtils, FileUtils
def inject_pype_environment(deadlinePlugin):
job = deadlinePlugin.GetJob()
job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache
pype_render_job = job.GetJobEnvironmentKeyValue('PYPE_RENDER_JOB') \
or '0'
pype_publish_job = job.GetJobEnvironmentKeyValue('PYPE_PUBLISH_JOB') \
or '0'
if pype_publish_job == '1' and pype_render_job == '1':
raise RuntimeError("Misconfiguration. Job couldn't be both " +
"render and publish.")
if pype_publish_job == '1':
print("Publish job, skipping inject.")
return
elif pype_render_job == '0':
# not pype triggered job
return
print("inject_pype_environment start")
try:
exe_list = job.GetJobExtraInfoKeyValue("pype_executables")
pype_app = FileUtils.SearchFileList(exe_list)
if pype_app == "":
raise RuntimeError(
"Pype executable was not found " +
"in the semicolon separated list \"" + exe_list + "\". " +
"The path to the render executable can be configured " +
"from the Plugin Configuration in the Deadline Monitor.")
# tempfile.TemporaryFile cannot be used because of locking
export_url = os.path.join(tempfile.gettempdir(),
time.strftime('%Y%m%d%H%M%S'),
'env.json') # add HHMMSS + delete later
print("export_url {}".format(export_url))
args = [
pype_app,
'extractenvironments',
export_url
]
add_args = {}
add_args['project'] = \
job.GetJobEnvironmentKeyValue('AVALON_PROJECT')
add_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET')
add_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK')
add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME')
if all(add_args.values()):
for key, value in add_args.items():
args.append("--{}".format(key))
args.append(value)
else:
msg = "Required env vars: AVALON_PROJECT, AVALON_ASSET, " + \
"AVALON_TASK, AVALON_APP_NAME"
raise RuntimeError(msg)
print("args::{}".format(args))
exit_code = subprocess.call(args, shell=True)
if exit_code != 0:
raise RuntimeError("Publishing failed, check worker's log")
with open(export_url) as fp:
contents = json.load(fp)
for key, value in contents.items():
deadlinePlugin.SetEnvironmentVariable(key, value)
os.remove(export_url)
print("inject_pype_environment end")
except Exception:
import traceback
print(traceback.format_exc())
print("inject_pype_environment failed")
RepositoryUtils.FailJob(job)
raise
def __main__(deadlinePlugin):
inject_pype_environment(deadlinePlugin)

Binary file not shown.

After

Width:  |  Height:  |  Size: 107 KiB

View file

@ -0,0 +1,41 @@
[ScriptFile]
Type=filename
Label=Script File
Category=Python Options
CategoryOrder=0
Index=0
Description=The script file to be executed.
Required=false
DisableIfBlank=true
[Arguments]
Type=string
Label=Arguments
Category=Python Options
CategoryOrder=0
Index=1
Description=The arguments to pass to the script. If no arguments are required, leave this blank.
Required=false
DisableIfBlank=true
[Version]
Type=enum
Values=3.0
Label=Version
Category=Python Options
CategoryOrder=0
Index=2
Description=The version of Python to use.
Required=false
DisableIfBlank=true
[SingleFramesOnly]
Type=boolean
Label=Single Frames Only
Category=Job Options
CategoryOrder=1
Index=0
Description=If enabled, the plugin will only render one frame at a time even if a single task contains a chunk of frames.
Required=true
DisableIfBlank=true
Default=false

View file

@ -0,0 +1,27 @@
[About]
Type=label
Label=About
Category=About Plugin
CategoryOrder=-1
Index=0
Default=Pype Plugin for Deadline
Description=Not configurable
[ConcurrentTasks]
Type=label
Label=ConcurrentTasks
Category=About Plugin
CategoryOrder=-1
Index=0
Default=True
Description=Not configurable
[Pype_Executable_3_0]
Type=multilinemultifilename
Label=Pype 3.0 Executable
Category=Pype Executables
CategoryOrder=0
Index=0
Default=
Description=The path to the Pype executable. Enter alternative paths on separate lines.

View file

@ -0,0 +1,116 @@
from System.IO import Path
from System.Text.RegularExpressions import Regex
from Deadline.Plugins import PluginType, DeadlinePlugin
from Deadline.Scripting import StringUtils, FileUtils, RepositoryUtils
import re
######################################################################
# This is the function that Deadline calls to get an instance of the
# main DeadlinePlugin class.
######################################################################
def GetDeadlinePlugin():
return PypeDeadlinePlugin()
def CleanupDeadlinePlugin(deadlinePlugin):
deadlinePlugin.Cleanup()
class PypeDeadlinePlugin(DeadlinePlugin):
"""
Standalone plugin for publishing from Pype.
Calls Pype executable 'pype_console' from first correctly found
file based on plugin configuration. Uses 'publish' command and passes
path to metadata json file, which contains all needed information
for publish process.
"""
def __init__(self):
self.InitializeProcessCallback += self.InitializeProcess
self.RenderExecutableCallback += self.RenderExecutable
self.RenderArgumentCallback += self.RenderArgument
def Cleanup(self):
for stdoutHandler in self.StdoutHandlers:
del stdoutHandler.HandleCallback
del self.InitializeProcessCallback
del self.RenderExecutableCallback
del self.RenderArgumentCallback
def InitializeProcess(self):
self.PluginType = PluginType.Simple
self.StdoutHandling = True
self.SingleFramesOnly = self.GetBooleanPluginInfoEntryWithDefault(
"SingleFramesOnly", False)
self.LogInfo("Single Frames Only: %s" % self.SingleFramesOnly)
self.AddStdoutHandlerCallback(
".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress
def RenderExecutable(self):
version = self.GetPluginInfoEntry("Version")
exeList = self.GetConfigEntry(
"Pype_Executable_" + version.replace(".", "_"))
exe = FileUtils.SearchFileList(exeList)
if exe == "":
self.FailRender(
"Pype " + version + " executable was not found " +
"in the semicolon separated list \"" + exeList + "\". " +
"The path to the render executable can be configured " +
"from the Plugin Configuration in the Deadline Monitor.")
return exe
def RenderArgument(self):
arguments = str(self.GetPluginInfoEntryWithDefault("Arguments", ""))
arguments = RepositoryUtils.CheckPathMapping(arguments)
arguments = re.sub(r"<(?i)STARTFRAME>", str(self.GetStartFrame()),
arguments)
arguments = re.sub(r"<(?i)ENDFRAME>", str(self.GetEndFrame()),
arguments)
arguments = re.sub(r"<(?i)QUOTE>", "\"", arguments)
arguments = self.ReplacePaddedFrame(arguments,
"<(?i)STARTFRAME%([0-9]+)>",
self.GetStartFrame())
arguments = self.ReplacePaddedFrame(arguments,
"<(?i)ENDFRAME%([0-9]+)>",
self.GetEndFrame())
count = 0
for filename in self.GetAuxiliaryFilenames():
localAuxFile = Path.Combine(self.GetJobsDataDirectory(), filename)
arguments = re.sub(r"<(?i)AUXFILE" + str(count) + r">",
localAuxFile.replace("\\", "/"), arguments)
count += 1
return arguments
def ReplacePaddedFrame(self, arguments, pattern, frame):
frameRegex = Regex(pattern)
while True:
frameMatch = frameRegex.Match(arguments)
if frameMatch.Success:
paddingSize = int(frameMatch.Groups[1].Value)
if paddingSize > 0:
padding = StringUtils.ToZeroPaddedString(frame,
paddingSize,
False)
else:
padding = str(frame)
arguments = arguments.replace(frameMatch.Groups[0].Value,
padding)
else:
break
return arguments
def HandleProgress(self):
progress = float(self.GetRegexMatch(1))
self.SetProgress(progress)

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

View file

@ -0,0 +1,35 @@
[OIIOToolPath]
Type=filename
Label=OIIO Tool location
Category=OIIO
Index=0
Description=OIIO Tool executable to use.
Required=false
DisableIfBlank=true
[OutputFile]
Type=filenamesave
Label=Output File
Category=Output
Index=0
Description=The scene filename as it exists on the network
Required=false
DisableIfBlank=true
[CleanupTiles]
Type=boolean
Category=Options
Index=0
Label=Cleanup Tiles
Required=false
DisableIfBlank=true
Description=If enabled, the Pype Tile Assembler will cleanup all tiles after assembly.
[Renderer]
Type=string
Label=Renderer
Category=Quicktime Info
Index=0
Description=Renderer name
Required=false
DisableIfBlank=true

View file

@ -0,0 +1,17 @@
[About]
Type=label
Label=About
Category=About Plugin
CategoryOrder=-1
Index=0
Default=Pype Tile Assembler Plugin for Deadline
Description=Not configurable
[OIIOTool_RenderExecutable]
Type=multilinemultifilename
Label=OIIO Tool Executable
Category=Render Executables
CategoryOrder=0
Default=C:\Program Files\OIIO\bin\oiiotool.exe;/usr/bin/oiiotool
Description=The path to the Open Image IO Tool executable file used for rendering. Enter alternative paths on separate lines.
W

View file

@ -0,0 +1,372 @@
# -*- coding: utf-8 -*-
"""Tile Assembler Plugin using Open Image IO tool.
Todo:
Currently we support only EXRs with their data window set.
"""
import os
import subprocess
from xml.dom import minidom
from System.IO import Path
from Deadline.Plugins import DeadlinePlugin
from Deadline.Scripting import (
FileUtils, RepositoryUtils, SystemUtils)
INT_KEYS = {
"x", "y", "height", "width", "full_x", "full_y",
"full_width", "full_height", "full_depth", "full_z",
"tile_width", "tile_height", "tile_depth", "deep", "depth",
"nchannels", "z_channel", "alpha_channel", "subimages"
}
LIST_KEYS = {
"channelnames"
}
def GetDeadlinePlugin(): # noqa: N802
"""Helper."""
return PypeTileAssembler()
def CleanupDeadlinePlugin(deadlinePlugin): # noqa: N802, N803
"""Helper."""
deadlinePlugin.cleanup()
class PypeTileAssembler(DeadlinePlugin):
"""Deadline plugin for assembling tiles using OIIO."""
def __init__(self):
"""Init."""
self.InitializeProcessCallback += self.initialize_process
self.RenderExecutableCallback += self.render_executable
self.RenderArgumentCallback += self.render_argument
self.PreRenderTasksCallback += self.pre_render_tasks
self.PostRenderTasksCallback += self.post_render_tasks
def cleanup(self):
"""Cleanup function."""
for stdoutHandler in self.StdoutHandlers:
del stdoutHandler.HandleCallback
del self.InitializeProcessCallback
del self.RenderExecutableCallback
del self.RenderArgumentCallback
del self.PreRenderTasksCallback
del self.PostRenderTasksCallback
def initialize_process(self):
"""Initialization."""
self.SingleFramesOnly = True
self.StdoutHandling = True
self.renderer = self.GetPluginInfoEntryWithDefault(
"Renderer", "undefined")
self.AddStdoutHandlerCallback(
".*Error.*").HandleCallback += self.handle_stdout_error
def render_executable(self):
"""Get render executable name.
Get paths from plugin configuration, find executable and return it.
Returns:
(str): Render executable.
"""
oiiotool_exe_list = self.GetConfigEntry("OIIOTool_RenderExecutable")
oiiotool_exe = FileUtils.SearchFileList(oiiotool_exe_list)
if oiiotool_exe == "":
self.FailRender(("No file found in the semicolon separated "
"list \"{}\". The path to the render executable "
"can be configured from the Plugin Configuration "
"in the Deadline Monitor.").format(
oiiotool_exe_list))
return oiiotool_exe
def render_argument(self):
"""Generate command line arguments for render executable.
Returns:
(str): arguments to add to render executable.
"""
# Read tile config file. This file is in compatible format with
# Draft Tile Assembler
data = {}
with open(self.config_file, "rU") as f:
for text in f:
# Parsing key-value pair and removing white-space
# around the entries
info = [x.strip() for x in text.split("=", 1)]
if len(info) > 1:
try:
data[str(info[0])] = info[1]
except Exception as e:
# should never be called
self.FailRender(
"Cannot parse config file: {}".format(e))
# Get output file. We support only EXRs now.
output_file = data["ImageFileName"]
output_file = RepositoryUtils.CheckPathMapping(output_file)
output_file = self.process_path(output_file)
"""
_, ext = os.path.splitext(output_file)
if "exr" not in ext:
self.FailRender(
"[{}] Only EXR format is supported for now.".format(ext))
"""
tile_info = []
for tile in range(int(data["TileCount"])):
tile_info.append({
"filepath": data["Tile{}".format(tile)],
"pos_x": int(data["Tile{}X".format(tile)]),
"pos_y": int(data["Tile{}Y".format(tile)]),
"height": int(data["Tile{}Height".format(tile)]),
"width": int(data["Tile{}Width".format(tile)])
})
# FFMpeg doesn't support tile coordinates at the moment.
# arguments = self.tile_completer_ffmpeg_args(
# int(data["ImageWidth"]), int(data["ImageHeight"]),
# tile_info, output_file)
arguments = self.tile_oiio_args(
int(data["ImageWidth"]), int(data["ImageHeight"]),
tile_info, output_file)
self.LogInfo(
"Using arguments: {}".format(" ".join(arguments)))
self.tiles = tile_info
return " ".join(arguments)
def process_path(self, filepath):
"""Handle slashes in file paths."""
if SystemUtils.IsRunningOnWindows():
filepath = filepath.replace("/", "\\")
if filepath.startswith("\\") and not filepath.startswith("\\\\"):
filepath = "\\" + filepath
else:
filepath = filepath.replace("\\", "/")
return filepath
def pre_render_tasks(self):
"""Load config file and do remapping."""
self.LogInfo("Pype Tile Assembler starting...")
scene_filename = self.GetDataFilename()
temp_scene_directory = self.CreateTempDirectory(
"thread" + str(self.GetThreadNumber()))
temp_scene_filename = Path.GetFileName(scene_filename)
self.config_file = Path.Combine(
temp_scene_directory, temp_scene_filename)
if SystemUtils.IsRunningOnWindows():
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
scene_filename, self.config_file, "/", "\\")
else:
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
scene_filename, self.config_file, "\\", "/")
os.chmod(self.config_file, os.stat(self.config_file).st_mode)
def post_render_tasks(self):
"""Cleanup tiles if required."""
if self.GetBooleanPluginInfoEntryWithDefault("CleanupTiles", False):
self.LogInfo("Cleaning up Tiles...")
for tile in self.tiles:
try:
self.LogInfo("Deleting: {}".format(tile["filepath"]))
os.remove(tile["filepath"])
# By this time we would have errored out
# if error on missing was enabled
except KeyError:
pass
except OSError:
self.LogInfo("Failed to delete: {}".format(
tile["filepath"]))
pass
self.LogInfo("Pype Tile Assembler Job finished.")
def handle_stdout_error(self):
"""Handle errors in stdout."""
self.FailRender(self.GetRegexMatch(0))
def tile_oiio_args(
self, output_width, output_height, tile_info, output_path):
"""Generate oiio tool arguments for tile assembly.
Args:
output_width (int): Width of output image.
output_height (int): Height of output image.
tiles_info (list): List of tile items, each item must be
dictionary with `filepath`, `pos_x` and `pos_y` keys
representing path to file and x, y coordinates on output
image where top-left point of tile item should start.
output_path (str): Path to file where should be output stored.
Returns:
(list): oiio tools arguments.
"""
args = []
# Create new image with output resolution, and with same type and
# channels as input
first_tile_path = tile_info[0]["filepath"]
first_tile_info = self.info_about_input(first_tile_path)
create_arg_template = "--create{} {}x{} {}"
image_type = ""
image_format = first_tile_info.get("format")
if image_format:
image_type = ":type={}".format(image_format)
create_arg = create_arg_template.format(
image_type, output_width,
output_height, first_tile_info["nchannels"]
)
args.append(create_arg)
for tile in tile_info:
path = tile["filepath"]
pos_x = tile["pos_x"]
tile_height = self.info_about_input(path)["height"]
if self.renderer == "vray":
pos_y = tile["pos_y"]
else:
pos_y = output_height - tile["pos_y"] - tile_height
# Add input path and make sure inputs origin is 0, 0
args.append(path)
args.append("--origin +0+0")
# Swap to have input as foreground
args.append("--swap")
# Paste foreground to background
args.append("--paste +{}+{}".format(pos_x, pos_y))
args.append("-o")
args.append(output_path)
return args
def tile_completer_ffmpeg_args(
self, output_width, output_height, tiles_info, output_path):
"""Generate ffmpeg arguments for tile assembly.
Expected inputs are tiled images.
Args:
output_width (int): Width of output image.
output_height (int): Height of output image.
tiles_info (list): List of tile items, each item must be
dictionary with `filepath`, `pos_x` and `pos_y` keys
representing path to file and x, y coordinates on output
image where top-left point of tile item should start.
output_path (str): Path to file where should be output stored.
Returns:
(list): ffmpeg arguments.
"""
previous_name = "base"
ffmpeg_args = []
filter_complex_strs = []
filter_complex_strs.append("nullsrc=size={}x{}[{}]".format(
output_width, output_height, previous_name
))
new_tiles_info = {}
for idx, tile_info in enumerate(tiles_info):
# Add input and store input index
filepath = tile_info["filepath"]
ffmpeg_args.append("-i \"{}\"".format(filepath.replace("\\", "/")))
# Prepare initial filter complex arguments
index_name = "input{}".format(idx)
filter_complex_strs.append(
"[{}]setpts=PTS-STARTPTS[{}]".format(idx, index_name)
)
tile_info["index"] = idx
new_tiles_info[index_name] = tile_info
# Set frames to 1
ffmpeg_args.append("-frames 1")
# Concatenation filter complex arguments
global_index = 1
total_index = len(new_tiles_info)
for index_name, tile_info in new_tiles_info.items():
item_str = (
"[{previous_name}][{index_name}]overlay={pos_x}:{pos_y}"
).format(
previous_name=previous_name,
index_name=index_name,
pos_x=tile_info["pos_x"],
pos_y=tile_info["pos_y"]
)
new_previous = "tmp{}".format(global_index)
if global_index != total_index:
item_str += "[{}]".format(new_previous)
filter_complex_strs.append(item_str)
previous_name = new_previous
global_index += 1
joined_parts = ";".join(filter_complex_strs)
filter_complex_str = "-filter_complex \"{}\"".format(joined_parts)
ffmpeg_args.append(filter_complex_str)
ffmpeg_args.append("-y")
ffmpeg_args.append("\"{}\"".format(output_path))
return ffmpeg_args
def info_about_input(self, input_path):
args = [self.render_executable(), "--info:format=xml", input_path]
popen = subprocess.Popen(
" ".join(args),
shell=True,
stdout=subprocess.PIPE
)
popen_output = popen.communicate()[0].replace(b"\r\n", b"")
xmldoc = minidom.parseString(popen_output)
image_spec = None
for main_child in xmldoc.childNodes:
if main_child.nodeName.lower() == "imagespec":
image_spec = main_child
break
info = {}
if not image_spec:
return info
def child_check(node):
if len(node.childNodes) != 1:
self.FailRender((
"Implementation BUG. Node {} has more children than 1"
).format(node.nodeName))
for child in image_spec.childNodes:
if child.nodeName in LIST_KEYS:
values = []
for node in child.childNodes:
child_check(node)
values.append(node.childNodes[0].nodeValue)
info[child.nodeName] = values
elif child.nodeName in INT_KEYS:
child_check(child)
info[child.nodeName] = int(child.childNodes[0].nodeValue)
else:
child_check(child)
info[child.nodeName] = child.childNodes[0].nodeValue
return info

3
vendor/deadline/readme.md vendored Normal file
View file

@ -0,0 +1,3 @@
## Pype Deadline repository overlay
This directory is overlay for Deadline repository. It means that you can copy whole hierarchy to Deadline repository and it should work.