mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
removed deadline addon
This commit is contained in:
parent
ea3c559143
commit
2beac025f7
49 changed files with 0 additions and 9466 deletions
|
|
@ -1,8 +0,0 @@
|
|||
from .addon import DeadlineAddon
|
||||
from .version import __version__
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineAddon",
|
||||
"__version__"
|
||||
)
|
||||
|
|
@ -1,617 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Abstract package for submitting jobs to Deadline.
|
||||
|
||||
It provides Deadline JobInfo data class.
|
||||
|
||||
"""
|
||||
import json.decoder
|
||||
import os
|
||||
from abc import abstractmethod
|
||||
import platform
|
||||
import getpass
|
||||
from functools import partial
|
||||
from collections import OrderedDict
|
||||
|
||||
import six
|
||||
import attr
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import (
|
||||
AbstractMetaInstancePlugin,
|
||||
KnownPublishError,
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish.lib import (
|
||||
replace_with_published_scene_path
|
||||
)
|
||||
|
||||
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
|
||||
|
||||
|
||||
def requests_post(*args, **kwargs):
|
||||
"""Wrap request post method.
|
||||
|
||||
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
|
||||
This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth) # explicit cast to tuple
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.post(*args, **kwargs)
|
||||
|
||||
|
||||
def requests_get(*args, **kwargs):
|
||||
"""Wrap request get method.
|
||||
|
||||
Disabling SSL certificate validation if ``verify`` kwarg is set to False.
|
||||
This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
auth = kwargs.get("auth")
|
||||
if auth:
|
||||
kwargs["auth"] = tuple(auth)
|
||||
# add 10sec timeout before bailing out
|
||||
kwargs['timeout'] = 10
|
||||
return requests.get(*args, **kwargs)
|
||||
|
||||
|
||||
class DeadlineKeyValueVar(dict):
|
||||
"""
|
||||
|
||||
Serializes dictionary key values as "{key}={value}" like Deadline uses
|
||||
for EnvironmentKeyValue.
|
||||
|
||||
As an example:
|
||||
EnvironmentKeyValue0="A_KEY=VALUE_A"
|
||||
EnvironmentKeyValue1="OTHER_KEY=VALUE_B"
|
||||
|
||||
The keys are serialized in alphabetical order (sorted).
|
||||
|
||||
Example:
|
||||
>>> var = DeadlineKeyValueVar("EnvironmentKeyValue")
|
||||
>>> var["my_var"] = "hello"
|
||||
>>> var["my_other_var"] = "hello2"
|
||||
>>> var.serialize()
|
||||
|
||||
|
||||
"""
|
||||
def __init__(self, key):
|
||||
super(DeadlineKeyValueVar, self).__init__()
|
||||
self.__key = key
|
||||
|
||||
def serialize(self):
|
||||
key = self.__key
|
||||
|
||||
# Allow custom location for index in serialized string
|
||||
if "{}" not in key:
|
||||
key = key + "{}"
|
||||
|
||||
return {
|
||||
key.format(index): "{}={}".format(var_key, var_value)
|
||||
for index, (var_key, var_value) in enumerate(sorted(self.items()))
|
||||
}
|
||||
|
||||
|
||||
class DeadlineIndexedVar(dict):
|
||||
"""
|
||||
|
||||
Allows to set and query values by integer indices:
|
||||
Query: var[1] or var.get(1)
|
||||
Set: var[1] = "my_value"
|
||||
Append: var += "value"
|
||||
|
||||
Note: Iterating the instance is not guarantueed to be the order of the
|
||||
indices. To do so iterate with `sorted()`
|
||||
|
||||
"""
|
||||
def __init__(self, key):
|
||||
super(DeadlineIndexedVar, self).__init__()
|
||||
self.__key = key
|
||||
|
||||
def serialize(self):
|
||||
key = self.__key
|
||||
|
||||
# Allow custom location for index in serialized string
|
||||
if "{}" not in key:
|
||||
key = key + "{}"
|
||||
|
||||
return {
|
||||
key.format(index): value for index, value in sorted(self.items())
|
||||
}
|
||||
|
||||
def next_available_index(self):
|
||||
# Add as first unused entry
|
||||
i = 0
|
||||
while i in self.keys():
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def update(self, data):
|
||||
# Force the integer key check
|
||||
for key, value in data.items():
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def __iadd__(self, other):
|
||||
index = self.next_available_index()
|
||||
self[index] = other
|
||||
return self
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, int):
|
||||
raise TypeError("Key must be an integer: {}".format(key))
|
||||
|
||||
if key < 0:
|
||||
raise ValueError("Negative index can't be set: {}".format(key))
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlineJobInfo(object):
|
||||
"""Mapping of all Deadline *JobInfo* attributes.
|
||||
|
||||
This contains all JobInfo attributes plus their default values.
|
||||
Those attributes set to `None` shouldn't be posted to Deadline as
|
||||
the only required one is `Plugin`. Their default values used by Deadline
|
||||
are stated in
|
||||
comments.
|
||||
|
||||
..seealso:
|
||||
https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/manual-submission.html
|
||||
|
||||
"""
|
||||
|
||||
# Required
|
||||
# ----------------------------------------------
|
||||
Plugin = attr.ib()
|
||||
|
||||
# General
|
||||
Frames = attr.ib(default=None) # default: 0
|
||||
Name = attr.ib(default="Untitled")
|
||||
Comment = attr.ib(default=None) # default: empty
|
||||
Department = attr.ib(default=None) # default: empty
|
||||
BatchName = attr.ib(default=None) # default: empty
|
||||
UserName = attr.ib(default=getpass.getuser())
|
||||
MachineName = attr.ib(default=platform.node())
|
||||
Pool = attr.ib(default=None) # default: "none"
|
||||
SecondaryPool = attr.ib(default=None)
|
||||
Group = attr.ib(default=None) # default: "none"
|
||||
Priority = attr.ib(default=50)
|
||||
ChunkSize = attr.ib(default=1)
|
||||
ConcurrentTasks = attr.ib(default=1)
|
||||
LimitConcurrentTasksToNumberOfCpus = attr.ib(
|
||||
default=None) # default: "true"
|
||||
OnJobComplete = attr.ib(default="Nothing")
|
||||
SynchronizeAllAuxiliaryFiles = attr.ib(default=None) # default: false
|
||||
ForceReloadPlugin = attr.ib(default=None) # default: false
|
||||
Sequential = attr.ib(default=None) # default: false
|
||||
SuppressEvents = attr.ib(default=None) # default: false
|
||||
Protected = attr.ib(default=None) # default: false
|
||||
InitialStatus = attr.ib(default="Active")
|
||||
NetworkRoot = attr.ib(default=None)
|
||||
|
||||
# Timeouts
|
||||
# ----------------------------------------------
|
||||
MinRenderTimeSeconds = attr.ib(default=None) # Default: 0
|
||||
MinRenderTimeMinutes = attr.ib(default=None) # Default: 0
|
||||
TaskTimeoutSeconds = attr.ib(default=None) # Default: 0
|
||||
TaskTimeoutMinutes = attr.ib(default=None) # Default: 0
|
||||
StartJobTimeoutSeconds = attr.ib(default=None) # Default: 0
|
||||
StartJobTimeoutMinutes = attr.ib(default=None) # Default: 0
|
||||
InitializePluginTimeoutSeconds = attr.ib(default=None) # Default: 0
|
||||
# can be one of <Error/Notify/ErrorAndNotify/Complete>
|
||||
OnTaskTimeout = attr.ib(default=None) # Default: Error
|
||||
EnableTimeoutsForScriptTasks = attr.ib(default=None) # Default: false
|
||||
EnableFrameTimeouts = attr.ib(default=None) # Default: false
|
||||
EnableAutoTimeout = attr.ib(default=None) # Default: false
|
||||
|
||||
# Interruptible
|
||||
# ----------------------------------------------
|
||||
Interruptible = attr.ib(default=None) # Default: false
|
||||
InterruptiblePercentage = attr.ib(default=None)
|
||||
RemTimeThreshold = attr.ib(default=None)
|
||||
|
||||
# Notifications
|
||||
# ----------------------------------------------
|
||||
# can be comma separated list of users
|
||||
NotificationTargets = attr.ib(default=None) # Default: blank
|
||||
ClearNotificationTargets = attr.ib(default=None) # Default: false
|
||||
# A comma separated list of additional email addresses
|
||||
NotificationEmails = attr.ib(default=None) # Default: blank
|
||||
OverrideNotificationMethod = attr.ib(default=None) # Default: false
|
||||
EmailNotification = attr.ib(default=None) # Default: false
|
||||
PopupNotification = attr.ib(default=None) # Default: false
|
||||
# String with `[EOL]` used for end of line
|
||||
NotificationNote = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Machine Limit
|
||||
# ----------------------------------------------
|
||||
MachineLimit = attr.ib(default=None) # Default: 0
|
||||
MachineLimitProgress = attr.ib(default=None) # Default: -1.0
|
||||
Whitelist = attr.ib(default=None) # Default: blank
|
||||
Blacklist = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Limits
|
||||
# ----------------------------------------------
|
||||
# comma separated list of limit groups
|
||||
LimitGroups = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Dependencies
|
||||
# ----------------------------------------------
|
||||
# comma separated list of job IDs
|
||||
JobDependencies = attr.ib(default=None) # Default: blank
|
||||
JobDependencyPercentage = attr.ib(default=None) # Default: -1
|
||||
IsFrameDependent = attr.ib(default=None) # Default: false
|
||||
FrameDependencyOffsetStart = attr.ib(default=None) # Default: 0
|
||||
FrameDependencyOffsetEnd = attr.ib(default=None) # Default: 0
|
||||
ResumeOnCompleteDependencies = attr.ib(default=None) # Default: true
|
||||
ResumeOnDeletedDependencies = attr.ib(default=None) # Default: false
|
||||
ResumeOnFailedDependencies = attr.ib(default=None) # Default: false
|
||||
# comma separated list of asset paths
|
||||
RequiredAssets = attr.ib(default=None) # Default: blank
|
||||
# comma separated list of script paths
|
||||
ScriptDependencies = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Failure Detection
|
||||
# ----------------------------------------------
|
||||
OverrideJobFailureDetection = attr.ib(default=None) # Default: false
|
||||
FailureDetectionJobErrors = attr.ib(default=None) # 0..x
|
||||
OverrideTaskFailureDetection = attr.ib(default=None) # Default: false
|
||||
FailureDetectionTaskErrors = attr.ib(default=None) # 0..x
|
||||
IgnoreBadJobDetection = attr.ib(default=None) # Default: false
|
||||
SendJobErrorWarning = attr.ib(default=None) # Default: false
|
||||
|
||||
# Cleanup
|
||||
# ----------------------------------------------
|
||||
DeleteOnComplete = attr.ib(default=None) # Default: false
|
||||
ArchiveOnComplete = attr.ib(default=None) # Default: false
|
||||
OverrideAutoJobCleanup = attr.ib(default=None) # Default: false
|
||||
OverrideJobCleanup = attr.ib(default=None)
|
||||
JobCleanupDays = attr.ib(default=None) # Default: false
|
||||
# <ArchiveJobs/DeleteJobs>
|
||||
OverrideJobCleanupType = attr.ib(default=None)
|
||||
|
||||
# Scheduling
|
||||
# ----------------------------------------------
|
||||
# <None/Once/Daily/Custom>
|
||||
ScheduledType = attr.ib(default=None) # Default: None
|
||||
# <dd/MM/yyyy HH:mm>
|
||||
ScheduledStartDateTime = attr.ib(default=None)
|
||||
ScheduledDays = attr.ib(default=None) # Default: 1
|
||||
# <dd:hh:mm:ss>
|
||||
JobDelay = attr.ib(default=None)
|
||||
# <Day of the Week><Start/Stop>Time=<HH:mm:ss>
|
||||
Scheduled = attr.ib(default=None)
|
||||
|
||||
# Scripts
|
||||
# ----------------------------------------------
|
||||
# all accept path to script
|
||||
PreJobScript = attr.ib(default=None) # Default: blank
|
||||
PostJobScript = attr.ib(default=None) # Default: blank
|
||||
PreTaskScript = attr.ib(default=None) # Default: blank
|
||||
PostTaskScript = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Event Opt-Ins
|
||||
# ----------------------------------------------
|
||||
# comma separated list of plugins
|
||||
EventOptIns = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Environment
|
||||
# ----------------------------------------------
|
||||
EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar,
|
||||
"EnvironmentKeyValue"))
|
||||
|
||||
IncludeEnvironment = attr.ib(default=None) # Default: false
|
||||
UseJobEnvironmentOnly = attr.ib(default=None) # Default: false
|
||||
CustomPluginDirectory = attr.ib(default=None) # Default: blank
|
||||
|
||||
# Job Extra Info
|
||||
# ----------------------------------------------
|
||||
ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo"))
|
||||
ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar,
|
||||
"ExtraInfoKeyValue"))
|
||||
|
||||
# Task Extra Info Names
|
||||
# ----------------------------------------------
|
||||
OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false
|
||||
TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"TaskExtraInfoName"))
|
||||
|
||||
# Output
|
||||
# ----------------------------------------------
|
||||
OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputFilename"))
|
||||
OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputFilename{}Tile"))
|
||||
OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputDirectory"))
|
||||
|
||||
# Asset Dependency
|
||||
# ----------------------------------------------
|
||||
AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"AssetDependency"))
|
||||
|
||||
# Tile Job
|
||||
# ----------------------------------------------
|
||||
TileJob = attr.ib(default=None) # Default: false
|
||||
TileJobFrame = attr.ib(default=None) # Default: 0
|
||||
TileJobTilesInX = attr.ib(default=None) # Default: 0
|
||||
TileJobTilesInY = attr.ib(default=None) # Default: 0
|
||||
TileJobTileCount = attr.ib(default=None) # Default: 0
|
||||
|
||||
# Maintenance Job
|
||||
# ----------------------------------------------
|
||||
MaintenanceJob = attr.ib(default=None) # Default: false
|
||||
MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0
|
||||
MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0
|
||||
|
||||
def serialize(self):
|
||||
"""Return all data serialized as dictionary.
|
||||
|
||||
Returns:
|
||||
OrderedDict: all serialized data.
|
||||
|
||||
"""
|
||||
def filter_data(a, v):
|
||||
if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)):
|
||||
return False
|
||||
if v is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
serialized = attr.asdict(
|
||||
self, dict_factory=OrderedDict, filter=filter_data)
|
||||
|
||||
# Custom serialize these attributes
|
||||
for attribute in [
|
||||
self.EnvironmentKeyValue,
|
||||
self.ExtraInfo,
|
||||
self.ExtraInfoKeyValue,
|
||||
self.TaskExtraInfoName,
|
||||
self.OutputFilename,
|
||||
self.OutputFilenameTile,
|
||||
self.OutputDirectory,
|
||||
self.AssetDependency
|
||||
]:
|
||||
serialized.update(attribute.serialize())
|
||||
|
||||
return serialized
|
||||
|
||||
def update(self, data):
|
||||
"""Update instance with data dict"""
|
||||
for key, value in data.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def add_render_job_env_var(self):
|
||||
"""Check if in OP or AYON mode and use appropriate env var."""
|
||||
self.EnvironmentKeyValue["AYON_RENDER_JOB"] = "1"
|
||||
self.EnvironmentKeyValue["AYON_BUNDLE_NAME"] = (
|
||||
os.environ["AYON_BUNDLE_NAME"])
|
||||
|
||||
|
||||
@six.add_metaclass(AbstractMetaInstancePlugin)
|
||||
class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Class abstracting access to Deadline."""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
|
||||
import_reference = False
|
||||
use_published = True
|
||||
asset_dependencies = False
|
||||
default_priority = 50
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AbstractSubmitDeadline, self).__init__(*args, **kwargs)
|
||||
self._instance = None
|
||||
self._deadline_url = None
|
||||
self.scene_path = None
|
||||
self.job_info = None
|
||||
self.plugin_info = None
|
||||
self.aux_files = None
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
self._instance = instance
|
||||
context = instance.context
|
||||
self._deadline_url = instance.data["deadline"]["url"]
|
||||
|
||||
assert self._deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
file_path = None
|
||||
if self.use_published:
|
||||
if not self.import_reference:
|
||||
file_path = self.from_published_scene()
|
||||
else:
|
||||
self.log.info("use the scene with imported reference for rendering") # noqa
|
||||
file_path = context.data["currentFile"]
|
||||
|
||||
# fallback if nothing was set
|
||||
if not file_path:
|
||||
self.log.warning("Falling back to workfile")
|
||||
file_path = context.data["currentFile"]
|
||||
|
||||
self.scene_path = file_path
|
||||
self.log.info("Using {} for render/export.".format(file_path))
|
||||
|
||||
self.job_info = self.get_job_info()
|
||||
self.plugin_info = self.get_plugin_info()
|
||||
self.aux_files = self.get_aux_files()
|
||||
|
||||
job_id = self.process_submission()
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
if instance.data.get("splitRender"):
|
||||
self.log.info("Splitting export and render in two jobs")
|
||||
self.log.info("Export job id: %s", job_id)
|
||||
render_job_info = self.get_job_info(dependency_job_ids=[job_id])
|
||||
render_plugin_info = self.get_plugin_info(job_type="render")
|
||||
payload = self.assemble_payload(
|
||||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
render_job_id = self.submit(payload, auth, verify)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
from them and submit it do Deadline.
|
||||
|
||||
Returns:
|
||||
str: Deadline job ID
|
||||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth, verify)
|
||||
|
||||
@abstractmethod
|
||||
def get_job_info(self):
|
||||
"""Return filled Deadline JobInfo.
|
||||
|
||||
This is host/plugin specific implementation of how to fill data in.
|
||||
|
||||
See:
|
||||
:class:`DeadlineJobInfo`
|
||||
|
||||
Returns:
|
||||
:class:`DeadlineJobInfo`: Filled Deadline JobInfo.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_plugin_info(self):
|
||||
"""Return filled Deadline PluginInfo.
|
||||
|
||||
This is host/plugin specific implementation of how to fill data in.
|
||||
|
||||
See:
|
||||
:class:`DeadlineJobInfo`
|
||||
|
||||
Returns:
|
||||
dict: Filled Deadline JobInfo.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_aux_files(self):
|
||||
"""Return list of auxiliary files for Deadline job.
|
||||
|
||||
If needed this should be overridden, otherwise return empty list as
|
||||
that field even empty must be present on Deadline submission.
|
||||
|
||||
Returns:
|
||||
list: List of files.
|
||||
|
||||
"""
|
||||
return []
|
||||
|
||||
def from_published_scene(self, replace_in_path=True):
|
||||
"""Switch work scene for published scene.
|
||||
|
||||
If rendering/exporting from published scenes is enabled, this will
|
||||
replace paths from working scene to published scene.
|
||||
|
||||
Args:
|
||||
replace_in_path (bool): if True, it will try to find
|
||||
old scene name in path of expected files and replace it
|
||||
with name of published scene.
|
||||
|
||||
Returns:
|
||||
str: Published scene path.
|
||||
None: if no published scene is found.
|
||||
|
||||
Note:
|
||||
Published scene path is actually determined from project Anatomy
|
||||
as at the time this plugin is running scene can still no be
|
||||
published.
|
||||
|
||||
"""
|
||||
return replace_with_published_scene_path(
|
||||
self._instance, replace_in_path=replace_in_path)
|
||||
|
||||
def assemble_payload(
|
||||
self, job_info=None, plugin_info=None, aux_files=None):
|
||||
"""Assemble payload data from its various parts.
|
||||
|
||||
Args:
|
||||
job_info (DeadlineJobInfo): Deadline JobInfo. You can use
|
||||
:class:`DeadlineJobInfo` for it.
|
||||
plugin_info (dict): Deadline PluginInfo. Plugin specific options.
|
||||
aux_files (list, optional): List of auxiliary file to submit with
|
||||
the job.
|
||||
|
||||
Returns:
|
||||
dict: Deadline Payload.
|
||||
|
||||
"""
|
||||
job = job_info or self.job_info
|
||||
return {
|
||||
"JobInfo": job.serialize(),
|
||||
"PluginInfo": plugin_info or self.plugin_info,
|
||||
"AuxFiles": aux_files or self.aux_files
|
||||
}
|
||||
|
||||
def submit(self, payload, auth, verify):
|
||||
"""Submit payload to Deadline API end-point.
|
||||
|
||||
This takes payload in the form of JSON file and POST it to
|
||||
Deadline jobs end-point.
|
||||
|
||||
Args:
|
||||
payload (dict): dict to become json in deadline submission.
|
||||
auth (tuple): (username, password)
|
||||
verify (bool): verify SSL certificate if present
|
||||
|
||||
Returns:
|
||||
str: resulting Deadline job id.
|
||||
|
||||
Throws:
|
||||
KnownPublishError: if submission fails.
|
||||
|
||||
"""
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
response = requests_post(
|
||||
url, json=payload, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
self.log.error(response.content)
|
||||
self.log.debug(payload)
|
||||
raise KnownPublishError(response.text)
|
||||
|
||||
try:
|
||||
result = response.json()
|
||||
except JSONDecodeError:
|
||||
msg = "Broken response {}. ".format(response)
|
||||
msg += "Try restarting the Deadline Webservice."
|
||||
self.log.warning(msg, exc_info=True)
|
||||
raise KnownPublishError("Broken response from DL")
|
||||
|
||||
# for submit publish job
|
||||
self._instance.data["deadlineSubmissionJob"] = result
|
||||
|
||||
return result["_id"]
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import requests
|
||||
import six
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.addon import AYONAddon, IPluginPaths
|
||||
|
||||
from .version import __version__
|
||||
|
||||
|
||||
class DeadlineWebserviceError(Exception):
|
||||
"""
|
||||
Exception to throw when connection to Deadline server fails.
|
||||
"""
|
||||
|
||||
|
||||
class DeadlineAddon(AYONAddon, IPluginPaths):
|
||||
name = "deadline"
|
||||
version = __version__
|
||||
|
||||
def initialize(self, studio_settings):
|
||||
deadline_settings = studio_settings[self.name]
|
||||
deadline_servers_info = {
|
||||
url_item["name"]: url_item
|
||||
for url_item in deadline_settings["deadline_urls"]
|
||||
}
|
||||
|
||||
if not deadline_servers_info:
|
||||
self.enabled = False
|
||||
self.log.warning((
|
||||
"Deadline Webservice URLs are not specified. Disabling addon."
|
||||
))
|
||||
|
||||
self.deadline_servers_info = deadline_servers_info
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return {
|
||||
"publish": [os.path.join(current_dir, "plugins", "publish")]
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_deadline_pools(webservice, auth=None, log=None):
|
||||
"""Get pools from Deadline.
|
||||
Args:
|
||||
webservice (str): Server url.
|
||||
auth (Optional[Tuple[str, str]]): Tuple containing username,
|
||||
password
|
||||
log (Optional[Logger]): Logger to log errors to, if provided.
|
||||
Returns:
|
||||
List[str]: Pools.
|
||||
Throws:
|
||||
RuntimeError: If deadline webservice is unreachable.
|
||||
|
||||
"""
|
||||
from .abstract_submit_deadline import requests_get
|
||||
|
||||
if not log:
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
argument = "{}/api/pools?NamesOnly=true".format(webservice)
|
||||
try:
|
||||
kwargs = {}
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(argument, **kwargs)
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
msg = 'Cannot connect to DL web service {}'.format(webservice)
|
||||
log.error(msg)
|
||||
six.reraise(
|
||||
DeadlineWebserviceError,
|
||||
DeadlineWebserviceError('{} - {}'.format(msg, exc)),
|
||||
sys.exc_info()[2])
|
||||
if not response.ok:
|
||||
log.warning("No pools retrieved")
|
||||
return []
|
||||
|
||||
return response.json()
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
# describes list of product typed used for plugin filtering for farm publishing
|
||||
FARM_FAMILIES = [
|
||||
"render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop", "redshift_rop",
|
||||
"renderFarm", "usdrender", "publish.hou"
|
||||
]
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Deadline servers from instance.
|
||||
|
||||
This is resolving index of server lists stored in `deadlineServers` instance
|
||||
attribute or using default server if that attribute doesn't exists.
|
||||
|
||||
"""
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import KnownPublishError
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
||||
# Run before collect_render.
|
||||
order = pyblish.api.CollectorOrder + 0.225
|
||||
label = "Deadline Webservice from the Instance"
|
||||
targets = ["local"]
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
if not instance.data.get("deadline"):
|
||||
instance.data["deadline"] = {}
|
||||
|
||||
# todo: separate logic should be removed, all hosts should have same
|
||||
host_name = instance.context.data["hostName"]
|
||||
if host_name == "maya":
|
||||
deadline_url = self._collect_deadline_url(instance)
|
||||
else:
|
||||
deadline_url = (instance.data.get("deadlineUrl") or # backwards
|
||||
instance.data.get("deadline", {}).get("url"))
|
||||
if deadline_url:
|
||||
instance.data["deadline"]["url"] = deadline_url.strip().rstrip("/")
|
||||
else:
|
||||
instance.data["deadline"]["url"] = instance.context.data["deadline"]["defaultUrl"] # noqa
|
||||
self.log.debug(
|
||||
"Using {} for submission".format(instance.data["deadline"]["url"]))
|
||||
|
||||
def _collect_deadline_url(self, render_instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
"""Get Deadline Webservice URL from render instance.
|
||||
|
||||
This will get all configured Deadline Webservice URLs and create
|
||||
subset of them based upon project configuration. It will then take
|
||||
`deadlineServers` from render instance that is now basically `int`
|
||||
index of that list.
|
||||
|
||||
Args:
|
||||
render_instance (pyblish.api.Instance): Render instance created
|
||||
by Creator in Maya.
|
||||
|
||||
Returns:
|
||||
str: Selected Deadline Webservice URL.
|
||||
|
||||
"""
|
||||
# Not all hosts can import this module.
|
||||
from maya import cmds
|
||||
deadline_settings = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
)
|
||||
default_server_url = (render_instance.context.data["deadline"]
|
||||
["defaultUrl"])
|
||||
# QUESTION How and where is this is set? Should be removed?
|
||||
instance_server = render_instance.data.get("deadlineServers")
|
||||
if not instance_server:
|
||||
self.log.debug("Using default server.")
|
||||
return default_server_url
|
||||
|
||||
# Get instance server as sting.
|
||||
if isinstance(instance_server, int):
|
||||
instance_server = cmds.getAttr(
|
||||
"{}.deadlineServers".format(render_instance.data["objset"]),
|
||||
asString=True
|
||||
)
|
||||
|
||||
default_servers = {
|
||||
url_item["name"]: url_item["value"]
|
||||
for url_item in deadline_settings["deadline_servers_info"]
|
||||
}
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
["deadline_servers"]
|
||||
)
|
||||
if not project_servers:
|
||||
self.log.debug("Not project servers found. Using default servers.")
|
||||
return default_servers[instance_server]
|
||||
|
||||
project_enabled_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
if instance_server not in project_enabled_servers:
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
)
|
||||
)
|
||||
raise KnownPublishError(msg)
|
||||
|
||||
self.log.debug("Using project approved server.")
|
||||
return project_enabled_servers[instance_server]
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect default Deadline server."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
||||
"""Collect default Deadline Webservice URL.
|
||||
|
||||
DL webservice addresses must be configured first in System Settings for
|
||||
project settings enum to work.
|
||||
|
||||
Default webservice could be overridden by
|
||||
`project_settings/deadline/deadline_servers`. Currently only single url
|
||||
is expected.
|
||||
|
||||
This url could be overridden by some hosts directly on instances with
|
||||
`CollectDeadlineServerFromInstance`.
|
||||
"""
|
||||
|
||||
# Run before collect_deadline_server_instance.
|
||||
order = pyblish.api.CollectorOrder + 0.200
|
||||
label = "Default Deadline Webservice"
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
deadline_addon = context.data["ayonAddonsManager"]["deadline"]
|
||||
except AttributeError:
|
||||
self.log.error("Cannot get AYON Deadline addon.")
|
||||
raise AssertionError("AYON Deadline addon not found.")
|
||||
|
||||
deadline_settings = context.data["project_settings"]["deadline"]
|
||||
deadline_server_name = deadline_settings["deadline_server"]
|
||||
|
||||
dl_server_info = None
|
||||
if deadline_server_name:
|
||||
dl_server_info = deadline_addon.deadline_servers_info.get(
|
||||
deadline_server_name)
|
||||
|
||||
if dl_server_info:
|
||||
deadline_url = dl_server_info["value"]
|
||||
else:
|
||||
default_dl_server_info = deadline_addon.deadline_servers_info[0]
|
||||
deadline_url = default_dl_server_info["value"]
|
||||
|
||||
context.data["deadline"] = {}
|
||||
context.data["deadline"]["defaultUrl"] = (
|
||||
deadline_url.strip().rstrip("/"))
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from ayon_core.lib import TextDef
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlinePools(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Collect pools from instance or Publisher attributes, from Setting
|
||||
otherwise.
|
||||
|
||||
Pools are used to control which DL workers could render the job.
|
||||
|
||||
Pools might be set:
|
||||
- directly on the instance (set directly in DCC)
|
||||
- from Publisher attributes
|
||||
- from defaults from Settings.
|
||||
|
||||
Publisher attributes could be shown even for instances that should be
|
||||
rendered locally as visibility is driven by product type of the instance
|
||||
(which will be `render` most likely).
|
||||
(Might be resolved in the future and class attribute 'families' should
|
||||
be cleaned up.)
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.420
|
||||
label = "Collect Deadline Pools"
|
||||
hosts = [
|
||||
"aftereffects",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini",
|
||||
"nuke",
|
||||
]
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
primary_pool = None
|
||||
secondary_pool = None
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# deadline.publish.CollectDeadlinePools
|
||||
settings = project_settings["deadline"]["publish"]["CollectDeadlinePools"] # noqa
|
||||
cls.primary_pool = settings.get("primary_pool", None)
|
||||
cls.secondary_pool = settings.get("secondary_pool", None)
|
||||
|
||||
def process(self, instance):
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
if not instance.data.get("primaryPool"):
|
||||
instance.data["primaryPool"] = (
|
||||
attr_values.get("primaryPool") or self.primary_pool or "none"
|
||||
)
|
||||
if instance.data["primaryPool"] == "-":
|
||||
instance.data["primaryPool"] = None
|
||||
|
||||
if not instance.data.get("secondaryPool"):
|
||||
instance.data["secondaryPool"] = (
|
||||
attr_values.get("secondaryPool") or self.secondary_pool or "none" # noqa
|
||||
)
|
||||
|
||||
if instance.data["secondaryPool"] == "-":
|
||||
instance.data["secondaryPool"] = None
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
# TODO: Preferably this would be an enum for the user
|
||||
# but the Deadline server URL can be dynamic and
|
||||
# can be set per render instance. Since get_attribute_defs
|
||||
# can't be dynamic unfortunately EnumDef isn't possible (yet?)
|
||||
# pool_names = self.deadline_addon.get_deadline_pools(deadline_url,
|
||||
# self.log)
|
||||
# secondary_pool_names = ["-"] + pool_names
|
||||
|
||||
return [
|
||||
TextDef("primaryPool",
|
||||
label="Primary Pool",
|
||||
default=cls.primary_pool,
|
||||
tooltip="Deadline primary pool, "
|
||||
"applicable for farm rendering"),
|
||||
TextDef("secondaryPool",
|
||||
label="Secondary Pool",
|
||||
default=cls.secondary_pool,
|
||||
tooltip="Deadline secondary pool, "
|
||||
"applicable for farm rendering")
|
||||
]
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect user credentials
|
||||
|
||||
Requires:
|
||||
context -> project_settings
|
||||
instance.data["deadline"]["url"]
|
||||
|
||||
Provides:
|
||||
instance.data["deadline"] -> require_authentication (bool)
|
||||
instance.data["deadline"] -> auth (tuple (str, str)) -
|
||||
(username, password) or None
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
from ayon_api import get_server_api_connection
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
||||
"""Collects user name and password for artist if DL requires authentication
|
||||
|
||||
If Deadline server is marked to require authentication, it looks first for
|
||||
default values in 'Studio Settings', which could be overriden by artist
|
||||
dependent values from 'Site settings`.
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.250
|
||||
label = "Collect Deadline User Credentials"
|
||||
|
||||
targets = ["local"]
|
||||
hosts = ["aftereffects",
|
||||
"blender",
|
||||
"fusion",
|
||||
"harmony",
|
||||
"nuke",
|
||||
"maya",
|
||||
"max",
|
||||
"houdini"]
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
collected_deadline_url = instance.data["deadline"]["url"]
|
||||
if not collected_deadline_url:
|
||||
raise ValueError("Instance doesn't have '[deadline][url]'.")
|
||||
context_data = instance.context.data
|
||||
deadline_settings = context_data["project_settings"]["deadline"]
|
||||
|
||||
deadline_server_name = None
|
||||
# deadline url might be set directly from instance, need to find
|
||||
# metadata for it
|
||||
for deadline_info in deadline_settings["deadline_urls"]:
|
||||
dl_settings_url = deadline_info["value"].strip().rstrip("/")
|
||||
if dl_settings_url == collected_deadline_url:
|
||||
deadline_server_name = deadline_info["name"]
|
||||
break
|
||||
|
||||
if not deadline_server_name:
|
||||
raise ValueError(f"Collected {collected_deadline_url} doesn't "
|
||||
"match any site configured in Studio Settings")
|
||||
|
||||
instance.data["deadline"]["require_authentication"] = (
|
||||
deadline_info["require_authentication"]
|
||||
)
|
||||
instance.data["deadline"]["auth"] = None
|
||||
|
||||
instance.data["deadline"]["verify"] = (
|
||||
not deadline_info["not_verify_ssl"])
|
||||
|
||||
if not deadline_info["require_authentication"]:
|
||||
return
|
||||
|
||||
addons_manager = instance.context.data["ayonAddonsManager"]
|
||||
deadline_addon = addons_manager["deadline"]
|
||||
|
||||
default_username = deadline_info["default_username"]
|
||||
default_password = deadline_info["default_password"]
|
||||
if default_username and default_password:
|
||||
self.log.debug("Setting credentials from defaults")
|
||||
instance.data["deadline"]["auth"] = (default_username,
|
||||
default_password)
|
||||
|
||||
# TODO import 'get_addon_site_settings' when available
|
||||
# in public 'ayon_api'
|
||||
local_settings = get_server_api_connection().get_addon_site_settings(
|
||||
deadline_addon.name, deadline_addon.version)
|
||||
local_settings = local_settings["local_settings"]
|
||||
for server_info in local_settings:
|
||||
if deadline_server_name == server_info["server_name"]:
|
||||
if server_info["username"] and server_info["password"]:
|
||||
self.log.debug("Setting credentials from Site Settings")
|
||||
instance.data["deadline"]["auth"] = \
|
||||
(server_info["username"], server_info["password"])
|
||||
break
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Deadline Authentication</title>
|
||||
<description>
|
||||
## Deadline authentication is required
|
||||
|
||||
This project has set in Settings that Deadline requires authentication.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Please go to Ayon Server > Site Settings and provide your Deadline username and password.
|
||||
In some cases the password may be empty if Deadline is configured to allow that. Ask your administrator.
|
||||
|
||||
</description>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Deadline Pools</title>
|
||||
<description>
|
||||
## Invalid Deadline pools found
|
||||
|
||||
Configured pools don't match available pools in Deadline.
|
||||
|
||||
### How to repair?
|
||||
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
|
||||
In other cases inform admin to change them in Settings.
|
||||
|
||||
Available deadline pools:
|
||||
|
||||
{pools_str}
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__
|
||||
|
||||
This error is shown when a configured pool is not available on Deadline. It
|
||||
can happen when publishing old workfiles which were created with previous
|
||||
deadline pools, or someone changed the available pools in Deadline,
|
||||
but didn't modify AYON Settings to match the changes.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
import os
|
||||
import attr
|
||||
import getpass
|
||||
import pyblish.api
|
||||
from datetime import datetime
|
||||
|
||||
from ayon_core.lib import (
|
||||
env_value_to_bool,
|
||||
collect_frames,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlinePluginInfo():
|
||||
Comp = attr.ib(default=None)
|
||||
SceneFile = attr.ib(default=None)
|
||||
OutputFilePath = attr.ib(default=None)
|
||||
Output = attr.ib(default=None)
|
||||
StartupDirectory = attr.ib(default=None)
|
||||
Arguments = attr.ib(default=None)
|
||||
ProjectPath = attr.ib(default=None)
|
||||
AWSAssetFile0 = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
MultiProcess = attr.ib(default=None)
|
||||
|
||||
|
||||
class AfterEffectsSubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline
|
||||
):
|
||||
|
||||
label = "Submit AE to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["aftereffects"]
|
||||
families = ["render.farm"] # cannot be "render' as that is integrated
|
||||
use_published = True
|
||||
targets = ["local"]
|
||||
|
||||
priority = 50
|
||||
chunk_size = 1000000
|
||||
group = None
|
||||
department = None
|
||||
multiprocess = True
|
||||
|
||||
def get_job_info(self):
|
||||
dln_job_info = DeadlineJobInfo(Plugin="AfterEffects")
|
||||
|
||||
context = self._instance.context
|
||||
|
||||
batch_name = os.path.basename(self._instance.data["source"])
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
dln_job_info.Name = self._instance.data["name"]
|
||||
dln_job_info.BatchName = batch_name
|
||||
dln_job_info.Plugin = "AfterEffects"
|
||||
dln_job_info.UserName = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
# Deadline requires integers in frame range
|
||||
frame_range = "{}-{}".format(
|
||||
int(round(self._instance.data["frameStart"])),
|
||||
int(round(self._instance.data["frameEnd"])))
|
||||
dln_job_info.Frames = frame_range
|
||||
|
||||
dln_job_info.Priority = self.priority
|
||||
dln_job_info.Pool = self._instance.data.get("primaryPool")
|
||||
dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool")
|
||||
dln_job_info.Group = self.group
|
||||
dln_job_info.Department = self.department
|
||||
dln_job_info.ChunkSize = self.chunk_size
|
||||
dln_job_info.OutputFilename += \
|
||||
os.path.basename(self._instance.data["expectedFiles"][0])
|
||||
dln_job_info.OutputDirectory += \
|
||||
os.path.dirname(self._instance.data["expectedFiles"][0])
|
||||
dln_job_info.JobDelay = "00:00:00"
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
dln_job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
dln_job_info.add_render_job_env_var()
|
||||
|
||||
return dln_job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
deadline_plugin_info = DeadlinePluginInfo()
|
||||
|
||||
render_path = self._instance.data["expectedFiles"][0]
|
||||
|
||||
file_name, frame = list(collect_frames([render_path]).items())[0]
|
||||
if frame:
|
||||
# replace frame ('000001') with Deadline's required '[#######]'
|
||||
# expects filename in format project_folder_product_version.FRAME.ext
|
||||
render_dir = os.path.dirname(render_path)
|
||||
file_name = os.path.basename(render_path)
|
||||
hashed = '[{}]'.format(len(frame) * "#")
|
||||
file_name = file_name.replace(frame, hashed)
|
||||
render_path = os.path.join(render_dir, file_name)
|
||||
|
||||
deadline_plugin_info.Comp = self._instance.data["comp_name"]
|
||||
deadline_plugin_info.Version = self._instance.data["app_version"]
|
||||
# must be here because of DL AE plugin
|
||||
# added override of multiprocess by env var, if shouldn't be used for
|
||||
# some app variant use MULTIPROCESS:false in Settings, default is True
|
||||
env_multi = env_value_to_bool("MULTIPROCESS", default=True)
|
||||
deadline_plugin_info.MultiProcess = env_multi and self.multiprocess
|
||||
deadline_plugin_info.SceneFile = self.scene_path
|
||||
deadline_plugin_info.Output = render_path.replace("\\", "/")
|
||||
|
||||
return attr.asdict(deadline_plugin_info)
|
||||
|
||||
def from_published_scene(self):
|
||||
""" Do not overwrite expected files.
|
||||
|
||||
Use published is set to True, so rendering will be triggered
|
||||
from published scene (in 'publish' folder). Default implementation
|
||||
of abstract class renames expected (eg. rendered) files accordingly
|
||||
which is not needed here.
|
||||
"""
|
||||
return super().from_published_scene(False)
|
||||
|
|
@ -1,225 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline."""
|
||||
|
||||
import os
|
||||
import getpass
|
||||
import attr
|
||||
from datetime import datetime
|
||||
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
from ayon_core.pipeline.farm.tools import iter_expected_files
|
||||
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class BlenderPluginInfo():
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
SaveFile = attr.ib(default=True)
|
||||
|
||||
|
||||
class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin):
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["blender"]
|
||||
families = ["render"]
|
||||
settings_category = "deadline"
|
||||
|
||||
use_published = True
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = None
|
||||
job_delay = "00:00:00:00"
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="Blender")
|
||||
|
||||
job_info.update(self.jobInfo)
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
|
||||
if is_in_tests():
|
||||
src_filename += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info.Name = f"{src_filename} - {instance.name}"
|
||||
job_info.BatchName = src_filename
|
||||
instance.data.get("blenderRenderPlugin", "Blender")
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
# Deadline requires integers in frame range
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
job_info.Comment = instance.data.get("comment")
|
||||
|
||||
if self.group != "none" and self.group:
|
||||
job_info.Group = self.group
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
render_globals = instance.data.setdefault("renderGlobals", {})
|
||||
machine_list = attr_values.get("machineList", "")
|
||||
if machine_list:
|
||||
if attr_values.get("whitelist", True):
|
||||
machine_list_key = "Whitelist"
|
||||
else:
|
||||
machine_list_key = "Blacklist"
|
||||
render_globals[machine_list_key] = machine_list
|
||||
|
||||
job_info.ChunkSize = attr_values.get("chunkSize", self.chunk_size)
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.ScheduledType = "Once"
|
||||
job_info.JobDelay = attr_values.get("job_delay", self.job_delay)
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
job_info.update(render_globals)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize job from PYPE for turning Event On/Off
|
||||
job_info.add_render_job_env_var()
|
||||
job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1"
|
||||
|
||||
# Adding file dependencies.
|
||||
if self.asset_dependencies:
|
||||
dependencies = instance.context.data["fileDependencies"]
|
||||
for dependency in dependencies:
|
||||
job_info.AssetDependency += dependency
|
||||
|
||||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
import bpy
|
||||
|
||||
plugin_info = BlenderPluginInfo(
|
||||
SceneFile=self.scene_path,
|
||||
Version=bpy.app.version_string,
|
||||
SaveFile=True,
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
# Patching with pluginInfo from settings
|
||||
for key, value in self.pluginInfo.items():
|
||||
plugin_payload[key] = value
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self, auth=None):
|
||||
instance = self._instance
|
||||
|
||||
expected_files = instance.data["expectedFiles"]
|
||||
if not expected_files:
|
||||
raise RuntimeError("No Render Elements found!")
|
||||
|
||||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth=auth, verify=verify)
|
||||
|
||||
def from_published_scene(self):
|
||||
"""
|
||||
This is needed to set the correct path for the json metadata. Because
|
||||
the rendering path is set in the blend file during the collection,
|
||||
and the path is adjusted to use the published scene, this ensures that
|
||||
the metadata and the rendered files are in the same location.
|
||||
"""
|
||||
return super().from_published_scene(False)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(BlenderSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
BoolDef("use_published",
|
||||
default=cls.use_published,
|
||||
label="Use Published Scene"),
|
||||
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
|
||||
NumberDef("chunkSize",
|
||||
minimum=1,
|
||||
maximum=50,
|
||||
decimals=0,
|
||||
default=cls.chunk_size,
|
||||
label="Frame Per Task"),
|
||||
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
|
||||
TextDef("job_delay",
|
||||
default=cls.job_delay,
|
||||
label="Job Delay",
|
||||
placeholder="dd:hh:mm:ss",
|
||||
tooltip="Delay the job by the specified amount of time. "
|
||||
"Timecode: dd:hh:mm:ss."),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -1,271 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import getpass
|
||||
import pyblish.api
|
||||
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit CelAction2D scene to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit CelAction to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["celaction"]
|
||||
families = ["render.farm"]
|
||||
settings_category = "deadline"
|
||||
|
||||
deadline_department = ""
|
||||
deadline_priority = 50
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
deadline_job_delay = "00:00:08:00"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = instance.data["comment"]
|
||||
self._deadline_user = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
self._frame_start = int(instance.data["frameStart"])
|
||||
self._frame_end = int(instance.data["frameEnd"])
|
||||
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
response = self.payload_submit(instance,
|
||||
script_path,
|
||||
render_path
|
||||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
# adding 2d render specific family for version identification in Loader
|
||||
instance.data["families"] = ["render2d"]
|
||||
|
||||
def payload_submit(self,
|
||||
instance,
|
||||
script_path,
|
||||
render_path
|
||||
):
|
||||
resolution_width = instance.data["resolutionWidth"]
|
||||
resolution_height = instance.data["resolutionHeight"]
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
render_path = os.path.normpath(render_path)
|
||||
script_name = os.path.basename(script_path)
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for item in instance.context:
|
||||
if "workfile" in item.data["productType"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
assert item.data["publish"] is True, msg
|
||||
|
||||
template_data = item.data.get("anatomyData")
|
||||
rep = item.data.get("representations")[0].get("name")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
template_filled = publish_template.format_strict(
|
||||
template_data
|
||||
)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(script_path)
|
||||
)
|
||||
|
||||
jobname = "%s - %s" % (script_name, instance.name)
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
os.makedirs(render_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.context.data.get("chunk")
|
||||
if not chunk_size:
|
||||
chunk_size = self.deadline_chunk_size
|
||||
|
||||
# search for %02d pattern in name, and padding number
|
||||
search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups()
|
||||
split_patern = "".join(search_results)
|
||||
padding_number = int(search_results[1])
|
||||
|
||||
args = [
|
||||
f"<QUOTE>{script_path}<QUOTE>",
|
||||
"-a",
|
||||
"-16",
|
||||
"-s <STARTFRAME>",
|
||||
"-e <ENDFRAME>",
|
||||
f"-d <QUOTE>{render_dir}<QUOTE>",
|
||||
f"-x {resolution_width}",
|
||||
f"-y {resolution_height}",
|
||||
f"-r <QUOTE>{render_path.replace(split_patern, '')}<QUOTE>",
|
||||
f"-= AbsoluteFrameNumber=on -= PadDigits={padding_number}",
|
||||
"-= ClearAttachment=on",
|
||||
]
|
||||
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
# plugin definition
|
||||
"Plugin": "CelAction",
|
||||
|
||||
# Top-level group name
|
||||
"BatchName": script_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"Priority": self.deadline_priority,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool,
|
||||
"SecondaryPool": self.deadline_pool_secondary,
|
||||
"ChunkSize": chunk_size,
|
||||
|
||||
"Frames": f"{self._frame_start}-{self._frame_end}",
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
|
||||
# # Asset dependency to wait for at least
|
||||
# the scene file to sync.
|
||||
# "AssetDependency0": script_path
|
||||
"ScheduledType": "Once",
|
||||
"JobDelay": self.deadline_job_delay
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": script_path,
|
||||
|
||||
# Output directory
|
||||
"OutputFilePath": render_dir.replace("\\", "/"),
|
||||
|
||||
# Plugin attributes
|
||||
"StartupDirectory": "",
|
||||
"Arguments": " ".join(args),
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": script_path,
|
||||
"AWSAssetFile0": render_path,
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.debug("using render plugin : {}".format(plugin))
|
||||
|
||||
self.log.debug("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expectied files to instance.data
|
||||
self.expected_files(instance, render_path)
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(self.deadline_url, json=payload,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
if not response.ok:
|
||||
self.log.error(
|
||||
"Submission failed! [{}] {}".format(
|
||||
response.status_code, response.content))
|
||||
self.log.debug(payload)
|
||||
raise SystemExit(response.text)
|
||||
|
||||
return response
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
for key in ("frameStart", "frameEnd"):
|
||||
value = instance.data[key]
|
||||
|
||||
if int(value) == value:
|
||||
continue
|
||||
|
||||
self.log.warning(
|
||||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"[._](%0)(\d)(d)[._]", path).groups()
|
||||
split_patern = "".join(search_results)
|
||||
split_path = path.split(split_patern)
|
||||
hashes = "#" * int(search_results[1])
|
||||
return "".join([split_path[0], hashes, split_path[-1]])
|
||||
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
|
||||
def expected_files(self, instance, filepath):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = []
|
||||
|
||||
dirpath = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
if "#" in filename:
|
||||
pparts = filename.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
filename = pparts[0] + padding + pparts[-1]
|
||||
|
||||
if "%" not in filename:
|
||||
instance.data["expectedFiles"].append(filepath)
|
||||
return
|
||||
|
||||
for i in range(self._frame_start, (self._frame_end + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dirpath, (filename % i)).replace("\\", "/")
|
||||
)
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import NumberDef
|
||||
|
||||
|
||||
class FusionSubmitDeadline(
|
||||
pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin
|
||||
):
|
||||
"""Submit current Comp to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Fusion to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["fusion"]
|
||||
families = ["render"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
# presets
|
||||
plugin = None
|
||||
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
concurrent_tasks = 1
|
||||
group = ""
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurrency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
attribute_values = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
from ayon_fusion.api.lib import get_frame_path
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
saver_instances = []
|
||||
for inst in context:
|
||||
if inst.data["productType"] != "render":
|
||||
# Allow only saver family instances
|
||||
continue
|
||||
|
||||
if not inst.data.get("publish", True):
|
||||
# Skip inactive instances
|
||||
continue
|
||||
|
||||
self.log.debug(inst.data["name"])
|
||||
saver_instances.append(inst)
|
||||
|
||||
if not saver_instances:
|
||||
raise RuntimeError("No instances found for Deadline submission")
|
||||
|
||||
comment = instance.data.get("comment", "")
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for item in context:
|
||||
if "workfile" in item.data["families"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
assert item.data["publish"] is True, msg
|
||||
|
||||
template_data = item.data.get("anatomyData")
|
||||
rep = item.data.get("representations")[0].get("name")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
template_filled = publish_template.format_strict(
|
||||
template_data
|
||||
)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(script_path)
|
||||
)
|
||||
|
||||
filename = os.path.basename(script_path)
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
# /manual-submission.html#job-info-file-options
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": script_path,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": filename,
|
||||
|
||||
"Priority": attribute_values.get(
|
||||
"priority", self.priority),
|
||||
"ChunkSize": attribute_values.get(
|
||||
"chunk", self.chunk_size),
|
||||
"ConcurrentTasks": attribute_values.get(
|
||||
"concurrency",
|
||||
self.concurrent_tasks
|
||||
),
|
||||
|
||||
# User, as seen in Monitor
|
||||
"UserName": deadline_user,
|
||||
|
||||
"Pool": instance.data.get("primaryPool"),
|
||||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Group": self.group,
|
||||
|
||||
"Plugin": self.plugin,
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"])
|
||||
),
|
||||
|
||||
"Comment": comment,
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"FlowFile": script_path,
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": str(instance.data["app_version"]),
|
||||
|
||||
# Render in high quality
|
||||
"HighQuality": True,
|
||||
|
||||
# Whether saver output should be checked after rendering
|
||||
# is complete
|
||||
"CheckOutput": True,
|
||||
|
||||
# Proxy: higher numbers smaller images for faster test renders
|
||||
# 1 = no proxy quality
|
||||
"Proxy": 1
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Enable going to rendered frames from Deadline Monitor
|
||||
for index, instance in enumerate(saver_instances):
|
||||
head, padding, tail = get_frame_path(
|
||||
instance.data["expectedFiles"][0]
|
||||
)
|
||||
path = "{}{}{}".format(head, "#" * padding, tail)
|
||||
folder, filename = os.path.split(path)
|
||||
payload["JobInfo"]["OutputDirectory%d" % index] = folder
|
||||
payload["JobInfo"]["OutputFilename%d" % index] = filename
|
||||
|
||||
# Include critical variables with submission
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
"AYON_IN_TESTS",
|
||||
"AYON_BUNDLE_NAME",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
# to recognize render jobs
|
||||
environment["AYON_RENDER_JOB"] = "1"
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
self.log.debug("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(url, json=payload, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Store the response for dependent job submission plug-ins
|
||||
for instance in saver_instances:
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
|
@ -1,420 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline."""
|
||||
import os
|
||||
from pathlib import Path
|
||||
from collections import OrderedDict
|
||||
from zipfile import ZipFile, is_zipfile
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
import attr
|
||||
import pyblish.api
|
||||
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
from ayon_core.lib import is_in_tests
|
||||
|
||||
|
||||
class _ZipFile(ZipFile):
|
||||
"""Extended check for windows invalid characters."""
|
||||
|
||||
# this is extending default zipfile table for few invalid characters
|
||||
# that can come from Mac
|
||||
_windows_illegal_characters = ":<>|\"?*\r\n\x00"
|
||||
_windows_illegal_name_trans_table = str.maketrans(
|
||||
_windows_illegal_characters,
|
||||
"_" * len(_windows_illegal_characters)
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class PluginInfo(object):
|
||||
"""Plugin info structure for Harmony Deadline plugin."""
|
||||
|
||||
SceneFile = attr.ib()
|
||||
# Harmony version
|
||||
Version = attr.ib()
|
||||
|
||||
Camera = attr.ib(default="")
|
||||
FieldOfView = attr.ib(default=41.11)
|
||||
IsDatabase = attr.ib(default=False)
|
||||
ResolutionX = attr.ib(default=1920)
|
||||
ResolutionY = attr.ib(default=1080)
|
||||
|
||||
# Resolution name preset, default
|
||||
UsingResPreset = attr.ib(default=False)
|
||||
ResolutionName = attr.ib(default="HDTV_1080p24")
|
||||
|
||||
PreRenderInlineScript = attr.ib(default=None)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputNode = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputNode(self): # noqa: N802
|
||||
"""Return all output nodes formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Node', 'Top/renderFarmDefault'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputNode):
|
||||
out["Output{}Node".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputNode.setter
|
||||
def OutputNode(self, val): # noqa: N802
|
||||
self._outputNode.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputType = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputType(self): # noqa: N802
|
||||
"""Return output nodes type formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Type', 'Image'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputType):
|
||||
out["Output{}Type".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputType.setter
|
||||
def OutputType(self, val): # noqa: N802
|
||||
self._outputType.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputLeadingZero = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputLeadingZero(self): # noqa: N802
|
||||
"""Return output nodes type formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0LeadingZero', '3'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputLeadingZero):
|
||||
out["Output{}LeadingZero".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputLeadingZero.setter
|
||||
def OutputLeadingZero(self, val): # noqa: N802
|
||||
self._outputLeadingZero.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputFormat = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputFormat(self): # noqa: N802
|
||||
"""Return output nodes format formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Type', 'PNG4'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputFormat):
|
||||
out["Output{}Format".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputFormat.setter
|
||||
def OutputFormat(self, val): # noqa: N802
|
||||
self._outputFormat.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputStartFrame = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputStartFrame(self): # noqa: N802
|
||||
"""Return start frame for output nodes formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0StartFrame', '1'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputStartFrame):
|
||||
out["Output{}StartFrame".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputStartFrame.setter
|
||||
def OutputStartFrame(self, val): # noqa: N802
|
||||
self._outputStartFrame.append(val)
|
||||
|
||||
# --------------------------------------------------
|
||||
_outputPath = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def OutputPath(self): # noqa: N802
|
||||
"""Return output paths for nodes formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'Output0Path', '/output/path'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputPath):
|
||||
out["Output{}Path".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputPath.setter
|
||||
def OutputPath(self, val): # noqa: N802
|
||||
self._outputPath.append(val)
|
||||
|
||||
def set_output(self, node, image_format, output,
|
||||
output_type="Image", zeros=3, start_frame=1):
|
||||
"""Helper to set output.
|
||||
|
||||
This should be used instead of setting properties individually
|
||||
as so index remain consistent.
|
||||
|
||||
Args:
|
||||
node (str): harmony write node name
|
||||
image_format (str): format of output (PNG4, TIF, ...)
|
||||
output (str): output path
|
||||
output_type (str, optional): "Image" or "Movie" (not supported).
|
||||
zeros (int, optional): Leading zeros (for 0001 = 3)
|
||||
start_frame (int, optional): Sequence offset.
|
||||
|
||||
"""
|
||||
|
||||
self.OutputNode = node
|
||||
self.OutputFormat = image_format
|
||||
self.OutputPath = output
|
||||
self.OutputType = output_type
|
||||
self.OutputLeadingZero = zeros
|
||||
self.OutputStartFrame = start_frame
|
||||
|
||||
def serialize(self):
|
||||
"""Return all data serialized as dictionary.
|
||||
|
||||
Returns:
|
||||
OrderedDict: all serialized data.
|
||||
|
||||
"""
|
||||
def filter_data(a, v):
|
||||
if a.name.startswith("_"):
|
||||
return False
|
||||
if v is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
serialized = attr.asdict(
|
||||
self, dict_factory=OrderedDict, filter=filter_data)
|
||||
serialized.update(self.OutputNode)
|
||||
serialized.update(self.OutputFormat)
|
||||
serialized.update(self.OutputPath)
|
||||
serialized.update(self.OutputType)
|
||||
serialized.update(self.OutputLeadingZero)
|
||||
serialized.update(self.OutputStartFrame)
|
||||
|
||||
return serialized
|
||||
|
||||
|
||||
class HarmonySubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline
|
||||
):
|
||||
"""Submit render write of Harmony scene to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable ``DEADLINE_REST_URL``.
|
||||
|
||||
Note:
|
||||
If Deadline configuration is not detected, this plugin will
|
||||
be disabled.
|
||||
|
||||
Attributes:
|
||||
use_published (bool): Use published scene to render instead of the
|
||||
one in work area.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["harmony"]
|
||||
families = ["render.farm"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
optional = True
|
||||
use_published = False
|
||||
priority = 50
|
||||
chunk_size = 1000000
|
||||
group = "none"
|
||||
department = ""
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo("Harmony")
|
||||
job_info.Name = self._instance.data["name"]
|
||||
job_info.Plugin = "HarmonyAYON"
|
||||
job_info.Frames = "{}-{}".format(
|
||||
self._instance.data["frameStartHandle"],
|
||||
self._instance.data["frameEndHandle"]
|
||||
)
|
||||
# for now, get those from presets. Later on it should be
|
||||
# configurable in Harmony UI directly.
|
||||
job_info.Priority = self.priority
|
||||
job_info.Pool = self._instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = self._instance.data.get("secondaryPool")
|
||||
job_info.ChunkSize = self.chunk_size
|
||||
batch_name = os.path.basename(self._instance.data["source"])
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Department = self.department
|
||||
job_info.Group = self.group
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS"
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
|
||||
return job_info
|
||||
|
||||
def _unzip_scene_file(self, published_scene: Path) -> Path:
|
||||
"""Unzip scene zip file to its directory.
|
||||
|
||||
Unzip scene file (if it is zip file) to its current directory and
|
||||
return path to xstage file there. Xstage file is determined by its
|
||||
name.
|
||||
|
||||
Args:
|
||||
published_scene (Path): path to zip file.
|
||||
|
||||
Returns:
|
||||
Path: The path to unzipped xstage.
|
||||
"""
|
||||
# if not zip, bail out.
|
||||
if "zip" not in published_scene.suffix or not is_zipfile(
|
||||
published_scene.as_posix()
|
||||
):
|
||||
self.log.error("Published scene is not in zip.")
|
||||
self.log.error(published_scene)
|
||||
raise AssertionError("invalid scene format")
|
||||
|
||||
xstage_path = (
|
||||
published_scene.parent
|
||||
/ published_scene.stem
|
||||
/ f"{published_scene.stem}.xstage"
|
||||
)
|
||||
unzip_dir = (published_scene.parent / published_scene.stem)
|
||||
with _ZipFile(published_scene, "r") as zip_ref:
|
||||
# UNC path (//?/) added to minimalize risk with extracting
|
||||
# to large file paths
|
||||
zip_ref.extractall("//?/" + str(unzip_dir.as_posix()))
|
||||
|
||||
# find any xstage files in directory, prefer the one with the same name
|
||||
# as directory (plus extension)
|
||||
xstage_files = []
|
||||
for scene in unzip_dir.iterdir():
|
||||
if scene.suffix == ".xstage":
|
||||
xstage_files.append(scene)
|
||||
|
||||
# there must be at least one (but maybe not more?) xstage file
|
||||
if not xstage_files:
|
||||
self.log.error("No xstage files found in zip")
|
||||
raise AssertionError("Invalid scene archive")
|
||||
|
||||
ideal_scene = False
|
||||
# find the one with the same name as zip. In case there can be more
|
||||
# then one xtage file.
|
||||
for scene in xstage_files:
|
||||
# if /foo/bar/baz.zip == /foo/bar/baz/baz.xstage
|
||||
# ^^^ ^^^
|
||||
if scene.stem == published_scene.stem:
|
||||
xstage_path = scene
|
||||
ideal_scene = True
|
||||
|
||||
# but sometimes xstage file has different name then zip - in that case
|
||||
# use that one.
|
||||
if not ideal_scene:
|
||||
xstage_path = xstage_files[0]
|
||||
return xstage_path
|
||||
|
||||
def get_plugin_info(self):
|
||||
# this is path to published scene workfile _ZIP_. Before
|
||||
# rendering, we need to unzip it.
|
||||
published_scene = Path(
|
||||
self.from_published_scene(False))
|
||||
self.log.debug(f"Processing {published_scene.as_posix()}")
|
||||
xstage_path = self._unzip_scene_file(published_scene)
|
||||
render_path = xstage_path.parent / "renders"
|
||||
|
||||
# for submit_publish job to create .json file in
|
||||
self._instance.data["outputDir"] = render_path
|
||||
new_expected_files = []
|
||||
render_path_str = str(render_path.as_posix())
|
||||
for file in self._instance.data["expectedFiles"]:
|
||||
_file = str(Path(file).as_posix())
|
||||
expected_dir_str = os.path.dirname(_file)
|
||||
new_expected_files.append(
|
||||
_file.replace(expected_dir_str, render_path_str)
|
||||
)
|
||||
audio_file = self._instance.data.get("audioFile")
|
||||
if audio_file:
|
||||
abs_path = xstage_path.parent / audio_file
|
||||
self._instance.context.data["audioFile"] = str(abs_path)
|
||||
|
||||
self._instance.data["source"] = str(published_scene.as_posix())
|
||||
self._instance.data["expectedFiles"] = new_expected_files
|
||||
harmony_plugin_info = PluginInfo(
|
||||
SceneFile=xstage_path.as_posix(),
|
||||
Version=(
|
||||
self._instance.context.data["harmonyVersion"].split(".")[0]),
|
||||
FieldOfView=self._instance.context.data["FOV"],
|
||||
ResolutionX=self._instance.data["resolutionWidth"],
|
||||
ResolutionY=self._instance.data["resolutionHeight"]
|
||||
)
|
||||
|
||||
pattern = '[0]{' + str(self._instance.data["leadingZeros"]) + \
|
||||
'}1\.[a-zA-Z]{3}'
|
||||
render_prefix = re.sub(pattern, '',
|
||||
self._instance.data["expectedFiles"][0])
|
||||
harmony_plugin_info.set_output(
|
||||
self._instance.data["setMembers"][0],
|
||||
self._instance.data["outputFormat"],
|
||||
render_prefix,
|
||||
self._instance.data["outputType"],
|
||||
self._instance.data["leadingZeros"],
|
||||
self._instance.data["outputStartFrame"]
|
||||
)
|
||||
|
||||
all_write_nodes = self._instance.context.data["all_write_nodes"]
|
||||
disable_nodes = []
|
||||
for node in all_write_nodes:
|
||||
# disable all other write nodes
|
||||
if node != self._instance.data["setMembers"][0]:
|
||||
disable_nodes.append("node.setEnable('{}', false)"
|
||||
.format(node))
|
||||
harmony_plugin_info.PreRenderInlineScript = ';'.join(disable_nodes)
|
||||
|
||||
return harmony_plugin_info.serialize()
|
||||
|
|
@ -1,181 +0,0 @@
|
|||
import os
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import attr
|
||||
import pyblish.api
|
||||
from ayon_core.lib import (
|
||||
TextDef,
|
||||
NumberDef,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class HoudiniPluginInfo(object):
|
||||
Build = attr.ib(default=None)
|
||||
IgnoreInputs = attr.ib(default=True)
|
||||
ScriptJob = attr.ib(default=True)
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
SaveFile = attr.ib(default=True)
|
||||
ScriptFilename = attr.ib(default=None)
|
||||
OutputDriver = attr.ib(default=None)
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
ProjectPath = attr.ib(default=None)
|
||||
|
||||
|
||||
class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, # noqa
|
||||
AYONPyblishPluginMixin):
|
||||
"""Submit Houdini scene to perform a local publish in Deadline.
|
||||
|
||||
Publishing in Deadline can be helpful for scenes that publish very slow.
|
||||
This way it can process in the background on another machine without the
|
||||
Artist having to wait for the publish to finish on their local machine.
|
||||
"""
|
||||
|
||||
label = "Submit Scene to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["publish.hou"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
priority = 50
|
||||
chunk_size = 999999
|
||||
group = None
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="Houdini")
|
||||
|
||||
job_info.update(self.jobInfo)
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
assert all(
|
||||
result["success"] for result in context.data["results"]
|
||||
), "Errors found, aborting integration.."
|
||||
|
||||
project_name = instance.context.data["projectName"]
|
||||
filepath = context.data["currentFile"]
|
||||
scenename = os.path.basename(filepath)
|
||||
job_name = "{scene} - {instance} [PUBLISH]".format(
|
||||
scene=scenename, instance=instance.name)
|
||||
batch_name = "{code} - {scene}".format(code=project_name,
|
||||
scene=scenename)
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info.Name = job_name
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Plugin = instance.data["plugin"]
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
rop_node = self.get_rop_node(instance)
|
||||
if rop_node.type().name() != "alembic":
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["frameStart"]),
|
||||
end=int(instance.data["frameEnd"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
job_info.ChunkSize = instance.data.get("chunk_size", self.chunk_size)
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.Group = attr_values.get("group", self.group)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
instance = self._instance
|
||||
version = hou.applicationVersionString()
|
||||
version = ".".join(version.split(".")[:2])
|
||||
rop = self.get_rop_node(instance)
|
||||
plugin_info = HoudiniPluginInfo(
|
||||
Build=None,
|
||||
IgnoreInputs=True,
|
||||
ScriptJob=True,
|
||||
SceneFile=self.scene_path,
|
||||
SaveFile=True,
|
||||
OutputDriver=rop.path(),
|
||||
Version=version,
|
||||
ProjectPath=os.path.dirname(self.scene_path)
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process(self, instance):
|
||||
super(HoudiniCacheSubmitDeadline, self).process(instance)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
def get_rop_node(self, instance):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
rop = instance.data.get("instance_node")
|
||||
rop_node = hou.node(rop)
|
||||
|
||||
return rop_node
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(HoudiniCacheSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -1,403 +0,0 @@
|
|||
import os
|
||||
import attr
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import AYONPyblishPluginMixin
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
TextDef,
|
||||
NumberDef
|
||||
)
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlinePluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
OutputDriver = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
IgnoreInputs = attr.ib(default=True)
|
||||
|
||||
|
||||
@attr.s
|
||||
class ArnoldRenderDeadlinePluginInfo():
|
||||
InputFile = attr.ib(default=None)
|
||||
Verbose = attr.ib(default=4)
|
||||
|
||||
|
||||
@attr.s
|
||||
class MantraRenderDeadlinePluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
Version = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class VrayRenderPluginInfo():
|
||||
InputFilename = attr.ib(default=None)
|
||||
SeparateFilesPerFrame = attr.ib(default=True)
|
||||
|
||||
|
||||
@attr.s
|
||||
class RedshiftRenderPluginInfo():
|
||||
SceneFile = attr.ib(default=None)
|
||||
# Use "1" as the default Redshift version just because it
|
||||
# default fallback version in Deadline's Redshift plugin
|
||||
# if no version was specified
|
||||
Version = attr.ib(default="1")
|
||||
|
||||
|
||||
@attr.s
|
||||
class HuskStandalonePluginInfo():
|
||||
"""Requires Deadline Husk Standalone Plugin.
|
||||
See Deadline Plug-in:
|
||||
https://github.com/BigRoy/HuskStandaloneSubmitter
|
||||
Also see Husk options here:
|
||||
https://www.sidefx.com/docs/houdini/ref/utils/husk.html
|
||||
"""
|
||||
SceneFile = attr.ib()
|
||||
# TODO: Below parameters are only supported by custom version of the plugin
|
||||
Renderer = attr.ib(default=None)
|
||||
RenderSettings = attr.ib(default="/Render/rendersettings")
|
||||
Purpose = attr.ib(default="geometry,render")
|
||||
Complexity = attr.ib(default="veryhigh")
|
||||
Snapshot = attr.ib(default=-1)
|
||||
LogLevel = attr.ib(default="2")
|
||||
PreRender = attr.ib(default="")
|
||||
PreFrame = attr.ib(default="")
|
||||
PostFrame = attr.ib(default="")
|
||||
PostRender = attr.ib(default="")
|
||||
RestartDelegate = attr.ib(default="")
|
||||
Version = attr.ib(default="")
|
||||
|
||||
|
||||
class HoudiniSubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin
|
||||
):
|
||||
"""Submit Render ROPs to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable AVALON_DEADLINE.
|
||||
|
||||
Target "local":
|
||||
Even though this does *not* render locally this is seen as
|
||||
a 'local' submission as it is the regular way of submitting
|
||||
a Houdini render locally.
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
use_published = True
|
||||
|
||||
# presets
|
||||
export_priority = 50
|
||||
export_chunk_size = 10
|
||||
export_group = ""
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
group = ""
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
TextDef(
|
||||
"group",
|
||||
default=cls.group,
|
||||
label="Group Name"
|
||||
),
|
||||
NumberDef(
|
||||
"export_priority",
|
||||
label="Export Priority",
|
||||
default=cls.export_priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"export_chunk",
|
||||
label="Export Frames Per Task",
|
||||
default=cls.export_chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
TextDef(
|
||||
"export_group",
|
||||
default=cls.export_group,
|
||||
label="Export Group Name"
|
||||
),
|
||||
]
|
||||
|
||||
def get_job_info(self, dependency_job_ids=None):
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
attribute_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
# Whether Deadline render submission is being split in two
|
||||
# (extract + render)
|
||||
split_render_job = instance.data.get("splitRender")
|
||||
|
||||
# If there's some dependency job ids we can assume this is a render job
|
||||
# and not an export job
|
||||
is_export_job = True
|
||||
if dependency_job_ids:
|
||||
is_export_job = False
|
||||
|
||||
job_type = "[RENDER]"
|
||||
if split_render_job and not is_export_job:
|
||||
product_type = instance.data["productType"]
|
||||
plugin = {
|
||||
"usdrender": "HuskStandalone",
|
||||
}.get(product_type)
|
||||
if not plugin:
|
||||
# Convert from product type to Deadline plugin name
|
||||
# i.e., arnold_rop -> Arnold
|
||||
plugin = product_type.replace("_rop", "").capitalize()
|
||||
else:
|
||||
plugin = "Houdini"
|
||||
if split_render_job:
|
||||
job_type = "[EXPORT IFD]"
|
||||
|
||||
job_info = DeadlineJobInfo(Plugin=plugin)
|
||||
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
job_info.Name = "{} - {} {}".format(filename, instance.name, job_type)
|
||||
job_info.BatchName = filename
|
||||
|
||||
job_info.UserName = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
if is_in_tests():
|
||||
job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
# Deadline requires integers in frame range
|
||||
start = instance.data["frameStartHandle"]
|
||||
end = instance.data["frameEndHandle"]
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(start),
|
||||
end=int(end),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
# Make sure we make job frame dependent so render tasks pick up a soon
|
||||
# as export tasks are done
|
||||
if split_render_job and not is_export_job:
|
||||
job_info.IsFrameDependent = bool(instance.data.get(
|
||||
"splitRenderFrameDependent", True))
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
if split_render_job and is_export_job:
|
||||
job_info.Priority = attribute_values.get(
|
||||
"export_priority", self.export_priority
|
||||
)
|
||||
job_info.ChunkSize = attribute_values.get(
|
||||
"export_chunk", self.export_chunk_size
|
||||
)
|
||||
job_info.Group = self.export_group
|
||||
else:
|
||||
job_info.Priority = attribute_values.get(
|
||||
"priority", self.priority
|
||||
)
|
||||
job_info.ChunkSize = attribute_values.get(
|
||||
"chunk", self.chunk_size
|
||||
)
|
||||
job_info.Group = self.group
|
||||
|
||||
# Apply render globals, like e.g. data from collect machine list
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
if render_globals:
|
||||
self.log.debug("Applying 'renderGlobals' to job info: %s",
|
||||
render_globals)
|
||||
job_info.update(render_globals)
|
||||
|
||||
job_info.Comment = context.data.get("comment")
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_LOG_NO_COLORS",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
|
||||
for i, filepath in enumerate(instance.data["files"]):
|
||||
dirname = os.path.dirname(filepath)
|
||||
fname = os.path.basename(filepath)
|
||||
job_info.OutputDirectory += dirname.replace("\\", "/")
|
||||
job_info.OutputFilename += fname
|
||||
|
||||
# Add dependencies if given
|
||||
if dependency_job_ids:
|
||||
job_info.JobDependencies = ",".join(dependency_job_ids)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self, job_type=None):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
hou_major_minor = hou.applicationVersionString().rsplit(".", 1)[0]
|
||||
|
||||
# Output driver to render
|
||||
if job_type == "render":
|
||||
product_type = instance.data.get("productType")
|
||||
if product_type == "arnold_rop":
|
||||
plugin_info = ArnoldRenderDeadlinePluginInfo(
|
||||
InputFile=instance.data["ifdFile"]
|
||||
)
|
||||
elif product_type == "mantra_rop":
|
||||
plugin_info = MantraRenderDeadlinePluginInfo(
|
||||
SceneFile=instance.data["ifdFile"],
|
||||
Version=hou_major_minor,
|
||||
)
|
||||
elif product_type == "vray_rop":
|
||||
plugin_info = VrayRenderPluginInfo(
|
||||
InputFilename=instance.data["ifdFile"],
|
||||
)
|
||||
elif product_type == "redshift_rop":
|
||||
plugin_info = RedshiftRenderPluginInfo(
|
||||
SceneFile=instance.data["ifdFile"]
|
||||
)
|
||||
# Note: To use different versions of Redshift on Deadline
|
||||
# set the `REDSHIFT_VERSION` env variable in the Tools
|
||||
# settings in the AYON Application plugin. You will also
|
||||
# need to set that version in `Redshift.param` file
|
||||
# of the Redshift Deadline plugin:
|
||||
# [Redshift_Executable_*]
|
||||
# where * is the version number.
|
||||
if os.getenv("REDSHIFT_VERSION"):
|
||||
plugin_info.Version = os.getenv("REDSHIFT_VERSION")
|
||||
else:
|
||||
self.log.warning((
|
||||
"REDSHIFT_VERSION env variable is not set"
|
||||
" - using version configured in Deadline"
|
||||
))
|
||||
|
||||
elif product_type == "usdrender":
|
||||
plugin_info = self._get_husk_standalone_plugin_info(
|
||||
instance, hou_major_minor)
|
||||
|
||||
else:
|
||||
self.log.error(
|
||||
"Product type '%s' not supported yet to split render job",
|
||||
product_type
|
||||
)
|
||||
return
|
||||
else:
|
||||
driver = hou.node(instance.data["instance_node"])
|
||||
plugin_info = DeadlinePluginInfo(
|
||||
SceneFile=context.data["currentFile"],
|
||||
OutputDriver=driver.path(),
|
||||
Version=hou_major_minor,
|
||||
IgnoreInputs=True
|
||||
)
|
||||
|
||||
return attr.asdict(plugin_info)
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data["farm"]:
|
||||
self.log.debug("Render on farm is disabled. "
|
||||
"Skipping deadline submission.")
|
||||
return
|
||||
|
||||
super(HoudiniSubmitDeadline, self).process(instance)
|
||||
|
||||
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
def _get_husk_standalone_plugin_info(self, instance, hou_major_minor):
|
||||
# Not all hosts can import this module.
|
||||
import hou
|
||||
|
||||
# Supply additional parameters from the USD Render ROP
|
||||
# to the Husk Standalone Render Plug-in
|
||||
rop_node = hou.node(instance.data["instance_node"])
|
||||
snapshot_interval = -1
|
||||
if rop_node.evalParm("dosnapshot"):
|
||||
snapshot_interval = rop_node.evalParm("snapshotinterval")
|
||||
|
||||
restart_delegate = 0
|
||||
if rop_node.evalParm("husk_restartdelegate"):
|
||||
restart_delegate = rop_node.evalParm("husk_restartdelegateframes")
|
||||
|
||||
rendersettings = (
|
||||
rop_node.evalParm("rendersettings")
|
||||
or "/Render/rendersettings"
|
||||
)
|
||||
return HuskStandalonePluginInfo(
|
||||
SceneFile=instance.data["ifdFile"],
|
||||
Renderer=rop_node.evalParm("renderer"),
|
||||
RenderSettings=rendersettings,
|
||||
Purpose=rop_node.evalParm("husk_purpose"),
|
||||
Complexity=rop_node.evalParm("husk_complexity"),
|
||||
Snapshot=snapshot_interval,
|
||||
PreRender=rop_node.evalParm("husk_prerender"),
|
||||
PreFrame=rop_node.evalParm("husk_preframe"),
|
||||
PostFrame=rop_node.evalParm("husk_postframe"),
|
||||
PostRender=rop_node.evalParm("husk_postrender"),
|
||||
RestartDelegate=restart_delegate,
|
||||
Version=hou_major_minor
|
||||
)
|
||||
|
||||
|
||||
class HoudiniSubmitDeadlineUsdRender(HoudiniSubmitDeadline):
|
||||
# Do not use published workfile paths for USD Render ROP because the
|
||||
# Export Job doesn't seem to occur using the published path either, so
|
||||
# output paths then do not match the actual rendered paths
|
||||
use_published = False
|
||||
families = ["usdrender"]
|
||||
|
|
@ -1,431 +0,0 @@
|
|||
import os
|
||||
import getpass
|
||||
import copy
|
||||
import attr
|
||||
|
||||
from ayon_core.lib import (
|
||||
TextDef,
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline.publish.lib import (
|
||||
replace_with_published_scene_path
|
||||
)
|
||||
from ayon_core.pipeline.publish import KnownPublishError
|
||||
from ayon_max.api.lib import (
|
||||
get_current_renderer,
|
||||
get_multipass_setting
|
||||
)
|
||||
from ayon_max.api.lib_rendersettings import RenderSettings
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
class MaxPluginInfo(object):
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
SaveFile = attr.ib(default=True)
|
||||
IgnoreInputs = attr.ib(default=True)
|
||||
|
||||
|
||||
class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin):
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["max"]
|
||||
families = ["maxrender"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
use_published = True
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = None
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
settings = project_settings["deadline"]["publish"]["MaxSubmitDeadline"] # noqa
|
||||
|
||||
# Take some defaults from settings
|
||||
cls.use_published = settings.get("use_published",
|
||||
cls.use_published)
|
||||
cls.priority = settings.get("priority",
|
||||
cls.priority)
|
||||
cls.chuck_size = settings.get("chunk_size", cls.chunk_size)
|
||||
cls.group = settings.get("group", cls.group)
|
||||
# TODO: multiple camera instance, separate job infos
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="3dsmax")
|
||||
|
||||
# todo: test whether this works for existing production cases
|
||||
# where custom jobInfo was stored in the project settings
|
||||
job_info.update(self.jobInfo)
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
job_info.Name = "%s - %s" % (src_filename, instance.name)
|
||||
job_info.BatchName = src_filename
|
||||
job_info.Plugin = instance.data["plugin"]
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
job_info.EnableAutoTimeout = True
|
||||
# Deadline requires integers in frame range
|
||||
frames = "{start}-{end}".format(
|
||||
start=int(instance.data["frameStart"]),
|
||||
end=int(instance.data["frameEnd"])
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
|
||||
job_info.ChunkSize = attr_values.get("chunkSize", 1)
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = attr_values.get("priority", self.priority)
|
||||
job_info.Group = attr_values.get("group", self.group)
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
job_info.update(render_globals)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_IN_TESTS",
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1"
|
||||
|
||||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
if not instance.data.get("multiCamera"):
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in self._iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
instance = self._instance
|
||||
|
||||
plugin_info = MaxPluginInfo(
|
||||
SceneFile=self.scene_path,
|
||||
Version=instance.data["maxversion"],
|
||||
SaveFile=True,
|
||||
IgnoreInputs=True
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
# Patching with pluginInfo from settings
|
||||
for key, value in self.pluginInfo.items():
|
||||
plugin_payload[key] = value
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self):
|
||||
|
||||
instance = self._instance
|
||||
filepath = instance.context.data["currentFile"]
|
||||
|
||||
files = instance.data["expectedFiles"]
|
||||
if not files:
|
||||
raise KnownPublishError("No Render Elements found!")
|
||||
first_file = next(self._iter_expected_files(files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
payload_data = {
|
||||
"filename": filename,
|
||||
"dirname": output_dir
|
||||
}
|
||||
|
||||
self.log.debug("Submitting 3dsMax render..")
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if instance.data.get("multiCamera"):
|
||||
self.log.debug("Submitting jobs for multiple cameras..")
|
||||
payload = self._use_published_name_for_multiples(
|
||||
payload_data, project_settings)
|
||||
job_infos, plugin_infos = payload
|
||||
for job_info, plugin_info in zip(job_infos, plugin_infos):
|
||||
self.submit(
|
||||
self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
else:
|
||||
payload = self._use_published_name(payload_data, project_settings)
|
||||
job_info, plugin_info = payload
|
||||
self.submit(
|
||||
self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
|
||||
def _use_published_name(self, data, project_settings):
|
||||
# Not all hosts can import these modules.
|
||||
from ayon_max.api.lib import (
|
||||
get_current_renderer,
|
||||
get_multipass_setting
|
||||
)
|
||||
from ayon_max.api.lib_rendersettings import RenderSettings
|
||||
|
||||
instance = self._instance
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
plugin_info = copy.deepcopy(self.plugin_info)
|
||||
plugin_data = {}
|
||||
|
||||
multipass = get_multipass_setting(project_settings)
|
||||
if multipass:
|
||||
plugin_data["DisableMultipass"] = 0
|
||||
else:
|
||||
plugin_data["DisableMultipass"] = 1
|
||||
|
||||
files = instance.data.get("expectedFiles")
|
||||
if not files:
|
||||
raise KnownPublishError("No render elements found")
|
||||
first_file = next(self._iter_expected_files(files))
|
||||
old_output_dir = os.path.dirname(first_file)
|
||||
output_beauty = RenderSettings().get_render_output(instance.name,
|
||||
old_output_dir)
|
||||
rgb_bname = os.path.basename(output_beauty)
|
||||
dir = os.path.dirname(first_file)
|
||||
beauty_name = f"{dir}/{rgb_bname}"
|
||||
beauty_name = beauty_name.replace("\\", "/")
|
||||
plugin_data["RenderOutput"] = beauty_name
|
||||
# as 3dsmax has version with different languages
|
||||
plugin_data["Language"] = "ENU"
|
||||
|
||||
renderer_class = get_current_renderer()
|
||||
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"Redshift_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
render_elem_list = RenderSettings().get_render_element()
|
||||
for i, element in enumerate(render_elem_list):
|
||||
elem_bname = os.path.basename(element)
|
||||
new_elem = f"{dir}/{elem_bname}"
|
||||
new_elem = new_elem.replace("/", "\\")
|
||||
plugin_data["RenderElementOutputFilename%d" % i] = new_elem # noqa
|
||||
|
||||
if renderer == "Redshift_Renderer":
|
||||
plugin_data["redshift_SeparateAovFiles"] = instance.data.get(
|
||||
"separateAovFiles")
|
||||
if instance.data["cameras"]:
|
||||
camera = instance.data["cameras"][0]
|
||||
plugin_info["Camera0"] = camera
|
||||
plugin_info["Camera"] = camera
|
||||
plugin_info["Camera1"] = camera
|
||||
self.log.debug("plugin data:{}".format(plugin_data))
|
||||
plugin_info.update(plugin_data)
|
||||
|
||||
return job_info, plugin_info
|
||||
|
||||
def get_job_info_through_camera(self, camera):
|
||||
"""Get the job parameters for deadline submission when
|
||||
multi-camera is enabled.
|
||||
Args:
|
||||
infos(dict): a dictionary with job info.
|
||||
"""
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
exp = instance.data.get("expectedFiles")
|
||||
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
job_info.Name = "%s - %s - %s" % (
|
||||
src_filename, instance.name, camera)
|
||||
for filepath in self._iter_expected_files(exp):
|
||||
if camera not in filepath:
|
||||
continue
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
# set the output filepath with the relative camera
|
||||
|
||||
def get_plugin_info_through_camera(self, camera):
|
||||
"""Get the plugin parameters for deadline submission when
|
||||
multi-camera is enabled.
|
||||
Args:
|
||||
infos(dict): a dictionary with plugin info.
|
||||
"""
|
||||
instance = self._instance
|
||||
# set the target camera
|
||||
plugin_info = copy.deepcopy(self.plugin_info)
|
||||
|
||||
plugin_data = {}
|
||||
# set the output filepath with the relative camera
|
||||
if instance.data.get("multiCamera"):
|
||||
scene_filepath = instance.context.data["currentFile"]
|
||||
scene_filename = os.path.basename(scene_filepath)
|
||||
scene_directory = os.path.dirname(scene_filepath)
|
||||
current_filename, ext = os.path.splitext(scene_filename)
|
||||
camera_scene_name = f"{current_filename}_{camera}{ext}"
|
||||
camera_scene_filepath = os.path.join(
|
||||
scene_directory, f"_{current_filename}", camera_scene_name)
|
||||
plugin_data["SceneFile"] = camera_scene_filepath
|
||||
|
||||
files = instance.data.get("expectedFiles")
|
||||
if not files:
|
||||
raise KnownPublishError("No render elements found")
|
||||
first_file = next(self._iter_expected_files(files))
|
||||
old_output_dir = os.path.dirname(first_file)
|
||||
rgb_output = RenderSettings().get_batch_render_output(camera) # noqa
|
||||
rgb_bname = os.path.basename(rgb_output)
|
||||
dir = os.path.dirname(first_file)
|
||||
beauty_name = f"{dir}/{rgb_bname}"
|
||||
beauty_name = beauty_name.replace("\\", "/")
|
||||
plugin_info["RenderOutput"] = beauty_name
|
||||
renderer_class = get_current_renderer()
|
||||
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
if renderer in [
|
||||
"ART_Renderer",
|
||||
"Redshift_Renderer",
|
||||
"V_Ray_6_Hotfix_3",
|
||||
"V_Ray_GPU_6_Hotfix_3",
|
||||
"Default_Scanline_Renderer",
|
||||
"Quicksilver_Hardware_Renderer",
|
||||
]:
|
||||
render_elem_list = RenderSettings().get_batch_render_elements(
|
||||
instance.name, old_output_dir, camera
|
||||
)
|
||||
for i, element in enumerate(render_elem_list):
|
||||
if camera in element:
|
||||
elem_bname = os.path.basename(element)
|
||||
new_elem = f"{dir}/{elem_bname}"
|
||||
new_elem = new_elem.replace("/", "\\")
|
||||
plugin_info["RenderElementOutputFilename%d" % i] = new_elem # noqa
|
||||
|
||||
if camera:
|
||||
# set the default camera and target camera
|
||||
# (weird parameters from max)
|
||||
plugin_data["Camera"] = camera
|
||||
plugin_data["Camera1"] = camera
|
||||
plugin_data["Camera0"] = None
|
||||
|
||||
plugin_info.update(plugin_data)
|
||||
return plugin_info
|
||||
|
||||
def _use_published_name_for_multiples(self, data, project_settings):
|
||||
"""Process the parameters submission for deadline when
|
||||
user enables multi-cameras option.
|
||||
Args:
|
||||
job_info_list (list): A list of multiple job infos
|
||||
plugin_info_list (list): A list of multiple plugin infos
|
||||
"""
|
||||
job_info_list = []
|
||||
plugin_info_list = []
|
||||
instance = self._instance
|
||||
cameras = instance.data.get("cameras", [])
|
||||
plugin_data = {}
|
||||
multipass = get_multipass_setting(project_settings)
|
||||
if multipass:
|
||||
plugin_data["DisableMultipass"] = 0
|
||||
else:
|
||||
plugin_data["DisableMultipass"] = 1
|
||||
for cam in cameras:
|
||||
job_info = self.get_job_info_through_camera(cam)
|
||||
plugin_info = self.get_plugin_info_through_camera(cam)
|
||||
plugin_info.update(plugin_data)
|
||||
job_info_list.append(job_info)
|
||||
plugin_info_list.append(plugin_info)
|
||||
|
||||
return job_info_list, plugin_info_list
|
||||
|
||||
def from_published_scene(self, replace_in_path=True):
|
||||
instance = self._instance
|
||||
if instance.data["renderer"] == "Redshift_Renderer":
|
||||
self.log.debug("Using Redshift...published scene wont be used..")
|
||||
replace_in_path = False
|
||||
return replace_with_published_scene_path(
|
||||
instance, replace_in_path)
|
||||
|
||||
@staticmethod
|
||||
def _iter_expected_files(exp):
|
||||
if isinstance(exp[0], dict):
|
||||
for _aov, files in exp[0].items():
|
||||
for file in files:
|
||||
yield file
|
||||
else:
|
||||
for file in exp:
|
||||
yield file
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(MaxSubmitDeadline, cls).get_attribute_defs()
|
||||
defs.extend([
|
||||
BoolDef("use_published",
|
||||
default=cls.use_published,
|
||||
label="Use Published Scene"),
|
||||
|
||||
NumberDef("priority",
|
||||
minimum=1,
|
||||
maximum=250,
|
||||
decimals=0,
|
||||
default=cls.priority,
|
||||
label="Priority"),
|
||||
|
||||
NumberDef("chunkSize",
|
||||
minimum=1,
|
||||
maximum=50,
|
||||
decimals=0,
|
||||
default=cls.chunk_size,
|
||||
label="Frame Per Task"),
|
||||
|
||||
TextDef("group",
|
||||
default=cls.group,
|
||||
label="Group Name"),
|
||||
])
|
||||
|
||||
return defs
|
||||
|
|
@ -1,935 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline.
|
||||
|
||||
This module is taking care of submitting job from Maya to Deadline. It
|
||||
creates job and set correct environments. Its behavior is controlled by
|
||||
``DEADLINE_REST_URL`` environment variable - pointing to Deadline Web Service
|
||||
and :data:`MayaSubmitDeadline.use_published` property telling Deadline to
|
||||
use published scene workfile or not.
|
||||
|
||||
If ``vrscene`` or ``assscene`` are detected in families, it will first
|
||||
submit job to export these files and then dependent job to render them.
|
||||
|
||||
Attributes:
|
||||
payload_skeleton (dict): Skeleton payload data sent as job to Deadline.
|
||||
Default values are for ``MayaBatch`` plugin.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import json
|
||||
import getpass
|
||||
import copy
|
||||
import re
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from collections import OrderedDict
|
||||
|
||||
import attr
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
is_in_tests,
|
||||
)
|
||||
from ayon_maya.api.lib_rendersettings import RenderSettings
|
||||
from ayon_maya.api.lib import get_attr_in_layer
|
||||
|
||||
from ayon_core.pipeline.farm.tools import iter_expected_files
|
||||
|
||||
from ayon_deadline import abstract_submit_deadline
|
||||
from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
def _validate_deadline_bool_value(instance, attribute, value):
|
||||
if not isinstance(value, (str, bool)):
|
||||
raise TypeError(
|
||||
"Attribute {} must be str or bool.".format(attribute))
|
||||
if value not in {"1", "0", True, False}:
|
||||
raise ValueError(
|
||||
("Value of {} must be one of "
|
||||
"'0', '1', True, False").format(attribute)
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class MayaPluginInfo(object):
|
||||
SceneFile = attr.ib(default=None) # Input
|
||||
OutputFilePath = attr.ib(default=None) # Output directory and filename
|
||||
OutputFilePrefix = attr.ib(default=None)
|
||||
Version = attr.ib(default=None) # Mandatory for Deadline
|
||||
UsingRenderLayers = attr.ib(default=True)
|
||||
RenderLayer = attr.ib(default=None) # Render only this layer
|
||||
Renderer = attr.ib(default=None)
|
||||
ProjectPath = attr.ib(default=None) # Resolve relative references
|
||||
# Include all lights flag
|
||||
RenderSetupIncludeLights = attr.ib(
|
||||
default="1", validator=_validate_deadline_bool_value)
|
||||
StrictErrorChecking = attr.ib(default=True)
|
||||
|
||||
|
||||
@attr.s
|
||||
class PythonPluginInfo(object):
|
||||
ScriptFile = attr.ib()
|
||||
Version = attr.ib(default="3.6")
|
||||
Arguments = attr.ib(default=None)
|
||||
SingleFrameOnly = attr.ib(default=None)
|
||||
|
||||
|
||||
@attr.s
|
||||
class VRayPluginInfo(object):
|
||||
InputFilename = attr.ib(default=None) # Input
|
||||
SeparateFilesPerFrame = attr.ib(default=None)
|
||||
VRayEngine = attr.ib(default="V-Ray")
|
||||
Width = attr.ib(default=None)
|
||||
Height = attr.ib(default=None) # Mandatory for Deadline
|
||||
OutputFilePath = attr.ib(default=True)
|
||||
OutputFileName = attr.ib(default=None) # Render only this layer
|
||||
|
||||
|
||||
@attr.s
|
||||
class ArnoldPluginInfo(object):
|
||||
ArnoldFile = attr.ib(default=None)
|
||||
|
||||
|
||||
class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
||||
AYONPyblishPluginMixin):
|
||||
|
||||
label = "Submit Render to Deadline"
|
||||
hosts = ["maya"]
|
||||
families = ["renderlayer"]
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
tile_assembler_plugin = "OpenPypeTileAssembler"
|
||||
priority = 50
|
||||
tile_priority = 50
|
||||
limit = [] # limit groups
|
||||
jobInfo = {}
|
||||
pluginInfo = {}
|
||||
group = "none"
|
||||
strict_error_checking = True
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa
|
||||
|
||||
# Take some defaults from settings
|
||||
cls.asset_dependencies = settings.get("asset_dependencies",
|
||||
cls.asset_dependencies)
|
||||
cls.import_reference = settings.get("import_reference",
|
||||
cls.import_reference)
|
||||
cls.use_published = settings.get("use_published", cls.use_published)
|
||||
cls.priority = settings.get("priority", cls.priority)
|
||||
cls.tile_priority = settings.get("tile_priority", cls.tile_priority)
|
||||
cls.limit = settings.get("limit", cls.limit)
|
||||
cls.group = settings.get("group", cls.group)
|
||||
cls.strict_error_checking = settings.get("strict_error_checking",
|
||||
cls.strict_error_checking)
|
||||
job_info = settings.get("jobInfo")
|
||||
if job_info:
|
||||
job_info = json.loads(job_info)
|
||||
plugin_info = settings.get("pluginInfo")
|
||||
if plugin_info:
|
||||
plugin_info = json.loads(plugin_info)
|
||||
|
||||
cls.jobInfo = job_info or cls.jobInfo
|
||||
cls.pluginInfo = plugin_info or cls.pluginInfo
|
||||
|
||||
def get_job_info(self):
|
||||
job_info = DeadlineJobInfo(Plugin="MayaBatch")
|
||||
|
||||
# todo: test whether this works for existing production cases
|
||||
# where custom jobInfo was stored in the project settings
|
||||
job_info.update(self.jobInfo)
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Always use the original work file name for the Job name even when
|
||||
# rendering is done from the published Work File. The original work
|
||||
# file name is clearer because it can also have subversion strings,
|
||||
# etc. which are stripped for the published file.
|
||||
src_filepath = context.data["currentFile"]
|
||||
src_filename = os.path.basename(src_filepath)
|
||||
|
||||
if is_in_tests():
|
||||
src_filename += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
job_info.Name = "%s - %s" % (src_filename, instance.name)
|
||||
job_info.BatchName = src_filename
|
||||
job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch")
|
||||
job_info.UserName = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
# Deadline requires integers in frame range
|
||||
frames = "{start}-{end}x{step}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"]),
|
||||
step=int(instance.data["byFrameStep"]),
|
||||
)
|
||||
job_info.Frames = frames
|
||||
|
||||
job_info.Pool = instance.data.get("primaryPool")
|
||||
job_info.SecondaryPool = instance.data.get("secondaryPool")
|
||||
job_info.Comment = context.data.get("comment")
|
||||
job_info.Priority = instance.data.get("priority", self.priority)
|
||||
|
||||
if self.group != "none" and self.group:
|
||||
job_info.Group = self.group
|
||||
|
||||
if self.limit:
|
||||
job_info.LimitGroups = ",".join(self.limit)
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
render_globals = instance.data.setdefault("renderGlobals", dict())
|
||||
machine_list = attr_values.get("machineList", "")
|
||||
if machine_list:
|
||||
if attr_values.get("whitelist", True):
|
||||
machine_list_key = "Whitelist"
|
||||
else:
|
||||
machine_list_key = "Blacklist"
|
||||
render_globals[machine_list_key] = machine_list
|
||||
|
||||
job_info.Priority = attr_values.get("priority")
|
||||
job_info.ChunkSize = attr_values.get("chunkSize")
|
||||
|
||||
# Add options from RenderGlobals
|
||||
render_globals = instance.data.get("renderGlobals", {})
|
||||
job_info.update(render_globals)
|
||||
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_WORKDIR",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_IN_TESTS"
|
||||
]
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
for key in keys:
|
||||
value = environment.get(key)
|
||||
if not value:
|
||||
continue
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize render jobs
|
||||
job_info.add_render_job_env_var()
|
||||
job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1"
|
||||
|
||||
# Adding file dependencies.
|
||||
if not is_in_tests() and self.asset_dependencies:
|
||||
dependencies = instance.context.data["fileDependencies"]
|
||||
for dependency in dependencies:
|
||||
job_info.AssetDependency += dependency
|
||||
|
||||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
return job_info
|
||||
|
||||
def get_plugin_info(self):
|
||||
# Not all hosts can import this module.
|
||||
from maya import cmds
|
||||
|
||||
instance = self._instance
|
||||
context = instance.context
|
||||
|
||||
# Set it to default Maya behaviour if it cannot be determined
|
||||
# from instance (but it should be, by the Collector).
|
||||
|
||||
default_rs_include_lights = (
|
||||
instance.context.data['project_settings']
|
||||
['maya']
|
||||
['render_settings']
|
||||
['enable_all_lights']
|
||||
)
|
||||
|
||||
rs_include_lights = instance.data.get(
|
||||
"renderSetupIncludeLights", default_rs_include_lights)
|
||||
if rs_include_lights not in {"1", "0", True, False}:
|
||||
rs_include_lights = default_rs_include_lights
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
strict_error_checking = attr_values.get("strict_error_checking",
|
||||
self.strict_error_checking)
|
||||
plugin_info = MayaPluginInfo(
|
||||
SceneFile=self.scene_path,
|
||||
Version=cmds.about(version=True),
|
||||
RenderLayer=instance.data['setMembers'],
|
||||
Renderer=instance.data["renderer"],
|
||||
RenderSetupIncludeLights=rs_include_lights, # noqa
|
||||
ProjectPath=context.data["workspaceDir"],
|
||||
UsingRenderLayers=True,
|
||||
StrictErrorChecking=strict_error_checking
|
||||
)
|
||||
|
||||
plugin_payload = attr.asdict(plugin_info)
|
||||
|
||||
# Patching with pluginInfo from settings
|
||||
for key, value in self.pluginInfo.items():
|
||||
plugin_payload[key] = value
|
||||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
filepath = self.scene_path # publish if `use_publish` else workfile
|
||||
|
||||
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
expected_files = instance.data["expectedFiles"]
|
||||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
|
||||
# Patch workfile (only when use_published is enabled)
|
||||
if self.use_published:
|
||||
self._patch_workfile()
|
||||
|
||||
# Gather needed data ------------------------------------------------
|
||||
filename = os.path.basename(filepath)
|
||||
dirname = os.path.join(
|
||||
cmds.workspace(query=True, rootDirectory=True),
|
||||
cmds.workspace(fileRuleEntry="images")
|
||||
)
|
||||
|
||||
# Fill in common data to payload ------------------------------------
|
||||
# TODO: Replace these with collected data from CollectRender
|
||||
payload_data = {
|
||||
"filename": filename,
|
||||
"dirname": dirname,
|
||||
}
|
||||
|
||||
# Submit preceding export jobs -------------------------------------
|
||||
export_job = None
|
||||
assert not all(x in instance.data["families"]
|
||||
for x in ['vrayscene', 'assscene']), (
|
||||
"Vray Scene and Ass Scene options are mutually exclusive")
|
||||
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
export_job = self.submit(vray_export_payload,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
payload = self._get_vray_render_payload(payload_data)
|
||||
|
||||
else:
|
||||
self.log.debug("Submitting MayaBatch render..")
|
||||
payload = self._get_maya_payload(payload_data)
|
||||
|
||||
# Add export job as dependency --------------------------------------
|
||||
if export_job:
|
||||
job_info, _ = payload
|
||||
job_info.JobDependencies = export_job
|
||||
|
||||
if instance.data.get("tileRendering"):
|
||||
# Prepare tiles data
|
||||
self._tile_render(payload)
|
||||
else:
|
||||
# Submit main render job
|
||||
job_info, plugin_info = payload
|
||||
self.submit(self.assemble_payload(job_info, plugin_info),
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
def _tile_render(self, payload):
|
||||
"""Submit as tile render per frame with dependent assembly jobs."""
|
||||
|
||||
# As collected by super process()
|
||||
instance = self._instance
|
||||
|
||||
payload_job_info, payload_plugin_info = payload
|
||||
job_info = copy.deepcopy(payload_job_info)
|
||||
plugin_info = copy.deepcopy(payload_plugin_info)
|
||||
|
||||
# Force plugin reload for vray cause the region does not get flushed
|
||||
# between tile renders.
|
||||
if plugin_info["Renderer"] == "vray":
|
||||
job_info.ForceReloadPlugin = True
|
||||
|
||||
# if we have sequence of files, we need to create tile job for
|
||||
# every frame
|
||||
job_info.TileJob = True
|
||||
job_info.TileJobTilesInX = instance.data.get("tilesX")
|
||||
job_info.TileJobTilesInY = instance.data.get("tilesY")
|
||||
|
||||
tiles_count = job_info.TileJobTilesInX * job_info.TileJobTilesInY
|
||||
|
||||
plugin_info["ImageHeight"] = instance.data.get("resolutionHeight")
|
||||
plugin_info["ImageWidth"] = instance.data.get("resolutionWidth")
|
||||
plugin_info["RegionRendering"] = True
|
||||
|
||||
R_FRAME_NUMBER = re.compile(
|
||||
r".+\.(?P<frame>[0-9]+)\..+") # noqa: N806, E501
|
||||
REPL_FRAME_NUMBER = re.compile(
|
||||
r"(.+\.)([0-9]+)(\..+)") # noqa: N806, E501
|
||||
|
||||
exp = instance.data["expectedFiles"]
|
||||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
# get files from `beauty`
|
||||
files = exp[0].get("beauty")
|
||||
# assembly files are used for assembly jobs as we need to put
|
||||
# together all AOVs
|
||||
assembly_files = list(
|
||||
itertools.chain.from_iterable(
|
||||
[f for _, f in exp[0].items()]))
|
||||
if not files:
|
||||
# if beauty doesn't exist, use first aov we found
|
||||
files = exp[0].get(list(exp[0].keys())[0])
|
||||
else:
|
||||
files = exp
|
||||
assembly_files = files
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
|
||||
# Define frame tile jobs
|
||||
frame_file_hash = {}
|
||||
frame_payloads = {}
|
||||
file_index = 1
|
||||
for file in files:
|
||||
frame = re.search(R_FRAME_NUMBER, file).group("frame")
|
||||
|
||||
new_job_info = copy.deepcopy(job_info)
|
||||
new_job_info.Name += " (Frame {} - {} tiles)".format(frame,
|
||||
tiles_count)
|
||||
new_job_info.TileJobFrame = frame
|
||||
|
||||
new_plugin_info = copy.deepcopy(plugin_info)
|
||||
|
||||
# Add tile data into job info and plugin info
|
||||
tiles_data = _format_tiles(
|
||||
file, 0,
|
||||
instance.data.get("tilesX"),
|
||||
instance.data.get("tilesY"),
|
||||
instance.data.get("resolutionWidth"),
|
||||
instance.data.get("resolutionHeight"),
|
||||
payload_plugin_info["OutputFilePrefix"]
|
||||
)[0]
|
||||
|
||||
new_job_info.update(tiles_data["JobInfo"])
|
||||
new_plugin_info.update(tiles_data["PluginInfo"])
|
||||
|
||||
self.log.debug("hashing {} - {}".format(file_index, file))
|
||||
job_hash = hashlib.sha256(
|
||||
("{}_{}".format(file_index, file)).encode("utf-8"))
|
||||
|
||||
file_hash = job_hash.hexdigest()
|
||||
frame_file_hash[frame] = file_hash
|
||||
|
||||
new_job_info.ExtraInfo[0] = file_hash
|
||||
new_job_info.ExtraInfo[1] = file
|
||||
|
||||
frame_payloads[frame] = self.assemble_payload(
|
||||
job_info=new_job_info,
|
||||
plugin_info=new_plugin_info
|
||||
)
|
||||
file_index += 1
|
||||
|
||||
self.log.debug(
|
||||
"Submitting tile job(s) [{}] ...".format(len(frame_payloads)))
|
||||
|
||||
# Submit frame tile jobs
|
||||
frame_tile_job_id = {}
|
||||
for frame, tile_job_payload in frame_payloads.items():
|
||||
job_id = self.submit(
|
||||
tile_job_payload, auth, verify)
|
||||
frame_tile_job_id[frame] = job_id
|
||||
|
||||
# Define assembly payloads
|
||||
assembly_job_info = copy.deepcopy(job_info)
|
||||
assembly_job_info.Plugin = self.tile_assembler_plugin
|
||||
assembly_job_info.Name += " - Tile Assembly Job"
|
||||
assembly_job_info.Frames = 1
|
||||
assembly_job_info.MachineLimit = 1
|
||||
|
||||
attr_values = self.get_attr_values_from_data(instance.data)
|
||||
assembly_job_info.Priority = attr_values.get("tile_priority",
|
||||
self.tile_priority)
|
||||
assembly_job_info.TileJob = False
|
||||
|
||||
# TODO: This should be a new publisher attribute definition
|
||||
pool = instance.context.data["project_settings"]["deadline"]
|
||||
pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"]
|
||||
assembly_job_info.Pool = pool or instance.data.get("primaryPool", "")
|
||||
|
||||
assembly_plugin_info = {
|
||||
"CleanupTiles": 1,
|
||||
"ErrorOnMissing": True,
|
||||
"Renderer": self._instance.data["renderer"]
|
||||
}
|
||||
|
||||
assembly_payloads = []
|
||||
output_dir = self.job_info.OutputDirectory[0]
|
||||
config_files = []
|
||||
for file in assembly_files:
|
||||
frame = re.search(R_FRAME_NUMBER, file).group("frame")
|
||||
|
||||
frame_assembly_job_info = copy.deepcopy(assembly_job_info)
|
||||
frame_assembly_job_info.Name += " (Frame {})".format(frame)
|
||||
frame_assembly_job_info.OutputFilename[0] = re.sub(
|
||||
REPL_FRAME_NUMBER,
|
||||
"\\1{}\\3".format("#" * len(frame)), file)
|
||||
|
||||
file_hash = frame_file_hash[frame]
|
||||
tile_job_id = frame_tile_job_id[frame]
|
||||
|
||||
frame_assembly_job_info.ExtraInfo[0] = file_hash
|
||||
frame_assembly_job_info.ExtraInfo[1] = file
|
||||
frame_assembly_job_info.JobDependencies = tile_job_id
|
||||
frame_assembly_job_info.Frames = frame
|
||||
|
||||
# write assembly job config files
|
||||
config_file = os.path.join(
|
||||
output_dir,
|
||||
"{}_config_{}.txt".format(
|
||||
os.path.splitext(file)[0],
|
||||
datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
|
||||
)
|
||||
)
|
||||
config_files.append(config_file)
|
||||
try:
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
except OSError:
|
||||
# directory is not available
|
||||
self.log.warning("Path is unreachable: "
|
||||
"`{}`".format(output_dir))
|
||||
|
||||
with open(config_file, "w") as cf:
|
||||
print("TileCount={}".format(tiles_count), file=cf)
|
||||
print("ImageFileName={}".format(file), file=cf)
|
||||
print("ImageWidth={}".format(
|
||||
instance.data.get("resolutionWidth")), file=cf)
|
||||
print("ImageHeight={}".format(
|
||||
instance.data.get("resolutionHeight")), file=cf)
|
||||
|
||||
reversed_y = False
|
||||
if plugin_info["Renderer"] == "arnold":
|
||||
reversed_y = True
|
||||
|
||||
with open(config_file, "a") as cf:
|
||||
# Need to reverse the order of the y tiles, because image
|
||||
# coordinates are calculated from bottom left corner.
|
||||
tiles = _format_tiles(
|
||||
file, 0,
|
||||
instance.data.get("tilesX"),
|
||||
instance.data.get("tilesY"),
|
||||
instance.data.get("resolutionWidth"),
|
||||
instance.data.get("resolutionHeight"),
|
||||
payload_plugin_info["OutputFilePrefix"],
|
||||
reversed_y=reversed_y
|
||||
)[1]
|
||||
for k, v in sorted(tiles.items()):
|
||||
print("{}={}".format(k, v), file=cf)
|
||||
|
||||
assembly_payloads.append(
|
||||
self.assemble_payload(
|
||||
job_info=frame_assembly_job_info,
|
||||
plugin_info=assembly_plugin_info.copy(),
|
||||
# This would fail if the client machine and webserice are
|
||||
# using different storage paths.
|
||||
aux_files=[config_file]
|
||||
)
|
||||
)
|
||||
|
||||
# Submit assembly jobs
|
||||
assembly_job_ids = []
|
||||
num_assemblies = len(assembly_payloads)
|
||||
for i, payload in enumerate(assembly_payloads):
|
||||
self.log.debug(
|
||||
"submitting assembly job {} of {}".format(i + 1,
|
||||
num_assemblies)
|
||||
)
|
||||
assembly_job_id = self.submit(
|
||||
payload,
|
||||
auth=auth,
|
||||
verify=verify
|
||||
)
|
||||
assembly_job_ids.append(assembly_job_id)
|
||||
|
||||
instance.data["assemblySubmissionJobs"] = assembly_job_ids
|
||||
|
||||
# Remove config files to avoid confusion about where data is coming
|
||||
# from in Deadline.
|
||||
for config_file in config_files:
|
||||
os.remove(config_file)
|
||||
|
||||
def _get_maya_payload(self, data):
|
||||
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
|
||||
if not is_in_tests() and self.asset_dependencies:
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
job_info.AssetDependency += self.scene_path
|
||||
|
||||
# Get layer prefix
|
||||
renderlayer = self._instance.data["setMembers"]
|
||||
renderer = self._instance.data["renderer"]
|
||||
layer_prefix_attr = RenderSettings.get_image_prefix_attr(renderer)
|
||||
layer_prefix = get_attr_in_layer(layer_prefix_attr, layer=renderlayer)
|
||||
|
||||
plugin_info = copy.deepcopy(self.plugin_info)
|
||||
plugin_info.update({
|
||||
# Output directory and filename
|
||||
"OutputFilePath": data["dirname"].replace("\\", "/"),
|
||||
"OutputFilePrefix": layer_prefix,
|
||||
})
|
||||
|
||||
# This hack is here because of how Deadline handles Renderman version.
|
||||
# it considers everything with `renderman` set as version older than
|
||||
# Renderman 22, and so if we are using renderman > 21 we need to set
|
||||
# renderer string on the job to `renderman22`. We will have to change
|
||||
# this when Deadline releases new version handling this.
|
||||
renderer = self._instance.data["renderer"]
|
||||
if renderer == "renderman":
|
||||
try:
|
||||
from rfm2.config import cfg # noqa
|
||||
except ImportError:
|
||||
raise Exception("Cannot determine renderman version")
|
||||
|
||||
rman_version = cfg().build_info.version() # type: str
|
||||
if int(rman_version.split(".")[0]) > 22:
|
||||
renderer = "renderman22"
|
||||
|
||||
plugin_info["Renderer"] = renderer
|
||||
|
||||
# this is needed because renderman plugin in Deadline
|
||||
# handles directory and file prefixes separately
|
||||
plugin_info["OutputFilePath"] = job_info.OutputDirectory[0]
|
||||
|
||||
return job_info, plugin_info
|
||||
|
||||
def _get_vray_export_payload(self, data):
|
||||
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Export")
|
||||
|
||||
# Get V-Ray settings info to compute output path
|
||||
vray_scene = self.format_vray_output_filename()
|
||||
|
||||
plugin_info = {
|
||||
"Renderer": "vray",
|
||||
"SkipExistingFrames": True,
|
||||
"UseLegacyRenderLayers": True,
|
||||
"OutputFilePath": os.path.dirname(vray_scene)
|
||||
}
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_vray_render_payload(self, data):
|
||||
|
||||
# Job Info
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Render")
|
||||
job_info.Plugin = "Vray"
|
||||
job_info.OverrideTaskExtraInfoNames = False
|
||||
|
||||
# Plugin Info
|
||||
plugin_info = VRayPluginInfo(
|
||||
InputFilename=self.format_vray_output_filename(),
|
||||
SeparateFilesPerFrame=False,
|
||||
VRayEngine="V-Ray",
|
||||
Width=self._instance.data["resolutionWidth"],
|
||||
Height=self._instance.data["resolutionHeight"],
|
||||
OutputFilePath=job_info.OutputDirectory[0],
|
||||
OutputFileName=job_info.OutputFilename[0]
|
||||
)
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_arnold_render_payload(self, data):
|
||||
# Job Info
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Render")
|
||||
job_info.Plugin = "Arnold"
|
||||
job_info.OverrideTaskExtraInfoNames = False
|
||||
|
||||
# Plugin Info
|
||||
ass_file, _ = os.path.splitext(data["output_filename_0"])
|
||||
ass_filepath = ass_file + ".ass"
|
||||
|
||||
plugin_info = ArnoldPluginInfo(
|
||||
ArnoldFile=ass_filepath
|
||||
)
|
||||
|
||||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def format_vray_output_filename(self):
|
||||
"""Format the expected output file of the Export job.
|
||||
|
||||
Example:
|
||||
<Scene>/<Scene>_<Layer>/<Layer>
|
||||
"shot010_v006/shot010_v006_CHARS/CHARS_0001.vrscene"
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
from maya import cmds
|
||||
# "vrayscene/<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
vray_settings = cmds.ls(type="VRaySettingsNode")
|
||||
node = vray_settings[0]
|
||||
template = cmds.getAttr("{}.vrscene_filename".format(node))
|
||||
scene, _ = os.path.splitext(self.scene_path)
|
||||
|
||||
def smart_replace(string, key_values):
|
||||
new_string = string
|
||||
for key, value in key_values.items():
|
||||
new_string = new_string.replace(key, value)
|
||||
return new_string
|
||||
|
||||
# Get workfile scene path without extension to format vrscene_filename
|
||||
scene_filename = os.path.basename(self.scene_path)
|
||||
scene_filename_no_ext, _ = os.path.splitext(scene_filename)
|
||||
|
||||
layer = self._instance.data['setMembers']
|
||||
|
||||
# Reformat without tokens
|
||||
output_path = smart_replace(
|
||||
template,
|
||||
{"<Scene>": scene_filename_no_ext,
|
||||
"<Layer>": layer})
|
||||
|
||||
start_frame = int(self._instance.data["frameStartHandle"])
|
||||
workspace = self._instance.context.data["workspace"]
|
||||
filename_zero = "{}_{:04d}.vrscene".format(output_path, start_frame)
|
||||
filepath_zero = os.path.join(workspace, filename_zero)
|
||||
|
||||
return filepath_zero.replace("\\", "/")
|
||||
|
||||
def _patch_workfile(self):
|
||||
"""Patch Maya scene.
|
||||
|
||||
This will take list of patches (lines to add) and apply them to
|
||||
*published* Maya scene file (that is used later for rendering).
|
||||
|
||||
Patches are dict with following structure::
|
||||
{
|
||||
"name": "Name of patch",
|
||||
"regex": "regex of line before patch",
|
||||
"line": "line to insert"
|
||||
}
|
||||
|
||||
"""
|
||||
project_settings = self._instance.context.data["project_settings"]
|
||||
patches = (
|
||||
project_settings.get(
|
||||
"deadline", {}).get(
|
||||
"publish", {}).get(
|
||||
"MayaSubmitDeadline", {}).get(
|
||||
"scene_patches", {})
|
||||
)
|
||||
if not patches:
|
||||
return
|
||||
|
||||
if not os.path.splitext(self.scene_path)[1].lower() != ".ma":
|
||||
self.log.debug("Skipping workfile patch since workfile is not "
|
||||
".ma file")
|
||||
return
|
||||
|
||||
compiled_regex = [re.compile(p["regex"]) for p in patches]
|
||||
with open(self.scene_path, "r+") as pf:
|
||||
scene_data = pf.readlines()
|
||||
for ln, line in enumerate(scene_data):
|
||||
for i, r in enumerate(compiled_regex):
|
||||
if re.match(r, line):
|
||||
scene_data.insert(ln + 1, patches[i]["line"])
|
||||
pf.seek(0)
|
||||
pf.writelines(scene_data)
|
||||
pf.truncate()
|
||||
self.log.info("Applied {} patch to scene.".format(
|
||||
patches[i]["name"]
|
||||
))
|
||||
|
||||
def _job_info_label(self, label):
|
||||
return "{label} {job.Name} [{start}-{end}]".format(
|
||||
label=label,
|
||||
job=self.job_info,
|
||||
start=int(self._instance.data["frameStartHandle"]),
|
||||
end=int(self._instance.data["frameEndHandle"]),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(MayaSubmitDeadline, cls).get_attribute_defs()
|
||||
|
||||
defs.extend([
|
||||
NumberDef("priority",
|
||||
label="Priority",
|
||||
default=cls.default_priority,
|
||||
decimals=0),
|
||||
NumberDef("chunkSize",
|
||||
label="Frames Per Task",
|
||||
default=1,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000),
|
||||
TextDef("machineList",
|
||||
label="Machine List",
|
||||
default="",
|
||||
placeholder="machine1,machine2"),
|
||||
EnumDef("whitelist",
|
||||
label="Machine List (Allow/Deny)",
|
||||
items={
|
||||
True: "Allow List",
|
||||
False: "Deny List",
|
||||
},
|
||||
default=False),
|
||||
NumberDef("tile_priority",
|
||||
label="Tile Assembler Priority",
|
||||
decimals=0,
|
||||
default=cls.tile_priority),
|
||||
BoolDef("strict_error_checking",
|
||||
label="Strict Error Checking",
|
||||
default=cls.strict_error_checking),
|
||||
|
||||
])
|
||||
|
||||
return defs
|
||||
|
||||
def _format_tiles(
|
||||
filename,
|
||||
index,
|
||||
tiles_x,
|
||||
tiles_y,
|
||||
width,
|
||||
height,
|
||||
prefix,
|
||||
reversed_y=False
|
||||
):
|
||||
"""Generate tile entries for Deadline tile job.
|
||||
|
||||
Returns two dictionaries - one that can be directly used in Deadline
|
||||
job, second that can be used for Deadline Assembly job configuration
|
||||
file.
|
||||
|
||||
This will format tile names:
|
||||
|
||||
Example::
|
||||
{
|
||||
"OutputFilename0Tile0": "_tile_1x1_4x4_Main_beauty.1001.exr",
|
||||
"OutputFilename0Tile1": "_tile_2x1_4x4_Main_beauty.1001.exr"
|
||||
}
|
||||
|
||||
And add tile prefixes like:
|
||||
|
||||
Example::
|
||||
Image prefix is:
|
||||
`<Scene>/<RenderLayer>/<RenderLayer>_<RenderPass>`
|
||||
|
||||
Result for tile 0 for 4x4 will be:
|
||||
`<Scene>/<RenderLayer>/_tile_1x1_4x4_<RenderLayer>_<RenderPass>`
|
||||
|
||||
Calculating coordinates is tricky as in Job they are defined as top,
|
||||
left, bottom, right with zero being in top-left corner. But Assembler
|
||||
configuration file takes tile coordinates as X, Y, Width and Height and
|
||||
zero is bottom left corner.
|
||||
|
||||
Args:
|
||||
filename (str): Filename to process as tiles.
|
||||
index (int): Index of that file if it is sequence.
|
||||
tiles_x (int): Number of tiles in X.
|
||||
tiles_y (int): Number of tiles in Y.
|
||||
width (int): Width resolution of final image.
|
||||
height (int): Height resolution of final image.
|
||||
prefix (str): Image prefix.
|
||||
reversed_y (bool): Reverses the order of the y tiles.
|
||||
|
||||
Returns:
|
||||
(dict, dict): Tuple of two dictionaries - first can be used to
|
||||
extend JobInfo, second has tiles x, y, width and height
|
||||
used for assembler configuration.
|
||||
|
||||
"""
|
||||
# Math used requires integers for correct output - as such
|
||||
# we ensure our inputs are correct.
|
||||
assert isinstance(tiles_x, int), "tiles_x must be an integer"
|
||||
assert isinstance(tiles_y, int), "tiles_y must be an integer"
|
||||
assert isinstance(width, int), "width must be an integer"
|
||||
assert isinstance(height, int), "height must be an integer"
|
||||
|
||||
out = {"JobInfo": {}, "PluginInfo": {}}
|
||||
cfg = OrderedDict()
|
||||
w_space = width // tiles_x
|
||||
h_space = height // tiles_y
|
||||
|
||||
cfg["TilesCropped"] = "False"
|
||||
|
||||
tile = 0
|
||||
range_y = range(1, tiles_y + 1)
|
||||
reversed_y_range = list(reversed(range_y))
|
||||
for tile_x in range(1, tiles_x + 1):
|
||||
for i, tile_y in enumerate(range_y):
|
||||
tile_y_index = tile_y
|
||||
if reversed_y:
|
||||
tile_y_index = reversed_y_range[i]
|
||||
|
||||
tile_prefix = "_tile_{}x{}_{}x{}_".format(
|
||||
tile_x, tile_y_index, tiles_x, tiles_y
|
||||
)
|
||||
|
||||
new_filename = "{}/{}{}".format(
|
||||
os.path.dirname(filename),
|
||||
tile_prefix,
|
||||
os.path.basename(filename)
|
||||
)
|
||||
|
||||
top = height - (tile_y * h_space)
|
||||
bottom = height - ((tile_y - 1) * h_space) - 1
|
||||
left = (tile_x - 1) * w_space
|
||||
right = (tile_x * w_space) - 1
|
||||
|
||||
# Job info
|
||||
key = "OutputFilename{}".format(index)
|
||||
out["JobInfo"][key] = new_filename
|
||||
|
||||
# Plugin Info
|
||||
key = "RegionPrefix{}".format(str(tile))
|
||||
out["PluginInfo"][key] = "/{}".format(
|
||||
tile_prefix
|
||||
).join(prefix.rsplit("/", 1))
|
||||
out["PluginInfo"]["RegionTop{}".format(tile)] = top
|
||||
out["PluginInfo"]["RegionBottom{}".format(tile)] = bottom
|
||||
out["PluginInfo"]["RegionLeft{}".format(tile)] = left
|
||||
out["PluginInfo"]["RegionRight{}".format(tile)] = right
|
||||
|
||||
# Tile config
|
||||
cfg["Tile{}FileName".format(tile)] = new_filename
|
||||
cfg["Tile{}X".format(tile)] = left
|
||||
cfg["Tile{}Y".format(tile)] = top
|
||||
cfg["Tile{}Width".format(tile)] = w_space
|
||||
cfg["Tile{}Height".format(tile)] = h_space
|
||||
|
||||
tile += 1
|
||||
|
||||
return out, cfg
|
||||
|
|
@ -1,558 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
AYONPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
BoolDef,
|
||||
NumberDef
|
||||
)
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
"""Submit write to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via settings key "DEADLINE_REST_URL".
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Nuke to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender"]
|
||||
optional = True
|
||||
targets = ["local"]
|
||||
settings_category = "deadline"
|
||||
|
||||
# presets
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
concurrent_tasks = 1
|
||||
group = ""
|
||||
department = ""
|
||||
limit_groups = []
|
||||
use_gpu = False
|
||||
env_allowed_keys = []
|
||||
env_search_replace_values = []
|
||||
workfile_dependency = True
|
||||
use_published_workfile = True
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurrency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"use_gpu",
|
||||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
),
|
||||
BoolDef(
|
||||
"workfile_dependency",
|
||||
default=cls.workfile_dependency,
|
||||
label="Workfile Dependency"
|
||||
),
|
||||
BoolDef(
|
||||
"use_published_workfile",
|
||||
default=cls.use_published_workfile,
|
||||
label="Use Published Workfile"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
families = instance.data["families"]
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
context = instance.context
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
self.deadline_url = "{}/api/jobs".format(deadline_url)
|
||||
self._comment = context.data.get("comment", "")
|
||||
self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))
|
||||
self._deadline_user = context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
submit_frame_start = int(instance.data["frameStartHandle"])
|
||||
submit_frame_end = int(instance.data["frameEndHandle"])
|
||||
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
use_published_workfile = instance.data["attributeValues"].get(
|
||||
"use_published_workfile", self.use_published_workfile
|
||||
)
|
||||
if use_published_workfile:
|
||||
script_path = self._get_published_workfile_path(context)
|
||||
|
||||
# only add main rendering job if target is not frames_farm
|
||||
r_job_response_json = None
|
||||
if instance.data["render_target"] != "frames_farm":
|
||||
r_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name(),
|
||||
submit_frame_start,
|
||||
submit_frame_end
|
||||
)
|
||||
r_job_response_json = r_job_response.json()
|
||||
instance.data["deadlineSubmissionJob"] = r_job_response_json
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
if instance.data.get("bakingNukeScripts"):
|
||||
for baking_script in instance.data["bakingNukeScripts"]:
|
||||
render_path = baking_script["bakeRenderPath"]
|
||||
script_path = baking_script["bakeScriptPath"]
|
||||
exe_node_name = baking_script["bakeWriteNodeName"]
|
||||
|
||||
b_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name,
|
||||
submit_frame_start,
|
||||
submit_frame_end,
|
||||
r_job_response_json,
|
||||
baking_submission=True
|
||||
)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = b_job_response.json()
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
# add to list of job Id
|
||||
if not instance.data.get("bakingSubmissionJobs"):
|
||||
instance.data["bakingSubmissionJobs"] = []
|
||||
|
||||
instance.data["bakingSubmissionJobs"].append(
|
||||
b_job_response.json()["_id"])
|
||||
|
||||
# redefinition of families
|
||||
if "render" in instance.data["productType"]:
|
||||
instance.data["family"] = "write"
|
||||
instance.data["productType"] = "write"
|
||||
families.insert(0, "render2d")
|
||||
elif "prerender" in instance.data["productType"]:
|
||||
instance.data["family"] = "write"
|
||||
instance.data["productType"] = "write"
|
||||
families.insert(0, "prerender")
|
||||
instance.data["families"] = families
|
||||
|
||||
def _get_published_workfile_path(self, context):
|
||||
"""This method is temporary while the class is not inherited from
|
||||
AbstractSubmitDeadline"""
|
||||
anatomy = context.data["anatomy"]
|
||||
# WARNING Hardcoded template name 'default' > may not be used
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "path"
|
||||
)
|
||||
for instance in context:
|
||||
if (
|
||||
instance.data["productType"] != "workfile"
|
||||
# Disabled instances won't be integrated
|
||||
or instance.data("publish") is False
|
||||
):
|
||||
continue
|
||||
template_data = instance.data["anatomyData"]
|
||||
# Expect workfile instance has only one representation
|
||||
representation = instance.data["representations"][0]
|
||||
# Get workfile extension
|
||||
repre_file = representation["files"]
|
||||
self.log.info(repre_file)
|
||||
ext = os.path.splitext(repre_file)[1].lstrip(".")
|
||||
|
||||
# Fill template data
|
||||
template_data["representation"] = representation["name"]
|
||||
template_data["ext"] = ext
|
||||
template_data["comment"] = None
|
||||
|
||||
template_filled = publish_template.format(template_data)
|
||||
script_path = os.path.normpath(template_filled)
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(
|
||||
script_path
|
||||
)
|
||||
)
|
||||
return script_path
|
||||
|
||||
return None
|
||||
|
||||
def payload_submit(
|
||||
self,
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name,
|
||||
start_frame,
|
||||
end_frame,
|
||||
response_data=None,
|
||||
baking_submission=False,
|
||||
):
|
||||
"""Submit payload to Deadline
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
script_path (str): path to nuke script
|
||||
render_path (str): path to rendered images
|
||||
exe_node_name (str): name of the node to render
|
||||
start_frame (int): start frame
|
||||
end_frame (int): end frame
|
||||
response_data Optional[dict]: response data from
|
||||
previous submission
|
||||
baking_submission Optional[bool]: if it's baking submission
|
||||
|
||||
Returns:
|
||||
requests.Response
|
||||
"""
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
|
||||
# batch name
|
||||
src_filepath = instance.context.data["currentFile"]
|
||||
batch_name = os.path.basename(src_filepath)
|
||||
job_name = os.path.basename(render_path)
|
||||
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not response_data:
|
||||
response_data = {}
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
os.makedirs(render_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# resolve any limit groups
|
||||
limit_groups = self.get_limit_groups()
|
||||
self.log.debug("Limit groups: `{}`".format(limit_groups))
|
||||
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": batch_name,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": job_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Priority": instance.data["attributeValues"].get(
|
||||
"priority", self.priority),
|
||||
"ChunkSize": instance.data["attributeValues"].get(
|
||||
"chunk", self.chunk_size),
|
||||
"ConcurrentTasks": instance.data["attributeValues"].get(
|
||||
"concurrency",
|
||||
self.concurrent_tasks
|
||||
),
|
||||
|
||||
"Department": self.department,
|
||||
|
||||
"Pool": instance.data.get("primaryPool"),
|
||||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Group": self.group,
|
||||
|
||||
"Plugin": "Nuke",
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=start_frame,
|
||||
end=end_frame
|
||||
),
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
|
||||
# limiting groups
|
||||
"LimitGroups": ",".join(limit_groups)
|
||||
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"SceneFile": script_path,
|
||||
|
||||
# Output directory and filename
|
||||
"OutputFilePath": render_dir.replace("\\", "/"),
|
||||
# "OutputFilePrefix": render_variables["filename_prefix"],
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": self._ver.group(),
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": script_path,
|
||||
"AWSAssetFile0": render_path,
|
||||
|
||||
# using GPU by default
|
||||
"UseGpu": instance.data["attributeValues"].get(
|
||||
"use_gpu", self.use_gpu),
|
||||
|
||||
# Only the specific write node is rendered.
|
||||
"WriteNode": exe_node_name
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Add workfile dependency.
|
||||
workfile_dependency = instance.data["attributeValues"].get(
|
||||
"workfile_dependency", self.workfile_dependency
|
||||
)
|
||||
if workfile_dependency:
|
||||
payload["JobInfo"].update({"AssetDependency0": script_path})
|
||||
|
||||
# TODO: rewrite for baking with sequences
|
||||
if baking_submission:
|
||||
payload["JobInfo"].update({
|
||||
"JobType": "Normal",
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
if response_data.get("_id"):
|
||||
payload["JobInfo"].update({
|
||||
"BatchName": response_data["Props"]["Batch"],
|
||||
"JobDependency0": response_data["_id"],
|
||||
})
|
||||
|
||||
# Include critical environment variables with submission
|
||||
keys = [
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
"AYON_BUNDLE_NAME",
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT",
|
||||
"AYON_PROJECT_NAME",
|
||||
"AYON_FOLDER_PATH",
|
||||
"AYON_TASK_NAME",
|
||||
"AYON_APP_NAME",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"PYBLISHPLUGINPATH",
|
||||
"NUKE_PATH",
|
||||
"TOOL_ENV",
|
||||
"FOUNDRY_LICENSE",
|
||||
"OPENPYPE_SG_USER",
|
||||
]
|
||||
|
||||
# add allowed keys from preset if any
|
||||
if self.env_allowed_keys:
|
||||
keys += self.env_allowed_keys
|
||||
|
||||
environment = {
|
||||
key: os.environ[key]
|
||||
for key in keys
|
||||
if key in os.environ
|
||||
}
|
||||
|
||||
# to recognize render jobs
|
||||
environment["AYON_RENDER_JOB"] = "1"
|
||||
|
||||
# finally search replace in values of any key
|
||||
if self.env_search_replace_values:
|
||||
for key, value in environment.items():
|
||||
for item in self.env_search_replace_values:
|
||||
environment[key] = value.replace(
|
||||
item["name"], item["value"]
|
||||
)
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
plugin = payload["JobInfo"]["Plugin"]
|
||||
self.log.debug("using render plugin : {}".format(plugin))
|
||||
|
||||
self.log.debug("Submitting..")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expected files to instance.data
|
||||
self.expected_files(
|
||||
instance,
|
||||
render_path,
|
||||
start_frame,
|
||||
end_frame
|
||||
)
|
||||
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(self.deadline_url,
|
||||
json=payload,
|
||||
timeout=10,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
return response
|
||||
|
||||
def preflight_check(self, instance):
|
||||
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
|
||||
|
||||
for key in ("frameStart", "frameEnd"):
|
||||
value = instance.data[key]
|
||||
|
||||
if int(value) == value:
|
||||
continue
|
||||
|
||||
self.log.warning(
|
||||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"(%0)(\d)(d.)", path).groups()
|
||||
self.log.debug("_ search_results: `{}`".format(search_results))
|
||||
return int(search_results[1])
|
||||
if "#" in path:
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
|
||||
def expected_files(
|
||||
self,
|
||||
instance,
|
||||
filepath,
|
||||
start_frame,
|
||||
end_frame
|
||||
):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = []
|
||||
|
||||
dirname = os.path.dirname(filepath)
|
||||
file = os.path.basename(filepath)
|
||||
|
||||
# since some files might be already tagged as publish_on_farm
|
||||
# we need to avoid adding them to expected files since those would be
|
||||
# duplicated into metadata.json file
|
||||
representations = instance.data.get("representations", [])
|
||||
# check if file is not in representations with publish_on_farm tag
|
||||
for repre in representations:
|
||||
# Skip if 'publish_on_farm' not available
|
||||
if "publish_on_farm" not in repre.get("tags", []):
|
||||
continue
|
||||
|
||||
# in case where single file (video, image) is already in
|
||||
# representation file. Will be added to expected files via
|
||||
# submit_publish_job.py
|
||||
if file in repre.get("files", []):
|
||||
self.log.debug(
|
||||
"Skipping expected file: {}".format(filepath))
|
||||
return
|
||||
|
||||
# in case path is hashed sequence expression
|
||||
# (e.g. /path/to/file.####.png)
|
||||
if "#" in file:
|
||||
pparts = file.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
file = pparts[0] + padding + pparts[-1]
|
||||
|
||||
# in case input path was single file (video or image)
|
||||
if "%" not in file:
|
||||
instance.data["expectedFiles"].append(filepath)
|
||||
return
|
||||
|
||||
# shift start frame by 1 if slate is present
|
||||
if instance.data.get("slate"):
|
||||
start_frame -= 1
|
||||
|
||||
# add sequence files to expected files
|
||||
for i in range(start_frame, (end_frame + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dirname, (file % i)).replace("\\", "/"))
|
||||
|
||||
def get_limit_groups(self):
|
||||
"""Search for limit group nodes and return group name.
|
||||
Limit groups will be defined as pairs in Nuke deadline submitter
|
||||
presents where the key will be name of limit group and value will be
|
||||
a list of plugin's node class names. Thus, when a plugin uses more
|
||||
than one node, these will be captured and the triggered process
|
||||
will add the appropriate limit group to the payload jobinfo attributes.
|
||||
Returning:
|
||||
list: captured groups list
|
||||
"""
|
||||
# Not all hosts can import this module.
|
||||
import nuke
|
||||
|
||||
captured_groups = []
|
||||
for limit_group in self.limit_groups:
|
||||
lg_name = limit_group["name"]
|
||||
|
||||
for node_class in limit_group["value"]:
|
||||
for node in nuke.allNodes(recurseGroups=True):
|
||||
# ignore all nodes not member of defined class
|
||||
if node.Class() not in node_class:
|
||||
continue
|
||||
# ignore all disabled nodes
|
||||
if node["disable"].value():
|
||||
continue
|
||||
# add group name if not already added
|
||||
if lg_name not in captured_groups:
|
||||
captured_groups.append(lg_name)
|
||||
return captured_groups
|
||||
|
|
@ -1,463 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
from ayon_core.pipeline.farm.pyblish_functions import (
|
||||
create_skeleton_instance_cache,
|
||||
create_instances_for_cache,
|
||||
attach_instances_to_product,
|
||||
prepare_cache_representations,
|
||||
create_metadata_path
|
||||
)
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin,
|
||||
publish.AYONPyblishPluginMixin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Process Cache Job submitted on farm
|
||||
This is replicated version of submit publish job
|
||||
specifically for cache(s).
|
||||
|
||||
These jobs are dependent on a deadline job
|
||||
submission prior to this plug-in.
|
||||
|
||||
- In case of Deadline, it creates dependent job on farm publishing
|
||||
rendered image sequence.
|
||||
|
||||
Options in instance.data:
|
||||
- deadlineSubmissionJob (dict, Required): The returned .json
|
||||
data from the job submission to deadline.
|
||||
|
||||
- outputDir (str, Required): The output directory where the metadata
|
||||
file should be generated. It's assumed that this will also be
|
||||
final folder containing the output files.
|
||||
|
||||
- ext (str, Optional): The extension (including `.`) that is required
|
||||
in the output filename to be picked up for image sequence
|
||||
publishing.
|
||||
|
||||
- expectedFiles (list or dict): explained below
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit cache jobs to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
settings_category = "deadline"
|
||||
|
||||
targets = ["local"]
|
||||
|
||||
hosts = ["houdini"]
|
||||
|
||||
families = ["publish.hou"]
|
||||
|
||||
environ_keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"AYON_SG_USERNAME",
|
||||
"KITSU_LOGIN",
|
||||
"KITSU_PWD"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
deadline_department = ""
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
deadline_priority = None
|
||||
|
||||
# regex for finding frame number in string
|
||||
R_FRAME_NUMBER = re.compile(r'.+\.(?P<frame>[0-9]+)\..+')
|
||||
|
||||
plugin_pype_version = "3.0"
|
||||
|
||||
# script path for publish_filesequence.py
|
||||
publishing_script = None
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
"""Submit publish job to Deadline.
|
||||
|
||||
Returns:
|
||||
(str): deadline_publish_job_id
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
product_name = data["productName"]
|
||||
job_name = "Publish - {}".format(product_name)
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# instance.data.get("productName") != instances[0]["productName"]
|
||||
# 'Main' vs 'renderMain'
|
||||
override_version = None
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
override_version = instance_version
|
||||
|
||||
output_dir = self._get_publish_folder(
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("folderEntity"),
|
||||
instance.data["productName"],
|
||||
instance.context,
|
||||
instance.data["productType"],
|
||||
override_version
|
||||
)
|
||||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"AYON_IN_TESTS": str(int(is_in_tests())),
|
||||
"AYON_PUBLISH_JOB": "1",
|
||||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
for env_key in self.environ_keys:
|
||||
if os.getenv(env_key):
|
||||
environment[env_key] = os.environ[env_key]
|
||||
|
||||
priority = self.deadline_priority or instance.data.get("priority", 50)
|
||||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
'publish',
|
||||
'"{}"'.format(rootless_metadata_path),
|
||||
"--targets", "deadline",
|
||||
"--targets", "farm"
|
||||
]
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
secondary_pool = (
|
||||
self.deadline_pool_secondary or instance.data.get("secondaryPool")
|
||||
)
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": "Ayon",
|
||||
"BatchName": job["Props"]["Batch"],
|
||||
"Name": job_name,
|
||||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"ChunkSize": self.deadline_chunk_size,
|
||||
"Priority": priority,
|
||||
"InitialStatus": initial_status,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool or instance.data.get("primaryPool"),
|
||||
"SecondaryPool": secondary_pool,
|
||||
# ensure the outputdirectory with correct slashes
|
||||
"OutputDirectory0": output_dir.replace("\\", "/")
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Version": self.plugin_pype_version,
|
||||
"Arguments": " ".join(args),
|
||||
"SingleFrameOnly": "True",
|
||||
},
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": [],
|
||||
}
|
||||
|
||||
if job.get("_id"):
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
for index, (key_, value_) in enumerate(environment.items()):
|
||||
payload["JobInfo"].update(
|
||||
{
|
||||
"EnvironmentKeyValue%d"
|
||||
% index: "{key}={value}".format(
|
||||
key=key_, value=value_
|
||||
)
|
||||
}
|
||||
)
|
||||
# remove secondary pool
|
||||
payload["JobInfo"].pop("SecondaryPool", None)
|
||||
|
||||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(
|
||||
url, json=payload, timeout=10, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
deadline_publish_job_id = response.json()["_id"]
|
||||
|
||||
return deadline_publish_job_id
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of render farm submission and create and post dependent
|
||||
job in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Instance data.
|
||||
|
||||
"""
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance_cache(instance)
|
||||
"""
|
||||
if content of `expectedFiles` list are dictionaries, we will handle
|
||||
it as list of AOVs, creating instance for every one of them.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
{
|
||||
"beauty": [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr"
|
||||
],
|
||||
|
||||
"Z": [
|
||||
"boo_v01.0001.exr",
|
||||
"boo_v01.0002.exr"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
This will create instances for `beauty` and `Z` product
|
||||
adding those files to their respective representations.
|
||||
|
||||
If we have only list of files, we collect all file sequences.
|
||||
More then one doesn't probably make sense, but we'll handle it
|
||||
like creating one instance with multiple representations.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr",
|
||||
"xxx_v01.0001.exr",
|
||||
"xxx_v01.0002.exr"
|
||||
]
|
||||
|
||||
This will result in one instance with two representations:
|
||||
`foo` and `xxx`
|
||||
"""
|
||||
|
||||
if isinstance(instance.data.get("expectedFiles")[0], dict):
|
||||
instances = create_instances_for_cache(
|
||||
instance, instance_skeleton_data)
|
||||
else:
|
||||
representations = prepare_cache_representations(
|
||||
instance_skeleton_data,
|
||||
instance.data.get("expectedFiles"),
|
||||
anatomy
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instances = [instance_skeleton_data]
|
||||
|
||||
# attach instances to product
|
||||
if instance.data.get("attachTo"):
|
||||
instances = attach_instances_to_product(
|
||||
instance.data.get("attachTo"), instances
|
||||
)
|
||||
|
||||
r''' SUBMiT PUBLiSH JOB 2 D34DLiN3
|
||||
____
|
||||
' ' .---. .---. .--. .---. .--..--..--..--. .---.
|
||||
| | --= \ | . \/ _|/ \| . \ || || \ |/ _|
|
||||
| JOB | --= / | | || __| .. | | | |;_ || \ || __|
|
||||
| | |____./ \.__|._||_.|___./|_____|||__|\__|\.___|
|
||||
._____.
|
||||
|
||||
'''
|
||||
|
||||
render_job = None
|
||||
submission_type = ""
|
||||
if instance.data.get("toBeRenderedOn") == "deadline":
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
submission_type = "deadline"
|
||||
|
||||
if not render_job:
|
||||
import getpass
|
||||
|
||||
render_job = {}
|
||||
self.log.debug("Faking job data ...")
|
||||
render_job["Props"] = {}
|
||||
# Render job doesn't exist because we do not have prior submission.
|
||||
# We still use data from it so lets fake it.
|
||||
#
|
||||
# Batch name reflect original scene name
|
||||
|
||||
if instance.data.get("assemblySubmissionJobs"):
|
||||
render_job["Props"]["Batch"] = instance.data.get(
|
||||
"jobBatchName")
|
||||
else:
|
||||
batch = os.path.splitext(os.path.basename(
|
||||
instance.context.data.get("currentFile")))[0]
|
||||
render_job["Props"]["Batch"] = batch
|
||||
# User is deadline user
|
||||
render_job["Props"]["User"] = instance.context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job)
|
||||
|
||||
# Inject deadline url to instances.
|
||||
for inst in instances:
|
||||
if "deadline" not in inst:
|
||||
inst["deadline"] = {}
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
"source": instance_skeleton_data["source"],
|
||||
"user": instance.context.data["user"],
|
||||
"version": instance.context.data["version"], # workfile version
|
||||
"intent": instance.context.data.get("intent"),
|
||||
"comment": instance.context.data.get("comment"),
|
||||
"job": render_job or None,
|
||||
"instances": instances
|
||||
}
|
||||
|
||||
if deadline_publish_job_id:
|
||||
publish_job["deadline_publish_job_id"] = deadline_publish_job_id
|
||||
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def _get_publish_folder(self, anatomy, template_data,
|
||||
folder_entity, product_name, context,
|
||||
product_type, version=None):
|
||||
"""
|
||||
Extracted logic to pre-calculate real publish folder, which is
|
||||
calculated in IntegrateNew inside of Deadline process.
|
||||
This should match logic in:
|
||||
'collect_anatomy_instance_data' - to
|
||||
get correct anatomy, family, version for product and
|
||||
'collect_resources_path'
|
||||
get publish_path
|
||||
|
||||
Args:
|
||||
anatomy (ayon_core.pipeline.anatomy.Anatomy):
|
||||
template_data (dict): pre-calculated collected data for process
|
||||
folder_entity (dict[str, Any]): Folder entity.
|
||||
product_name (str): Product name (actually group name of product).
|
||||
product_type (str): for current deadline process it's always
|
||||
'render'
|
||||
TODO - for generic use family needs to be dynamically
|
||||
calculated like IntegrateNew does
|
||||
version (int): override version from instance if exists
|
||||
|
||||
Returns:
|
||||
(string): publish folder where rendered and published files will
|
||||
be stored
|
||||
based on 'publish' template
|
||||
"""
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
host_name = context.data["hostName"]
|
||||
if not version:
|
||||
version_entity = None
|
||||
if folder_entity:
|
||||
version_entity = ayon_api.get_last_version_by_product_name(
|
||||
project_name,
|
||||
product_name,
|
||||
folder_entity["id"]
|
||||
)
|
||||
|
||||
if version_entity:
|
||||
version = int(version_entity["version"]) + 1
|
||||
else:
|
||||
version = get_versioning_start(
|
||||
project_name,
|
||||
host_name,
|
||||
task_name=template_data["task"]["name"],
|
||||
task_type=template_data["task"]["type"],
|
||||
product_type="render",
|
||||
product_name=product_name,
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
task_info = template_data.get("task") or {}
|
||||
|
||||
template_name = publish.get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
product_type,
|
||||
task_info.get("name"),
|
||||
task_info.get("type"),
|
||||
)
|
||||
|
||||
template_data["subset"] = product_name
|
||||
template_data["family"] = product_type
|
||||
template_data["version"] = version
|
||||
template_data["product"] = {
|
||||
"name": product_name,
|
||||
"type": product_type,
|
||||
}
|
||||
|
||||
render_dir_template = anatomy.get_template_item(
|
||||
"publish", template_name, "directory"
|
||||
)
|
||||
return render_dir_template.format_strict(template_data)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
EnumDef("publishJobState",
|
||||
label="Publish Job State",
|
||||
items=["Active", "Suspended"],
|
||||
default="Active")
|
||||
]
|
||||
|
|
@ -1,585 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
import clique
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import EnumDef, is_in_tests
|
||||
from ayon_core.pipeline.version_start import get_versioning_start
|
||||
|
||||
from ayon_core.pipeline.farm.pyblish_functions import (
|
||||
create_skeleton_instance,
|
||||
create_instances_for_aov,
|
||||
attach_instances_to_product,
|
||||
prepare_representations,
|
||||
create_metadata_path
|
||||
)
|
||||
from ayon_deadline.abstract_submit_deadline import requests_post
|
||||
|
||||
|
||||
def get_resource_files(resources, frame_range=None):
|
||||
"""Get resource files at given path.
|
||||
|
||||
If `frame_range` is specified those outside will be removed.
|
||||
|
||||
Arguments:
|
||||
resources (list): List of resources
|
||||
frame_range (list): Frame range to apply override
|
||||
|
||||
Returns:
|
||||
list of str: list of collected resources
|
||||
|
||||
"""
|
||||
res_collections, _ = clique.assemble(resources)
|
||||
assert len(res_collections) == 1, "Multiple collections found"
|
||||
res_collection = res_collections[0]
|
||||
|
||||
# Remove any frames
|
||||
if frame_range is not None:
|
||||
for frame in frame_range:
|
||||
if frame not in res_collection.indexes:
|
||||
continue
|
||||
res_collection.indexes.remove(frame)
|
||||
|
||||
return list(res_collection)
|
||||
|
||||
|
||||
class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
||||
publish.AYONPyblishPluginMixin,
|
||||
publish.ColormanagedPyblishPluginMixin):
|
||||
"""Process Job submitted on farm.
|
||||
|
||||
These jobs are dependent on a deadline job
|
||||
submission prior to this plug-in.
|
||||
|
||||
It creates dependent job on farm publishing rendered image sequence.
|
||||
|
||||
Options in instance.data:
|
||||
- deadlineSubmissionJob (dict, Required): The returned .json
|
||||
data from the job submission to deadline.
|
||||
|
||||
- outputDir (str, Required): The output directory where the metadata
|
||||
file should be generated. It's assumed that this will also be
|
||||
final folder containing the output files.
|
||||
|
||||
- ext (str, Optional): The extension (including `.`) that is required
|
||||
in the output filename to be picked up for image sequence
|
||||
publishing.
|
||||
|
||||
- publishJobState (str, Optional): "Active" or "Suspended"
|
||||
This defaults to "Suspended"
|
||||
|
||||
- expectedFiles (list or dict): explained below
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit Image Publishing job to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
|
||||
targets = ["local"]
|
||||
|
||||
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||
"celaction", "aftereffects", "harmony", "blender"]
|
||||
|
||||
families = ["render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop",
|
||||
"redshift_rop", "usdrender"]
|
||||
settings_category = "deadline"
|
||||
|
||||
aov_filter = [
|
||||
{
|
||||
"name": "maya",
|
||||
"value": [r".*([Bb]eauty).*"]
|
||||
},
|
||||
{
|
||||
"name": "blender",
|
||||
"value": [r".*([Bb]eauty).*"]
|
||||
},
|
||||
{
|
||||
# for everything from AE
|
||||
"name": "aftereffects",
|
||||
"value": [r".*"]
|
||||
},
|
||||
{
|
||||
"name": "harmony",
|
||||
"value": [r".*"]
|
||||
},
|
||||
{
|
||||
"name": "celaction",
|
||||
"value": [r".*"]
|
||||
},
|
||||
{
|
||||
"name": "max",
|
||||
"value": [r".*"]
|
||||
},
|
||||
]
|
||||
|
||||
environ_keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AYON_APP_NAME",
|
||||
"AYON_USERNAME",
|
||||
"AYON_SG_USERNAME",
|
||||
"KITSU_LOGIN",
|
||||
"KITSU_PWD"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
deadline_department = ""
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_group = ""
|
||||
deadline_chunk_size = 1
|
||||
deadline_priority = None
|
||||
|
||||
# regex for finding frame number in string
|
||||
R_FRAME_NUMBER = re.compile(r'.+\.(?P<frame>[0-9]+)\..+')
|
||||
|
||||
# mapping of instance properties to be transferred to new instance
|
||||
# for every specified family
|
||||
instance_transfer = {
|
||||
"slate": ["slateFrames", "slate"],
|
||||
"review": ["lutPath"],
|
||||
"render2d": ["bakingNukeScripts", "version"],
|
||||
"renderlayer": ["convertToScanline"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
|
||||
plugin_pype_version = "3.0"
|
||||
|
||||
# script path for publish_filesequence.py
|
||||
publishing_script = None
|
||||
|
||||
# poor man exclusion
|
||||
skip_integration_repre_list = []
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job, instances):
|
||||
"""Submit publish job to Deadline.
|
||||
|
||||
Returns:
|
||||
(str): deadline_publish_job_id
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
product_name = data["productName"]
|
||||
job_name = "Publish - {}".format(product_name)
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# instance.data.get("productName") != instances[0]["productName"]
|
||||
# 'Main' vs 'renderMain'
|
||||
override_version = None
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
override_version = instance_version
|
||||
|
||||
output_dir = self._get_publish_folder(
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("folderEntity"),
|
||||
instances[0]["productName"],
|
||||
instance.context,
|
||||
instances[0]["productType"],
|
||||
override_version
|
||||
)
|
||||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AYON_PROJECT_NAME": instance.context.data["projectName"],
|
||||
"AYON_FOLDER_PATH": instance.context.data["folderPath"],
|
||||
"AYON_TASK_NAME": instance.context.data["task"],
|
||||
"AYON_USERNAME": instance.context.data["user"],
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"AYON_IN_TESTS": str(int(is_in_tests())),
|
||||
"AYON_PUBLISH_JOB": "1",
|
||||
"AYON_RENDER_JOB": "0",
|
||||
"AYON_REMOTE_PUBLISH": "0",
|
||||
"AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"],
|
||||
"AYON_DEFAULT_SETTINGS_VARIANT": (
|
||||
os.environ["AYON_DEFAULT_SETTINGS_VARIANT"]
|
||||
),
|
||||
}
|
||||
|
||||
# add environments from self.environ_keys
|
||||
for env_key in self.environ_keys:
|
||||
if os.getenv(env_key):
|
||||
environment[env_key] = os.environ[env_key]
|
||||
|
||||
priority = self.deadline_priority or instance.data.get("priority", 50)
|
||||
|
||||
instance_settings = self.get_attr_values_from_data(instance.data)
|
||||
initial_status = instance_settings.get("publishJobState", "Active")
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
'publish',
|
||||
'"{}"'.format(rootless_metadata_path),
|
||||
"--targets", "deadline",
|
||||
"--targets", "farm"
|
||||
]
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
secondary_pool = (
|
||||
self.deadline_pool_secondary or instance.data.get("secondaryPool")
|
||||
)
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": "Ayon",
|
||||
"BatchName": job["Props"]["Batch"],
|
||||
"Name": job_name,
|
||||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
|
||||
"Department": self.deadline_department,
|
||||
"ChunkSize": self.deadline_chunk_size,
|
||||
"Priority": priority,
|
||||
"InitialStatus": initial_status,
|
||||
|
||||
"Group": self.deadline_group,
|
||||
"Pool": self.deadline_pool or instance.data.get("primaryPool"),
|
||||
"SecondaryPool": secondary_pool,
|
||||
# ensure the outputdirectory with correct slashes
|
||||
"OutputDirectory0": output_dir.replace("\\", "/")
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Version": self.plugin_pype_version,
|
||||
"Arguments": " ".join(args),
|
||||
"SingleFrameOnly": "True",
|
||||
},
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": [],
|
||||
}
|
||||
|
||||
# add assembly jobs as dependencies
|
||||
if instance.data.get("tileRendering"):
|
||||
self.log.info("Adding tile assembly jobs as dependencies...")
|
||||
job_index = 0
|
||||
for assembly_id in instance.data.get("assemblySubmissionJobs"):
|
||||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
elif instance.data.get("bakingSubmissionJobs"):
|
||||
self.log.info(
|
||||
"Adding baking submission jobs as dependencies..."
|
||||
)
|
||||
job_index = 0
|
||||
for assembly_id in instance.data["bakingSubmissionJobs"]:
|
||||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
elif job.get("_id"):
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
for index, (key_, value_) in enumerate(environment.items()):
|
||||
payload["JobInfo"].update(
|
||||
{
|
||||
"EnvironmentKeyValue%d"
|
||||
% index: "{key}={value}".format(
|
||||
key=key_, value=value_
|
||||
)
|
||||
}
|
||||
)
|
||||
# remove secondary pool
|
||||
payload["JobInfo"].pop("SecondaryPool", None)
|
||||
|
||||
self.log.debug("Submitting Deadline publish job ...")
|
||||
|
||||
url = "{}/api/jobs".format(self.deadline_url)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
response = requests_post(
|
||||
url, json=payload, timeout=10, auth=auth, verify=verify)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
deadline_publish_job_id = response.json()["_id"]
|
||||
|
||||
return deadline_publish_job_id
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of render farm submission and create and post dependent
|
||||
job in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Instance data.
|
||||
|
||||
"""
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
instance, families_transfer=self.families_transfer,
|
||||
instance_transfer=self.instance_transfer)
|
||||
"""
|
||||
if content of `expectedFiles` list are dictionaries, we will handle
|
||||
it as list of AOVs, creating instance for every one of them.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
{
|
||||
"beauty": [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr"
|
||||
],
|
||||
|
||||
"Z": [
|
||||
"boo_v01.0001.exr",
|
||||
"boo_v01.0002.exr"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
This will create instances for `beauty` and `Z` product
|
||||
adding those files to their respective representations.
|
||||
|
||||
If we have only list of files, we collect all file sequences.
|
||||
More then one doesn't probably make sense, but we'll handle it
|
||||
like creating one instance with multiple representations.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
||||
expectedFiles = [
|
||||
"foo_v01.0001.exr",
|
||||
"foo_v01.0002.exr",
|
||||
"xxx_v01.0001.exr",
|
||||
"xxx_v01.0002.exr"
|
||||
]
|
||||
|
||||
This will result in one instance with two representations:
|
||||
`foo` and `xxx`
|
||||
"""
|
||||
do_not_add_review = False
|
||||
if instance.data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
aov_filter = {
|
||||
item["name"]: item["value"]
|
||||
for item in self.aov_filter
|
||||
}
|
||||
if isinstance(instance.data.get("expectedFiles")[0], dict):
|
||||
instances = create_instances_for_aov(
|
||||
instance, instance_skeleton_data,
|
||||
aov_filter,
|
||||
self.skip_integration_repre_list,
|
||||
do_not_add_review
|
||||
)
|
||||
else:
|
||||
representations = prepare_representations(
|
||||
instance_skeleton_data,
|
||||
instance.data.get("expectedFiles"),
|
||||
anatomy,
|
||||
aov_filter,
|
||||
self.skip_integration_repre_list,
|
||||
do_not_add_review,
|
||||
instance.context,
|
||||
self
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instances = [instance_skeleton_data]
|
||||
|
||||
# attach instances to product
|
||||
if instance.data.get("attachTo"):
|
||||
instances = attach_instances_to_product(
|
||||
instance.data.get("attachTo"), instances
|
||||
)
|
||||
|
||||
r''' SUBMiT PUBLiSH JOB 2 D34DLiN3
|
||||
____
|
||||
' ' .---. .---. .--. .---. .--..--..--..--. .---.
|
||||
| | --= \ | . \/ _|/ \| . \ || || \ |/ _|
|
||||
| JOB | --= / | | || __| .. | | | |;_ || \ || __|
|
||||
| | |____./ \.__|._||_.|___./|_____|||__|\__|\.___|
|
||||
._____.
|
||||
|
||||
'''
|
||||
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
if not render_job and instance.data.get("tileRendering") is False:
|
||||
raise AssertionError(("Cannot continue without valid "
|
||||
"Deadline submission."))
|
||||
if not render_job:
|
||||
import getpass
|
||||
|
||||
render_job = {}
|
||||
self.log.debug("Faking job data ...")
|
||||
render_job["Props"] = {}
|
||||
# Render job doesn't exist because we do not have prior submission.
|
||||
# We still use data from it so lets fake it.
|
||||
#
|
||||
# Batch name reflect original scene name
|
||||
|
||||
if instance.data.get("assemblySubmissionJobs"):
|
||||
render_job["Props"]["Batch"] = instance.data.get(
|
||||
"jobBatchName")
|
||||
else:
|
||||
batch = os.path.splitext(os.path.basename(
|
||||
instance.context.data.get("currentFile")))[0]
|
||||
render_job["Props"]["Batch"] = batch
|
||||
# User is deadline user
|
||||
render_job["Props"]["User"] = instance.context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
render_job["Props"]["Env"] = {
|
||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||
"FTRACK_API_KEY": os.environ.get("FTRACK_API_KEY"),
|
||||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER"),
|
||||
}
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.data["deadline"]["url"]
|
||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
deadline_publish_job_id = \
|
||||
self._submit_deadline_post_job(instance, render_job, instances)
|
||||
|
||||
# Inject deadline url to instances to query DL for job id for overrides
|
||||
for inst in instances:
|
||||
inst["deadline"] = instance.data["deadline"]
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"folderPath": instance_skeleton_data["folderPath"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
"source": instance_skeleton_data["source"],
|
||||
"user": instance.context.data["user"],
|
||||
"version": instance.context.data["version"], # workfile version
|
||||
"intent": instance.context.data.get("intent"),
|
||||
"comment": instance.context.data.get("comment"),
|
||||
"job": render_job or None,
|
||||
"instances": instances
|
||||
}
|
||||
|
||||
if deadline_publish_job_id:
|
||||
publish_job["deadline_publish_job_id"] = deadline_publish_job_id
|
||||
|
||||
# add audio to metadata file if available
|
||||
audio_file = instance.context.data.get("audioFile")
|
||||
if audio_file and os.path.isfile(audio_file):
|
||||
publish_job.update({"audio": audio_file})
|
||||
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def _get_publish_folder(self, anatomy, template_data,
|
||||
folder_entity, product_name, context,
|
||||
product_type, version=None):
|
||||
"""
|
||||
Extracted logic to pre-calculate real publish folder, which is
|
||||
calculated in IntegrateNew inside of Deadline process.
|
||||
This should match logic in:
|
||||
'collect_anatomy_instance_data' - to
|
||||
get correct anatomy, family, version for product name and
|
||||
'collect_resources_path'
|
||||
get publish_path
|
||||
|
||||
Args:
|
||||
anatomy (ayon_core.pipeline.anatomy.Anatomy):
|
||||
template_data (dict): pre-calculated collected data for process
|
||||
folder_entity (dict[str, Any]): Folder entity.
|
||||
product_name (string): Product name (actually group name
|
||||
of product)
|
||||
product_type (string): for current deadline process it's always
|
||||
'render'
|
||||
TODO - for generic use family needs to be dynamically
|
||||
calculated like IntegrateNew does
|
||||
version (int): override version from instance if exists
|
||||
|
||||
Returns:
|
||||
(string): publish folder where rendered and published files will
|
||||
be stored
|
||||
based on 'publish' template
|
||||
"""
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
host_name = context.data["hostName"]
|
||||
if not version:
|
||||
version_entity = None
|
||||
if folder_entity:
|
||||
version_entity = ayon_api.get_last_version_by_product_name(
|
||||
project_name,
|
||||
product_name,
|
||||
folder_entity["id"]
|
||||
)
|
||||
|
||||
if version_entity:
|
||||
version = int(version_entity["version"]) + 1
|
||||
else:
|
||||
version = get_versioning_start(
|
||||
project_name,
|
||||
host_name,
|
||||
task_name=template_data["task"]["name"],
|
||||
task_type=template_data["task"]["type"],
|
||||
product_type="render",
|
||||
product_name=product_name,
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
host_name = context.data["hostName"]
|
||||
task_info = template_data.get("task") or {}
|
||||
|
||||
template_name = publish.get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
product_type,
|
||||
task_info.get("name"),
|
||||
task_info.get("type"),
|
||||
)
|
||||
|
||||
template_data["version"] = version
|
||||
template_data["subset"] = product_name
|
||||
template_data["family"] = product_type
|
||||
template_data["product"] = {
|
||||
"name": product_name,
|
||||
"type": product_type,
|
||||
}
|
||||
|
||||
render_dir_template = anatomy.get_template_item(
|
||||
"publish", template_name, "directory"
|
||||
)
|
||||
return render_dir_template.format_strict(template_data)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
EnumDef("publishJobState",
|
||||
label="Publish Job State",
|
||||
items=["Active", "Suspended"],
|
||||
default="Active")
|
||||
]
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
|
||||
from ayon_deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
||||
label = "Validate Deadline Web Service"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "nuke", "aftereffects", "harmony", "fusion"]
|
||||
families = ["renderlayer", "render", "render.farm"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Should not be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
kwargs = {}
|
||||
if instance.data["deadline"]["require_authentication"]:
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
kwargs["auth"] = auth
|
||||
|
||||
if not auth[0]:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"At least username is required to be set in "
|
||||
"Site Settings.")
|
||||
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url, **kwargs)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
if response.status_code == 401:
|
||||
raise PublishXmlValidationError(
|
||||
self,
|
||||
"Deadline requires authentication. "
|
||||
"Provided credentials are not working. "
|
||||
"Please change them in Site Settings")
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
)
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
||||
pyblish.api.InstancePlugin):
|
||||
"""Validate primaryPool and secondaryPool on instance.
|
||||
|
||||
Values are on instance based on value insertion when Creating instance or
|
||||
by Settings in CollectDeadlinePools.
|
||||
"""
|
||||
|
||||
label = "Validate Deadline Pools"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["rendering",
|
||||
"render.farm",
|
||||
"render.frames_farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"publish.hou"]
|
||||
optional = True
|
||||
|
||||
# cache
|
||||
pools_per_url = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
addons_manager = instance.context.data["ayonAddonsManager"]
|
||||
deadline_addon = addons_manager["deadline"]
|
||||
pools = self.get_pools(
|
||||
deadline_addon,
|
||||
deadline_url,
|
||||
instance.data["deadline"].get("auth")
|
||||
)
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
if primary_pool and primary_pool not in pools:
|
||||
invalid_pools["primary"] = primary_pool
|
||||
|
||||
secondary_pool = instance.data.get("secondaryPool")
|
||||
if secondary_pool and secondary_pool not in pools:
|
||||
invalid_pools["secondary"] = secondary_pool
|
||||
|
||||
if invalid_pools:
|
||||
message = "\n".join(
|
||||
"{} pool '{}' not available on Deadline".format(key.title(),
|
||||
pool)
|
||||
for key, pool in invalid_pools.items()
|
||||
)
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=message,
|
||||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_pools(self, deadline_addon, deadline_url, auth):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = deadline_addon.get_deadline_pools(
|
||||
deadline_url, auth=auth, log=self.log
|
||||
)
|
||||
# some DL return "none" as a pool name
|
||||
if "none" not in pools:
|
||||
pools.append("none")
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
return self.pools_per_url[deadline_url]
|
||||
|
|
@ -1,256 +0,0 @@
|
|||
import os
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.lib import collect_frames
|
||||
from ayon_deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
||||
"""Compare rendered and expected files"""
|
||||
|
||||
label = "Validate rendered files from Deadline"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["render"]
|
||||
targets = ["deadline"]
|
||||
|
||||
# check if actual frame range on render job wasn't different
|
||||
# case when artists wants to render only subset of frames
|
||||
allow_user_override = True
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance"""
|
||||
|
||||
# get dependency jobs ids for retrieving frame list
|
||||
dependent_job_ids = self._get_dependent_job_ids(instance)
|
||||
|
||||
if not dependent_job_ids:
|
||||
self.log.warning("No dependent jobs found for instance: {}"
|
||||
"".format(instance))
|
||||
return
|
||||
|
||||
# get list of frames from dependent jobs
|
||||
frame_list = self._get_dependent_jobs_frames(
|
||||
instance, dependent_job_ids)
|
||||
|
||||
for repre in instance.data["representations"]:
|
||||
expected_files = self._get_expected_files(repre)
|
||||
|
||||
staging_dir = repre["stagingDir"]
|
||||
existing_files = self._get_existing_files(staging_dir)
|
||||
|
||||
if self.allow_user_override:
|
||||
# We always check for user override because the user might have
|
||||
# also overridden the Job frame list to be longer than the
|
||||
# originally submitted frame range
|
||||
# todo: We should first check if Job frame range was overridden
|
||||
# at all so we don't unnecessarily override anything
|
||||
file_name_template, frame_placeholder = \
|
||||
self._get_file_name_template_and_placeholder(
|
||||
expected_files)
|
||||
|
||||
if not file_name_template:
|
||||
raise RuntimeError("Unable to retrieve file_name template"
|
||||
"from files: {}".format(expected_files))
|
||||
|
||||
job_expected_files = self._get_job_expected_files(
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list)
|
||||
|
||||
job_files_diff = job_expected_files.difference(expected_files)
|
||||
if job_files_diff:
|
||||
self.log.debug(
|
||||
"Detected difference in expected output files from "
|
||||
"Deadline job. Assuming an updated frame list by the "
|
||||
"user. Difference: {}".format(sorted(job_files_diff))
|
||||
)
|
||||
|
||||
# Update the representation expected files
|
||||
self.log.info("Update range from actual job range "
|
||||
"to frame list: {}".format(frame_list))
|
||||
# single item files must be string not list
|
||||
repre["files"] = (sorted(job_expected_files)
|
||||
if len(job_expected_files) > 1 else
|
||||
list(job_expected_files)[0])
|
||||
|
||||
# Update the expected files
|
||||
expected_files = job_expected_files
|
||||
|
||||
# We don't use set.difference because we do allow other existing
|
||||
# files to be in the folder that we might not want to use.
|
||||
missing = expected_files - existing_files
|
||||
if missing:
|
||||
raise RuntimeError(
|
||||
"Missing expected files: {}\n"
|
||||
"Expected files: {}\n"
|
||||
"Existing files: {}".format(
|
||||
sorted(missing),
|
||||
sorted(expected_files),
|
||||
sorted(existing_files)
|
||||
)
|
||||
)
|
||||
|
||||
def _get_dependent_job_ids(self, instance):
|
||||
"""Returns list of dependent job ids from instance metadata.json
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
(list): list of dependent job ids
|
||||
|
||||
"""
|
||||
dependent_job_ids = []
|
||||
|
||||
# job_id collected from metadata.json
|
||||
original_job_id = instance.data["render_job_id"]
|
||||
|
||||
dependent_job_ids_env = os.environ.get("RENDER_JOB_IDS")
|
||||
if dependent_job_ids_env:
|
||||
dependent_job_ids = dependent_job_ids_env.split(',')
|
||||
elif original_job_id:
|
||||
dependent_job_ids = [original_job_id]
|
||||
|
||||
return dependent_job_ids
|
||||
|
||||
def _get_dependent_jobs_frames(self, instance, dependent_job_ids):
|
||||
"""Returns list of frame ranges from all render job.
|
||||
|
||||
Render job might be re-submitted so job_id in metadata.json could be
|
||||
invalid. GlobalJobPreload injects current job id to RENDER_JOB_IDS.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
dependent_job_ids (list): list of dependent job ids
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
all_frame_lists = []
|
||||
|
||||
for job_id in dependent_job_ids:
|
||||
job_info = self._get_job_info(instance, job_id)
|
||||
frame_list = job_info["Props"].get("Frames")
|
||||
if frame_list:
|
||||
all_frame_lists.extend(frame_list.split(','))
|
||||
|
||||
return all_frame_lists
|
||||
|
||||
def _get_job_expected_files(self,
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list):
|
||||
"""Calculates list of names of expected rendered files.
|
||||
|
||||
Might be different from expected files from submission if user
|
||||
explicitly and manually changed the frame list on the Deadline job.
|
||||
|
||||
"""
|
||||
# no frames in file name at all, eg 'renderCompositingMain.withLut.mov'
|
||||
if not frame_placeholder:
|
||||
return {file_name_template}
|
||||
|
||||
real_expected_rendered = set()
|
||||
src_padding_exp = "%0{}d".format(len(frame_placeholder))
|
||||
for frames in frame_list:
|
||||
if '-' not in frames: # single frame
|
||||
frames = "{}-{}".format(frames, frames)
|
||||
|
||||
start, end = frames.split('-')
|
||||
for frame in range(int(start), int(end) + 1):
|
||||
ren_name = file_name_template.replace(
|
||||
frame_placeholder, src_padding_exp % frame)
|
||||
real_expected_rendered.add(ren_name)
|
||||
|
||||
return real_expected_rendered
|
||||
|
||||
def _get_file_name_template_and_placeholder(self, files):
|
||||
"""Returns file name with frame replaced with # and this placeholder"""
|
||||
sources_and_frames = collect_frames(files)
|
||||
|
||||
file_name_template = frame_placeholder = None
|
||||
for file_name, frame in sources_and_frames.items():
|
||||
|
||||
# There might be cases where clique was unable to collect
|
||||
# collections in `collect_frames` - thus we capture that case
|
||||
if frame is not None:
|
||||
frame_placeholder = "#" * len(frame)
|
||||
|
||||
file_name_template = os.path.basename(
|
||||
file_name.replace(frame, frame_placeholder))
|
||||
else:
|
||||
file_name_template = file_name
|
||||
break
|
||||
|
||||
return file_name_template, frame_placeholder
|
||||
|
||||
def _get_job_info(self, instance, job_id):
|
||||
"""Calls DL for actual job info for 'job_id'
|
||||
|
||||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
job_id (str): Deadline job id
|
||||
|
||||
Returns:
|
||||
(dict): Job info from Deadline
|
||||
|
||||
"""
|
||||
deadline_url = instance.data["deadline"]["url"]
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
try:
|
||||
kwargs = {}
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
if auth:
|
||||
kwargs["auth"] = auth
|
||||
response = requests_get(url, **kwargs)
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.log.error("Deadline is not accessible at "
|
||||
"{}".format(deadline_url))
|
||||
return {}
|
||||
|
||||
if not response.ok:
|
||||
self.log.error("Submission failed!")
|
||||
self.log.error(response.status_code)
|
||||
self.log.error(response.content)
|
||||
raise RuntimeError(response.text)
|
||||
|
||||
json_content = response.json()
|
||||
if json_content:
|
||||
return json_content.pop()
|
||||
return {}
|
||||
|
||||
def _get_existing_files(self, staging_dir):
|
||||
"""Returns set of existing file names from 'staging_dir'"""
|
||||
existing_files = set()
|
||||
for file_name in os.listdir(staging_dir):
|
||||
existing_files.add(file_name)
|
||||
return existing_files
|
||||
|
||||
def _get_expected_files(self, repre):
|
||||
"""Returns set of file names in representation['files']
|
||||
|
||||
The representations are collected from `CollectRenderedFiles` using
|
||||
the metadata.json file submitted along with the render job.
|
||||
|
||||
Args:
|
||||
repre (dict): The representation containing 'files'
|
||||
|
||||
Returns:
|
||||
set: Set of expected file_names in the staging directory.
|
||||
|
||||
"""
|
||||
expected_files = set()
|
||||
|
||||
files = repre["files"]
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
for file_name in files:
|
||||
expected_files.add(file_name)
|
||||
return expected_files
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 7.5 KiB |
|
|
@ -1,9 +0,0 @@
|
|||
[Arguments]
|
||||
Type=string
|
||||
Label=Arguments
|
||||
Category=Python Options
|
||||
CategoryOrder=0
|
||||
Index=1
|
||||
Description=The arguments to pass to the script. If no arguments are required, leave this blank.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=Ayon Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[AyonExecutable]
|
||||
Type=multilinemultifilename
|
||||
Label=Ayon Executable
|
||||
Category=Ayon Executables
|
||||
CategoryOrder=1
|
||||
Index=0
|
||||
Default=
|
||||
Description=The path to the Ayon executable. Enter alternative paths on separate lines.
|
||||
|
||||
[AyonServerUrl]
|
||||
Type=string
|
||||
Label=Ayon Server Url
|
||||
Category=Ayon Credentials
|
||||
CategoryOrder=2
|
||||
Index=0
|
||||
Default=
|
||||
Description=Url to Ayon server
|
||||
|
||||
[AyonApiKey]
|
||||
Type=password
|
||||
Label=Ayon API key
|
||||
Category=Ayon Credentials
|
||||
CategoryOrder=2
|
||||
Index=0
|
||||
Default=
|
||||
Description=API key for service account on Ayon Server
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from System.IO import Path
|
||||
from System.Text.RegularExpressions import Regex
|
||||
|
||||
from Deadline.Plugins import PluginType, DeadlinePlugin
|
||||
from Deadline.Scripting import (
|
||||
StringUtils,
|
||||
FileUtils,
|
||||
RepositoryUtils
|
||||
)
|
||||
|
||||
import re
|
||||
import os
|
||||
import platform
|
||||
|
||||
__version__ = "1.0.0"
|
||||
|
||||
######################################################################
|
||||
# This is the function that Deadline calls to get an instance of the
|
||||
# main DeadlinePlugin class.
|
||||
######################################################################
|
||||
def GetDeadlinePlugin():
|
||||
return AyonDeadlinePlugin()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
|
||||
class AyonDeadlinePlugin(DeadlinePlugin):
|
||||
"""
|
||||
Standalone plugin for publishing from Ayon
|
||||
|
||||
Calls Ayonexecutable 'ayon_console' from first correctly found
|
||||
file based on plugin configuration. Uses 'publish' command and passes
|
||||
path to metadata json file, which contains all needed information
|
||||
for publish process.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
||||
def Cleanup(self):
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
|
||||
def InitializeProcess(self):
|
||||
self.LogInfo(
|
||||
"Initializing process with AYON plugin {}".format(__version__)
|
||||
)
|
||||
self.PluginType = PluginType.Simple
|
||||
self.StdoutHandling = True
|
||||
|
||||
self.SingleFramesOnly = self.GetBooleanPluginInfoEntryWithDefault(
|
||||
"SingleFramesOnly", False)
|
||||
self.LogInfo("Single Frames Only: %s" % self.SingleFramesOnly)
|
||||
|
||||
self.AddStdoutHandlerCallback(
|
||||
".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress
|
||||
|
||||
def RenderExecutable(self):
|
||||
job = self.GetJob()
|
||||
|
||||
# set required env vars for Ayon
|
||||
# cannot be in InitializeProcess as it is too soon
|
||||
config = RepositoryUtils.GetPluginConfig("Ayon")
|
||||
ayon_server_url = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_SERVER_URL") or
|
||||
config.GetConfigEntryWithDefault("AyonServerUrl", "")
|
||||
)
|
||||
ayon_api_key = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_API_KEY") or
|
||||
config.GetConfigEntryWithDefault("AyonApiKey", "")
|
||||
)
|
||||
ayon_bundle_name = job.GetJobEnvironmentKeyValue("AYON_BUNDLE_NAME")
|
||||
|
||||
environment = {
|
||||
"AYON_SERVER_URL": ayon_server_url,
|
||||
"AYON_API_KEY": ayon_api_key,
|
||||
"AYON_BUNDLE_NAME": ayon_bundle_name,
|
||||
}
|
||||
|
||||
for env, val in environment.items():
|
||||
self.SetEnvironmentVariable(env, val)
|
||||
|
||||
exe_list = self.GetConfigEntry("AyonExecutable")
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
exe = FileUtils.SearchFileList(";".join(expanded_paths))
|
||||
|
||||
if exe == "":
|
||||
self.FailRender(
|
||||
"Ayon executable was not found in the semicolon separated "
|
||||
"list: \"{}\". The path to the render executable can be "
|
||||
"configured from the Plugin Configuration in the Deadline "
|
||||
"Monitor.".format(exe_list)
|
||||
)
|
||||
return exe
|
||||
|
||||
def RenderArgument(self):
|
||||
arguments = str(self.GetPluginInfoEntryWithDefault("Arguments", ""))
|
||||
arguments = RepositoryUtils.CheckPathMapping(arguments)
|
||||
|
||||
arguments = re.sub(r"<(?i)STARTFRAME>", str(self.GetStartFrame()),
|
||||
arguments)
|
||||
arguments = re.sub(r"<(?i)ENDFRAME>", str(self.GetEndFrame()),
|
||||
arguments)
|
||||
arguments = re.sub(r"<(?i)QUOTE>", "\"", arguments)
|
||||
|
||||
arguments = self.ReplacePaddedFrame(arguments,
|
||||
"<(?i)STARTFRAME%([0-9]+)>",
|
||||
self.GetStartFrame())
|
||||
arguments = self.ReplacePaddedFrame(arguments,
|
||||
"<(?i)ENDFRAME%([0-9]+)>",
|
||||
self.GetEndFrame())
|
||||
|
||||
count = 0
|
||||
for filename in self.GetAuxiliaryFilenames():
|
||||
localAuxFile = Path.Combine(self.GetJobsDataDirectory(), filename)
|
||||
arguments = re.sub(r"<(?i)AUXFILE" + str(count) + r">",
|
||||
localAuxFile.replace("\\", "/"), arguments)
|
||||
count += 1
|
||||
|
||||
return arguments
|
||||
|
||||
def ReplacePaddedFrame(self, arguments, pattern, frame):
|
||||
frameRegex = Regex(pattern)
|
||||
while True:
|
||||
frameMatch = frameRegex.Match(arguments)
|
||||
if not frameMatch.Success:
|
||||
break
|
||||
paddingSize = int(frameMatch.Groups[1].Value)
|
||||
if paddingSize > 0:
|
||||
padding = StringUtils.ToZeroPaddedString(
|
||||
frame, paddingSize, False)
|
||||
else:
|
||||
padding = str(frame)
|
||||
arguments = arguments.replace(
|
||||
frameMatch.Groups[0].Value, padding)
|
||||
|
||||
return arguments
|
||||
|
||||
def HandleProgress(self):
|
||||
progress = float(self.GetRegexMatch(1))
|
||||
self.SetProgress(progress)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 101 KiB |
|
|
@ -1,38 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=Celaction Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[ConcurrentTasks]
|
||||
Type=label
|
||||
Label=ConcurrentTasks
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=True
|
||||
Description=Not configurable
|
||||
|
||||
[Executable]
|
||||
Type=filename
|
||||
Label=Executable
|
||||
Category=Config
|
||||
CategoryOrder=0
|
||||
CategoryIndex=0
|
||||
Description=The command executable to run
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[RenderNameSeparator]
|
||||
Type=string
|
||||
Label=RenderNameSeparator
|
||||
Category=Config
|
||||
CategoryOrder=0
|
||||
CategoryIndex=1
|
||||
Description=The separator to use for naming
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
Default=.
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
from System.Text.RegularExpressions import *
|
||||
|
||||
from Deadline.Plugins import *
|
||||
from Deadline.Scripting import *
|
||||
|
||||
import _winreg
|
||||
|
||||
######################################################################
|
||||
# This is the function that Deadline calls to get an instance of the
|
||||
# main DeadlinePlugin class.
|
||||
######################################################################
|
||||
|
||||
|
||||
def GetDeadlinePlugin():
|
||||
return CelActionPlugin()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
######################################################################
|
||||
# This is the main DeadlinePlugin class for the CelAction plugin.
|
||||
######################################################################
|
||||
|
||||
|
||||
class CelActionPlugin(DeadlinePlugin):
|
||||
|
||||
def __init__(self):
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
self.StartupDirectoryCallback += self.StartupDirectory
|
||||
|
||||
def Cleanup(self):
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
del self.StartupDirectoryCallback
|
||||
|
||||
def GetCelActionRegistryKey(self):
|
||||
# Modify registry for frame separation
|
||||
path = r'Software\CelAction\CelAction2D\User Settings'
|
||||
_winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path)
|
||||
regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0,
|
||||
_winreg.KEY_ALL_ACCESS)
|
||||
return regKey
|
||||
|
||||
def GetSeparatorValue(self, regKey):
|
||||
useSeparator, _ = _winreg.QueryValueEx(
|
||||
regKey, 'RenderNameUseSeparator')
|
||||
separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator')
|
||||
|
||||
return useSeparator, separator
|
||||
|
||||
def SetSeparatorValue(self, regKey, useSeparator, separator):
|
||||
_winreg.SetValueEx(regKey, 'RenderNameUseSeparator',
|
||||
0, _winreg.REG_DWORD, useSeparator)
|
||||
_winreg.SetValueEx(regKey, 'RenderNameSeparator',
|
||||
0, _winreg.REG_SZ, separator)
|
||||
|
||||
def InitializeProcess(self):
|
||||
# Set the plugin specific settings.
|
||||
self.SingleFramesOnly = False
|
||||
|
||||
# Set the process specific settings.
|
||||
self.StdoutHandling = True
|
||||
self.PopupHandling = True
|
||||
|
||||
# Ignore 'celaction' Pop-up dialog
|
||||
self.AddPopupIgnorer(".*Rendering.*")
|
||||
self.AddPopupIgnorer(".*AutoRender.*")
|
||||
|
||||
# Ignore 'celaction' Pop-up dialog
|
||||
self.AddPopupIgnorer(".*Wait.*")
|
||||
|
||||
# Ignore 'celaction' Pop-up dialog
|
||||
self.AddPopupIgnorer(".*Timeline Scrub.*")
|
||||
|
||||
celActionRegKey = self.GetCelActionRegistryKey()
|
||||
|
||||
self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault(
|
||||
"RenderNameSeparator", ".").strip())
|
||||
|
||||
def RenderExecutable(self):
|
||||
return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip())
|
||||
|
||||
def RenderArgument(self):
|
||||
arguments = RepositoryUtils.CheckPathMapping(
|
||||
self.GetPluginInfoEntry("Arguments").strip())
|
||||
arguments = arguments.replace(
|
||||
"<STARTFRAME>", str(self.GetStartFrame()))
|
||||
arguments = arguments.replace("<ENDFRAME>", str(self.GetEndFrame()))
|
||||
arguments = self.ReplacePaddedFrame(
|
||||
arguments, "<STARTFRAME%([0-9]+)>", self.GetStartFrame())
|
||||
arguments = self.ReplacePaddedFrame(
|
||||
arguments, "<ENDFRAME%([0-9]+)>", self.GetEndFrame())
|
||||
arguments = arguments.replace("<QUOTE>", "\"")
|
||||
return arguments
|
||||
|
||||
def StartupDirectory(self):
|
||||
return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip()
|
||||
|
||||
def ReplacePaddedFrame(self, arguments, pattern, frame):
|
||||
frameRegex = Regex(pattern)
|
||||
while True:
|
||||
frameMatch = frameRegex.Match(arguments)
|
||||
if frameMatch.Success:
|
||||
paddingSize = int(frameMatch.Groups[1].Value)
|
||||
if paddingSize > 0:
|
||||
padding = StringUtils.ToZeroPaddedString(
|
||||
frame, paddingSize, False)
|
||||
else:
|
||||
padding = str(frame)
|
||||
arguments = arguments.replace(
|
||||
frameMatch.Groups[0].Value, padding)
|
||||
else:
|
||||
break
|
||||
|
||||
return arguments
|
||||
|
|
@ -1,662 +0,0 @@
|
|||
# /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
import subprocess
|
||||
import json
|
||||
import platform
|
||||
import uuid
|
||||
import re
|
||||
from Deadline.Scripting import (
|
||||
RepositoryUtils,
|
||||
FileUtils,
|
||||
DirectoryUtils,
|
||||
)
|
||||
__version__ = "1.1.1"
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
r"\.(?P<patch>0|[1-9]\d*)"
|
||||
r"(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?"
|
||||
r"(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?"
|
||||
)
|
||||
|
||||
|
||||
class OpenPypeVersion:
|
||||
"""Fake semver version class for OpenPype version purposes.
|
||||
|
||||
The version
|
||||
"""
|
||||
def __init__(self, major, minor, patch, prerelease, origin=None):
|
||||
self.major = major
|
||||
self.minor = minor
|
||||
self.patch = patch
|
||||
self.prerelease = prerelease
|
||||
|
||||
is_valid = True
|
||||
if major is None or minor is None or patch is None:
|
||||
is_valid = False
|
||||
self.is_valid = is_valid
|
||||
|
||||
if origin is None:
|
||||
base = "{}.{}.{}".format(str(major), str(minor), str(patch))
|
||||
if not prerelease:
|
||||
origin = base
|
||||
else:
|
||||
origin = "{}-{}".format(base, str(prerelease))
|
||||
|
||||
self.origin = origin
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, version):
|
||||
"""Create an object of version from string.
|
||||
|
||||
Args:
|
||||
version (str): Version as a string.
|
||||
|
||||
Returns:
|
||||
Union[OpenPypeVersion, None]: Version object if input is nonempty
|
||||
string otherwise None.
|
||||
"""
|
||||
|
||||
if not version:
|
||||
return None
|
||||
valid_parts = VERSION_REGEX.findall(version)
|
||||
if len(valid_parts) != 1:
|
||||
# Return invalid version with filled 'origin' attribute
|
||||
return cls(None, None, None, None, origin=str(version))
|
||||
|
||||
# Unpack found version
|
||||
major, minor, patch, pre, post = valid_parts[0]
|
||||
prerelease = pre
|
||||
# Post release is not important anymore and should be considered as
|
||||
# part of prerelease
|
||||
# - comparison is implemented to find suitable build and builds should
|
||||
# never contain prerelease part so "not proper" parsing is
|
||||
# acceptable for this use case.
|
||||
if post:
|
||||
prerelease = "{}+{}".format(pre, post)
|
||||
|
||||
return cls(
|
||||
int(major), int(minor), int(patch), prerelease, origin=version
|
||||
)
|
||||
|
||||
def has_compatible_release(self, other):
|
||||
"""Version has compatible release as other version.
|
||||
|
||||
Both major and minor versions must be exactly the same. In that case
|
||||
a build can be considered as release compatible with any version.
|
||||
|
||||
Args:
|
||||
other (OpenPypeVersion): Other version.
|
||||
|
||||
Returns:
|
||||
bool: Version is release compatible with other version.
|
||||
"""
|
||||
|
||||
if self.is_valid and other.is_valid:
|
||||
return self.major == other.major and self.minor == other.minor
|
||||
return False
|
||||
|
||||
def __bool__(self):
|
||||
return self.is_valid
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {}>".format(self.__class__.__name__, self.origin)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return self.origin == other
|
||||
return self.origin == other.origin
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return None
|
||||
|
||||
if not self.is_valid:
|
||||
return True
|
||||
|
||||
if not other.is_valid:
|
||||
return False
|
||||
|
||||
if self.origin == other.origin:
|
||||
return None
|
||||
|
||||
same_major = self.major == other.major
|
||||
if not same_major:
|
||||
return self.major < other.major
|
||||
|
||||
same_minor = self.minor == other.minor
|
||||
if not same_minor:
|
||||
return self.minor < other.minor
|
||||
|
||||
same_patch = self.patch == other.patch
|
||||
if not same_patch:
|
||||
return self.patch < other.patch
|
||||
|
||||
if not self.prerelease:
|
||||
return False
|
||||
|
||||
if not other.prerelease:
|
||||
return True
|
||||
|
||||
pres = [self.prerelease, other.prerelease]
|
||||
pres.sort()
|
||||
return pres[0] == self.prerelease
|
||||
|
||||
|
||||
def get_openpype_version_from_path(path, build=True):
|
||||
"""Get OpenPype version from provided path.
|
||||
path (str): Path to scan.
|
||||
build (bool, optional): Get only builds, not sources
|
||||
|
||||
Returns:
|
||||
Union[OpenPypeVersion, None]: version of OpenPype if found.
|
||||
"""
|
||||
|
||||
# fix path for application bundle on macos
|
||||
if platform.system().lower() == "darwin":
|
||||
path = os.path.join(path, "MacOS")
|
||||
|
||||
version_file = os.path.join(path, "openpype", "version.py")
|
||||
if not os.path.isfile(version_file):
|
||||
return None
|
||||
|
||||
# skip if the version is not build
|
||||
exe = os.path.join(path, "openpype_console.exe")
|
||||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
exe = os.path.join(path, "openpype_console")
|
||||
|
||||
# if only builds are requested
|
||||
if build and not os.path.isfile(exe): # noqa: E501
|
||||
print(" ! path is not a build: {}".format(path))
|
||||
return None
|
||||
|
||||
version = {}
|
||||
with open(version_file, "r") as vf:
|
||||
exec(vf.read(), version)
|
||||
|
||||
version_str = version.get("__version__")
|
||||
if version_str:
|
||||
return OpenPypeVersion.from_string(version_str)
|
||||
return None
|
||||
|
||||
|
||||
def get_openpype_executable():
|
||||
"""Return OpenPype Executable from Event Plug-in Settings"""
|
||||
config = RepositoryUtils.GetPluginConfig("OpenPype")
|
||||
exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "")
|
||||
dir_list = config.GetConfigEntryWithDefault(
|
||||
"OpenPypeInstallationDirs", "")
|
||||
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
dir_list = dir_list.replace("\\ ", " ")
|
||||
return exe_list, dir_list
|
||||
|
||||
|
||||
def get_openpype_versions(dir_list):
|
||||
print(">>> Getting OpenPype executable ...")
|
||||
openpype_versions = []
|
||||
|
||||
# special case of multiple install dirs
|
||||
for dir_list in dir_list.split(","):
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if install_dir:
|
||||
print("--- Looking for OpenPype at: {}".format(install_dir))
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
]
|
||||
for subdir in sub_dirs:
|
||||
version = get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
print(" - found: {} - {}".format(version, subdir))
|
||||
openpype_versions.append((version, subdir))
|
||||
return openpype_versions
|
||||
|
||||
|
||||
def get_requested_openpype_executable(
|
||||
exe, dir_list, requested_version
|
||||
):
|
||||
requested_version_obj = OpenPypeVersion.from_string(requested_version)
|
||||
if not requested_version_obj:
|
||||
print((
|
||||
">>> Requested version '{}' does not match version regex '{}'"
|
||||
).format(requested_version, VERSION_REGEX))
|
||||
return None
|
||||
|
||||
print((
|
||||
">>> Scanning for compatible requested version {}"
|
||||
).format(requested_version))
|
||||
openpype_versions = get_openpype_versions(dir_list)
|
||||
if not openpype_versions:
|
||||
return None
|
||||
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
if exe:
|
||||
exe_dir = os.path.dirname(exe)
|
||||
print("Looking for OpenPype at: {}".format(exe_dir))
|
||||
version = get_openpype_version_from_path(exe_dir)
|
||||
if version:
|
||||
print(" - found: {} - {}".format(version, exe_dir))
|
||||
openpype_versions.append((version, exe_dir))
|
||||
|
||||
matching_item = None
|
||||
compatible_versions = []
|
||||
for version_item in openpype_versions:
|
||||
version, version_dir = version_item
|
||||
if requested_version_obj.has_compatible_release(version):
|
||||
compatible_versions.append(version_item)
|
||||
if version == requested_version_obj:
|
||||
# Store version item if version match exactly
|
||||
# - break if is found matching version
|
||||
matching_item = version_item
|
||||
break
|
||||
|
||||
if not compatible_versions:
|
||||
return None
|
||||
|
||||
compatible_versions.sort(key=lambda item: item[0])
|
||||
if matching_item:
|
||||
version, version_dir = matching_item
|
||||
print((
|
||||
"*** Found exact match build version {} in {}"
|
||||
).format(version_dir, version))
|
||||
|
||||
else:
|
||||
version, version_dir = compatible_versions[-1]
|
||||
|
||||
print((
|
||||
"*** Latest compatible version found is {} in {}"
|
||||
).format(version_dir, version))
|
||||
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
os.path.join(version_dir, "openpype_console.exe"),
|
||||
os.path.join(version_dir, "openpype_console"),
|
||||
os.path.join(version_dir, "MacOS", "openpype_console")
|
||||
]
|
||||
return FileUtils.SearchFileList(";".join(exe_list))
|
||||
|
||||
|
||||
def inject_openpype_environment(deadlinePlugin):
|
||||
""" Pull env vars from OpenPype and push them to rendering process.
|
||||
|
||||
Used for correct paths, configuration from OpenPype etc.
|
||||
"""
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
print(">>> Injecting OpenPype environments ...")
|
||||
try:
|
||||
exe_list, dir_list = get_openpype_executable()
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
|
||||
if requested_version:
|
||||
exe = get_requested_openpype_executable(
|
||||
exe, dir_list, requested_version
|
||||
)
|
||||
if exe is None:
|
||||
raise RuntimeError((
|
||||
"Cannot find compatible version available for version {}"
|
||||
" requested by the job. Please add it through plugin"
|
||||
" configuration in Deadline or install it to configured"
|
||||
" directory."
|
||||
).format(requested_version))
|
||||
|
||||
if not exe:
|
||||
raise RuntimeError((
|
||||
"OpenPype executable was not found in the semicolon "
|
||||
"separated list \"{}\"."
|
||||
"The path to the render executable can be configured"
|
||||
" from the Plugin Configuration in the Deadline Monitor."
|
||||
).format(";".join(exe_list)))
|
||||
|
||||
print("--- OpenPype executable: {}".format(exe))
|
||||
|
||||
# tempfile.TemporaryFile cannot be used because of locking
|
||||
temp_file_name = "{}_{}.json".format(
|
||||
datetime.utcnow().strftime("%Y%m%d%H%M%S%f"),
|
||||
str(uuid.uuid1())
|
||||
)
|
||||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
add_kwargs = {
|
||||
"project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"),
|
||||
"asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"),
|
||||
"task": job.GetJobEnvironmentKeyValue("AVALON_TASK"),
|
||||
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
|
||||
"envgroup": "farm"
|
||||
}
|
||||
|
||||
# use legacy IS_TEST env var to mark automatic tests for OP
|
||||
if job.GetJobEnvironmentKeyValue("IS_TEST"):
|
||||
args.append("--automatic-tests")
|
||||
|
||||
if all(add_kwargs.values()):
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
else:
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
|
||||
" AVALON_TASK, AVALON_APP_NAME"
|
||||
))
|
||||
|
||||
openpype_mongo = job.GetJobEnvironmentKeyValue("OPENPYPE_MONGO")
|
||||
if openpype_mongo:
|
||||
# inject env var for OP extractenvironments
|
||||
# SetEnvironmentVariable is important, not SetProcessEnv...
|
||||
deadlinePlugin.SetEnvironmentVariable("OPENPYPE_MONGO",
|
||||
openpype_mongo)
|
||||
|
||||
if not os.environ.get("OPENPYPE_MONGO"):
|
||||
print(">>> Missing OPENPYPE_MONGO env var, process won't work")
|
||||
|
||||
os.environ["AVALON_TIMEOUT"] = "5000"
|
||||
|
||||
args_str = subprocess.list2cmdline(args)
|
||||
print(">>> Executing: {} {}".format(exe, args_str))
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run OpenPype process to extract environments."
|
||||
)
|
||||
|
||||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
contents = json.load(fp)
|
||||
|
||||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
if "PATH" in contents:
|
||||
# Set os.environ[PATH] so studio settings' path entries
|
||||
# can be used to define search path for executables.
|
||||
print(f">>> Setting 'PATH' Environment to: {contents['PATH']}")
|
||||
os.environ["PATH"] = contents["PATH"]
|
||||
|
||||
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
|
||||
if script_url:
|
||||
script_url = script_url.format(**contents).replace("\\", "/")
|
||||
print(">>> Setting script path {}".format(script_url))
|
||||
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
|
||||
|
||||
print(">>> Removing temporary file")
|
||||
os.remove(export_url)
|
||||
|
||||
print(">> Injection end.")
|
||||
except Exception as e:
|
||||
if hasattr(e, "output"):
|
||||
print(">>> Exception {}".format(e.output))
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
print("!!! Injection failed.")
|
||||
RepositoryUtils.FailJob(job)
|
||||
raise
|
||||
|
||||
|
||||
def inject_ayon_environment(deadlinePlugin):
|
||||
""" Pull env vars from AYON and push them to rendering process.
|
||||
|
||||
Used for correct paths, configuration from AYON etc.
|
||||
"""
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
print(">>> Injecting AYON environments ...")
|
||||
try:
|
||||
exe_list = get_ayon_executable()
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
if not exe:
|
||||
raise RuntimeError((
|
||||
"Ayon executable was not found in the semicolon "
|
||||
"separated list \"{}\"."
|
||||
"The path to the render executable can be configured"
|
||||
" from the Plugin Configuration in the Deadline Monitor."
|
||||
).format(exe_list))
|
||||
|
||||
print("--- Ayon executable: {}".format(exe))
|
||||
|
||||
ayon_bundle_name = job.GetJobEnvironmentKeyValue("AYON_BUNDLE_NAME")
|
||||
if not ayon_bundle_name:
|
||||
raise RuntimeError(
|
||||
"Missing env var in job properties AYON_BUNDLE_NAME"
|
||||
)
|
||||
|
||||
config = RepositoryUtils.GetPluginConfig("Ayon")
|
||||
ayon_server_url = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_SERVER_URL") or
|
||||
config.GetConfigEntryWithDefault("AyonServerUrl", "")
|
||||
)
|
||||
ayon_api_key = (
|
||||
job.GetJobEnvironmentKeyValue("AYON_API_KEY") or
|
||||
config.GetConfigEntryWithDefault("AyonApiKey", "")
|
||||
)
|
||||
|
||||
if not all([ayon_server_url, ayon_api_key]):
|
||||
raise RuntimeError((
|
||||
"Missing required values for server url and api key. "
|
||||
"Please fill in Ayon Deadline plugin or provide by "
|
||||
"AYON_SERVER_URL and AYON_API_KEY"
|
||||
))
|
||||
|
||||
# tempfile.TemporaryFile cannot be used because of locking
|
||||
temp_file_name = "{}_{}.json".format(
|
||||
datetime.utcnow().strftime("%Y%m%d%H%M%S%f"),
|
||||
str(uuid.uuid1())
|
||||
)
|
||||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
add_kwargs = {
|
||||
"envgroup": "farm",
|
||||
}
|
||||
# Support backwards compatible keys
|
||||
for key, env_keys in (
|
||||
("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]),
|
||||
("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]),
|
||||
("task", ["AYON_TASK_NAME", "AVALON_TASK"]),
|
||||
("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]),
|
||||
):
|
||||
value = ""
|
||||
for env_key in env_keys:
|
||||
value = job.GetJobEnvironmentKeyValue(env_key)
|
||||
if value:
|
||||
break
|
||||
add_kwargs[key] = value
|
||||
|
||||
if not all(add_kwargs.values()):
|
||||
raise RuntimeError((
|
||||
"Missing required env vars: AYON_PROJECT_NAME,"
|
||||
" AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME"
|
||||
))
|
||||
|
||||
# Use applications addon arguments
|
||||
# TODO validate if applications addon should be used
|
||||
args = [
|
||||
"--headless",
|
||||
"addon",
|
||||
"applications",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
# Backwards compatibility for older versions
|
||||
legacy_args = [
|
||||
"--headless",
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
for key, value in add_kwargs.items():
|
||||
args.extend(["--{}".format(key), value])
|
||||
# Legacy arguments expect '--asset' instead of '--folder'
|
||||
if key == "folder":
|
||||
key = "asset"
|
||||
legacy_args.extend(["--{}".format(key), value])
|
||||
|
||||
environment = {
|
||||
"AYON_SERVER_URL": ayon_server_url,
|
||||
"AYON_API_KEY": ayon_api_key,
|
||||
"AYON_BUNDLE_NAME": ayon_bundle_name,
|
||||
}
|
||||
|
||||
automatic_tests = job.GetJobEnvironmentKeyValue("AYON_IN_TESTS")
|
||||
if automatic_tests:
|
||||
environment["AYON_IN_TESTS"] = automatic_tests
|
||||
for env, val in environment.items():
|
||||
# Add the env var for the Render Plugin that is about to render
|
||||
deadlinePlugin.SetEnvironmentVariable(env, val)
|
||||
# Add the env var for current calls to `DeadlinePlugin.RunProcess`
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(env, val)
|
||||
|
||||
args_str = subprocess.list2cmdline(args)
|
||||
print(">>> Executing: {} {}".format(exe, args_str))
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
|
||||
if process_exitcode != 0:
|
||||
print(
|
||||
"Failed to run AYON process to extract environments. Trying"
|
||||
" to use legacy arguments."
|
||||
)
|
||||
legacy_args_str = subprocess.list2cmdline(legacy_args)
|
||||
process_exitcode = deadlinePlugin.RunProcess(
|
||||
exe, legacy_args_str, os.path.dirname(exe), -1
|
||||
)
|
||||
if process_exitcode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run AYON process to extract environments."
|
||||
)
|
||||
|
||||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
contents = json.load(fp)
|
||||
|
||||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
if "PATH" in contents:
|
||||
# Set os.environ[PATH] so studio settings' path entries
|
||||
# can be used to define search path for executables.
|
||||
print(f">>> Setting 'PATH' Environment to: {contents['PATH']}")
|
||||
os.environ["PATH"] = contents["PATH"]
|
||||
|
||||
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
|
||||
if script_url:
|
||||
script_url = script_url.format(**contents).replace("\\", "/")
|
||||
print(">>> Setting script path {}".format(script_url))
|
||||
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
|
||||
|
||||
print(">>> Removing temporary file")
|
||||
os.remove(export_url)
|
||||
|
||||
print(">> Injection end.")
|
||||
except Exception as e:
|
||||
if hasattr(e, "output"):
|
||||
print(">>> Exception {}".format(e.output))
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
print("!!! Injection failed.")
|
||||
RepositoryUtils.FailJob(job)
|
||||
raise
|
||||
|
||||
|
||||
def get_ayon_executable():
|
||||
"""Return AYON Executable from Event Plug-in Settings
|
||||
|
||||
Returns:
|
||||
list[str]: AYON executable paths.
|
||||
|
||||
Raises:
|
||||
RuntimeError: When no path configured at all.
|
||||
|
||||
"""
|
||||
config = RepositoryUtils.GetPluginConfig("Ayon")
|
||||
exe_list = config.GetConfigEntryWithDefault("AyonExecutable", "")
|
||||
|
||||
if not exe_list:
|
||||
raise RuntimeError(
|
||||
"Path to AYON executable not configured."
|
||||
"Please set it in Ayon Deadline Plugin."
|
||||
)
|
||||
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
|
||||
# Expand user paths
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
return ";".join(expanded_paths)
|
||||
|
||||
|
||||
def inject_render_job_id(deadlinePlugin):
|
||||
"""Inject dependency ids to publish process as env var for validation."""
|
||||
print(">>> Injecting render job id ...")
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
dependency_ids = job.JobDependencyIDs
|
||||
print(">>> Dependency IDs: {}".format(dependency_ids))
|
||||
render_job_ids = ",".join(dependency_ids)
|
||||
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(
|
||||
"RENDER_JOB_IDS", render_job_ids
|
||||
)
|
||||
print(">>> Injection end.")
|
||||
|
||||
|
||||
def __main__(deadlinePlugin):
|
||||
print("*** GlobalJobPreload {} start ...".format(__version__))
|
||||
print(">>> Getting job ...")
|
||||
job = deadlinePlugin.GetJob()
|
||||
|
||||
openpype_render_job = job.GetJobEnvironmentKeyValue(
|
||||
"OPENPYPE_RENDER_JOB")
|
||||
openpype_publish_job = job.GetJobEnvironmentKeyValue(
|
||||
"OPENPYPE_PUBLISH_JOB")
|
||||
openpype_remote_job = job.GetJobEnvironmentKeyValue(
|
||||
"OPENPYPE_REMOTE_PUBLISH")
|
||||
|
||||
if openpype_publish_job == "1" and openpype_render_job == "1":
|
||||
raise RuntimeError(
|
||||
"Misconfiguration. Job couldn't be both render and publish."
|
||||
)
|
||||
|
||||
if openpype_publish_job == "1":
|
||||
inject_render_job_id(deadlinePlugin)
|
||||
if openpype_render_job == "1" or openpype_remote_job == "1":
|
||||
inject_openpype_environment(deadlinePlugin)
|
||||
|
||||
ayon_render_job = job.GetJobEnvironmentKeyValue("AYON_RENDER_JOB")
|
||||
ayon_publish_job = job.GetJobEnvironmentKeyValue("AYON_PUBLISH_JOB")
|
||||
ayon_remote_job = job.GetJobEnvironmentKeyValue("AYON_REMOTE_PUBLISH")
|
||||
|
||||
if ayon_publish_job == "1" and ayon_render_job == "1":
|
||||
raise RuntimeError(
|
||||
"Misconfiguration. Job couldn't be both render and publish."
|
||||
)
|
||||
|
||||
if ayon_publish_job == "1":
|
||||
inject_render_job_id(deadlinePlugin)
|
||||
if ayon_render_job == "1" or ayon_remote_job == "1":
|
||||
inject_ayon_environment(deadlinePlugin)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 KiB |
|
|
@ -1,532 +0,0 @@
|
|||
[SceneFile]
|
||||
Type=filename
|
||||
Label=Scene Filename
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=0
|
||||
Description=The scene filename as it exists on the network.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Environment]
|
||||
Type=filename
|
||||
Label=Scene Environment
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=1
|
||||
Description=The Environment for the scene.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Job]
|
||||
Type=filename
|
||||
Label=Scene Job
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=2
|
||||
Description=The Job that the scene belongs to.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[SceneName]
|
||||
Type=filename
|
||||
Label=Scene Name
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=3
|
||||
Description=The name of the scene to render
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[SceneVersion]
|
||||
Type=filename
|
||||
Label=Scene Version
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Description=The version of the scene to render.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Version]
|
||||
Type=enum
|
||||
Values=10;11;12
|
||||
Label=Harmony Version
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=5
|
||||
Description=The version of Harmony to use.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[IsDatabase]
|
||||
Type=Boolean
|
||||
Label=Is Database Scene
|
||||
Category=Global Settings
|
||||
CategoryOrder=0
|
||||
Index=6
|
||||
Description=Whether or not the scene is in the database or not
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Camera]
|
||||
Type=string
|
||||
Label=Camera
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=0
|
||||
Description=Specifies the camera to use for rendering images. If Blank, the scene will be rendered with the current Camera.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[UsingResPreset]
|
||||
Type=Boolean
|
||||
Label=Use Resolution Preset
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=1
|
||||
Description=Whether or not you are using a resolution preset.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[ResolutionName]
|
||||
Type=enum
|
||||
Values=HDTV_1080p24;HDTV_1080p25;HDTV_720p24;4K_UHD;8K_UHD;DCI_2K;DCI_4K;film-2K;film-4K;film-1.33_H;film-1.66_H;film-1.66_V;Cineon;NTSC;PAL;2160p;1440p;1080p;720p;480p;360p;240p;low;Web_Video;Game_512;Game_512_Ortho;WebCC_Preview;Custom
|
||||
Label=Resolution Preset
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=2
|
||||
Description=The resolution preset to use.
|
||||
Required=true
|
||||
Default=HDTV_1080p24
|
||||
|
||||
[PresetName]
|
||||
Type=string
|
||||
Label=Preset Name
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=3
|
||||
Description=Specify the custom resolution name.
|
||||
Required=true
|
||||
Default=
|
||||
|
||||
[ResolutionX]
|
||||
Type=integer
|
||||
Label=Resolution X
|
||||
Minimum=0
|
||||
Maximum=1000000
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=4
|
||||
Description=Specifies the width of the rendered images. If 0, then the current resolution and Field of view will be used.
|
||||
Required=true
|
||||
Default=1920
|
||||
|
||||
[ResolutionY]
|
||||
Type=integer
|
||||
Label=Resolution Y
|
||||
Minimum=0
|
||||
Maximum=1000000
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=5
|
||||
Description=Specifies the height of the rendered images. If 0, then the current resolution and Field of view will be used.
|
||||
Required=true
|
||||
Default=1080
|
||||
|
||||
[FieldOfView]
|
||||
Type=float
|
||||
Label=Field Of View
|
||||
Minimum=0
|
||||
Maximum=89
|
||||
DecimalPlaces=2
|
||||
Category=Render Settings
|
||||
CategoryOrder=1
|
||||
Index=6
|
||||
Description=Specifies the field of view of the rendered images. If 0, then the current resolution and Field of view will be used.
|
||||
Required=true
|
||||
Default=41.11
|
||||
|
||||
[Output0Node]
|
||||
Type=string
|
||||
Label=Render Node 0 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=0
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 0 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=1
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0Path]
|
||||
Type=string
|
||||
Label=Render Node 0 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=2
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 0 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=3
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0Format]
|
||||
Type=string
|
||||
Label=Render Node 0 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=4
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output0StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 0 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=5
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Node]
|
||||
Type=string
|
||||
Label=Render Node 1 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=6
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 1 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=7
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Path]
|
||||
Type=string
|
||||
Label=Render Node 1 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=8
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 1 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=9
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1Format]
|
||||
Type=string
|
||||
Label=Render Node 1 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=10
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output1StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 1 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=11
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Node]
|
||||
Type=string
|
||||
Label=Render Node 2 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=12
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 2 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=13
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Path]
|
||||
Type=string
|
||||
Label=Render Node 2 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=14
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 2 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=15
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2Format]
|
||||
Type=string
|
||||
Label=Render Node 2 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=16
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output2StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 2 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=17
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Node]
|
||||
Type=string
|
||||
Label=Render Node 3 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=18
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 3 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=19
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Path]
|
||||
Type=string
|
||||
Label=Render Node 3 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=20
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 3 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=21
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3Format]
|
||||
Type=string
|
||||
Label=Render Node 3 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=22
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output3StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 3 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=23
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Node]
|
||||
Type=string
|
||||
Label=Render Node 4 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=24
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 4 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=25
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Path]
|
||||
Type=string
|
||||
Label=Render Node 4 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=26
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 4 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=27
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4Format]
|
||||
Type=string
|
||||
Label=Render Node 4 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=28
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output4StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 4 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=29
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Node]
|
||||
Type=string
|
||||
Label=Render Node 5 Name
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=30
|
||||
Description=The name of the render node.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Type]
|
||||
Type=enum
|
||||
Values=Image;Movie
|
||||
Label=Render Node 5 Type
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=31
|
||||
Description=The type of output that the render node is producing.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Path]
|
||||
Type=string
|
||||
Label=Render Node 5 Path
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=32
|
||||
Description=The output path and file name of the output files.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5LeadingZero]
|
||||
Type=integer
|
||||
Label=Render Node 5 Leading Zeroes
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=0
|
||||
Maximum=5
|
||||
Index=33
|
||||
Description=The number of leading zeroes for a 1 digit frame number. (1 less then the full padded length)
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5Format]
|
||||
Type=string
|
||||
Label=Render Node 5 Format
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Index=34
|
||||
Description=The format for the rendered output images.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[Output5StartFrame]
|
||||
Type=integer
|
||||
Label=Render Node 5 Start Frame
|
||||
Category=Output Settings
|
||||
CategoryOrder=2
|
||||
Minimum=1
|
||||
Index=35
|
||||
Description=The frame that will correspond to frame one when numbering. If this value is not 1 then the monitor's job output features will not work properly.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=Harmony Render Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[ConcurrentTasks]
|
||||
Type=label
|
||||
Label=ConcurrentTasks
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=True
|
||||
Description=Not configurable
|
||||
|
||||
[Harmony_RenderExecutable_10]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=0
|
||||
Label=Harmony 10 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 10.0\win64\bin\Stage.exe
|
||||
|
||||
[Harmony_RenderExecutable_11]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=1
|
||||
Label=Harmony 11 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 11.0\win64\bin\Stage.exe
|
||||
|
||||
[Harmony_RenderExecutable_12]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=2
|
||||
Label=Harmony 12 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 12.0 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 12.0 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_12/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_14]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=3
|
||||
Label=Harmony 14 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 14.0 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 14.0 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_14/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_15]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 15 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=C:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 15.0 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 15.0 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_15.0/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_17]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 17 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 17 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 17 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_17/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_20]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 20 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 20 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_20/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_21]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 21 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 21 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 21 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_21/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_22]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 22 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 22 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 22 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_22/lnx86_64/bin/HarmonyPremium
|
||||
|
|
@ -1,151 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
from System import *
|
||||
from System.Diagnostics import *
|
||||
from System.IO import *
|
||||
from System.Text import *
|
||||
|
||||
from Deadline.Plugins import *
|
||||
from Deadline.Scripting import *
|
||||
|
||||
def GetDeadlinePlugin():
|
||||
return HarmonyAYONPlugin()
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
class HarmonyAYONPlugin(DeadlinePlugin):
|
||||
|
||||
def __init__( self ):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
self.CheckExitCodeCallback += self.CheckExitCode
|
||||
|
||||
def Cleanup( self ):
|
||||
print("Cleanup")
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
|
||||
def CheckExitCode( self, exitCode ):
|
||||
print("check code")
|
||||
if exitCode != 0:
|
||||
if exitCode == 100:
|
||||
self.LogInfo( "Renderer reported an error with error code 100. This will be ignored, since the option to ignore it is specified in the Job Properties." )
|
||||
else:
|
||||
self.FailRender( "Renderer returned non-zero error code %d. Check the renderer's output." % exitCode )
|
||||
|
||||
def InitializeProcess( self ):
|
||||
self.PluginType = PluginType.Simple
|
||||
self.StdoutHandling = True
|
||||
self.PopupHandling = True
|
||||
|
||||
self.AddStdoutHandlerCallback( "Rendered frame ([0-9]+)" ).HandleCallback += self.HandleStdoutProgress
|
||||
|
||||
def HandleStdoutProgress( self ):
|
||||
startFrame = self.GetStartFrame()
|
||||
endFrame = self.GetEndFrame()
|
||||
if( endFrame - startFrame + 1 != 0 ):
|
||||
self.SetProgress( 100 * ( int(self.GetRegexMatch(1)) - startFrame + 1 ) / ( endFrame - startFrame + 1 ) )
|
||||
|
||||
def RenderExecutable( self ):
|
||||
version = int( self.GetPluginInfoEntry( "Version" ) )
|
||||
exe = ""
|
||||
exeList = self.GetConfigEntry( "Harmony_RenderExecutable_" + str(version) )
|
||||
exe = FileUtils.SearchFileList( exeList )
|
||||
if( exe == "" ):
|
||||
self.FailRender( "Harmony render executable was not found in the configured separated list \"" + exeList + "\". The path to the render executable can be configured from the Plugin Configuration in the Deadline Monitor." )
|
||||
return exe
|
||||
|
||||
def RenderArgument( self ):
|
||||
renderArguments = "-batch"
|
||||
|
||||
if self.GetBooleanPluginInfoEntryWithDefault( "UsingResPreset", False ):
|
||||
resName = self.GetPluginInfoEntryWithDefault( "ResolutionName", "HDTV_1080p24" )
|
||||
if resName == "Custom":
|
||||
renderArguments += " -res " + self.GetPluginInfoEntryWithDefault( "PresetName", "HDTV_1080p24" )
|
||||
else:
|
||||
renderArguments += " -res " + resName
|
||||
else:
|
||||
resolutionX = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionX", -1 )
|
||||
resolutionY = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionY", -1 )
|
||||
fov = self.GetFloatPluginInfoEntryWithDefault( "FieldOfView", -1 )
|
||||
|
||||
if resolutionX > 0 and resolutionY > 0 and fov > 0:
|
||||
renderArguments += " -res " + str( resolutionX ) + " " + str( resolutionY ) + " " + str( fov )
|
||||
|
||||
camera = self.GetPluginInfoEntryWithDefault( "Camera", "" )
|
||||
|
||||
if not camera == "":
|
||||
renderArguments += " -camera " + camera
|
||||
|
||||
startFrame = str( self.GetStartFrame() )
|
||||
endFrame = str( self.GetEndFrame() )
|
||||
|
||||
renderArguments += " -frames " + startFrame + " " + endFrame
|
||||
|
||||
if not self.GetBooleanPluginInfoEntryWithDefault( "IsDatabase", False ):
|
||||
sceneFilename = self.GetPluginInfoEntryWithDefault( "SceneFile", self.GetDataFilename() )
|
||||
sceneFilename = RepositoryUtils.CheckPathMapping( sceneFilename )
|
||||
renderArguments += " \"" + sceneFilename + "\""
|
||||
else:
|
||||
environment = self.GetPluginInfoEntryWithDefault( "Environment", "" )
|
||||
renderArguments += " -env " + environment
|
||||
job = self.GetPluginInfoEntryWithDefault( "Job", "" )
|
||||
renderArguments += " -job " + job
|
||||
scene = self.GetPluginInfoEntryWithDefault( "SceneName", "" )
|
||||
renderArguments += " -scene " + scene
|
||||
version = self.GetPluginInfoEntryWithDefault( "SceneVersion", "" )
|
||||
renderArguments += " -version " + version
|
||||
|
||||
#tempSceneDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
#preRenderScript =
|
||||
rendernodeNum = 0
|
||||
scriptBuilder = StringBuilder()
|
||||
|
||||
while True:
|
||||
nodeName = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Node", "" )
|
||||
if nodeName == "":
|
||||
break
|
||||
nodeType = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Type", "Image" )
|
||||
if nodeType == "Image":
|
||||
nodePath = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Path", "" )
|
||||
nodeLeadingZero = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "LeadingZero", "" )
|
||||
nodeFormat = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Format", "" )
|
||||
nodeStartFrame = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "StartFrame", "" )
|
||||
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingName\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
if not nodeLeadingZero == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"leadingZeros\", 1, \"" + nodeLeadingZero + "\" );")
|
||||
|
||||
if not nodeFormat == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingType\", 1, \"" + nodeFormat + "\" );")
|
||||
|
||||
if not nodeStartFrame == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"start\", 1, \"" + nodeStartFrame + "\" );")
|
||||
|
||||
if nodeType == "Movie":
|
||||
nodePath = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Path", "" )
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"moviePath\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
rendernodeNum += 1
|
||||
|
||||
tempDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
preRenderScriptName = Path.Combine( tempDirectory, "preRenderScript.txt" )
|
||||
|
||||
File.WriteAllText( preRenderScriptName, scriptBuilder.ToString() )
|
||||
|
||||
preRenderInlineScript = self.GetPluginInfoEntryWithDefault( "PreRenderInlineScript", "" )
|
||||
if preRenderInlineScript:
|
||||
renderArguments += " -preRenderInlineScript \"" + preRenderInlineScript +"\""
|
||||
|
||||
renderArguments += " -preRenderScript \"" + preRenderScriptName +"\""
|
||||
|
||||
return renderArguments
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 124 KiB |
|
|
@ -1,35 +0,0 @@
|
|||
[OIIOToolPath]
|
||||
Type=filename
|
||||
Label=OIIO Tool location
|
||||
Category=OIIO
|
||||
Index=0
|
||||
Description=OIIO Tool executable to use.
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[OutputFile]
|
||||
Type=filenamesave
|
||||
Label=Output File
|
||||
Category=Output
|
||||
Index=0
|
||||
Description=The scene filename as it exists on the network
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
||||
[CleanupTiles]
|
||||
Type=boolean
|
||||
Category=Options
|
||||
Index=0
|
||||
Label=Cleanup Tiles
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
Description=If enabled, the OpenPype Tile Assembler will cleanup all tiles after assembly.
|
||||
|
||||
[Renderer]
|
||||
Type=string
|
||||
Label=Renderer
|
||||
Category=Quicktime Info
|
||||
Index=0
|
||||
Description=Renderer name
|
||||
Required=false
|
||||
DisableIfBlank=true
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
[About]
|
||||
Type=label
|
||||
Label=About
|
||||
Category=About Plugin
|
||||
CategoryOrder=-1
|
||||
Index=0
|
||||
Default=OpenPype Tile Assembler Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[OIIOTool_RenderExecutable]
|
||||
Type=multilinemultifilename
|
||||
Label=OIIO Tool Executable
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Default=C:\Program Files\OIIO\bin\oiiotool.exe;/usr/bin/oiiotool
|
||||
Description=The path to the Open Image IO Tool executable file used for rendering. Enter alternative paths on separate lines.
|
||||
W
|
||||
|
|
@ -1,457 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tile Assembler Plugin using Open Image IO tool.
|
||||
|
||||
Todo:
|
||||
Currently we support only EXRs with their data window set.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import xml.etree.ElementTree
|
||||
|
||||
from System.IO import Path
|
||||
|
||||
from Deadline.Plugins import DeadlinePlugin
|
||||
from Deadline.Scripting import (
|
||||
FileUtils, RepositoryUtils, SystemUtils)
|
||||
|
||||
|
||||
version_major = 1
|
||||
version_minor = 0
|
||||
version_patch = 0
|
||||
version_string = "{}.{}.{}".format(version_major, version_minor, version_patch)
|
||||
STRING_TAGS = {
|
||||
"format"
|
||||
}
|
||||
INT_TAGS = {
|
||||
"x", "y", "z",
|
||||
"width", "height", "depth",
|
||||
"full_x", "full_y", "full_z",
|
||||
"full_width", "full_height", "full_depth",
|
||||
"tile_width", "tile_height", "tile_depth",
|
||||
"nchannels",
|
||||
"alpha_channel",
|
||||
"z_channel",
|
||||
"deep",
|
||||
"subimages",
|
||||
}
|
||||
|
||||
|
||||
XML_CHAR_REF_REGEX_HEX = re.compile(r"&#x?[0-9a-fA-F]+;")
|
||||
|
||||
# Regex to parse array attributes
|
||||
ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$")
|
||||
|
||||
|
||||
def convert_value_by_type_name(value_type, value):
|
||||
"""Convert value to proper type based on type name.
|
||||
|
||||
In some cases value types have custom python class.
|
||||
"""
|
||||
|
||||
# Simple types
|
||||
if value_type == "string":
|
||||
return value
|
||||
|
||||
if value_type == "int":
|
||||
return int(value)
|
||||
|
||||
if value_type == "float":
|
||||
return float(value)
|
||||
|
||||
# Vectors will probably have more types
|
||||
if value_type in ("vec2f", "float2"):
|
||||
return [float(item) for item in value.split(",")]
|
||||
|
||||
# Matrix should be always have square size of element 3x3, 4x4
|
||||
# - are returned as list of lists
|
||||
if value_type == "matrix":
|
||||
output = []
|
||||
current_index = -1
|
||||
parts = value.split(",")
|
||||
parts_len = len(parts)
|
||||
if parts_len == 1:
|
||||
divisor = 1
|
||||
elif parts_len == 4:
|
||||
divisor = 2
|
||||
elif parts_len == 9:
|
||||
divisor = 3
|
||||
elif parts_len == 16:
|
||||
divisor = 4
|
||||
else:
|
||||
print("Unknown matrix resolution {}. Value: \"{}\"".format(
|
||||
parts_len, value
|
||||
))
|
||||
for part in parts:
|
||||
output.append(float(part))
|
||||
return output
|
||||
|
||||
for idx, item in enumerate(parts):
|
||||
list_index = idx % divisor
|
||||
if list_index > current_index:
|
||||
current_index = list_index
|
||||
output.append([])
|
||||
output[list_index].append(float(item))
|
||||
return output
|
||||
|
||||
if value_type == "rational2i":
|
||||
parts = value.split("/")
|
||||
top = float(parts[0])
|
||||
bottom = 1.0
|
||||
if len(parts) != 1:
|
||||
bottom = float(parts[1])
|
||||
return float(top) / float(bottom)
|
||||
|
||||
if value_type == "vector":
|
||||
parts = [part.strip() for part in value.split(",")]
|
||||
output = []
|
||||
for part in parts:
|
||||
if part == "-nan":
|
||||
output.append(None)
|
||||
continue
|
||||
try:
|
||||
part = float(part)
|
||||
except ValueError:
|
||||
pass
|
||||
output.append(part)
|
||||
return output
|
||||
|
||||
if value_type == "timecode":
|
||||
return value
|
||||
|
||||
# Array of other types is converted to list
|
||||
re_result = ARRAY_TYPE_REGEX.findall(value_type)
|
||||
if re_result:
|
||||
array_type = re_result[0]
|
||||
output = []
|
||||
for item in value.split(","):
|
||||
output.append(
|
||||
convert_value_by_type_name(array_type, item)
|
||||
)
|
||||
return output
|
||||
|
||||
print((
|
||||
"Dev note (missing implementation):"
|
||||
" Unknown attrib type \"{}\". Value: {}"
|
||||
).format(value_type, value))
|
||||
return value
|
||||
|
||||
|
||||
def parse_oiio_xml_output(xml_string):
|
||||
"""Parse xml output from OIIO info command."""
|
||||
output = {}
|
||||
if not xml_string:
|
||||
return output
|
||||
|
||||
# Fix values with ampresand (lazy fix)
|
||||
# - oiiotool exports invalid xml which ElementTree can't handle
|
||||
# e.g. ""
|
||||
# WARNING: this will affect even valid character entities. If you need
|
||||
# those values correctly, this must take care of valid character ranges.
|
||||
# See https://github.com/pypeclub/OpenPype/pull/2729
|
||||
matches = XML_CHAR_REF_REGEX_HEX.findall(xml_string)
|
||||
for match in matches:
|
||||
new_value = match.replace("&", "&")
|
||||
xml_string = xml_string.replace(match, new_value)
|
||||
|
||||
tree = xml.etree.ElementTree.fromstring(xml_string)
|
||||
attribs = {}
|
||||
output["attribs"] = attribs
|
||||
for child in tree:
|
||||
tag_name = child.tag
|
||||
if tag_name == "attrib":
|
||||
attrib_def = child.attrib
|
||||
value = convert_value_by_type_name(
|
||||
attrib_def["type"], child.text
|
||||
)
|
||||
|
||||
attribs[attrib_def["name"]] = value
|
||||
continue
|
||||
|
||||
# Channels are stored as tex on each child
|
||||
if tag_name == "channelnames":
|
||||
value = []
|
||||
for channel in child:
|
||||
value.append(channel.text)
|
||||
|
||||
# Convert known integer type tags to int
|
||||
elif tag_name in INT_TAGS:
|
||||
value = int(child.text)
|
||||
|
||||
# Keep value of known string tags
|
||||
elif tag_name in STRING_TAGS:
|
||||
value = child.text
|
||||
|
||||
# Keep value as text for unknown tags
|
||||
# - feel free to add more tags
|
||||
else:
|
||||
value = child.text
|
||||
print((
|
||||
"Dev note (missing implementation):"
|
||||
" Unknown tag \"{}\". Value \"{}\""
|
||||
).format(tag_name, value))
|
||||
|
||||
output[child.tag] = value
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def info_about_input(oiiotool_path, filepath):
|
||||
args = [
|
||||
oiiotool_path,
|
||||
"--info",
|
||||
"-v",
|
||||
"-i:infoformat=xml",
|
||||
filepath
|
||||
]
|
||||
popen = subprocess.Popen(args, stdout=subprocess.PIPE)
|
||||
_stdout, _stderr = popen.communicate()
|
||||
output = ""
|
||||
if _stdout:
|
||||
output += _stdout.decode("utf-8", errors="backslashreplace")
|
||||
|
||||
if _stderr:
|
||||
output += _stderr.decode("utf-8", errors="backslashreplace")
|
||||
|
||||
output = output.replace("\r\n", "\n")
|
||||
xml_started = False
|
||||
lines = []
|
||||
for line in output.split("\n"):
|
||||
if not xml_started:
|
||||
if not line.startswith("<"):
|
||||
continue
|
||||
xml_started = True
|
||||
if xml_started:
|
||||
lines.append(line)
|
||||
|
||||
if not xml_started:
|
||||
raise ValueError(
|
||||
"Failed to read input file \"{}\".\nOutput:\n{}".format(
|
||||
filepath, output
|
||||
)
|
||||
)
|
||||
xml_text = "\n".join(lines)
|
||||
return parse_oiio_xml_output(xml_text)
|
||||
|
||||
|
||||
def GetDeadlinePlugin(): # noqa: N802
|
||||
"""Helper."""
|
||||
return OpenPypeTileAssembler()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin(deadlinePlugin): # noqa: N802, N803
|
||||
"""Helper."""
|
||||
deadlinePlugin.cleanup()
|
||||
|
||||
|
||||
class OpenPypeTileAssembler(DeadlinePlugin):
|
||||
"""Deadline plugin for assembling tiles using OIIO."""
|
||||
|
||||
def __init__(self):
|
||||
"""Init."""
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.initialize_process
|
||||
self.RenderExecutableCallback += self.render_executable
|
||||
self.RenderArgumentCallback += self.render_argument
|
||||
self.PreRenderTasksCallback += self.pre_render_tasks
|
||||
self.PostRenderTasksCallback += self.post_render_tasks
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup function."""
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
del self.PreRenderTasksCallback
|
||||
del self.PostRenderTasksCallback
|
||||
|
||||
def initialize_process(self):
|
||||
"""Initialization."""
|
||||
self.LogInfo("Plugin version: {}".format(version_string))
|
||||
self.SingleFramesOnly = True
|
||||
self.StdoutHandling = True
|
||||
self.renderer = self.GetPluginInfoEntryWithDefault(
|
||||
"Renderer", "undefined")
|
||||
self.AddStdoutHandlerCallback(
|
||||
".*Error.*").HandleCallback += self.handle_stdout_error
|
||||
|
||||
def render_executable(self):
|
||||
"""Get render executable name.
|
||||
|
||||
Get paths from plugin configuration, find executable and return it.
|
||||
|
||||
Returns:
|
||||
(str): Render executable.
|
||||
|
||||
"""
|
||||
oiiotool_exe_list = self.GetConfigEntry("OIIOTool_RenderExecutable")
|
||||
oiiotool_exe = FileUtils.SearchFileList(oiiotool_exe_list)
|
||||
|
||||
if oiiotool_exe == "":
|
||||
self.FailRender(("No file found in the semicolon separated "
|
||||
"list \"{}\". The path to the render executable "
|
||||
"can be configured from the Plugin Configuration "
|
||||
"in the Deadline Monitor.").format(
|
||||
oiiotool_exe_list))
|
||||
|
||||
return oiiotool_exe
|
||||
|
||||
def render_argument(self):
|
||||
"""Generate command line arguments for render executable.
|
||||
|
||||
Returns:
|
||||
(str): arguments to add to render executable.
|
||||
|
||||
"""
|
||||
# Read tile config file. This file is in compatible format with
|
||||
# Draft Tile Assembler
|
||||
data = {}
|
||||
with open(self.config_file, "rU") as f:
|
||||
for text in f:
|
||||
# Parsing key-value pair and removing white-space
|
||||
# around the entries
|
||||
info = [x.strip() for x in text.split("=", 1)]
|
||||
|
||||
if len(info) > 1:
|
||||
try:
|
||||
data[str(info[0])] = info[1]
|
||||
except Exception as e:
|
||||
# should never be called
|
||||
self.FailRender(
|
||||
"Cannot parse config file: {}".format(e))
|
||||
|
||||
# Get output file. We support only EXRs now.
|
||||
output_file = data["ImageFileName"]
|
||||
output_file = RepositoryUtils.CheckPathMapping(output_file)
|
||||
output_file = self.process_path(output_file)
|
||||
|
||||
tile_info = []
|
||||
for tile in range(int(data["TileCount"])):
|
||||
tile_info.append({
|
||||
"filepath": data["Tile{}".format(tile)],
|
||||
"pos_x": int(data["Tile{}X".format(tile)]),
|
||||
"pos_y": int(data["Tile{}Y".format(tile)]),
|
||||
"height": int(data["Tile{}Height".format(tile)]),
|
||||
"width": int(data["Tile{}Width".format(tile)])
|
||||
})
|
||||
|
||||
arguments = self.tile_oiio_args(
|
||||
int(data["ImageWidth"]), int(data["ImageHeight"]),
|
||||
tile_info, output_file)
|
||||
self.LogInfo(
|
||||
"Using arguments: {}".format(" ".join(arguments)))
|
||||
self.tiles = tile_info
|
||||
return " ".join(arguments)
|
||||
|
||||
def process_path(self, filepath):
|
||||
"""Handle slashes in file paths."""
|
||||
if SystemUtils.IsRunningOnWindows():
|
||||
filepath = filepath.replace("/", "\\")
|
||||
if filepath.startswith("\\") and not filepath.startswith("\\\\"):
|
||||
filepath = "\\" + filepath
|
||||
else:
|
||||
filepath = filepath.replace("\\", "/")
|
||||
return filepath
|
||||
|
||||
def pre_render_tasks(self):
|
||||
"""Load config file and do remapping."""
|
||||
self.LogInfo("OpenPype Tile Assembler starting...")
|
||||
config_file = self.GetPluginInfoEntry("ConfigFile")
|
||||
|
||||
temp_scene_directory = self.CreateTempDirectory(
|
||||
"thread" + str(self.GetThreadNumber()))
|
||||
temp_scene_filename = Path.GetFileName(config_file)
|
||||
self.config_file = Path.Combine(
|
||||
temp_scene_directory, temp_scene_filename)
|
||||
|
||||
if SystemUtils.IsRunningOnWindows():
|
||||
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
|
||||
config_file, self.config_file, "/", "\\")
|
||||
else:
|
||||
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
|
||||
config_file, self.config_file, "\\", "/")
|
||||
os.chmod(self.config_file, os.stat(self.config_file).st_mode)
|
||||
|
||||
def post_render_tasks(self):
|
||||
"""Cleanup tiles if required."""
|
||||
if self.GetBooleanPluginInfoEntryWithDefault("CleanupTiles", False):
|
||||
self.LogInfo("Cleaning up Tiles...")
|
||||
for tile in self.tiles:
|
||||
try:
|
||||
self.LogInfo("Deleting: {}".format(tile["filepath"]))
|
||||
os.remove(tile["filepath"])
|
||||
# By this time we would have errored out
|
||||
# if error on missing was enabled
|
||||
except KeyError:
|
||||
pass
|
||||
except OSError:
|
||||
self.LogInfo("Failed to delete: {}".format(
|
||||
tile["filepath"]))
|
||||
pass
|
||||
|
||||
self.LogInfo("OpenPype Tile Assembler Job finished.")
|
||||
|
||||
def handle_stdout_error(self):
|
||||
"""Handle errors in stdout."""
|
||||
self.FailRender(self.GetRegexMatch(0))
|
||||
|
||||
def tile_oiio_args(
|
||||
self, output_width, output_height, tile_info, output_path):
|
||||
"""Generate oiio tool arguments for tile assembly.
|
||||
|
||||
Args:
|
||||
output_width (int): Width of output image.
|
||||
output_height (int): Height of output image.
|
||||
tile_info (list): List of tile items, each item must be
|
||||
dictionary with `filepath`, `pos_x` and `pos_y` keys
|
||||
representing path to file and x, y coordinates on output
|
||||
image where top-left point of tile item should start.
|
||||
output_path (str): Path to file where should be output stored.
|
||||
|
||||
Returns:
|
||||
(list): oiio tools arguments.
|
||||
|
||||
"""
|
||||
args = []
|
||||
|
||||
# Create new image with output resolution, and with same type and
|
||||
# channels as input
|
||||
oiiotool_path = self.render_executable()
|
||||
first_tile_path = tile_info[0]["filepath"]
|
||||
first_tile_info = info_about_input(oiiotool_path, first_tile_path)
|
||||
create_arg_template = "--create{} {}x{} {}"
|
||||
|
||||
image_type = ""
|
||||
image_format = first_tile_info.get("format")
|
||||
if image_format:
|
||||
image_type = ":type={}".format(image_format)
|
||||
|
||||
create_arg = create_arg_template.format(
|
||||
image_type, output_width,
|
||||
output_height, first_tile_info["nchannels"]
|
||||
)
|
||||
args.append(create_arg)
|
||||
|
||||
for tile in tile_info:
|
||||
path = tile["filepath"]
|
||||
pos_x = tile["pos_x"]
|
||||
tile_height = info_about_input(oiiotool_path, path)["height"]
|
||||
if self.renderer == "vray":
|
||||
pos_y = tile["pos_y"]
|
||||
else:
|
||||
pos_y = output_height - tile["pos_y"] - tile_height
|
||||
|
||||
# Add input path and make sure inputs origin is 0, 0
|
||||
args.append(path)
|
||||
args.append("--origin +0+0")
|
||||
# Swap to have input as foreground
|
||||
args.append("--swap")
|
||||
# Paste foreground to background
|
||||
args.append("--paste {x:+d}{y:+d}".format(x=pos_x, y=pos_y))
|
||||
|
||||
args.append("-o")
|
||||
args.append(output_path)
|
||||
|
||||
return args
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
## OpenPype Deadline repository overlay
|
||||
|
||||
This directory is an overlay for Deadline repository.
|
||||
It means that you can copy the whole hierarchy to Deadline repository and it
|
||||
should work.
|
||||
|
||||
Logic:
|
||||
-----
|
||||
GlobalJobPreLoad
|
||||
-----
|
||||
|
||||
The `GlobalJobPreLoad` will retrieve the OpenPype executable path from the
|
||||
`OpenPype` Deadline Plug-in's settings. Then it will call the executable to
|
||||
retrieve the environment variables needed for the Deadline Job.
|
||||
These environment variables are injected into rendering process.
|
||||
|
||||
Deadline triggers the `GlobalJobPreLoad.py` for each Worker as it starts the
|
||||
Job.
|
||||
|
||||
*Note*: It also contains backward compatible logic to preserve functionality
|
||||
for old Pype2 and non-OpenPype triggered jobs.
|
||||
|
||||
Plugin
|
||||
------
|
||||
For each render and publishing job the `OpenPype` Deadline Plug-in is checked
|
||||
for the configured location of the OpenPype executable (needs to be configured
|
||||
in `Deadline's Configure Plugins > OpenPype`) through `GlobalJobPreLoad`.
|
||||
|
||||
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'deadline' version."""
|
||||
__version__ = "0.2.3"
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
name = "deadline"
|
||||
title = "Deadline"
|
||||
version = "0.2.3"
|
||||
|
||||
client_dir = "ayon_deadline"
|
||||
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.2",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
from typing import Type
|
||||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import DeadlineSettings, DEFAULT_VALUES, DeadlineSiteSettings
|
||||
|
||||
|
||||
class Deadline(BaseServerAddon):
|
||||
settings_model: Type[DeadlineSettings] = DeadlineSettings
|
||||
site_settings_model: Type[DeadlineSiteSettings] = DeadlineSiteSettings
|
||||
|
||||
|
||||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
return settings_model_cls(**DEFAULT_VALUES)
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
from .main import (
|
||||
DeadlineSettings,
|
||||
DEFAULT_VALUES,
|
||||
)
|
||||
from .site_settings import DeadlineSiteSettings
|
||||
|
||||
|
||||
__all__ = (
|
||||
"DeadlineSettings",
|
||||
"DeadlineSiteSettings",
|
||||
"DEFAULT_VALUES",
|
||||
)
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
from typing import TYPE_CHECKING
|
||||
from pydantic import validator
|
||||
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .publish_plugins import (
|
||||
PublishPluginsModel,
|
||||
DEFAULT_DEADLINE_PLUGINS_SETTINGS
|
||||
)
|
||||
|
||||
|
||||
async def defined_deadline_ws_name_enum_resolver(
|
||||
addon: "BaseServerAddon",
|
||||
settings_variant: str = "production",
|
||||
project_name: str | None = None,
|
||||
) -> list[str]:
|
||||
"""Provides list of names of configured Deadline webservice urls."""
|
||||
if addon is None:
|
||||
return []
|
||||
|
||||
settings = await addon.get_studio_settings(variant=settings_variant)
|
||||
|
||||
ws_server_name = []
|
||||
for deadline_url_item in settings.deadline_urls:
|
||||
ws_server_name.append(deadline_url_item.name)
|
||||
|
||||
return ws_server_name
|
||||
|
||||
class ServerItemSubmodel(BaseSettingsModel):
|
||||
"""Connection info about configured DL servers."""
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Url")
|
||||
require_authentication: bool = SettingsField(
|
||||
False, title="Require authentication")
|
||||
not_verify_ssl: bool = SettingsField(
|
||||
False, title="Don't verify SSL")
|
||||
default_username: str = SettingsField(
|
||||
"",
|
||||
title="Default user name",
|
||||
description="Webservice username, 'Require authentication' must be "
|
||||
"enabled."
|
||||
)
|
||||
default_password: str = SettingsField(
|
||||
"",
|
||||
title="Default password",
|
||||
description="Webservice password, 'Require authentication' must be "
|
||||
"enabled."
|
||||
)
|
||||
|
||||
|
||||
class DeadlineSettings(BaseSettingsModel):
|
||||
# configured DL servers
|
||||
deadline_urls: list[ServerItemSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="System Deadline Webservice Info",
|
||||
scope=["studio"],
|
||||
)
|
||||
|
||||
# name(key) of selected server for project
|
||||
deadline_server: str = SettingsField(
|
||||
title="Project Deadline server name",
|
||||
section="---",
|
||||
scope=["project"],
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver
|
||||
)
|
||||
|
||||
publish: PublishPluginsModel = SettingsField(
|
||||
default_factory=PublishPluginsModel,
|
||||
title="Publish Plugins",
|
||||
)
|
||||
|
||||
@validator("deadline_urls")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"deadline_urls": [
|
||||
{
|
||||
"name": "default",
|
||||
"value": "http://127.0.0.1:8082",
|
||||
"require_authentication": False,
|
||||
"not_verify_ssl": False,
|
||||
"default_username": "",
|
||||
"default_password": ""
|
||||
|
||||
}
|
||||
],
|
||||
"deadline_server": "default",
|
||||
"publish": DEFAULT_DEADLINE_PLUGINS_SETTINGS
|
||||
}
|
||||
|
|
@ -1,578 +0,0 @@
|
|||
from pydantic import validator
|
||||
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
|
||||
|
||||
class CollectDeadlinePoolsModel(BaseSettingsModel):
|
||||
"""Settings Deadline default pools."""
|
||||
|
||||
primary_pool: str = SettingsField(title="Primary Pool")
|
||||
|
||||
secondary_pool: str = SettingsField(title="Secondary Pool")
|
||||
|
||||
|
||||
class ValidateExpectedFilesModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
allow_user_override: bool = SettingsField(
|
||||
True, title="Allow user change frame range"
|
||||
)
|
||||
families: list[str] = SettingsField(
|
||||
default_factory=list, title="Trigger on families"
|
||||
)
|
||||
targets: list[str] = SettingsField(
|
||||
default_factory=list, title="Trigger for plugins"
|
||||
)
|
||||
|
||||
|
||||
def tile_assembler_enum():
|
||||
"""Return a list of value/label dicts for the enumerator.
|
||||
|
||||
Returning a list of dicts is used to allow for a custom label to be
|
||||
displayed in the UI.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"value": "DraftTileAssembler",
|
||||
"label": "Draft Tile Assembler"
|
||||
},
|
||||
{
|
||||
"value": "OpenPypeTileAssembler",
|
||||
"label": "Open Image IO"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class ScenePatchesSubmodel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Patch name")
|
||||
regex: str = SettingsField(title="Patch regex")
|
||||
line: str = SettingsField(title="Patch line")
|
||||
|
||||
|
||||
class MayaSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Maya deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
import_reference: bool = SettingsField(
|
||||
title="Use Scene with Imported Reference"
|
||||
)
|
||||
asset_dependencies: bool = SettingsField(title="Use Asset dependencies")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
tile_priority: int = SettingsField(title="Tile Priority")
|
||||
group: str = SettingsField(title="Group")
|
||||
limit: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Limit Groups"
|
||||
)
|
||||
tile_assembler_plugin: str = SettingsField(
|
||||
title="Tile Assembler Plugin",
|
||||
enum_resolver=tile_assembler_enum,
|
||||
)
|
||||
jobInfo: str = SettingsField(
|
||||
title="Additional JobInfo data",
|
||||
widget="textarea",
|
||||
)
|
||||
pluginInfo: str = SettingsField(
|
||||
title="Additional PluginInfo data",
|
||||
widget="textarea",
|
||||
)
|
||||
|
||||
scene_patches: list[ScenePatchesSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Scene patches",
|
||||
)
|
||||
strict_error_checking: bool = SettingsField(
|
||||
title="Disable Strict Error Check profiles"
|
||||
)
|
||||
|
||||
@validator("scene_patches")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class MaxSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True)
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Frame per Task")
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
|
||||
|
||||
class EnvSearchReplaceSubmodel(BaseSettingsModel):
|
||||
_layout = "compact"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: str = SettingsField(title="Value")
|
||||
|
||||
|
||||
class LimitGroupsSubmodel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Name")
|
||||
value: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Limit Groups"
|
||||
)
|
||||
|
||||
|
||||
def fusion_deadline_plugin_enum():
|
||||
"""Return a list of value/label dicts for the enumerator.
|
||||
|
||||
Returning a list of dicts is used to allow for a custom label to be
|
||||
displayed in the UI.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"value": "Fusion",
|
||||
"label": "Fusion"
|
||||
},
|
||||
{
|
||||
"value": "FusionCmd",
|
||||
"label": "FusionCmd"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class FusionSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
optional: bool = SettingsField(False, title="Optional")
|
||||
active: bool = SettingsField(True, title="Active")
|
||||
priority: int = SettingsField(50, title="Priority")
|
||||
chunk_size: int = SettingsField(10, title="Frame per Task")
|
||||
concurrent_tasks: int = SettingsField(
|
||||
1, title="Number of concurrent tasks"
|
||||
)
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
plugin: str = SettingsField("Fusion",
|
||||
enum_resolver=fusion_deadline_plugin_enum,
|
||||
title="Deadline Plugin")
|
||||
|
||||
|
||||
class NukeSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Nuke deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
concurrent_tasks: int = SettingsField(title="Number of concurrent tasks")
|
||||
group: str = SettingsField(title="Group")
|
||||
department: str = SettingsField(title="Department")
|
||||
use_gpu: bool = SettingsField(title="Use GPU")
|
||||
workfile_dependency: bool = SettingsField(title="Workfile Dependency")
|
||||
use_published_workfile: bool = SettingsField(
|
||||
title="Use Published Workfile"
|
||||
)
|
||||
|
||||
env_allowed_keys: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Allowed environment keys"
|
||||
)
|
||||
|
||||
env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Search & replace in environment values",
|
||||
)
|
||||
|
||||
limit_groups: list[LimitGroupsSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Limit Groups",
|
||||
)
|
||||
|
||||
@validator(
|
||||
"limit_groups",
|
||||
"env_search_replace_values")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class HarmonySubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Harmony deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
department: str = SettingsField(title="Department")
|
||||
|
||||
|
||||
class HoudiniSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Houdini deadline render submitter settings."""
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
|
||||
export_priority: int = SettingsField(title="Export Priority")
|
||||
export_chunk_size: int = SettingsField(title="Export Chunk Size")
|
||||
export_group: str = SettingsField(title="Export Group")
|
||||
|
||||
|
||||
class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""Houdini deadline cache submitter settings."""
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
|
||||
|
||||
class AfterEffectsSubmitDeadlineModel(BaseSettingsModel):
|
||||
"""After Effects deadline submitter settings."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Chunk Size")
|
||||
group: str = SettingsField(title="Group")
|
||||
department: str = SettingsField(title="Department")
|
||||
multiprocess: bool = SettingsField(title="Optional")
|
||||
|
||||
|
||||
class CelactionSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True, title="Enabled")
|
||||
deadline_department: str = SettingsField("", title="Deadline apartment")
|
||||
deadline_priority: int = SettingsField(50, title="Deadline priority")
|
||||
deadline_pool: str = SettingsField("", title="Deadline pool")
|
||||
deadline_pool_secondary: str = SettingsField(
|
||||
"", title="Deadline pool (secondary)"
|
||||
)
|
||||
deadline_group: str = SettingsField("", title="Deadline Group")
|
||||
deadline_chunk_size: int = SettingsField(10, title="Deadline Chunk size")
|
||||
deadline_job_delay: str = SettingsField(
|
||||
"", title="Delay job (timecode dd:hh:mm:ss)"
|
||||
)
|
||||
|
||||
|
||||
class BlenderSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(True)
|
||||
optional: bool = SettingsField(title="Optional")
|
||||
active: bool = SettingsField(title="Active")
|
||||
use_published: bool = SettingsField(title="Use Published scene")
|
||||
asset_dependencies: bool = SettingsField(title="Use Asset dependencies")
|
||||
priority: int = SettingsField(title="Priority")
|
||||
chunk_size: int = SettingsField(title="Frame per Task")
|
||||
group: str = SettingsField("", title="Group Name")
|
||||
job_delay: str = SettingsField(
|
||||
"", title="Delay job (timecode dd:hh:mm:ss)"
|
||||
)
|
||||
|
||||
|
||||
class AOVFilterSubmodel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField(title="Host")
|
||||
value: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="AOV regex"
|
||||
)
|
||||
|
||||
|
||||
class ProcessCacheJobFarmModel(BaseSettingsModel):
|
||||
"""Process submitted job on farm."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
deadline_department: str = SettingsField(title="Department")
|
||||
deadline_pool: str = SettingsField(title="Pool")
|
||||
deadline_group: str = SettingsField(title="Group")
|
||||
deadline_chunk_size: int = SettingsField(title="Chunk Size")
|
||||
deadline_priority: int = SettingsField(title="Priority")
|
||||
|
||||
|
||||
class ProcessSubmittedJobOnFarmModel(BaseSettingsModel):
|
||||
"""Process submitted job on farm."""
|
||||
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
deadline_department: str = SettingsField(title="Department")
|
||||
deadline_pool: str = SettingsField(title="Pool")
|
||||
deadline_group: str = SettingsField(title="Group")
|
||||
deadline_chunk_size: int = SettingsField(title="Chunk Size")
|
||||
deadline_priority: int = SettingsField(title="Priority")
|
||||
publishing_script: str = SettingsField(title="Publishing script path")
|
||||
skip_integration_repre_list: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Skip integration of representation with ext"
|
||||
)
|
||||
families_transfer: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title=(
|
||||
"List of family names to transfer\n"
|
||||
"to generated instances (AOVs for example)."
|
||||
)
|
||||
)
|
||||
aov_filter: list[AOVFilterSubmodel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Reviewable products filter",
|
||||
)
|
||||
|
||||
@validator("aov_filter")
|
||||
def validate_unique_names(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class PublishPluginsModel(BaseSettingsModel):
|
||||
CollectDeadlinePools: CollectDeadlinePoolsModel = SettingsField(
|
||||
default_factory=CollectDeadlinePoolsModel,
|
||||
title="Default Pools")
|
||||
ValidateExpectedFiles: ValidateExpectedFilesModel = SettingsField(
|
||||
default_factory=ValidateExpectedFilesModel,
|
||||
title="Validate Expected Files"
|
||||
)
|
||||
AfterEffectsSubmitDeadline: AfterEffectsSubmitDeadlineModel = (
|
||||
SettingsField(
|
||||
default_factory=AfterEffectsSubmitDeadlineModel,
|
||||
title="After Effects to deadline",
|
||||
section="Hosts"
|
||||
)
|
||||
)
|
||||
BlenderSubmitDeadline: BlenderSubmitDeadlineModel = SettingsField(
|
||||
default_factory=BlenderSubmitDeadlineModel,
|
||||
title="Blender Submit Deadline")
|
||||
CelactionSubmitDeadline: CelactionSubmitDeadlineModel = SettingsField(
|
||||
default_factory=CelactionSubmitDeadlineModel,
|
||||
title="Celaction Submit Deadline")
|
||||
FusionSubmitDeadline: FusionSubmitDeadlineModel = SettingsField(
|
||||
default_factory=FusionSubmitDeadlineModel,
|
||||
title="Fusion submit to Deadline")
|
||||
HarmonySubmitDeadline: HarmonySubmitDeadlineModel = SettingsField(
|
||||
default_factory=HarmonySubmitDeadlineModel,
|
||||
title="Harmony Submit to deadline")
|
||||
HoudiniCacheSubmitDeadline: HoudiniCacheSubmitDeadlineModel = SettingsField(
|
||||
default_factory=HoudiniCacheSubmitDeadlineModel,
|
||||
title="Houdini Submit cache to deadline")
|
||||
HoudiniSubmitDeadline: HoudiniSubmitDeadlineModel = SettingsField(
|
||||
default_factory=HoudiniSubmitDeadlineModel,
|
||||
title="Houdini Submit render to deadline")
|
||||
MaxSubmitDeadline: MaxSubmitDeadlineModel = SettingsField(
|
||||
default_factory=MaxSubmitDeadlineModel,
|
||||
title="Max Submit to deadline")
|
||||
MayaSubmitDeadline: MayaSubmitDeadlineModel = SettingsField(
|
||||
default_factory=MayaSubmitDeadlineModel,
|
||||
title="Maya Submit to deadline")
|
||||
NukeSubmitDeadline: NukeSubmitDeadlineModel = SettingsField(
|
||||
default_factory=NukeSubmitDeadlineModel,
|
||||
title="Nuke Submit to deadline")
|
||||
ProcessSubmittedCacheJobOnFarm: ProcessCacheJobFarmModel = SettingsField(
|
||||
default_factory=ProcessCacheJobFarmModel,
|
||||
title="Process submitted cache Job on farm",
|
||||
section="Publish Jobs")
|
||||
ProcessSubmittedJobOnFarm: ProcessSubmittedJobOnFarmModel = SettingsField(
|
||||
default_factory=ProcessSubmittedJobOnFarmModel,
|
||||
title="Process submitted job on farm")
|
||||
|
||||
|
||||
DEFAULT_DEADLINE_PLUGINS_SETTINGS = {
|
||||
"CollectDeadlinePools": {
|
||||
"primary_pool": "",
|
||||
"secondary_pool": ""
|
||||
},
|
||||
"ValidateExpectedFiles": {
|
||||
"enabled": True,
|
||||
"active": True,
|
||||
"allow_user_override": True,
|
||||
"families": [
|
||||
"render"
|
||||
],
|
||||
"targets": [
|
||||
"deadline"
|
||||
]
|
||||
},
|
||||
"AfterEffectsSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10000,
|
||||
"group": "",
|
||||
"department": "",
|
||||
"multiprocess": True
|
||||
},
|
||||
"BlenderSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"asset_dependencies": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"group": "none",
|
||||
"job_delay": "00:00:00:00"
|
||||
},
|
||||
"CelactionSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"deadline_department": "",
|
||||
"deadline_priority": 50,
|
||||
"deadline_pool": "",
|
||||
"deadline_pool_secondary": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": 10,
|
||||
"deadline_job_delay": "00:00:00:00"
|
||||
},
|
||||
"FusionSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
"group": ""
|
||||
},
|
||||
"HarmonySubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10000,
|
||||
"group": "",
|
||||
"department": ""
|
||||
},
|
||||
"HoudiniCacheSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 999999,
|
||||
"group": ""
|
||||
},
|
||||
"HoudiniSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 1,
|
||||
"group": "",
|
||||
"export_priority": 50,
|
||||
"export_chunk_size": 10,
|
||||
"export_group": ""
|
||||
},
|
||||
"MaxSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"use_published": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"group": "none"
|
||||
},
|
||||
"MayaSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"tile_assembler_plugin": "DraftTileAssembler",
|
||||
"use_published": True,
|
||||
"import_reference": False,
|
||||
"asset_dependencies": True,
|
||||
"strict_error_checking": True,
|
||||
"priority": 50,
|
||||
"tile_priority": 50,
|
||||
"group": "none",
|
||||
"limit": [],
|
||||
# this used to be empty dict
|
||||
"jobInfo": "",
|
||||
# this used to be empty dict
|
||||
"pluginInfo": "",
|
||||
"scene_patches": []
|
||||
},
|
||||
"NukeSubmitDeadline": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True,
|
||||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
"group": "",
|
||||
"department": "",
|
||||
"use_gpu": True,
|
||||
"workfile_dependency": True,
|
||||
"use_published_workfile": True,
|
||||
"env_allowed_keys": [],
|
||||
"env_search_replace_values": [],
|
||||
"limit_groups": []
|
||||
},
|
||||
"ProcessSubmittedCacheJobOnFarm": {
|
||||
"enabled": True,
|
||||
"deadline_department": "",
|
||||
"deadline_pool": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": 1,
|
||||
"deadline_priority": 50
|
||||
},
|
||||
"ProcessSubmittedJobOnFarm": {
|
||||
"enabled": True,
|
||||
"deadline_department": "",
|
||||
"deadline_pool": "",
|
||||
"deadline_group": "",
|
||||
"deadline_chunk_size": 1,
|
||||
"deadline_priority": 50,
|
||||
"publishing_script": "",
|
||||
"skip_integration_repre_list": [],
|
||||
"families_transfer": ["render3d", "render2d", "ftrack", "slate"],
|
||||
"aov_filter": [
|
||||
{
|
||||
"name": "maya",
|
||||
"value": [
|
||||
".*([Bb]eauty).*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "blender",
|
||||
"value": [
|
||||
".*([Bb]eauty).*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "aftereffects",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "celaction",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "harmony",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "max",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "fusion",
|
||||
"value": [
|
||||
".*"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
)
|
||||
|
||||
from .main import defined_deadline_ws_name_enum_resolver
|
||||
|
||||
|
||||
class CredentialPerServerModel(BaseSettingsModel):
|
||||
"""Provide credentials for configured DL servers"""
|
||||
_layout = "expanded"
|
||||
server_name: str = SettingsField(
|
||||
"",
|
||||
title="DL server name",
|
||||
enum_resolver=defined_deadline_ws_name_enum_resolver
|
||||
)
|
||||
username: str = SettingsField("", title="Username")
|
||||
password: str = SettingsField("", title="Password")
|
||||
|
||||
|
||||
class DeadlineSiteSettings(BaseSettingsModel):
|
||||
local_settings: list[CredentialPerServerModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Local setting",
|
||||
description=(
|
||||
"Please provide credentials for configured Deadline servers"
|
||||
),
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue