rename folder

This commit is contained in:
Milan Kolar 2021-04-01 18:54:46 +02:00
parent 483c930a68
commit 8432e94615
1511 changed files with 0 additions and 0 deletions

View file

@ -0,0 +1,6 @@
from .deadline_module import DeadlineModule
__all__ = (
"DeadlineModule",
)

View file

@ -0,0 +1,29 @@
import os
from openpype.modules import (
PypeModule, IPluginPaths)
class DeadlineModule(PypeModule, IPluginPaths):
name = "deadline"
def initialize(self, modules_settings):
# This module is always enabled
deadline_settings = modules_settings[self.name]
self.enabled = deadline_settings["enabled"]
self.deadline_url = deadline_settings["DEADLINE_REST_URL"]
def get_global_environments(self):
"""Deadline global environments for pype implementation."""
return {
"DEADLINE_REST_URL": self.deadline_url
}
def connect_with_modules(self, *_a, **_kw):
return
def get_plugin_paths(self):
"""Deadline plugin paths."""
current_dir = os.path.dirname(os.path.abspath(__file__))
return {
"publish": [os.path.join(current_dir, "plugins", "publish")]
}

View file

@ -0,0 +1,122 @@
from openpype.lib import abstract_submit_deadline
from openpype.lib.abstract_submit_deadline import DeadlineJobInfo
import pyblish.api
import os
import attr
import getpass
from avalon import api
@attr.s
class DeadlinePluginInfo():
Comp = attr.ib(default=None)
SceneFile = attr.ib(default=None)
OutputFilePath = attr.ib(default=None)
Output = attr.ib(default=None)
StartupDirectory = attr.ib(default=None)
Arguments = attr.ib(default=None)
ProjectPath = attr.ib(default=None)
AWSAssetFile0 = attr.ib(default=None)
Version = attr.ib(default=None)
MultiProcess = attr.ib(default=None)
class AfterEffectsSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
label = "Submit AE to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["aftereffects"]
families = ["render.farm"] # cannot be "render' as that is integrated
use_published = True
chunk_size = 1000000
def get_job_info(self):
dln_job_info = DeadlineJobInfo(Plugin="AfterEffects")
context = self._instance.context
dln_job_info.Name = self._instance.data["name"]
dln_job_info.BatchName = os.path.basename(self._instance.
data["source"])
dln_job_info.Plugin = "AfterEffects"
dln_job_info.UserName = context.data.get(
"deadlineUser", getpass.getuser())
if self._instance.data["frameEnd"] > self._instance.data["frameStart"]:
# Deadline requires integers in frame range
frame_range = "{}-{}".format(
int(round(self._instance.data["frameStart"])),
int(round(self._instance.data["frameEnd"])))
dln_job_info.Frames = frame_range
dln_job_info.ChunkSize = self.chunk_size
dln_job_info.OutputFilename = \
os.path.basename(self._instance.data["expectedFiles"][0])
dln_job_info.OutputDirectory = \
os.path.dirname(self._instance.data["expectedFiles"][0])
dln_job_info.JobDelay = "00:00:00"
keys = [
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"OPENPYPE_USERNAME",
"OPENPYPE_DEV",
"OPENPYPE_LOG_NO_COLORS"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
for key in keys:
val = environment.get(key)
if val:
dln_job_info.EnvironmentKeyValue = "{key}={value}".format(
key=key,
value=val)
# to recognize job from PYPE for turning Event On/Off
dln_job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1"
return dln_job_info
def get_plugin_info(self):
deadline_plugin_info = DeadlinePluginInfo()
context = self._instance.context
script_path = context.data["currentFile"]
render_path = self._instance.data["expectedFiles"][0]
if len(self._instance.data["expectedFiles"]) > 1:
# replace frame ('000001') with Deadline's required '[#######]'
# expects filename in format project_asset_subset_version.FRAME.ext
render_dir = os.path.dirname(render_path)
file_name = os.path.basename(render_path)
arr = file_name.split('.')
assert len(arr) == 3, \
"Unable to parse frames from {}".format(file_name)
hashed = '[{}]'.format(len(arr[1]) * "#")
render_path = os.path.join(render_dir,
'{}.{}.{}'.format(arr[0], hashed,
arr[2]))
deadline_plugin_info.MultiProcess = True
deadline_plugin_info.Comp = self._instance.data["comp_name"]
deadline_plugin_info.Version = "17.5"
deadline_plugin_info.SceneFile = self.scene_path
deadline_plugin_info.Output = render_path.replace("\\", "/")
return attr.asdict(deadline_plugin_info)
def from_published_scene(self):
""" Do not overwrite expected files.
Use published is set to True, so rendering will be triggered
from published scene (in 'publish' folder). Default implementation
of abstract class renames expected (eg. rendered) files accordingly
which is not needed here.
"""
return super().from_published_scene(False)

View file

@ -0,0 +1,415 @@
# -*- coding: utf-8 -*-
"""Submitting render job to Deadline."""
import os
from pathlib import Path
from collections import OrderedDict
from zipfile import ZipFile, is_zipfile
import re
import attr
import pyblish.api
import openpype.lib.abstract_submit_deadline
from openpype.lib.abstract_submit_deadline import DeadlineJobInfo
from avalon import api
class _ZipFile(ZipFile):
"""Extended check for windows invalid characters."""
# this is extending default zipfile table for few invalid characters
# that can come from Mac
_windows_illegal_characters = ":<>|\"?*\r\n\x00"
_windows_illegal_name_trans_table = str.maketrans(
_windows_illegal_characters,
"_" * len(_windows_illegal_characters)
)
@attr.s
class PluginInfo(object):
"""Plugin info structure for Harmony Deadline plugin."""
SceneFile = attr.ib()
# Harmony version
Version = attr.ib()
Camera = attr.ib(default="")
FieldOfView = attr.ib(default=41.11)
IsDatabase = attr.ib(default=False)
ResolutionX = attr.ib(default=1920)
ResolutionY = attr.ib(default=1080)
# Resolution name preset, default
UsingResPreset = attr.ib(default=False)
ResolutionName = attr.ib(default="HDTV_1080p24")
PreRenderInlineScript = attr.ib(default=None)
# --------------------------------------------------
_outputNode = attr.ib(factory=list)
@property
def OutputNode(self): # noqa: N802
"""Return all output nodes formatted for Deadline.
Returns:
dict: as `{'Output0Node', 'Top/renderFarmDefault'}`
"""
out = {}
for index, v in enumerate(self._outputNode):
out["Output{}Node".format(index)] = v
return out
@OutputNode.setter
def OutputNode(self, val): # noqa: N802
self._outputNode.append(val)
# --------------------------------------------------
_outputType = attr.ib(factory=list)
@property
def OutputType(self): # noqa: N802
"""Return output nodes type formatted for Deadline.
Returns:
dict: as `{'Output0Type', 'Image'}`
"""
out = {}
for index, v in enumerate(self._outputType):
out["Output{}Type".format(index)] = v
return out
@OutputType.setter
def OutputType(self, val): # noqa: N802
self._outputType.append(val)
# --------------------------------------------------
_outputLeadingZero = attr.ib(factory=list)
@property
def OutputLeadingZero(self): # noqa: N802
"""Return output nodes type formatted for Deadline.
Returns:
dict: as `{'Output0LeadingZero', '3'}`
"""
out = {}
for index, v in enumerate(self._outputLeadingZero):
out["Output{}LeadingZero".format(index)] = v
return out
@OutputLeadingZero.setter
def OutputLeadingZero(self, val): # noqa: N802
self._outputLeadingZero.append(val)
# --------------------------------------------------
_outputFormat = attr.ib(factory=list)
@property
def OutputFormat(self): # noqa: N802
"""Return output nodes format formatted for Deadline.
Returns:
dict: as `{'Output0Type', 'PNG4'}`
"""
out = {}
for index, v in enumerate(self._outputFormat):
out["Output{}Format".format(index)] = v
return out
@OutputFormat.setter
def OutputFormat(self, val): # noqa: N802
self._outputFormat.append(val)
# --------------------------------------------------
_outputStartFrame = attr.ib(factory=list)
@property
def OutputStartFrame(self): # noqa: N802
"""Return start frame for output nodes formatted for Deadline.
Returns:
dict: as `{'Output0StartFrame', '1'}`
"""
out = {}
for index, v in enumerate(self._outputStartFrame):
out["Output{}StartFrame".format(index)] = v
return out
@OutputStartFrame.setter
def OutputStartFrame(self, val): # noqa: N802
self._outputStartFrame.append(val)
# --------------------------------------------------
_outputPath = attr.ib(factory=list)
@property
def OutputPath(self): # noqa: N802
"""Return output paths for nodes formatted for Deadline.
Returns:
dict: as `{'Output0Path', '/output/path'}`
"""
out = {}
for index, v in enumerate(self._outputPath):
out["Output{}Path".format(index)] = v
return out
@OutputPath.setter
def OutputPath(self, val): # noqa: N802
self._outputPath.append(val)
def set_output(self, node, image_format, output,
output_type="Image", zeros=3, start_frame=1):
"""Helper to set output.
This should be used instead of setting properties individually
as so index remain consistent.
Args:
node (str): harmony write node name
image_format (str): format of output (PNG4, TIF, ...)
output (str): output path
output_type (str, optional): "Image" or "Movie" (not supported).
zeros (int, optional): Leading zeros (for 0001 = 3)
start_frame (int, optional): Sequence offset.
"""
self.OutputNode = node
self.OutputFormat = image_format
self.OutputPath = output
self.OutputType = output_type
self.OutputLeadingZero = zeros
self.OutputStartFrame = start_frame
def serialize(self):
"""Return all data serialized as dictionary.
Returns:
OrderedDict: all serialized data.
"""
def filter_data(a, v):
if a.name.startswith("_"):
return False
if v is None:
return False
return True
serialized = attr.asdict(
self, dict_factory=OrderedDict, filter=filter_data)
serialized.update(self.OutputNode)
serialized.update(self.OutputFormat)
serialized.update(self.OutputPath)
serialized.update(self.OutputType)
serialized.update(self.OutputLeadingZero)
serialized.update(self.OutputStartFrame)
return serialized
class HarmonySubmitDeadline(
openpype.lib.abstract_submit_deadline.AbstractSubmitDeadline):
"""Submit render write of Harmony scene to Deadline.
Renders are submitted to a Deadline Web Service as
supplied via the environment variable ``DEADLINE_REST_URL``.
Note:
If Deadline configuration is not detected, this plugin will
be disabled.
Attributes:
use_published (bool): Use published scene to render instead of the
one in work area.
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["harmony"]
families = ["render.farm"]
if not os.environ.get("DEADLINE_REST_URL"):
optional = False
active = False
else:
optional = True
use_published = False
primary_pool = ""
secondary_pool = ""
priority = 50
chunk_size = 1000000
def get_job_info(self):
job_info = DeadlineJobInfo("Harmony")
job_info.Name = self._instance.data["name"]
job_info.Plugin = "HarmonyPype"
job_info.Frames = "{}-{}".format(
self._instance.data["frameStartHandle"],
self._instance.data["frameEndHandle"]
)
# for now, get those from presets. Later on it should be
# configurable in Harmony UI directly.
job_info.Priority = self.priority
job_info.Pool = self.primary_pool
job_info.SecondaryPool = self.secondary_pool
job_info.ChunkSize = self.chunk_size
job_info.BatchName = os.path.basename(self._instance.data["source"])
keys = [
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"OPENPYPE_USERNAME",
"OPENPYPE_DEV",
"OPENPYPE_LOG_NO_COLORS"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
for key in keys:
val = environment.get(key)
if val:
job_info.EnvironmentKeyValue = "{key}={value}".format(
key=key,
value=val)
# to recognize job from PYPE for turning Event On/Off
job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1"
return job_info
def _unzip_scene_file(self, published_scene: Path) -> Path:
"""Unzip scene zip file to its directory.
Unzip scene file (if it is zip file) to its current directory and
return path to xstage file there. Xstage file is determined by its
name.
Args:
published_scene (Path): path to zip file.
Returns:
Path: The path to unzipped xstage.
"""
# if not zip, bail out.
if "zip" not in published_scene.suffix or not is_zipfile(
published_scene.as_posix()
):
self.log.error("Published scene is not in zip.")
self.log.error(published_scene)
raise AssertionError("invalid scene format")
xstage_path = (
published_scene.parent
/ published_scene.stem
/ f"{published_scene.stem}.xstage"
)
unzip_dir = (published_scene.parent / published_scene.stem)
with _ZipFile(published_scene, "r") as zip_ref:
zip_ref.extractall(unzip_dir.as_posix())
# find any xstage files in directory, prefer the one with the same name
# as directory (plus extension)
xstage_files = []
for scene in unzip_dir.iterdir():
if scene.suffix == ".xstage":
xstage_files.append(scene)
# there must be at least one (but maybe not more?) xstage file
if not xstage_files:
self.log.error("No xstage files found in zip")
raise AssertionError("Invalid scene archive")
ideal_scene = False
# find the one with the same name as zip. In case there can be more
# then one xtage file.
for scene in xstage_files:
# if /foo/bar/baz.zip == /foo/bar/baz/baz.xstage
# ^^^ ^^^
if scene.stem == published_scene.stem:
xstage_path = scene
ideal_scene = True
# but sometimes xstage file has different name then zip - in that case
# use that one.
if not ideal_scene:
xstage_path = xstage_files[0]
return xstage_path
def get_plugin_info(self):
work_scene = Path(self._instance.data["source"])
# this is path to published scene workfile _ZIP_. Before
# rendering, we need to unzip it.
published_scene = Path(
self.from_published_scene(False))
self.log.info(f"Processing {published_scene.as_posix()}")
xstage_path = self._unzip_scene_file(published_scene)
render_path = xstage_path.parent / "renders"
# for submit_publish job to create .json file in
self._instance.data["outputDir"] = render_path
new_expected_files = []
work_path_str = str(work_scene.parent.as_posix())
render_path_str = str(render_path.as_posix())
for file in self._instance.data["expectedFiles"]:
_file = str(Path(file).as_posix())
new_expected_files.append(
_file.replace(work_path_str, render_path_str)
)
audio_file = self._instance.data.get("audioFile")
if audio_file:
abs_path = xstage_path.parent / audio_file
self._instance.context.data["audioFile"] = str(abs_path)
self._instance.data["source"] = str(published_scene.as_posix())
self._instance.data["expectedFiles"] = new_expected_files
harmony_plugin_info = PluginInfo(
SceneFile=xstage_path.as_posix(),
Version=(
self._instance.context.data["harmonyVersion"].split(".")[0]),
FieldOfView=self._instance.context.data["FOV"],
ResolutionX=self._instance.data["resolutionWidth"],
ResolutionY=self._instance.data["resolutionHeight"]
)
pattern = '[0]{' + str(self._instance.data["leadingZeros"]) + \
'}1\.[a-zA-Z]{3}'
render_prefix = re.sub(pattern, '',
self._instance.data["expectedFiles"][0])
harmony_plugin_info.set_output(
self._instance.data["setMembers"][0],
self._instance.data["outputFormat"],
render_prefix,
self._instance.data["outputType"],
self._instance.data["leadingZeros"],
self._instance.data["outputStartFrame"]
)
all_write_nodes = self._instance.context.data["all_write_nodes"]
disable_nodes = []
for node in all_write_nodes:
# disable all other write nodes
if node != self._instance.data["setMembers"][0]:
disable_nodes.append("node.setEnable('{}', false)"
.format(node))
harmony_plugin_info.PreRenderInlineScript = ';'.join(disable_nodes)
return harmony_plugin_info.serialize()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,390 @@
import os
import json
import getpass
from avalon import api
from avalon.vendor import requests
import re
import pyblish.api
import nuke
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit write to Deadline
Renders are submitted to a Deadline Web Service as
supplied via settings key "DEADLINE_REST_URL".
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["nuke", "nukestudio"]
families = ["render.farm", "prerender.farm"]
optional = True
# presets
priority = 50
chunk_size = 1
primary_pool = ""
secondary_pool = ""
group = ""
department = ""
limit_groups = {}
def process(self, instance):
instance.data["toBeRenderedOn"] = "deadline"
families = instance.data["families"]
node = instance[0]
context = instance.context
deadline_url = (
context.data["system_settings"]
["modules"]
["deadline"]
["DEADLINE_REST_URL"]
)
assert deadline_url, "Requires DEADLINE_REST_URL"
self.deadline_url = "{}/api/jobs".format(deadline_url)
self._comment = context.data.get("comment", "")
self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))
self._deadline_user = context.data.get(
"deadlineUser", getpass.getuser())
self._frame_start = int(instance.data["frameStartHandle"])
self._frame_end = int(instance.data["frameEndHandle"])
# get output path
render_path = instance.data['path']
script_path = context.data["currentFile"]
for item in context:
if "workfile" in item.data["families"]:
msg = "Workfile (scene) must be published along"
assert item.data["publish"] is True, msg
template_data = item.data.get("anatomyData")
rep = item.data.get("representations")[0].get("name")
template_data["representation"] = rep
template_data["ext"] = rep
template_data["comment"] = None
anatomy_filled = context.data["anatomy"].format(template_data)
template_filled = anatomy_filled["publish"]["path"]
script_path = os.path.normpath(template_filled)
self.log.info(
"Using published scene for render {}".format(script_path)
)
# exception for slate workflow
if "slate" in instance.data["families"]:
self._frame_start -= 1
response = self.payload_submit(instance,
script_path,
render_path,
node.name()
)
# Store output dir for unified publisher (filesequence)
instance.data["deadlineSubmissionJob"] = response.json()
instance.data["outputDir"] = os.path.dirname(
render_path).replace("\\", "/")
instance.data["publishJobState"] = "Suspended"
if instance.data.get("bakeScriptPath"):
render_path = instance.data.get("bakeRenderPath")
script_path = instance.data.get("bakeScriptPath")
exe_node_name = instance.data.get("bakeWriteNodeName")
# exception for slate workflow
if "slate" in instance.data["families"]:
self._frame_start += 1
resp = self.payload_submit(instance,
script_path,
render_path,
exe_node_name,
response.json()
)
# Store output dir for unified publisher (filesequence)
instance.data["deadlineSubmissionJob"] = resp.json()
instance.data["publishJobState"] = "Suspended"
# redefinition of families
if "render.farm" in families:
instance.data['family'] = 'write'
families.insert(0, "render2d")
elif "prerender.farm" in families:
instance.data['family'] = 'write'
families.insert(0, "prerender")
instance.data["families"] = families
def payload_submit(self,
instance,
script_path,
render_path,
exe_node_name,
responce_data=None
):
render_dir = os.path.normpath(os.path.dirname(render_path))
script_name = os.path.basename(script_path)
jobname = "%s - %s" % (script_name, instance.name)
output_filename_0 = self.preview_fname(render_path)
if not responce_data:
responce_data = {}
try:
# Ensure render folder exists
os.makedirs(render_dir)
except OSError:
pass
# define chunk and priority
chunk_size = instance.data.get("deadlineChunkSize")
if chunk_size == 0 and self.chunk_size:
chunk_size = self.chunk_size
priority = instance.data.get("deadlinePriority")
if not priority:
priority = self.priority
# resolve any limit groups
limit_groups = self.get_limit_groups()
self.log.info("Limit groups: `{}`".format(limit_groups))
payload = {
"JobInfo": {
# Top-level group name
"BatchName": script_name,
# Asset dependency to wait for at least the scene file to sync.
# "AssetDependency0": script_path,
# Job name, as seen in Monitor
"Name": jobname,
# Arbitrary username, for visualisation in Monitor
"UserName": self._deadline_user,
"Priority": priority,
"ChunkSize": chunk_size,
"Department": self.department,
"Pool": self.primary_pool,
"SecondaryPool": self.secondary_pool,
"Group": self.group,
"Plugin": "Nuke",
"Frames": "{start}-{end}".format(
start=self._frame_start,
end=self._frame_end
),
"Comment": self._comment,
# Optional, enable double-click to preview rendered
# frames from Deadline Monitor
"OutputFilename0": output_filename_0.replace("\\", "/"),
# limiting groups
"LimitGroups": ",".join(limit_groups)
},
"PluginInfo": {
# Input
"SceneFile": script_path,
# Output directory and filename
"OutputFilePath": render_dir.replace("\\", "/"),
# "OutputFilePrefix": render_variables["filename_prefix"],
# Mandatory for Deadline
"Version": self._ver.group(),
# Resolve relative references
"ProjectPath": script_path,
"AWSAssetFile0": render_path,
# Only the specific write node is rendered.
"WriteNode": exe_node_name
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
if responce_data.get("_id"):
payload["JobInfo"].update({
"JobType": "Normal",
"BatchName": responce_data["Props"]["Batch"],
"JobDependency0": responce_data["_id"],
"ChunkSize": 99999999
})
# Include critical environment variables with submission
keys = [
"PYTHONPATH",
"PATH",
"AVALON_SCHEMA",
"AVALON_PROJECT",
"AVALON_ASSET",
"AVALON_TASK",
"AVALON_APP_NAME",
"FTRACK_API_KEY",
"FTRACK_API_USER",
"FTRACK_SERVER",
"PYBLISHPLUGINPATH",
"NUKE_PATH",
"TOOL_ENV",
"OPENPYPE_DEV",
"FOUNDRY_LICENSE"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
# self.log.debug("enviro: {}".format(pprint(environment)))
for path in os.environ:
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
if path.lower().startswith('nuke_'):
environment[path] = os.environ[path]
if 'license' in path.lower():
environment[path] = os.environ[path]
clean_environment = {}
for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
if "://" in value:
clean_path = value
else:
valid_paths = []
for path in value.split(os.pathsep):
if not path:
continue
try:
path.decode('UTF-8', 'strict')
valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
if valid_paths:
clean_path = os.pathsep.join(valid_paths)
if key == "PYTHONPATH":
clean_path = clean_path.replace('python2', 'python3')
self.log.debug("clean path: {}".format(clean_path))
clean_environment[key] = clean_path
environment = clean_environment
# to recognize job from PYPE for turning Event On/Off
environment["OPENPYPE_RENDER_JOB"] = "1"
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
plugin = payload["JobInfo"]["Plugin"]
self.log.info("using render plugin : {}".format(plugin))
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# adding expectied files to instance.data
self.expected_files(instance, render_path)
self.log.debug("__ expectedFiles: `{}`".format(
instance.data["expectedFiles"]))
response = requests.post(self.deadline_url, json=payload, timeout=10)
if not response.ok:
raise Exception(response.text)
return response
def preflight_check(self, instance):
"""Ensure the startFrame, endFrame and byFrameStep are integers"""
for key in ("frameStart", "frameEnd"):
value = instance.data[key]
if int(value) == value:
continue
self.log.warning(
"%f=%d was rounded off to nearest integer"
% (value, int(value))
)
def preview_fname(self, path):
"""Return output file path with #### for padding.
Deadline requires the path to be formatted with # in place of numbers.
For example `/path/to/render.####.png`
Args:
path (str): path to rendered images
Returns:
str
"""
self.log.debug("_ path: `{}`".format(path))
if "%" in path:
search_results = re.search(r"(%0)(\d)(d.)", path).groups()
self.log.debug("_ search_results: `{}`".format(search_results))
return int(search_results[1])
if "#" in path:
self.log.debug("_ path: `{}`".format(path))
return path
def expected_files(self,
instance,
path):
""" Create expected files in instance data
"""
if not instance.data.get("expectedFiles"):
instance.data["expectedFiles"] = []
dir = os.path.dirname(path)
file = os.path.basename(path)
if "#" in file:
pparts = file.split("#")
padding = "%0{}d".format(len(pparts) - 1)
file = pparts[0] + padding + pparts[-1]
if "%" not in file:
instance.data["expectedFiles"].append(path)
return
for i in range(self._frame_start, (self._frame_end + 1)):
instance.data["expectedFiles"].append(
os.path.join(dir, (file % i)).replace("\\", "/"))
def get_limit_groups(self):
"""Search for limit group nodes and return group name.
Limit groups will be defined as pairs in Nuke deadline submitter
presents where the key will be name of limit group and value will be
a list of plugin's node class names. Thus, when a plugin uses more
than one node, these will be captured and the triggered process
will add the appropriate limit group to the payload jobinfo attributes.
Returning:
list: captured groups list
"""
captured_groups = []
for lg_name, list_node_class in self.deadline_limit_groups.items():
for node_class in list_node_class:
for node in nuke.allNodes(recurseGroups=True):
# ignore all nodes not member of defined class
if node.Class() not in node_class:
continue
# ignore all disabled nodes
if node["disable"].value():
continue
# add group name if not already added
if lg_name not in captured_groups:
captured_groups.append(lg_name)
return captured_groups

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,48 @@
import pyblish.api
from avalon.vendor import requests
from openpype.plugin import contextplugin_should_run
import os
class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
"""Validate Deadline Web Service is running"""
label = "Validate Deadline Web Service"
order = pyblish.api.ValidatorOrder
hosts = ["maya", "nuke"]
families = ["renderlayer"]
def process(self, context):
# Workaround bug pyblish-base#250
if not contextplugin_should_run(self, context):
return
deadline_url = (
context.data["system_settings"]
["modules"]
["deadline"]
["DEADLINE_REST_URL"]
)
# Check response
response = self._requests_get(deadline_url)
assert response.ok, "Response must be ok"
assert response.text.startswith("Deadline Web Service "), (
"Web service did not respond with 'Deadline Web Service'"
)
def _requests_get(self, *args, **kwargs):
""" Wrapper for requests, disabling SSL certificate validation if
DONT_VERIFY_SSL environment variable is found. This is useful when
Deadline or Muster server are running with self-signed certificates
and their certificate is not added to trusted certificates on
client machines.
WARNING: disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
return requests.get(*args, **kwargs)