Merge branch 'develop' into feature/1698-nuke-prerender-frame-range-by-default

This commit is contained in:
Jakub Jezek 2021-06-15 14:08:40 +02:00
commit 07c62d7e49
No known key found for this signature in database
GPG key ID: D8548FBF690B100A
22 changed files with 645 additions and 144 deletions

3
.gitignore vendored
View file

@ -97,4 +97,5 @@ website/.docusaurus
# Poetry
########
.poetry/
.poetry/
.python-version

View file

@ -115,7 +115,9 @@ def extractenvironments(output_json_path, project, asset, task, app):
@main.command()
@click.argument("paths", nargs=-1)
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
def publish(debug, paths):
@click.option("-t", "--targets", help="Targets module", default=None,
multiple=True)
def publish(debug, paths, targets):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
@ -123,7 +125,7 @@ def publish(debug, paths):
"""
if debug:
os.environ['OPENPYPE_DEBUG'] = '3'
PypeCommands.publish(list(paths))
PypeCommands.publish(list(paths), targets)
@main.command()

View file

@ -41,10 +41,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
# process all sellected timeline track items
for track_item in selected_timeline_items:
data = {}
clip_name = track_item.name()
source_clip = track_item.source()
self.log.debug("clip_name: {}".format(clip_name))
# get clips subtracks and anotations
annotations = self.clip_annotations(source_clip)
@ -128,7 +128,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
"_ instance.data: {}".format(pformat(instance.data)))
if not with_audio:
return
continue
# create audio subset instance
self.create_audio_instance(context, **data)

View file

@ -81,17 +81,18 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
if target == "Use existing frames":
# Local rendering
self.log.info("flagged for no render")
families.append(family)
elif target == "Local":
# Local rendering
self.log.info("flagged for local render")
families.append("{}.local".format(family))
family = families_ak.lower()
elif target == "On farm":
# Farm rendering
self.log.info("flagged for farm render")
instance.data["transfer"] = False
families.append("{}.farm".format(family))
family = families_ak.lower()
family = families_ak.lower()
node.begin()
for i in nuke.allNodes():

View file

@ -1,6 +1,5 @@
import os
import tempfile
import subprocess
import pyblish.api
import openpype.api
import openpype.lib
@ -77,30 +76,37 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
ffmpeg_args = self.ffmpeg_args or {}
jpeg_items = []
jpeg_items.append("\"{}\"".format(ffmpeg_path))
# override file if already exists
jpeg_items.append("-y")
jpeg_items = [
"\"{}\"".format(ffmpeg_path),
# override file if already exists
"-y"
]
# add input filters from peresets
jpeg_items.extend(ffmpeg_args.get("input") or [])
# input file
jpeg_items.append("-i {}".format(full_input_path))
jpeg_items.append("-i \"{}\"".format(full_input_path))
# extract only single file
jpeg_items.append("-vframes 1")
jpeg_items.append("-frames:v 1")
# Add black background for transparent images
jpeg_items.append((
"-filter_complex"
" \"color=black,format=rgb24[c]"
";[c][0]scale2ref[c][i]"
";[c][i]overlay=format=auto:shortest=1,setsar=1\""
))
jpeg_items.extend(ffmpeg_args.get("output") or [])
# output file
jpeg_items.append(full_thumbnail_path)
jpeg_items.append("\"{}\"".format(full_thumbnail_path))
subprocess_jpeg = " ".join(jpeg_items)
# run subprocess
self.log.debug("Executing: {}".format(subprocess_jpeg))
subprocess.Popen(
subprocess_jpeg,
stdout=subprocess.PIPE,
shell=True
openpype.api.run_subprocess(
subprocess_jpeg, shell=True, logger=self.log
)
# remove thumbnail key from origin repre

View file

@ -18,6 +18,48 @@ import pyblish.api
from .abstract_metaplugins import AbstractMetaInstancePlugin
def requests_post(*args, **kwargs):
"""Wrap request post method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.post(*args, **kwargs)
def requests_get(*args, **kwargs):
"""Wrap request get method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL",
True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.get(*args, **kwargs)
@attr.s
class DeadlineJobInfo(object):
"""Mapping of all Deadline *JobInfo* attributes.
@ -579,7 +621,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
"""
url = "{}/api/jobs".format(self._deadline_url)
response = self._requests_post(url, json=payload)
response = requests_post(url, json=payload)
if not response.ok:
self.log.error("Submission failed!")
self.log.error(response.status_code)
@ -592,41 +634,3 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
self._instance.data["deadlineSubmissionJob"] = result
return result["_id"]
def _requests_post(self, *args, **kwargs):
"""Wrap request post method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.post(*args, **kwargs)
def _requests_get(self, *args, **kwargs):
"""Wrap request get method.
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
variable is found. This is useful when Deadline or Muster server are
running with self-signed certificates and their certificate is not
added to trusted certificates on client machines.
Warning:
Disabling SSL certificate validation is defeating one line
of defense SSL is providing and it is not recommended.
"""
if 'verify' not in kwargs:
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
# add 10sec timeout before bailing out
kwargs['timeout'] = 10
return requests.get(*args, **kwargs)

View file

@ -231,7 +231,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
args = [
'publish',
roothless_metadata_path
roothless_metadata_path,
"--targets {}".format("deadline")
]
# Generate the payload for Deadline submission

View file

@ -0,0 +1,186 @@
import os
import json
import pyblish.api
from avalon.vendor import requests
from openpype.api import get_system_settings
from openpype.lib.abstract_submit_deadline import requests_get
from openpype.lib.delivery import collect_frames
class ValidateExpectedFiles(pyblish.api.InstancePlugin):
"""Compare rendered and expected files"""
label = "Validate rendered files from Deadline"
order = pyblish.api.ValidatorOrder
families = ["render"]
targets = ["deadline"]
# check if actual frame range on render job wasn't different
# case when artists wants to render only subset of frames
allow_user_override = True
def process(self, instance):
frame_list = self._get_frame_list(instance.data["render_job_id"])
for repre in instance.data["representations"]:
expected_files = self._get_expected_files(repre)
staging_dir = repre["stagingDir"]
existing_files = self._get_existing_files(staging_dir)
expected_non_existent = expected_files.difference(
existing_files)
if len(expected_non_existent) != 0:
self.log.info("Some expected files missing {}".format(
expected_non_existent))
if self.allow_user_override:
file_name_template, frame_placeholder = \
self._get_file_name_template_and_placeholder(
expected_files)
if not file_name_template:
return
real_expected_rendered = self._get_real_render_expected(
file_name_template,
frame_placeholder,
frame_list)
real_expected_non_existent = \
real_expected_rendered.difference(existing_files)
if len(real_expected_non_existent) != 0:
raise RuntimeError("Still missing some files {}".
format(real_expected_non_existent))
self.log.info("Update range from actual job range")
repre["files"] = sorted(list(real_expected_rendered))
else:
raise RuntimeError("Some expected files missing {}".format(
expected_non_existent))
def _get_frame_list(self, original_job_id):
"""
Returns list of frame ranges from all render job.
Render job might be requeried so job_id in metadata.json is invalid
GlobalJobPreload injects current ids to RENDER_JOB_IDS.
Args:
original_job_id (str)
Returns:
(list)
"""
all_frame_lists = []
render_job_ids = os.environ.get("RENDER_JOB_IDS")
if render_job_ids:
render_job_ids = render_job_ids.split(',')
else: # fallback
render_job_ids = [original_job_id]
for job_id in render_job_ids:
job_info = self._get_job_info(job_id)
frame_list = job_info["Props"]["Frames"]
if frame_list:
all_frame_lists.extend(frame_list.split(','))
return all_frame_lists
def _get_real_render_expected(self, file_name_template, frame_placeholder,
frame_list):
"""
Calculates list of names of expected rendered files.
Might be different from job expected files if user explicitly and
manually change frame list on Deadline job.
"""
real_expected_rendered = set()
src_padding_exp = "%0{}d".format(len(frame_placeholder))
for frames in frame_list:
if '-' not in frames: # single frame
frames = "{}-{}".format(frames, frames)
start, end = frames.split('-')
for frame in range(int(start), int(end) + 1):
ren_name = file_name_template.replace(
frame_placeholder, src_padding_exp % frame)
real_expected_rendered.add(ren_name)
return real_expected_rendered
def _get_file_name_template_and_placeholder(self, files):
"""Returns file name with frame replaced with # and this placeholder"""
sources_and_frames = collect_frames(files)
file_name_template = frame_placeholder = None
for file_name, frame in sources_and_frames.items():
frame_placeholder = "#" * len(frame)
file_name_template = os.path.basename(
file_name.replace(frame, frame_placeholder))
break
return file_name_template, frame_placeholder
def _get_job_info(self, job_id):
"""
Calls DL for actual job info for 'job_id'
Might be different than job info saved in metadata.json if user
manually changes job pre/during rendering.
"""
deadline_url = (
get_system_settings()
["modules"]
["deadline"]
["DEADLINE_REST_URL"]
)
assert deadline_url, "Requires DEADLINE_REST_URL"
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
try:
response = requests_get(url)
except requests.exceptions.ConnectionError:
print("Deadline is not accessible at {}".format(deadline_url))
# self.log("Deadline is not accessible at {}".format(deadline_url))
return {}
if not response.ok:
self.log.error("Submission failed!")
self.log.error(response.status_code)
self.log.error(response.content)
raise RuntimeError(response.text)
json_content = response.json()
if json_content:
return json_content.pop()
return {}
def _parse_metadata_json(self, json_path):
if not os.path.exists(json_path):
msg = "Metadata file {} doesn't exist".format(json_path)
raise RuntimeError(msg)
with open(json_path) as fp:
try:
return json.load(fp)
except Exception as exc:
self.log.error(
"Error loading json: "
"{} - Exception: {}".format(json_path, exc)
)
def _get_existing_files(self, out_dir):
"""Returns set of existing file names from 'out_dir'"""
existing_files = set()
for file_name in os.listdir(out_dir):
existing_files.add(file_name)
return existing_files
def _get_expected_files(self, repre):
"""Returns set of file names from metadata.json"""
expected_files = set()
for file_name in repre["files"]:
expected_files.add(file_name)
return expected_files

View file

@ -1,6 +1,6 @@
from Qt import QtWidgets, QtCore
from .widgets import LogsWidget, OutputWidget
from avalon import style
from openpype import style
class LogsWindow(QtWidgets.QWidget):
@ -14,7 +14,7 @@ class LogsWindow(QtWidgets.QWidget):
main_layout = QtWidgets.QHBoxLayout()
log_splitter = QtWidgets.QSplitter()
log_splitter = QtWidgets.QSplitter(self)
log_splitter.setOrientation(QtCore.Qt.Horizontal)
log_splitter.addWidget(logs_widget)
log_splitter.addWidget(log_detail)

View file

@ -83,7 +83,6 @@ class CustomCombo(QtWidgets.QWidget):
self.setLayout(layout)
# toolmenu.selection_changed.connect(self.on_selection_changed)
toolmenu.selection_changed.connect(self.selection_changed)
self.toolbutton = toolbutton
@ -119,7 +118,6 @@ class LogsWidget(QtWidgets.QWidget):
filter_layout = QtWidgets.QHBoxLayout()
# user_filter = SearchComboBox(self, "Users")
user_filter = CustomCombo("Users", self)
users = model.dbcon.distinct("username")
user_filter.populate(users)
@ -128,21 +126,18 @@ class LogsWidget(QtWidgets.QWidget):
proxy_model.update_users_filter(users)
level_filter = CustomCombo("Levels", self)
# levels = [(level, True) for level in model.dbcon.distinct("level")]
levels = model.dbcon.distinct("level")
level_filter.addItems(levels)
level_filter.selection_changed.connect(self._level_changed)
detail_widget.update_level_filter(levels)
spacer = QtWidgets.QWidget()
icon = qtawesome.icon("fa.refresh", color="white")
refresh_btn = QtWidgets.QPushButton(icon, "")
filter_layout.addWidget(user_filter)
filter_layout.addWidget(level_filter)
filter_layout.addWidget(spacer, 1)
filter_layout.addStretch(1)
filter_layout.addWidget(refresh_btn)
view = QtWidgets.QTreeView(self)

View file

@ -40,6 +40,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
otio_clip = instance.data["otioClip"]
otio_avalable_range = otio_clip.available_range()
media_fps = otio_avalable_range.start_time.rate
available_duration = otio_avalable_range.duration.value
# get available range trimmed with processed retimes
retimed_attributes = editorial.get_media_range_with_retimes(
@ -68,6 +69,8 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
a_frame_start_h, (a_frame_end_h - a_frame_start_h + 1),
media_fps
)
trimmed_duration = trimmed_media_range_h.duration.value
self.log.debug("trimmed_media_range_h: {}".format(
trimmed_media_range_h))
self.log.debug("a_frame_start_h: {}".format(
@ -150,12 +153,18 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
repre = self._create_representation(
frame_start, frame_end, collection=collection)
else:
_trim = False
dirname, filename = os.path.split(media_ref.target_url)
self.staging_dir = dirname
if trimmed_duration < available_duration:
self.log.debug("Ready for Trimming")
instance.data["families"].append("trim")
instance.data["otioTrimmingRange"] = trimmed_media_range_h
_trim = True
self.log.debug(filename)
repre = self._create_representation(
frame_start, frame_end, file=filename)
frame_start, frame_end, file=filename, trim=_trim)
if repre:
# add representation to instance data
@ -196,7 +205,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
"frameStart": start,
"frameEnd": end,
})
return representation_data
if kwargs.get("file"):
file = kwargs.get("file")
ext = os.path.splitext(file)[-1]
@ -207,4 +216,9 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
"frameStart": start,
"frameEnd": end,
})
return representation_data
if kwargs.get("trim") is True:
representation_data.update({
"tags": ["trim"]
})
return representation_data

View file

@ -87,11 +87,14 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
instance = self._context.create_instance(
instance_data.get("subset")
)
self.log.info("Filling stagignDir...")
self.log.info("Filling stagingDir...")
self._fill_staging_dir(instance_data, anatomy)
instance.data.update(instance_data)
# stash render job id for later validation
instance.data["render_job_id"] = data.get("job").get("_id")
representations = []
for repre_data in instance_data.get("representations") or []:
self._fill_staging_dir(repre_data, anatomy)

View file

@ -0,0 +1,125 @@
"""
Requires:
instance -> otioTrimmingRange
instance -> representations
"""
import os
from pyblish import api
import openpype
from copy import deepcopy
class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
"""
Trimming video file longer then required lenght
"""
order = api.ExtractorOrder
label = "Extract OTIO trim longer video"
families = ["trim"]
hosts = ["resolve", "hiero"]
def process(self, instance):
self.staging_dir = self.staging_dir(instance)
otio_trim_range = instance.data["otioTrimmingRange"]
representations = instance.data["representations"]
self.log.debug("otio_trim_range: {}".format(otio_trim_range))
self.log.debug("self.staging_dir: {}".format(self.staging_dir))
# get corresponding representation
for _repre in representations:
if "trim" not in _repre.get("tags", []):
continue
input_file = _repre["files"]
input_file_path = os.path.normpath(os.path.join(
_repre["stagingDir"], input_file
))
self.log.debug("input_file_path: {}".format(input_file_path))
# trim via ffmpeg
new_file = self._ffmpeg_trim_seqment(
input_file_path, otio_trim_range)
# prepare new representation data
repre_data = deepcopy(_repre)
# remove tags as we dont need them
repre_data.pop("tags")
repre_data["stagingDir"] = self.staging_dir
repre_data["files"] = new_file
# romove `trim` tagged representation
representations.remove(_repre)
representations.append(repre_data)
self.log.debug(repre_data)
self.log.debug("representations: {}".format(representations))
def _ffmpeg_trim_seqment(self, input_file_path, otio_range):
"""
Trim seqment of video file.
Using ffmpeg to trim video to desired length.
Args:
input_file_path (str): path string
otio_range (opentime.TimeRange): range to trim to
"""
# get rendering app path
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
# create path to destination
output_path = self._get_ffmpeg_output(input_file_path)
# start command list
command = ['"{}"'.format(ffmpeg_path)]
video_path = input_file_path
frame_start = otio_range.start_time.value
input_fps = otio_range.start_time.rate
frame_duration = (otio_range.duration.value + 1)
sec_start = openpype.lib.frames_to_secons(frame_start, input_fps)
sec_duration = openpype.lib.frames_to_secons(frame_duration, input_fps)
# form command for rendering gap files
command.extend([
"-ss {}".format(sec_start),
"-t {}".format(sec_duration),
"-i \"{}\"".format(video_path),
"-c copy",
output_path
])
# execute
self.log.debug("Executing: {}".format(" ".join(command)))
output = openpype.api.run_subprocess(
" ".join(command), logger=self.log
)
self.log.debug("Output: {}".format(output))
return os.path.basename(output_path)
def _get_ffmpeg_output(self, file_path):
"""
Returning ffmpeg output command arguments.
Arguments"
file_path (str): path string
Returns:
str: output_path is path
"""
basename = os.path.basename(file_path)
name, ext = os.path.splitext(basename)
output_file = "{}_{}{}".format(
name,
"trimmed",
ext
)
# create path to destination
return os.path.join(self.staging_dir, output_file)

View file

@ -46,16 +46,18 @@ class PypeCommands:
standalonepublish.main()
@staticmethod
def publish(paths):
def publish(paths, targets=None):
"""Start headless publishing.
Publish use json from passed paths argument.
Args:
paths (list): Paths to jsons.
targets (string): What module should be targeted
(to choose validator for example)
Raises:
RuntimeError: When there is no pathto process.
RuntimeError: When there is no path to process.
"""
if not any(paths):
raise RuntimeError("No publish paths specified")
@ -82,6 +84,10 @@ class PypeCommands:
pyblish.api.register_target("filesequence")
pyblish.api.register_host("shell")
if targets:
for target in targets:
pyblish.api.register_target(target)
os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths)
log.info("Running publish ...")

View file

@ -1,5 +1,12 @@
{
"publish": {
"ValidateExpectedFiles": {
"enabled": true,
"active": true,
"families": ["render"],
"targets": ["deadline"],
"allow_user_override": true
},
"MayaSubmitDeadline": {
"enabled": true,
"optional": false,

View file

@ -154,7 +154,7 @@
"ExtractThumbnailSP": {
"ffmpeg_args": {
"input": [
"-gamma 2.2"
"-apply_trc gamma22"
],
"output": []
}

View file

@ -11,6 +11,47 @@
"key": "publish",
"label": "Publish plugins",
"children": [
{
"type": "dict",
"collapsible": true,
"key": "ValidateExpectedFiles",
"label": "Validate Expected Files",
"checkbox_key": "enabled",
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "boolean",
"key": "active",
"label": "Active"
},
{
"type": "label",
"label": "Validate if all expected files were rendered"
},
{
"type": "boolean",
"key": "allow_user_override",
"object_type": "text",
"label": "Allow user change frame range"
},
{
"type": "list",
"key": "families",
"object_type": "text",
"label": "Trigger on families"
},
{
"type": "list",
"key": "targets",
"object_type": "text",
"label": "Trigger for plugins"
}
]
},
{
"type": "dict",
"collapsible": true,

View file

@ -97,6 +97,29 @@ QToolButton:disabled {
background: {color:bg-buttons-disabled};
}
QToolButton[popupMode="1"] {
/* make way for the popup button */
padding-right: 20px;
border: 1px solid {color:bg-buttons};
}
QToolButton::menu-button {
width: 16px;
/* Set border only of left side. */
border: 1px solid transparent;
border-left: 1px solid {color:bg-buttons};
}
QToolButton::menu-arrow {
/* Offset arrow a little bit to center. */
left: 1px; top: 1px;
}
QToolButton::menu-arrow:open {
/* Don't offset arrow on open. */
left: 0px; top: 0px;
}
/* QMenu */
QMenu {
border: 1px solid #555555;

View file

@ -28,6 +28,8 @@ class EnvironmentsView(QtWidgets.QTreeView):
def __init__(self, parent=None):
super(EnvironmentsView, self).__init__(parent)
self._scroll_enabled = False
model = QtGui.QStandardItemModel()
env = os.environ.copy()
@ -112,8 +114,11 @@ class EnvironmentsView(QtWidgets.QTreeView):
else:
return super(EnvironmentsView, self).keyPressEvent(event)
def set_scroll_enabled(self, value):
self._scroll_enabled = value
def wheelEvent(self, event):
if not self.hasFocus():
if not self._scroll_enabled:
event.ignore()
return
return super(EnvironmentsView, self).wheelEvent(event)
@ -200,9 +205,13 @@ class CollapsibleWidget(QtWidgets.QWidget):
class PypeInfoWidget(QtWidgets.QWidget):
_resized = QtCore.Signal()
def __init__(self, parent=None):
super(PypeInfoWidget, self).__init__(parent)
self._scroll_at_bottom = False
self.setStyleSheet(style.load_stylesheet())
icon = QtGui.QIcon(resources.pype_icon_filepath())
@ -219,11 +228,39 @@ class PypeInfoWidget(QtWidgets.QWidget):
main_layout.addWidget(scroll_area, 1)
main_layout.addWidget(self._create_btns_section(), 0)
scroll_area.verticalScrollBar().valueChanged.connect(
self._on_area_scroll
)
self._resized.connect(self._on_resize)
self.resize(740, 540)
self.scroll_area = scroll_area
self.info_widget = info_widget
def _on_area_scroll(self, value):
vertical_bar = self.scroll_area.verticalScrollBar()
self._scroll_at_bottom = vertical_bar.maximum() == vertical_bar.value()
self.info_widget.set_scroll_enabled(self._scroll_at_bottom)
def _on_resize(self):
if not self._scroll_at_bottom:
return
vertical_bar = self.scroll_area.verticalScrollBar()
vertical_bar.setValue(vertical_bar.maximum())
def resizeEvent(self, event):
super(PypeInfoWidget, self).resizeEvent(event)
self._resized.emit()
self.info_widget.set_content_height(
self.scroll_area.height()
)
def showEvent(self, event):
super(PypeInfoWidget, self).showEvent(event)
self.info_widget.set_content_height(
self.scroll_area.height()
)
def _create_btns_section(self):
btns_widget = QtWidgets.QWidget(self)
btns_layout = QtWidgets.QHBoxLayout(btns_widget)
@ -282,6 +319,8 @@ class PypeInfoSubWidget(QtWidgets.QWidget):
def __init__(self, parent=None):
super(PypeInfoSubWidget, self).__init__(parent)
self.env_view = None
main_layout = QtWidgets.QVBoxLayout(self)
main_layout.setContentsMargins(0, 0, 0, 0)
main_layout.setAlignment(QtCore.Qt.AlignTop)
@ -293,6 +332,14 @@ class PypeInfoSubWidget(QtWidgets.QWidget):
main_layout.addWidget(self._create_separator(), 0)
main_layout.addWidget(self._create_environ_widget(), 1)
def set_content_height(self, height):
if self.env_view:
self.env_view.setMinimumHeight(height)
def set_scroll_enabled(self, value):
if self.env_view:
self.env_view.set_scroll_enabled(value)
def _create_separator(self):
separator_widget = QtWidgets.QWidget(self)
separator_widget.setObjectName("Separator")
@ -369,9 +416,10 @@ class PypeInfoSubWidget(QtWidgets.QWidget):
env_view = EnvironmentsView(env_widget)
env_view.setMinimumHeight(300)
env_widget.set_content_widget(env_view)
self.env_view = env_view
return env_widget
def _create_openpype_info_widget(self):

150
poetry.lock generated
View file

@ -11,7 +11,7 @@ develop = false
type = "git"
url = "https://github.com/pypeclub/acre.git"
reference = "master"
resolved_reference = "9bf19573acb9328a3e5f5de96c619060177795cf"
resolved_reference = "efc1b8faa8f84568538b936688ae6f7604dd194c"
[[package]]
name = "aiohttp"
@ -307,7 +307,7 @@ trio = ["trio (>=0.14.0)", "sniffio (>=1.1)"]
[[package]]
name = "docutils"
version = "0.17.1"
version = "0.16"
description = "Docutils -- Python Documentation Utilities"
category = "dev"
optional = false
@ -398,7 +398,7 @@ typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\""
[[package]]
name = "google-api-core"
version = "1.29.0"
version = "1.30.0"
description = "Google API client core library"
category = "main"
optional = false
@ -436,7 +436,7 @@ uritemplate = ">=3.0.0,<4dev"
[[package]]
name = "google-auth"
version = "1.30.1"
version = "1.31.0"
description = "Google Authentication Library"
category = "main"
optional = false
@ -449,7 +449,7 @@ rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""}
six = ">=1.9.0"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"]
aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"]
pyopenssl = ["pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
@ -509,7 +509,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "importlib-metadata"
version = "4.4.0"
version = "4.5.0"
description = "Read metadata from Python packages"
category = "main"
optional = false
@ -759,7 +759,7 @@ python-versions = "*"
[[package]]
name = "protobuf"
version = "3.17.2"
version = "3.17.3"
description = "Protocol Buffers"
category = "main"
optional = false
@ -904,7 +904,7 @@ six = "*"
[[package]]
name = "pyobjc-core"
version = "7.2"
version = "7.3"
description = "Python<->ObjC Interoperability Module"
category = "main"
optional = false
@ -912,26 +912,26 @@ python-versions = ">=3.6"
[[package]]
name = "pyobjc-framework-cocoa"
version = "7.2"
version = "7.3"
description = "Wrappers for the Cocoa frameworks on macOS"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pyobjc-core = ">=7.2"
pyobjc-core = ">=7.3"
[[package]]
name = "pyobjc-framework-quartz"
version = "7.2"
version = "7.3"
description = "Wrappers for the Quartz frameworks on macOS"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pyobjc-core = ">=7.2"
pyobjc-framework-Cocoa = ">=7.2"
pyobjc-core = ">=7.3"
pyobjc-framework-Cocoa = ">=7.3"
[[package]]
name = "pyparsing"
@ -1161,6 +1161,18 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "slack-sdk"
version = "3.6.0"
description = "The Slack API Platform SDK for Python"
category = "main"
optional = false
python-versions = ">=3.6.0"
[package.extras]
optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=9.1,<10)", "websocket-client (>=0.57,<1)"]
testing = ["pytest (>=5.4,<6)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=3,<4)", "black (==21.5b1)", "psutil (>=5,<6)", "databases (>=0.3)"]
[[package]]
name = "smmap"
version = "4.0.0"
@ -1230,13 +1242,14 @@ sphinx = "*"
[[package]]
name = "sphinx-rtd-theme"
version = "0.5.1"
version = "0.5.2"
description = "Read the Docs theme for Sphinx"
category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
docutils = "<0.17"
sphinx = "*"
[package.extras]
@ -1453,7 +1466,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt
[metadata]
lock-version = "1.1"
python-versions = "3.7.*"
content-hash = "abfb3fda5422d9119fb942e66d82fad5c7fae46d0ed240876c3a61333e15febe"
content-hash = "8875d530ae66f9763b5b0cb84d9d35edc184ef5c141b63d38bf1ff5a1226e556"
[metadata.files]
acre = []
@ -1714,8 +1727,8 @@ dnspython = [
{file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"},
]
docutils = [
{file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"},
{file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"},
{file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
{file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
]
enlighten = [
{file = "enlighten-1.10.1-py2.py3-none-any.whl", hash = "sha256:3d6c3eec8cf3eb626ee7b65eddc1b3e904d01f4547a2b9fe7f1da8892a0297e8"},
@ -1744,16 +1757,16 @@ gitpython = [
{file = "GitPython-3.1.17.tar.gz", hash = "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"},
]
google-api-core = [
{file = "google-api-core-1.29.0.tar.gz", hash = "sha256:dbe885011111a9afd9ea9a347db6a9c608e82e5c7648c1f7fabf2b4079a579b5"},
{file = "google_api_core-1.29.0-py2.py3-none-any.whl", hash = "sha256:f83118319d2a28806ec3399671cbe4af5351bd0dac54c40a0395da6162ebe324"},
{file = "google-api-core-1.30.0.tar.gz", hash = "sha256:0724d354d394b3d763bc10dfee05807813c5210f0bd9b8e2ddf6b6925603411c"},
{file = "google_api_core-1.30.0-py2.py3-none-any.whl", hash = "sha256:92cd9e9f366e84bfcf2524e34d2dc244906c645e731962617ba620da1620a1e0"},
]
google-api-python-client = [
{file = "google-api-python-client-1.12.8.tar.gz", hash = "sha256:f3b9684442eec2cfe9f9bb48e796ef919456b82142c7528c5fd527e5224f08bb"},
{file = "google_api_python_client-1.12.8-py2.py3-none-any.whl", hash = "sha256:3c4c4ca46b5c21196bec7ee93453443e477d82cbfa79234d1ce0645f81170eaf"},
]
google-auth = [
{file = "google-auth-1.30.1.tar.gz", hash = "sha256:044d81b1e58012f8ebc71cc134e191c1fa312f543f1fbc99973afe28c25e3228"},
{file = "google_auth-1.30.1-py2.py3-none-any.whl", hash = "sha256:b3ca7a8ff9ab3bdefee3ad5aefb11fc6485423767eee016f5942d8e606ca23fb"},
{file = "google-auth-1.31.0.tar.gz", hash = "sha256:154f7889c5d679a6f626f36adb12afbd4dbb0a9a04ec575d989d6ba79c4fd65e"},
{file = "google_auth-1.31.0-py2.py3-none-any.whl", hash = "sha256:6d47c79b5d09fbc7e8355fd9594cc4cf65fdde5d401c63951eaac4baa1ba2ae1"},
]
google-auth-httplib2 = [
{file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"},
@ -1776,8 +1789,8 @@ imagesize = [
{file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"},
]
importlib-metadata = [
{file = "importlib_metadata-4.4.0-py3-none-any.whl", hash = "sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786"},
{file = "importlib_metadata-4.4.0.tar.gz", hash = "sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5"},
{file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"},
{file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
@ -1963,6 +1976,7 @@ pillow = [
{file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"},
{file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"},
{file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"},
{file = "Pillow-8.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8b56553c0345ad6dcb2e9b433ae47d67f95fc23fe28a0bde15a120f25257e291"},
{file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"},
]
pluggy = [
@ -1974,29 +1988,29 @@ prefixed = [
{file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"},
]
protobuf = [
{file = "protobuf-3.17.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c49e673436e24e925022c090a98905cfe88d056cb5dde67a8e20ae339acd8140"},
{file = "protobuf-3.17.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4e62be28dcaab52c7e8e8e3fb9a7005dec6374e698f3b22d79276d95c13151e5"},
{file = "protobuf-3.17.2-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:c2038dec269b65683f16057886c09ca7526471793029bdd43259282e1e0fb668"},
{file = "protobuf-3.17.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b667ddbb77ff619fdbd18be75445d4448ee68493c9bddd1b4d0b177025e2f6f6"},
{file = "protobuf-3.17.2-cp35-cp35m-win32.whl", hash = "sha256:993814b0199f22523a227696a8e20d2cd4b9cda60c76d2fb3969ce0c77eb9e0f"},
{file = "protobuf-3.17.2-cp35-cp35m-win_amd64.whl", hash = "sha256:38b2c6e2204731cfebdb2452ffb7addf0367172b35cff8ccda338ccea9e7c87a"},
{file = "protobuf-3.17.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:42239d47f213f1ce12ddca62c0c70856efbac09797d715e5d606b3fbc3f16c44"},
{file = "protobuf-3.17.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4da073b6c1a83e4ff1294156844f21343c5094e295c194d8ecc94703c7a6f42a"},
{file = "protobuf-3.17.2-cp36-cp36m-win32.whl", hash = "sha256:c303ce92c60d069237cfbd41790fde3d00066ca9500fac464454e20c2f12250c"},
{file = "protobuf-3.17.2-cp36-cp36m-win_amd64.whl", hash = "sha256:751d71dc6559dd794d309811d8dcf179d6a128b38a1655ae7137530f33895cb4"},
{file = "protobuf-3.17.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f3f057ad59cd4d5ea2b1bb88d36c6f009b8043007cf03d96cbd3b9c06859d4fa"},
{file = "protobuf-3.17.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c5b512c1982f8b427a302db094cf79f8f235284014d01b23fba461aa2c1459a0"},
{file = "protobuf-3.17.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9dc01ddc3b195c4538942790c4b195cf17b52d3785a60c1f6f25b794f51e2071"},
{file = "protobuf-3.17.2-cp37-cp37m-win32.whl", hash = "sha256:45cc0197e9f9192693c8a4dbcba8c9227a53a2720fc3826dfe791113d9ff5e5e"},
{file = "protobuf-3.17.2-cp37-cp37m-win_amd64.whl", hash = "sha256:816fe5e8b73c29adb13a57e1653da15f24cbb90e86ea92e6f08abe6d8c0cbdb4"},
{file = "protobuf-3.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:557e16884d7276caf92c1fb188fe6dfbec47891d3507d4982db1e3d89ffd22de"},
{file = "protobuf-3.17.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:89613ec119fdcad992f65d7a5bfe3170c17edc839982d0089fc0c9242fb8e517"},
{file = "protobuf-3.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:744f5c9a3b9e7538c4c70f2b0e46f86858b684f5d17bf78643a19a6c21c7936f"},
{file = "protobuf-3.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1e08da38d56b74962d9cb05d86ca5f25d2e5b78f05fd00db7900cad3faa6de00"},
{file = "protobuf-3.17.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:6388e7f300010ea7ac77113c7491c5622645d2447fdf701cbfe026b832d728cd"},
{file = "protobuf-3.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5a73fa43efda0df0191c162680ec40cef45463fa8ff69fbeaeeddda4c760f5"},
{file = "protobuf-3.17.2-py2.py3-none-any.whl", hash = "sha256:50c657a54592c1bec7b24521fdbbbd2f7b51325ba23ab505ed03e8ebf3a5aeff"},
{file = "protobuf-3.17.2.tar.gz", hash = "sha256:5a3450acf046716e4a4f02a3f7adfb7b86f1b5b3ae392cec759915e79538d40d"},
{file = "protobuf-3.17.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ab6bb0e270c6c58e7ff4345b3a803cc59dbee19ddf77a4719c5b635f1d547aa8"},
{file = "protobuf-3.17.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:13ee7be3c2d9a5d2b42a1030976f760f28755fcf5863c55b1460fd205e6cd637"},
{file = "protobuf-3.17.3-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:1556a1049ccec58c7855a78d27e5c6e70e95103b32de9142bae0576e9200a1b0"},
{file = "protobuf-3.17.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f0e59430ee953184a703a324b8ec52f571c6c4259d496a19d1cabcdc19dabc62"},
{file = "protobuf-3.17.3-cp35-cp35m-win32.whl", hash = "sha256:a981222367fb4210a10a929ad5983ae93bd5a050a0824fc35d6371c07b78caf6"},
{file = "protobuf-3.17.3-cp35-cp35m-win_amd64.whl", hash = "sha256:6d847c59963c03fd7a0cd7c488cadfa10cda4fff34d8bc8cba92935a91b7a037"},
{file = "protobuf-3.17.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:145ce0af55c4259ca74993ddab3479c78af064002ec8227beb3d944405123c71"},
{file = "protobuf-3.17.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ce4d8bf0321e7b2d4395e253f8002a1a5ffbcfd7bcc0a6ba46712c07d47d0b4"},
{file = "protobuf-3.17.3-cp36-cp36m-win32.whl", hash = "sha256:7a4c97961e9e5b03a56f9a6c82742ed55375c4a25f2692b625d4087d02ed31b9"},
{file = "protobuf-3.17.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a22b3a0dbac6544dacbafd4c5f6a29e389a50e3b193e2c70dae6bbf7930f651d"},
{file = "protobuf-3.17.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ffea251f5cd3c0b9b43c7a7a912777e0bc86263436a87c2555242a348817221b"},
{file = "protobuf-3.17.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:9b7a5c1022e0fa0dbde7fd03682d07d14624ad870ae52054849d8960f04bc764"},
{file = "protobuf-3.17.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8727ee027157516e2c311f218ebf2260a18088ffb2d29473e82add217d196b1c"},
{file = "protobuf-3.17.3-cp37-cp37m-win32.whl", hash = "sha256:14c1c9377a7ffbeaccd4722ab0aa900091f52b516ad89c4b0c3bb0a4af903ba5"},
{file = "protobuf-3.17.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c56c050a947186ba51de4f94ab441d7f04fcd44c56df6e922369cc2e1a92d683"},
{file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"},
{file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"},
{file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"},
{file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"},
{file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"},
{file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"},
{file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"},
{file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"},
]
py = [
{file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
@ -2136,28 +2150,28 @@ pynput = [
{file = "pynput-1.7.3.tar.gz", hash = "sha256:4e50b1a0ab86847e87e58f6d1993688b9a44f9f4c88d4712315ea8eb552ef828"},
]
pyobjc-core = [
{file = "pyobjc-core-7.2.tar.gz", hash = "sha256:9e9ec482d80ea030cdb1613d05a247f31eedabe6666d884d42dd890cc5fb0e05"},
{file = "pyobjc_core-7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:94b4d9de9d228db52dd35012096d63bdf8c1ace58ea3be1d5f6f39313cd502f2"},
{file = "pyobjc_core-7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:971cbd7189ae1aa03ef0d16124aa5bcd053779e0e6b6011a41c3dbd5b4ea7e88"},
{file = "pyobjc_core-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9d93b20394008373d6d2856d49aaff26f4b97ff42d924a14516c8a82313ec8c0"},
{file = "pyobjc_core-7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:860183540d1be792c26426018139ac8ba75e85f675c59ba080ccdc52d8e74c7a"},
{file = "pyobjc_core-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ffe61d3c2a404354daf2d895e34e38c5044453353581b3c396bf5365de26250c"},
{file = "pyobjc-core-7.3.tar.gz", hash = "sha256:5081aedf8bb40aac1a8ad95adac9e44e148a882686ded614adf46bb67fd67574"},
{file = "pyobjc_core-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e93ad769a20b908778fe950f62a843a6d8f0fa71996e5f3cc9fab5ae7d17771"},
{file = "pyobjc_core-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f63fd37bbf3785af4ddb2f86cad5ca81c62cfc7d1c0099637ca18343c3656c1"},
{file = "pyobjc_core-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9b1311f72f2e170742a7ee3a8149f52c35158dc024a21e88d6f1e52ba5d718b"},
{file = "pyobjc_core-7.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8d5e12a0729dfd1d998a861998b422d0a3e41923d75ea229bacf31372c831d7b"},
{file = "pyobjc_core-7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:efdee8c4884405e0c0186c57f87d7bfaa0abc1f50b18e865db3caea3a1f329b9"},
]
pyobjc-framework-cocoa = [
{file = "pyobjc-framework-Cocoa-7.2.tar.gz", hash = "sha256:c8b23f03dc3f4436d36c0fd006a8a084835c4f6015187df7c3aa5de8ecd5c653"},
{file = "pyobjc_framework_Cocoa-7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8e5dd5daa0096755937ec24c345a4b07c3fa131a457f99e0fdeeb01979178ec7"},
{file = "pyobjc_framework_Cocoa-7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:828d183947fc7746953fd0c9b1092cc423745ba0b49719e7b7d1e1614aaa20ec"},
{file = "pyobjc_framework_Cocoa-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e4c6d7baa0c2ab5ea5efb8836ad0b3b3976cffcfc6195c1f195e826c6eb5744"},
{file = "pyobjc_framework_Cocoa-7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9a9d1d49cc5a810773c88d6de821e60c8cc41d01113cf1b9e7662938f5f7d66"},
{file = "pyobjc_framework_Cocoa-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:506c2cd09f421eac92b9008a0142174c3d1d70ecd4b0e3fa2b924767995fd14e"},
{file = "pyobjc-framework-Cocoa-7.3.tar.gz", hash = "sha256:b18d05e7a795a3455ad191c3e43d6bfa673c2a4fd480bb1ccf57191051b80b7e"},
{file = "pyobjc_framework_Cocoa-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9edffdfa6dd1f71f21b531c3e61fdd3e4d5d3bf6c5a528c98e88828cd60bac11"},
{file = "pyobjc_framework_Cocoa-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35a6340437a4e0109a302150b7d1f6baf57004ccf74834f9e6062fcafe2fd8d7"},
{file = "pyobjc_framework_Cocoa-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c3886f2608ab3ed02482f8b2ebf9f782b324c559e84b52cfd92dba8a1109872"},
{file = "pyobjc_framework_Cocoa-7.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e8e7a1a82cca21d9bfac9115baf065305f3da577bf240085964dfb9c9fff337"},
{file = "pyobjc_framework_Cocoa-7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c15f43077c9a2ba1853eb402ff7a9515df9e584315bc2fcb779d4c95ef46dc5"},
]
pyobjc-framework-quartz = [
{file = "pyobjc-framework-Quartz-7.2.tar.gz", hash = "sha256:ea554e5697bc6747a4ce793c0b0036da16622b44ff75196d6124603008922afa"},
{file = "pyobjc_framework_Quartz-7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dc61fe61d26f797e4335f3ffc891bcef64624c728c2603e3307b3910580b2cb8"},
{file = "pyobjc_framework_Quartz-7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ad8103cc38923f2708904db11a0992ea960125ce6adf7b4c7a77d8fdafd412c4"},
{file = "pyobjc_framework_Quartz-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4549d17ca41f0bf62792d5bc4b4293ba9a6cc560014b3e18ba22c65e4a5030d2"},
{file = "pyobjc_framework_Quartz-7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:da16e4f1e13cb7b02e30fa538cbb3a356e4a694bbc2bb26d2bd100ca12a54ff6"},
{file = "pyobjc_framework_Quartz-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1f6471177a39535cd0358ae29b8f3d31fe778a21deb74105c448c4e726619d7"},
{file = "pyobjc-framework-Quartz-7.3.tar.gz", hash = "sha256:98812844c34262def980bdf60923a875cd43428a8375b6fd53bd2cd800eccf0b"},
{file = "pyobjc_framework_Quartz-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ef18f5a16511ded65980bf4f5983ea5d35c88224dbad1b3112abd29c60413ea"},
{file = "pyobjc_framework_Quartz-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b41eec8d4b10c7c7e011e2f9051367f5499ef315ba52dfbae573c3a2e05469c"},
{file = "pyobjc_framework_Quartz-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c65456ed045dfe1711d0298734e5a3ad670f8c770f7eb3b19979256c388bdd2"},
{file = "pyobjc_framework_Quartz-7.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ddbca6b466584c3dc0e5b701b1c2a9b5d97ddc1d79a949927499ebb1be1f210"},
{file = "pyobjc_framework_Quartz-7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7aba3cd966a0768dd58a35680742820f0c5ac596a9cd11014e2057818e65b0af"},
]
pyparsing = [
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
@ -2269,6 +2283,10 @@ six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
slack-sdk = [
{file = "slack_sdk-3.6.0-py2.py3-none-any.whl", hash = "sha256:e1b257923a1ef88b8620dd3abff94dc5b3eee16ef37975d101ba9e60123ac3af"},
{file = "slack_sdk-3.6.0.tar.gz", hash = "sha256:195f044e02a2844579a7a26818ce323e85dde8de224730c859644918d793399e"},
]
smmap = [
{file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"},
{file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"},
@ -2290,8 +2308,8 @@ sphinx-qt-documentation = [
{file = "sphinx_qt_documentation-0.3.tar.gz", hash = "sha256:f09a0c9d9e989172ba3e282b92bf55613bb23ad47315ec5b0d38536b343ac6c8"},
]
sphinx-rtd-theme = [
{file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"},
{file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"},
{file = "sphinx_rtd_theme-0.5.2-py2.py3-none-any.whl", hash = "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"},
{file = "sphinx_rtd_theme-0.5.2.tar.gz", hash = "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a"},
]
sphinxcontrib-applehelp = [
{file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},

View file

@ -85,6 +85,8 @@ sphinx-qt-documentation = "*"
recommonmark = "*"
wheel = "*"
enlighten = "*" # cool terminal progress bars
toml = "^0.10.2" # for parsing pyproject.toml
[tool.poetry.urls]
"Bug Tracker" = "https://github.com/pypeclub/openpype/issues"

View file

@ -9,6 +9,10 @@ from Deadline.Scripting import RepositoryUtils, FileUtils
def inject_openpype_environment(deadlinePlugin):
""" Pull env vars from OpenPype and push them to rendering process.
Used for correct paths, configuration from OpenPype etc.
"""
job = deadlinePlugin.GetJob()
job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache
@ -73,6 +77,21 @@ def inject_openpype_environment(deadlinePlugin):
raise
def inject_render_job_id(deadlinePlugin):
"""Inject dependency ids to publish process as env var for validation."""
print("inject_render_job_id start")
job = deadlinePlugin.GetJob()
job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache
dependency_ids = job.JobDependencyIDs
print("dependency_ids {}".format(dependency_ids))
render_job_ids = ",".join(dependency_ids)
deadlinePlugin.SetProcessEnvironmentVariable("RENDER_JOB_IDS",
render_job_ids)
print("inject_render_job_id end")
def pype_command_line(executable, arguments, workingDirectory):
"""Remap paths in comand line argument string.
@ -156,8 +175,7 @@ def __main__(deadlinePlugin):
"render and publish.")
if openpype_publish_job == '1':
print("Publish job, skipping inject.")
return
inject_render_job_id(deadlinePlugin)
elif openpype_render_job == '1':
inject_openpype_environment(deadlinePlugin)
else: