Merge branch 'develop' into feature/OP-4239_Multiselection-in-card-view

This commit is contained in:
Jakub Trllo 2022-10-19 12:11:46 +02:00
commit eed02f857d
20 changed files with 458 additions and 73 deletions

View file

@ -40,13 +40,13 @@ jobs:
- name: "✏️ Generate full changelog"
if: steps.version_type.outputs.type != 'skip'
id: generate-full-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
uses: heinrichreimer/github-changelog-generator-action@v2.3
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: "3.0.0"
sinceTag: "3.12.0"
maxIssues: 100
pullRequests: true
prWoLabels: false
@ -92,4 +92,4 @@ jobs:
github_token: ${{ secrets.ADMIN_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'

View file

@ -36,13 +36,13 @@ jobs:
- name: "✏️ Generate full changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-full-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
uses: heinrichreimer/github-changelog-generator-action@v2.3
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: "3.0.0"
sinceTag: "3.12.0"
maxIssues: 100
pullRequests: true
prWoLabels: false
@ -121,4 +121,4 @@ jobs:
github_token: ${{ secrets.ADMIN_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'

View file

@ -1,7 +1,7 @@
"""Host API required Work Files tool"""
import os
from openpype.api import Logger
from openpype.lib import Logger
# from .. import (
# get_project_manager,
# get_current_project

View file

@ -3,16 +3,17 @@ import json
import tempfile
import contextlib
import socket
from pprint import pformat
from openpype.lib import (
PreLaunchHook,
get_openpype_username
get_openpype_username,
run_subprocess,
)
from openpype.lib.applications import (
ApplicationLaunchFailed
)
from openpype.hosts import flame as opflame
import openpype
from pprint import pformat
class FlamePrelaunch(PreLaunchHook):
@ -127,7 +128,6 @@ class FlamePrelaunch(PreLaunchHook):
except OSError as exc:
self.log.warning("Not able to open files: {}".format(exc))
def _get_flame_fps(self, fps_num):
fps_table = {
float(23.976): "23.976 fps",
@ -179,7 +179,7 @@ class FlamePrelaunch(PreLaunchHook):
"env": self.launch_context.env
}
openpype.api.run_subprocess(args, **process_kwargs)
run_subprocess(args, **process_kwargs)
# process returned json file to pass launch args
return_json_data = open(tmp_json_path).read()

View file

@ -260,20 +260,20 @@ class ARenderProducts:
"""
try:
file_prefix_attr = IMAGE_PREFIXES[self.renderer]
prefix_attr = IMAGE_PREFIXES[self.renderer]
except KeyError:
raise UnsupportedRendererException(
"Unsupported renderer {}".format(self.renderer)
)
file_prefix = self._get_attr(file_prefix_attr)
prefix = self._get_attr(prefix_attr)
if not file_prefix:
if not prefix:
# Fall back to scene name by default
log.debug("Image prefix not set, using <Scene>")
file_prefix = "<Scene>"
return file_prefix
return prefix
def get_render_attribute(self, attribute):
"""Get attribute from render options.
@ -730,13 +730,16 @@ class RenderProductsVray(ARenderProducts):
"""Get image prefix for V-Ray.
This overrides :func:`ARenderProducts.get_renderer_prefix()` as
we must add `<aov>` token manually.
we must add `<aov>` token manually. This is done only for
non-multipart outputs, where `<aov>` token doesn't make sense.
See also:
:func:`ARenderProducts.get_renderer_prefix()`
"""
prefix = super(RenderProductsVray, self).get_renderer_prefix()
if self.multipart:
return prefix
aov_separator = self._get_aov_separator()
prefix = "{}{}<aov>".format(prefix, aov_separator)
return prefix
@ -974,15 +977,18 @@ class RenderProductsRedshift(ARenderProducts):
"""Get image prefix for Redshift.
This overrides :func:`ARenderProducts.get_renderer_prefix()` as
we must add `<aov>` token manually.
we must add `<aov>` token manually. This is done only for
non-multipart outputs, where `<aov>` token doesn't make sense.
See also:
:func:`ARenderProducts.get_renderer_prefix()`
"""
file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix()
separator = self.extract_separator(file_prefix)
prefix = "{}{}<aov>".format(file_prefix, separator or "_")
prefix = super(RenderProductsRedshift, self).get_renderer_prefix()
if self.multipart:
return prefix
separator = self.extract_separator(prefix)
prefix = "{}{}<aov>".format(prefix, separator or "_")
return prefix
def get_render_products(self):

View file

@ -188,6 +188,7 @@ class BatchMovieCreator(TrayPublishCreator):
folders=False,
single_item=False,
extensions=self.extensions,
allow_sequences=False,
label="Filepath"
),
BoolDef(

View file

@ -70,11 +70,17 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin):
repre_names,
representation_files_mapping
)
source_filepaths = list(set(source_filepaths))
instance.data["source"] = source
instance.data["sourceFilepaths"] = list(set(source_filepaths))
instance.data["originalBasename"] = Path(
instance.data["sourceFilepaths"][0]).stem
instance.data["sourceFilepaths"] = source_filepaths
# NOTE: Missing filepaths should not cause crashes (at least not here)
# - if filepaths are required they should crash on validation
if source_filepaths:
# NOTE: Original basename is not handling sequences
# - we should maybe not fill the key when sequence is used?
origin_basename = Path(source_filepaths[0]).stem
instance.data["originalBasename"] = origin_basename
self.log.debug(
(

View file

@ -1,10 +1,33 @@
import os
import logging
import platform
import subprocess
log = logging.getLogger("Vendor utils")
class CachedToolPaths:
"""Cache already used and discovered tools and their executables.
Discovering path can take some time and can trigger subprocesses so it's
better to cache the paths on first get.
"""
_cached_paths = {}
@classmethod
def is_tool_cached(cls, tool):
return tool in cls._cached_paths
@classmethod
def get_executable_path(cls, tool):
return cls._cached_paths.get(tool)
@classmethod
def cache_executable_path(cls, tool, path):
cls._cached_paths[tool] = path
def is_file_executable(filepath):
"""Filepath lead to executable file.
@ -98,6 +121,7 @@ def get_vendor_bin_path(bin_app):
Returns:
str: Path to vendorized binaries folder.
"""
return os.path.join(
os.environ["OPENPYPE_ROOT"],
"vendor",
@ -107,6 +131,112 @@ def get_vendor_bin_path(bin_app):
)
def find_tool_in_custom_paths(paths, tool, validation_func=None):
"""Find a tool executable in custom paths.
Args:
paths (Iterable[str]): Iterable of paths where to look for tool.
tool (str): Name of tool (binary file) to find in passed paths.
validation_func (Function): Custom validation function of path.
Function must expect one argument which is path to executable.
If not passed only 'find_executable' is used to be able identify
if path is valid.
Reuturns:
Union[str, None]: Path to validated executable or None if was not
found.
"""
for path in paths:
# Skip empty strings
if not path:
continue
# Handle cases when path is just an executable
# - it allows to use executable from PATH
# - basename must match 'tool' value (without extension)
extless_path, ext = os.path.splitext(path)
if extless_path == tool:
executable_path = find_executable(tool)
if executable_path and (
validation_func is None
or validation_func(executable_path)
):
return executable_path
continue
# Normalize path because it should be a path and check if exists
normalized = os.path.normpath(path)
if not os.path.exists(normalized):
continue
# Note: Path can be both file and directory
# If path is a file validate it
if os.path.isfile(normalized):
basename, ext = os.path.splitext(os.path.basename(path))
# Check if the filename has actually the sane bane as 'tool'
if basename == tool:
executable_path = find_executable(normalized)
if executable_path and (
validation_func is None
or validation_func(executable_path)
):
return executable_path
# Check if path is a directory and look for tool inside the dir
if os.path.isdir(normalized):
executable_path = find_executable(os.path.join(normalized, tool))
if executable_path and (
validation_func is None
or validation_func(executable_path)
):
return executable_path
return None
def _oiio_executable_validation(filepath):
"""Validate oiio tool executable if can be executed.
Validation has 2 steps. First is using 'find_executable' to fill possible
missing extension or fill directory then launch executable and validate
that it can be executed. For that is used '--help' argument which is fast
and does not need any other inputs.
Any possible crash of missing libraries or invalid build should be catched.
Main reason is to validate if executable can be executed on OS just running
which can be issue ob linux machines.
Note:
It does not validate if the executable is really a oiio tool which
should be used.
Args:
filepath (str): Path to executable.
Returns:
bool: Filepath is valid executable.
"""
filepath = find_executable(filepath)
if not filepath:
return False
try:
proc = subprocess.Popen(
[filepath, "--help"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
proc.wait()
return proc.returncode == 0
except Exception:
pass
return False
def get_oiio_tools_path(tool="oiiotool"):
"""Path to vendorized OpenImageIO tool executables.
@ -117,10 +247,73 @@ def get_oiio_tools_path(tool="oiiotool"):
Default is "oiiotool".
"""
oiio_dir = get_vendor_bin_path("oiio")
if platform.system().lower() == "linux":
oiio_dir = os.path.join(oiio_dir, "bin")
return find_executable(os.path.join(oiio_dir, tool))
if CachedToolPaths.is_tool_cached(tool):
return CachedToolPaths.get_executable_path(tool)
custom_paths_str = os.environ.get("OPENPYPE_OIIO_PATHS") or ""
tool_executable_path = find_tool_in_custom_paths(
custom_paths_str.split(os.pathsep),
tool,
_oiio_executable_validation
)
if not tool_executable_path:
oiio_dir = get_vendor_bin_path("oiio")
if platform.system().lower() == "linux":
oiio_dir = os.path.join(oiio_dir, "bin")
default_path = os.path.join(oiio_dir, tool)
if _oiio_executable_validation(default_path):
tool_executable_path = default_path
# Look to PATH for the tool
if not tool_executable_path:
from_path = find_executable(tool)
if from_path and _oiio_executable_validation(from_path):
tool_executable_path = from_path
CachedToolPaths.cache_executable_path(tool, tool_executable_path)
return tool_executable_path
def _ffmpeg_executable_validation(filepath):
"""Validate ffmpeg tool executable if can be executed.
Validation has 2 steps. First is using 'find_executable' to fill possible
missing extension or fill directory then launch executable and validate
that it can be executed. For that is used '-version' argument which is fast
and does not need any other inputs.
Any possible crash of missing libraries or invalid build should be catched.
Main reason is to validate if executable can be executed on OS just running
which can be issue ob linux machines.
Note:
It does not validate if the executable is really a ffmpeg tool.
Args:
filepath (str): Path to executable.
Returns:
bool: Filepath is valid executable.
"""
filepath = find_executable(filepath)
if not filepath:
return False
try:
proc = subprocess.Popen(
[filepath, "-version"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
proc.wait()
return proc.returncode == 0
except Exception:
pass
return False
def get_ffmpeg_tool_path(tool="ffmpeg"):
@ -133,10 +326,33 @@ def get_ffmpeg_tool_path(tool="ffmpeg"):
Returns:
str: Full path to ffmpeg executable.
"""
ffmpeg_dir = get_vendor_bin_path("ffmpeg")
if platform.system().lower() == "windows":
ffmpeg_dir = os.path.join(ffmpeg_dir, "bin")
return find_executable(os.path.join(ffmpeg_dir, tool))
if CachedToolPaths.is_tool_cached(tool):
return CachedToolPaths.get_executable_path(tool)
custom_paths_str = os.environ.get("OPENPYPE_FFMPEG_PATHS") or ""
tool_executable_path = find_tool_in_custom_paths(
custom_paths_str.split(os.pathsep),
tool,
_ffmpeg_executable_validation
)
if not tool_executable_path:
ffmpeg_dir = get_vendor_bin_path("ffmpeg")
if platform.system().lower() == "windows":
ffmpeg_dir = os.path.join(ffmpeg_dir, "bin")
tool_path = find_executable(os.path.join(ffmpeg_dir, tool))
if tool_path and _ffmpeg_executable_validation(tool_path):
tool_executable_path = tool_path
# Look to PATH for the tool
if not tool_executable_path:
from_path = find_executable(tool)
if from_path and _oiio_executable_validation(from_path):
tool_executable_path = from_path
CachedToolPaths.cache_executable_path(tool, tool_executable_path)
return tool_executable_path
def is_oiio_supported():

View file

@ -32,6 +32,9 @@ from maya import cmds
from openpype.pipeline import legacy_io
from openpype.hosts.maya.api.lib_rendersettings import RenderSettings
from openpype.hosts.maya.api.lib import get_attr_in_layer
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
@ -498,9 +501,10 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
job_info.AssetDependency += self.scene_path
# Get layer prefix
render_products = self._instance.data["renderProducts"]
layer_metadata = render_products.layer_data
layer_prefix = layer_metadata.filePrefix
renderlayer = self._instance.data["setMembers"]
renderer = self._instance.data["renderer"]
layer_prefix_attr = RenderSettings.get_image_prefix_attr(renderer)
layer_prefix = get_attr_in_layer(layer_prefix_attr, layer=renderlayer)
plugin_info = copy.deepcopy(self.plugin_info)
plugin_info.update({

View file

@ -0,0 +1,125 @@
import webbrowser
from openpype.pipeline import LauncherAction
from openpype.modules import ModulesManager
from openpype.client import get_project, get_asset_by_name
class ShowInKitsu(LauncherAction):
name = "showinkitsu"
label = "Show in Kitsu"
icon = "external-link-square"
color = "#e0e1e1"
order = 10
@staticmethod
def get_kitsu_module():
return ModulesManager().modules_by_name.get("kitsu")
def is_compatible(self, session):
if not session.get("AVALON_PROJECT"):
return False
return True
def process(self, session, **kwargs):
# Context inputs
project_name = session["AVALON_PROJECT"]
asset_name = session.get("AVALON_ASSET", None)
task_name = session.get("AVALON_TASK", None)
project = get_project(project_name=project_name,
fields=["data.zou_id"])
if not project:
raise RuntimeError(f"Project {project_name} not found.")
project_zou_id = project["data"].get("zou_id")
if not project_zou_id:
raise RuntimeError(f"Project {project_name} has no "
f"connected kitsu id.")
asset_zou_name = None
asset_zou_id = None
asset_zou_type = 'Assets'
task_zou_id = None
zou_sub_type = ['AssetType', 'Sequence']
if asset_name:
asset_zou_name = asset_name
asset_fields = ["data.zou.id", "data.zou.type"]
if task_name:
asset_fields.append(f"data.tasks.{task_name}.zou.id")
asset = get_asset_by_name(project_name,
asset_name=asset_name,
fields=asset_fields)
asset_zou_data = asset["data"].get("zou")
if asset_zou_data:
asset_zou_type = asset_zou_data["type"]
if asset_zou_type not in zou_sub_type:
asset_zou_id = asset_zou_data["id"]
else:
asset_zou_type = asset_name
if task_name:
task_data = asset["data"]["tasks"][task_name]
task_zou_data = task_data.get("zou", {})
if not task_zou_data:
self.log.debug(f"No zou task data for task: {task_name}")
task_zou_id = task_zou_data["id"]
# Define URL
url = self.get_url(project_id=project_zou_id,
asset_name=asset_zou_name,
asset_id=asset_zou_id,
asset_type=asset_zou_type,
task_id=task_zou_id)
# Open URL in webbrowser
self.log.info(f"Opening URL: {url}")
webbrowser.open(url,
# Try in new tab
new=2)
def get_url(self,
project_id,
asset_name=None,
asset_id=None,
asset_type=None,
task_id=None):
shots_url = {'Shots', 'Sequence', 'Shot'}
sub_type = {'AssetType', 'Sequence'}
kitsu_module = self.get_kitsu_module()
# Get kitsu url with /api stripped
kitsu_url = kitsu_module.server_url
if kitsu_url.endswith("/api"):
kitsu_url = kitsu_url[:-len("/api")]
sub_url = f"/productions/{project_id}"
asset_type_url = "Shots" if asset_type in shots_url else "Assets"
if task_id:
# Go to task page
# /productions/{project-id}/{asset_type}/tasks/{task_id}
sub_url += f"/{asset_type_url}/tasks/{task_id}"
elif asset_id:
# Go to asset or shot page
# /productions/{project-id}/assets/{entity_id}
# /productions/{project-id}/shots/{entity_id}
sub_url += f"/{asset_type_url}/{asset_id}"
else:
# Go to project page
# Project page must end with a view
# /productions/{project-id}/assets/
# Add search method if is a sub_type
sub_url += f"/{asset_type_url}"
if asset_type in sub_type:
sub_url += f'?search={asset_name}'
return f"{kitsu_url}{sub_url}"

View file

@ -89,7 +89,10 @@ class KitsuModule(OpenPypeModule, IPluginPaths, ITrayAction):
"""Implementation of abstract method for `IPluginPaths`."""
current_dir = os.path.dirname(os.path.abspath(__file__))
return {"publish": [os.path.join(current_dir, "plugins", "publish")]}
return {
"publish": [os.path.join(current_dir, "plugins", "publish")],
"actions": [os.path.join(current_dir, "actions")]
}
def cli(self, click_group):
click_group.add_command(cli_main)

View file

@ -115,7 +115,9 @@ def update_op_assets(
item_data["frameStart"] = frame_in
# Frames duration, fallback on 0
try:
frames_duration = int(item_data.pop("nb_frames", 0))
# NOTE nb_frames is stored directly in item
# because of zou's legacy design
frames_duration = int(item.get("nb_frames", 0))
except (TypeError, ValueError):
frames_duration = 0
# Frame out, fallback on frame_in + duration or project's value or 1001
@ -170,7 +172,7 @@ def update_op_assets(
# Substitute item type for general classification (assets or shots)
if item_type in ["Asset", "AssetType"]:
entity_root_asset_name = "Assets"
elif item_type in ["Episode", "Sequence"]:
elif item_type in ["Episode", "Sequence", "Shot"]:
entity_root_asset_name = "Shots"
# Root parent folder if exist
@ -276,11 +278,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
match_res = re.match(r"(\d+)x(\d+)", project["resolution"])
if match_res:
project_data['resolutionWidth'] = int(match_res.group(1))
project_data['resolutionHeight'] = int(match_res.group(2))
project_data["resolutionWidth"] = int(match_res.group(1))
project_data["resolutionHeight"] = int(match_res.group(2))
else:
log.warning(f"\'{project['resolution']}\' does not match the expected"
" format for the resolution, for example: 1920x1080")
log.warning(
f"'{project['resolution']}' does not match the expected"
" format for the resolution, for example: 1920x1080"
)
return UpdateOne(
{"_id": project_doc["_id"]},

View file

@ -973,23 +973,22 @@ VariantInputsWidget QToolButton {
background: {color:bg};
border-radius: 0.3em;
}
#PublishInfoFrame[state="-1"] {
background: rgb(194, 226, 236);
}
#PublishInfoFrame[state="0"] {
background: {color:publisher:crash};
background: {color:publisher:success};
}
#PublishInfoFrame[state="1"] {
background: {color:publisher:success};
background: {color:publisher:crash};
}
#PublishInfoFrame[state="2"] {
background: {color:publisher:warning};
}
#PublishInfoFrame[state="3"], #PublishInfoFrame[state="4"] {
background: rgb(194, 226, 236);
}
#PublishInfoFrame QLabel {
color: black;
font-style: bold;
@ -1086,7 +1085,7 @@ ValidationArtistMessage QLabel {
border-color: {color:publisher:error};
}
#PublishProgressBar[state="0"]::chunk {
#PublishProgressBar[state="1"]::chunk, #PublishProgressBar[state="4"]::chunk {
background: {color:bg-buttons};
}

View file

@ -3,7 +3,6 @@ import copy
import logging
import traceback
import collections
import time
import uuid
from abc import ABCMeta, abstractmethod, abstractproperty
@ -234,7 +233,7 @@ class PublishReport:
"""Set that current plugin has been skipped."""
self._current_plugin_data["skipped"] = True
def add_result(self, result, process_time):
def add_result(self, result):
"""Handle result of one plugin and it's instance."""
instance = result["instance"]
@ -244,7 +243,7 @@ class PublishReport:
self._current_plugin_data["instances_data"].append({
"id": instance_id,
"logs": self._extract_instance_log_items(result),
"process_time": process_time
"process_time": result["duration"]
})
def add_action_result(self, action, result):
@ -1290,7 +1289,7 @@ class BasePublisherController(AbstractPublisherController):
self._publish_has_validation_errors = False
self._publish_has_crashed = False
# All publish plugins are processed
self._publish_finished = False
self._publish_has_finished = False
self._publish_max_progress = 0
self._publish_progress = 0
@ -1338,7 +1337,7 @@ class BasePublisherController(AbstractPublisherController):
changed.
"publish.progress.changed" - Attr 'publish_progress' changed.
"publish.host_is_valid.changed" - Attr 'host_is_valid' changed.
"publish.finished.changed" - Attr 'publish_finished' changed.
"publish.finished.changed" - Attr 'publish_has_finished' changed.
Returns:
EventSystem: Event system which can trigger callbacks for topics.
@ -1362,11 +1361,11 @@ class BasePublisherController(AbstractPublisherController):
self._emit_event("publish.host_is_valid.changed", {"value": value})
def _get_publish_has_finished(self):
return self._publish_finished
return self._publish_has_finished
def _set_publish_has_finished(self, value):
if self._publish_finished != value:
self._publish_finished = value
if self._publish_has_finished != value:
self._publish_has_finished = value
self._emit_event("publish.finished.changed", {"value": value})
def _get_publish_is_running(self):
@ -1466,7 +1465,7 @@ class BasePublisherController(AbstractPublisherController):
self.publish_has_validated = False
self.publish_has_crashed = False
self.publish_has_validation_errors = False
self.publish_finished = False
self.publish_has_finished = False
self.publish_error_msg = None
self.publish_progress = 0
@ -2093,7 +2092,7 @@ class PublisherController(BasePublisherController):
self._publish_report.set_plugin_skipped()
# Cleanup of publishing process
self.publish_finished = True
self.publish_has_finished = True
self.publish_progress = self.publish_max_progress
yield MainThreadItem(self.stop_publish)
@ -2106,13 +2105,11 @@ class PublisherController(BasePublisherController):
)
def _process_and_continue(self, plugin, instance):
start = time.time()
result = pyblish.plugin.process(
plugin, self._publish_context, instance
)
process_time = time.time() - start
self._publish_report.add_result(result, process_time)
self._publish_report.add_result(result)
exception = result.get("error")
if exception:

View file

@ -173,7 +173,7 @@ class QtRemotePublishController(BasePublisherController):
return
if event.topic == "publish.finished.changed":
self.publish_finished = event["value"]
self.publish_has_finished = event["value"]
return
if event.topic == "publish.host_is_valid.changed":

View file

@ -44,8 +44,10 @@ class HelpWidget(QtWidgets.QWidget):
if commonmark:
html = commonmark.commonmark(text)
self._detail_description_input.setHtml(html)
else:
elif hasattr(self._detail_description_input, "setMarkdown"):
self._detail_description_input.setMarkdown(text)
else:
self._detail_description_input.setText(text)
class HelpDialog(QtWidgets.QDialog):

View file

@ -328,7 +328,7 @@ class PublishFrame(QtWidgets.QWidget):
if self._last_instance_label:
self._instance_label.setText(self._last_instance_label)
self._set_success_property(-1)
self._set_success_property(3)
self._set_progress_visibility(True)
self._set_main_label("Publishing...")
@ -407,7 +407,7 @@ class PublishFrame(QtWidgets.QWidget):
"Hit publish (play button) to continue."
)
self._set_success_property(-1)
self._set_success_property(4)
def _set_error_msg(self):
"""Show error message to artist on publish crash."""
@ -416,7 +416,7 @@ class PublishFrame(QtWidgets.QWidget):
self._message_label_top.setText(self._controller.publish_error_msg)
self._set_success_property(0)
self._set_success_property(1)
def _set_validation_errors(self):
self._set_main_label("Your publish didn't pass studio validations")
@ -426,7 +426,7 @@ class PublishFrame(QtWidgets.QWidget):
def _set_finished(self):
self._set_main_label("Finished")
self._message_label_top.setText("")
self._set_success_property(1)
self._set_success_property(0)
def _set_progress_visibility(self, visible):
window_height = self.height()
@ -447,6 +447,17 @@ class PublishFrame(QtWidgets.QWidget):
self.move(window_pos.x(), window_pos_y)
def _set_success_property(self, state=None):
"""Apply styles by state.
State enum:
- None - Default state after restart
- 0 - Success finish
- 1 - Error happened
- 2 - Validation error
- 3 - In progress
- 4 - Stopped/Paused
"""
if state is None:
state = ""
else:

View file

@ -709,5 +709,7 @@ class ValidationsWidget(QtWidgets.QFrame):
if commonmark:
html = commonmark.commonmark(description)
self._error_details_input.setHtml(html)
else:
elif hasattr(self._error_details_input, "setMarkdown"):
self._error_details_input.setMarkdown(description)
else:
self._error_details_input.setText(description)

View file

@ -470,6 +470,11 @@ class PublisherWindow(QtWidgets.QDialog):
self._set_publish_visibility(False)
self._set_footer_enabled(False)
self._update_publish_details_widget()
if (
not self._tabs_widget.is_current_tab("create")
or not self._tabs_widget.is_current_tab("publish")
):
self._tabs_widget.set_current_tab("publish")
def _on_publish_start(self):
self._create_tab.setEnabled(False)
@ -499,6 +504,9 @@ class PublisherWindow(QtWidgets.QDialog):
publish_has_crashed = self._controller.publish_has_crashed
validate_enabled = not publish_has_crashed
publish_enabled = not publish_has_crashed
if self._tabs_widget.is_current_tab("publish"):
self._go_to_report_tab()
if validate_enabled:
validate_enabled = not self._controller.publish_has_validated
if publish_enabled:
@ -507,8 +515,6 @@ class PublisherWindow(QtWidgets.QDialog):
and self._controller.publish_has_validation_errors
):
publish_enabled = False
if self._tabs_widget.is_current_tab("publish"):
self._go_to_report_tab()
else:
publish_enabled = not self._controller.publish_has_finished

View file

@ -26,6 +26,9 @@ as a naive barier to prevent artists from accidental setting changes.
**`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up.
Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume).
### FFmpeg and OpenImageIO tools
We bundle FFmpeg tools for all platforms and OpenImageIO tools for Windows and Linux. By default are used bundled tools but it is possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings environments to look for them in different directory e.g. for different linux distributions or to add oiio support for MacOs. Values of both environment variables should lead to directory where tool executables are located (multiple paths are supported).
### OpenPype deployment control
**`Versions Repository`** - Location where automatic update mechanism searches for zip files with
OpenPype update packages. To read more about preparing OpenPype for automatic updates go to [Admin Distribute docs](admin_distribute.md#2-openpype-codebase)