Merge branch 'develop' into bugfix/OP-7438_3dsmax-preview-resolution-with-burnins

This commit is contained in:
Kayla Man 2023-12-11 23:42:33 +08:00 committed by GitHub
commit 874b2c50d8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 60 additions and 5 deletions

View file

@ -140,7 +140,7 @@ def is_running_staging():
latest_version = get_latest_version(local=False, remote=True)
staging_version = latest_version
if current_version == production_version:
if current_version == staging_version:
return True
return is_staging_enabled()

View file

@ -1,6 +1,6 @@
import os
from openpype import AYON_SERVER_ENABLED
from openpype.lib.openpype_version import is_running_staging
from openpype.lib.openpype_version import is_staging_enabled
RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__))
@ -59,7 +59,7 @@ def get_openpype_icon_filepath(staging=None):
return get_resource("icons", "AYON_icon_dev.png")
if staging is None:
staging = is_running_staging()
staging = is_staging_enabled()
if staging:
return get_openpype_staging_icon_filepath()
@ -68,7 +68,7 @@ def get_openpype_icon_filepath(staging=None):
def get_openpype_splash_filepath(staging=None):
if staging is None:
staging = is_running_staging()
staging = is_staging_enabled()
if AYON_SERVER_ENABLED:
if os.getenv("AYON_USE_DEV") == "1":

View file

@ -481,7 +481,7 @@ class DeadlinePublishTest(PublishTest):
while not valid_date_finished:
time.sleep(0.5)
if time.time() - time_start > timeout:
raise ValueError("Timeout for DL finish reached")
raise ValueError("Timeout for Deadline finish reached")
response = requests.get(url, timeout=10)
if not response.ok:
@ -491,6 +491,61 @@ class DeadlinePublishTest(PublishTest):
if not response.json():
raise ValueError("Couldn't find {}".format(deadline_job_id))
job = response.json()[0]
def recursive_dependencies(job, results=None):
if results is None:
results = []
for dependency in job["Props"]["Dep"]:
dependency = requests.get(
"{}/api/jobs?JobId={}".format(
deadline_url, dependency["JobID"]
),
timeout=10
).json()[0]
results.append(dependency)
grand_dependencies = recursive_dependencies(
dependency, results=results
)
for grand_dependency in grand_dependencies:
if grand_dependency not in results:
results.append(grand_dependency)
return results
job_status = {
0: "Unknown",
1: "Active",
2: "Suspended",
3: "Completed",
4: "Failed",
6: "Pending"
}
jobs_to_validate = [job]
jobs_to_validate.extend(recursive_dependencies(job))
failed_jobs = []
errors = []
for job in jobs_to_validate:
if "Failed" == job_status[job["Stat"]]:
failed_jobs.append(str(job))
resp_error = requests.get(
"{}/api/jobreports?JobID={}&Data=allerrorcontents".format(
deadline_url, job["_id"]
),
timeout=10
)
errors.extend(resp_error.json())
msg = "Errors in Deadline:\n"
msg += "\n".join(errors)
assert not errors, msg
msg = "Failed in Deadline:\n"
msg += "\n".join(failed_jobs)
assert not failed_jobs, msg
# '0001-...' returned until job is finished
valid_date_finished = response.json()[0]["DateComp"][:4] != "0001"