mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/OP-7133_3dsMax--optional-custom-attributes-for-abc-extract
This commit is contained in:
commit
3fed4537d7
16 changed files with 83 additions and 27 deletions
|
|
@ -606,7 +606,7 @@ def convert_v4_version_to_v3(version):
|
|||
output_data[dst_key] = version[src_key]
|
||||
|
||||
if "createdAt" in version:
|
||||
created_at = arrow.get(version["createdAt"])
|
||||
created_at = arrow.get(version["createdAt"]).to("local")
|
||||
output_data["time"] = created_at.strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
output["data"] = output_data
|
||||
|
|
|
|||
|
|
@ -60,8 +60,9 @@ class ExtractLocalRender(publish.Extractor):
|
|||
first_repre = not representations
|
||||
if instance.data["review"] and first_repre:
|
||||
repre_data["tags"] = ["review"]
|
||||
thumbnail_path = os.path.join(staging_dir, files[0])
|
||||
instance.data["thumbnailSource"] = thumbnail_path
|
||||
# TODO return back when Extract from source same as regular
|
||||
# thumbnail_path = os.path.join(staging_dir, files[0])
|
||||
# instance.data["thumbnailSource"] = thumbnail_path
|
||||
|
||||
representations.append(repre_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -198,8 +198,8 @@ def _render_preview_animation_max_pre_2024(
|
|||
res_width, res_height, filename=filepath
|
||||
)
|
||||
dib = rt.gw.getViewportDib()
|
||||
dib_width = rt.renderWidth
|
||||
dib_height = rt.renderHeight
|
||||
dib_width = float(dib.width)
|
||||
dib_height = float(dib.height)
|
||||
# aspect ratio
|
||||
viewportRatio = dib_width / dib_height
|
||||
renderRatio = float(res_width / res_height)
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ def is_running_staging():
|
|||
latest_version = get_latest_version(local=False, remote=True)
|
||||
staging_version = latest_version
|
||||
|
||||
if current_version == production_version:
|
||||
if current_version == staging_version:
|
||||
return True
|
||||
|
||||
return is_staging_enabled()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.lib.openpype_version import is_running_staging
|
||||
from openpype.lib.openpype_version import is_staging_enabled
|
||||
|
||||
RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
@ -59,7 +59,7 @@ def get_openpype_icon_filepath(staging=None):
|
|||
return get_resource("icons", "AYON_icon_dev.png")
|
||||
|
||||
if staging is None:
|
||||
staging = is_running_staging()
|
||||
staging = is_staging_enabled()
|
||||
|
||||
if staging:
|
||||
return get_openpype_staging_icon_filepath()
|
||||
|
|
@ -68,7 +68,7 @@ def get_openpype_icon_filepath(staging=None):
|
|||
|
||||
def get_openpype_splash_filepath(staging=None):
|
||||
if staging is None:
|
||||
staging = is_running_staging()
|
||||
staging = is_staging_enabled()
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
if os.getenv("AYON_USE_DEV") == "1":
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ def version_item_from_entity(version):
|
|||
# NOTE There is also 'updatedAt', should be used that instead?
|
||||
# TODO skip conversion - converting to '%Y%m%dT%H%M%SZ' is because
|
||||
# 'PrettyTimeDelegate' expects it
|
||||
created_at = arrow.get(version["createdAt"])
|
||||
created_at = arrow.get(version["createdAt"]).to("local")
|
||||
published_time = created_at.strftime("%Y%m%dT%H%M%SZ")
|
||||
author = version["author"]
|
||||
version_num = version["version"]
|
||||
|
|
|
|||
|
|
@ -606,7 +606,7 @@ class PublishWorkfilesModel:
|
|||
print("Failed to format workfile path: {}".format(exc))
|
||||
|
||||
dirpath, filename = os.path.split(workfile_path)
|
||||
created_at = arrow.get(repre_entity["createdAt"])
|
||||
created_at = arrow.get(repre_entity["createdAt"].to("local"))
|
||||
return FileItem(
|
||||
dirpath,
|
||||
filename,
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass):
|
|||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
DBAssert.count_of_types(dbcon, "representation", 3))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
@ -77,7 +77,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla
|
|||
name="renderTest_taskMain2"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 5))
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
@ -89,7 +89,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
DBAssert.count_of_types(dbcon, "representation", 3))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
@ -75,7 +75,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
DBAssert.count_of_types(dbcon, "representation", 2))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
@ -77,7 +77,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
@ -89,7 +89,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
DBAssert.count_of_types(dbcon, "representation", 3))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "aep"}
|
||||
|
|
@ -62,7 +62,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass):
|
|||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="workfileTest_task"))
|
||||
|
||||
failures.append(DBAssert.count_of_types(dbcon, "representation", 8))
|
||||
failures.append(DBAssert.count_of_types(dbcon, "representation", 7))
|
||||
|
||||
# hero included
|
||||
additional_args = {"context.subset": "modelMain",
|
||||
|
|
@ -85,7 +85,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain_beauty",
|
||||
"context.ext": "jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain_beauty",
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 75 KiB |
|
|
@ -69,7 +69,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass):
|
|||
name="workfileTest_task"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
DBAssert.count_of_types(dbcon, "representation", 3))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "nk"}
|
||||
|
|
@ -86,7 +86,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass):
|
|||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
|
|
|
|||
|
|
@ -481,7 +481,7 @@ class DeadlinePublishTest(PublishTest):
|
|||
while not valid_date_finished:
|
||||
time.sleep(0.5)
|
||||
if time.time() - time_start > timeout:
|
||||
raise ValueError("Timeout for DL finish reached")
|
||||
raise ValueError("Timeout for Deadline finish reached")
|
||||
|
||||
response = requests.get(url, timeout=10)
|
||||
if not response.ok:
|
||||
|
|
@ -491,6 +491,61 @@ class DeadlinePublishTest(PublishTest):
|
|||
if not response.json():
|
||||
raise ValueError("Couldn't find {}".format(deadline_job_id))
|
||||
|
||||
job = response.json()[0]
|
||||
|
||||
def recursive_dependencies(job, results=None):
|
||||
if results is None:
|
||||
results = []
|
||||
|
||||
for dependency in job["Props"]["Dep"]:
|
||||
dependency = requests.get(
|
||||
"{}/api/jobs?JobId={}".format(
|
||||
deadline_url, dependency["JobID"]
|
||||
),
|
||||
timeout=10
|
||||
).json()[0]
|
||||
results.append(dependency)
|
||||
grand_dependencies = recursive_dependencies(
|
||||
dependency, results=results
|
||||
)
|
||||
for grand_dependency in grand_dependencies:
|
||||
if grand_dependency not in results:
|
||||
results.append(grand_dependency)
|
||||
return results
|
||||
|
||||
job_status = {
|
||||
0: "Unknown",
|
||||
1: "Active",
|
||||
2: "Suspended",
|
||||
3: "Completed",
|
||||
4: "Failed",
|
||||
6: "Pending"
|
||||
}
|
||||
|
||||
jobs_to_validate = [job]
|
||||
jobs_to_validate.extend(recursive_dependencies(job))
|
||||
failed_jobs = []
|
||||
errors = []
|
||||
for job in jobs_to_validate:
|
||||
if "Failed" == job_status[job["Stat"]]:
|
||||
failed_jobs.append(str(job))
|
||||
|
||||
resp_error = requests.get(
|
||||
"{}/api/jobreports?JobID={}&Data=allerrorcontents".format(
|
||||
deadline_url, job["_id"]
|
||||
),
|
||||
timeout=10
|
||||
)
|
||||
errors.extend(resp_error.json())
|
||||
|
||||
msg = "Errors in Deadline:\n"
|
||||
msg += "\n".join(errors)
|
||||
assert not errors, msg
|
||||
|
||||
msg = "Failed in Deadline:\n"
|
||||
msg += "\n".join(failed_jobs)
|
||||
assert not failed_jobs, msg
|
||||
|
||||
# '0001-...' returned until job is finished
|
||||
valid_date_finished = response.json()[0]["DateComp"][:4] != "0001"
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue