mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge fixes for tests from branch with Deadline tests.
Branch with newly implemented DL tests is targetted to release 3.15, but changes from it affects tests in develop. This branch should solve issues with automatic tests in develop without need of full release 3.15 changes.
This commit is contained in:
parent
7e2400db06
commit
412d03d382
15 changed files with 275 additions and 111 deletions
|
|
@ -1,5 +1,15 @@
|
|||
Automatic tests for OpenPype
|
||||
============================
|
||||
|
||||
Requirements:
|
||||
============
|
||||
Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path.
|
||||
|
||||
You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/
|
||||
|
||||
You can test that `mongorestore` is available by running this in console, or cmd:
|
||||
```mongorestore --version```
|
||||
|
||||
Structure:
|
||||
- integration - end to end tests, slow (see README.md in the integration folder for more info)
|
||||
- openpype/modules/MODULE_NAME - structure follow directory structure in code base
|
||||
|
|
|
|||
|
|
@ -43,3 +43,15 @@ def app_variant(request):
|
|||
@pytest.fixture(scope="module")
|
||||
def timeout(request):
|
||||
return request.config.getoption("--timeout")
|
||||
|
||||
|
||||
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
|
||||
def pytest_runtest_makereport(item, call):
|
||||
# execute all other hooks to obtain the report object
|
||||
outcome = yield
|
||||
rep = outcome.get_result()
|
||||
|
||||
# set a report attribute for each phase of a call, which can
|
||||
# be "setup", "call", "teardown"
|
||||
|
||||
setattr(item, "rep_" + rep.when, rep)
|
||||
|
|
|
|||
|
|
@ -2,10 +2,13 @@ import os
|
|||
import pytest
|
||||
import shutil
|
||||
|
||||
from tests.lib.testing_classes import HostFixtures
|
||||
from tests.lib.testing_classes import (
|
||||
HostFixtures,
|
||||
PublishTest,
|
||||
)
|
||||
|
||||
|
||||
class AfterEffectsTestClass(HostFixtures):
|
||||
class AEHostFixtures(HostFixtures):
|
||||
@pytest.fixture(scope="module")
|
||||
def last_workfile_path(self, download_test_data, output_folder_url):
|
||||
"""Get last_workfile_path from source data.
|
||||
|
|
@ -15,15 +18,15 @@ class AfterEffectsTestClass(HostFixtures):
|
|||
src_path = os.path.join(download_test_data,
|
||||
"input",
|
||||
"workfile",
|
||||
"test_project_test_asset_TestTask_v001.aep")
|
||||
dest_folder = os.path.join(download_test_data,
|
||||
"test_project_test_asset_test_task_v001.aep")
|
||||
dest_folder = os.path.join(output_folder_url,
|
||||
self.PROJECT,
|
||||
self.ASSET,
|
||||
"work",
|
||||
self.TASK)
|
||||
os.makedirs(dest_folder)
|
||||
dest_path = os.path.join(dest_folder,
|
||||
"test_project_test_asset_TestTask_v001.aep")
|
||||
"test_project_test_asset_test_task_v001.aep")
|
||||
shutil.copy(src_path, dest_path)
|
||||
|
||||
yield dest_path
|
||||
|
|
@ -32,3 +35,12 @@ class AfterEffectsTestClass(HostFixtures):
|
|||
def startup_scripts(self, monkeypatch_session, download_test_data):
|
||||
"""Points Maya to userSetup file from input data"""
|
||||
pass
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def skip_compare_folders(self):
|
||||
# skip folder that contain "Logs", these come only from Deadline
|
||||
return ["Logs", "Auto-Save"]
|
||||
|
||||
|
||||
class AELocalPublishTestClass(AEHostFixtures, PublishTest):
|
||||
"""Testing class for local publishes."""
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import logging
|
||||
|
||||
from tests.lib.assert_classes import DBAssert
|
||||
from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass
|
||||
from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass
|
||||
|
||||
log = logging.getLogger("test_publish_in_aftereffects")
|
||||
|
||||
|
||||
class TestPublishInAfterEffects(AfterEffectsTestClass):
|
||||
class TestPublishInAfterEffects(AELocalPublishTestClass):
|
||||
"""Basic test case for publishing in AfterEffects
|
||||
|
||||
Uses generic TestCase to prepare fixtures for test data, testing DBs,
|
||||
|
|
@ -32,10 +32,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass):
|
|||
"")
|
||||
]
|
||||
|
||||
APP = "aftereffects"
|
||||
APP_GROUP = "aftereffects"
|
||||
APP_VARIANT = ""
|
||||
|
||||
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
|
||||
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
|
||||
|
|
@ -49,27 +49,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass):
|
|||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="imageMainBackgroundcopy"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="workfileTest_task"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="reviewTesttask"))
|
||||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
|
||||
additional_args = {"context.subset": "renderTestTaskDefault",
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"context.ext": "aep"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"context.ext": "png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "png_png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert not any(failures)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
import logging
|
||||
|
||||
from tests.lib.assert_classes import DBAssert
|
||||
from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass
|
||||
from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass
|
||||
|
||||
log = logging.getLogger("test_publish_in_aftereffects")
|
||||
|
||||
|
||||
class TestPublishInAfterEffects(AfterEffectsTestClass):
|
||||
class TestPublishInAfterEffects(AELocalPublishTestClass):
|
||||
"""Basic test case for publishing in AfterEffects
|
||||
|
||||
Should publish 5 frames
|
||||
Should publish 10 frames
|
||||
"""
|
||||
PERSIST = True
|
||||
|
||||
|
|
@ -19,10 +19,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass):
|
|||
"")
|
||||
]
|
||||
|
||||
APP = "aftereffects"
|
||||
APP_GROUP = "aftereffects"
|
||||
APP_VARIANT = ""
|
||||
|
||||
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
|
||||
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
|
||||
|
|
@ -36,27 +36,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass):
|
|||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="imageMainBackgroundcopy"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="workfileTest_task"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="reviewTesttask"))
|
||||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
|
||||
additional_args = {"context.subset": "renderTestTaskDefault",
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"context.ext": "aep"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"context.ext": "png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "thumbnail"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"name": "h264_png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert not any(failures)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,10 +2,13 @@ import os
|
|||
import pytest
|
||||
import shutil
|
||||
|
||||
from tests.lib.testing_classes import HostFixtures
|
||||
from tests.lib.testing_classes import (
|
||||
HostFixtures,
|
||||
PublishTest,
|
||||
)
|
||||
|
||||
|
||||
class MayaTestClass(HostFixtures):
|
||||
class MayaHostFixtures(HostFixtures):
|
||||
@pytest.fixture(scope="module")
|
||||
def last_workfile_path(self, download_test_data, output_folder_url):
|
||||
"""Get last_workfile_path from source data.
|
||||
|
|
@ -15,7 +18,7 @@ class MayaTestClass(HostFixtures):
|
|||
src_path = os.path.join(download_test_data,
|
||||
"input",
|
||||
"workfile",
|
||||
"test_project_test_asset_TestTask_v001.mb")
|
||||
"test_project_test_asset_test_task_v001.mb")
|
||||
dest_folder = os.path.join(output_folder_url,
|
||||
self.PROJECT,
|
||||
self.ASSET,
|
||||
|
|
@ -23,7 +26,7 @@ class MayaTestClass(HostFixtures):
|
|||
self.TASK)
|
||||
os.makedirs(dest_folder)
|
||||
dest_path = os.path.join(dest_folder,
|
||||
"test_project_test_asset_TestTask_v001.mb")
|
||||
"test_project_test_asset_test_task_v001.mb")
|
||||
shutil.copy(src_path, dest_path)
|
||||
|
||||
yield dest_path
|
||||
|
|
@ -39,3 +42,11 @@ class MayaTestClass(HostFixtures):
|
|||
"{}{}{}".format(startup_path,
|
||||
os.pathsep,
|
||||
original_pythonpath))
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def skip_compare_folders(self):
|
||||
yield []
|
||||
|
||||
|
||||
class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest):
|
||||
"""Testing class for local publishes."""
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
from tests.integration.hosts.maya.lib import MayaTestClass
|
||||
from tests.lib.assert_classes import DBAssert
|
||||
from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass
|
||||
|
||||
|
||||
class TestPublishInMaya(MayaTestClass):
|
||||
class TestPublishInMaya(MayaLocalPublishTestClass):
|
||||
"""Basic test case for publishing in Maya
|
||||
|
||||
Shouldnt be running standalone only via 'runtests' pype command! (??)
|
||||
|
|
@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass):
|
|||
("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "")
|
||||
]
|
||||
|
||||
APP = "maya"
|
||||
APP_GROUP = "maya"
|
||||
# keep empty to locate latest installed variant or explicit
|
||||
APP_VARIANT = ""
|
||||
|
||||
|
|
@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass):
|
|||
def test_db_asserts(self, dbcon, publish_finished):
|
||||
"""Host and input data dependent expected results in DB."""
|
||||
print("test_db_asserts")
|
||||
assert 5 == dbcon.count_documents({"type": "version"}), \
|
||||
"Not expected no of versions"
|
||||
failures = []
|
||||
failures.append(DBAssert.count_of_types(dbcon, "version", 2))
|
||||
|
||||
assert 0 == dbcon.count_documents({"type": "version",
|
||||
"name": {"$ne": 1}}), \
|
||||
"Only versions with 1 expected"
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
|
||||
|
||||
assert 1 == dbcon.count_documents({"type": "subset",
|
||||
"name": "modelMain"}), \
|
||||
"modelMain subset must be present"
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="modelMain"))
|
||||
|
||||
assert 1 == dbcon.count_documents({"type": "subset",
|
||||
"name": "workfileTest_task"}), \
|
||||
"workfileTest_task subset must be present"
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="workfileTest_task"))
|
||||
|
||||
assert 11 == dbcon.count_documents({"type": "representation"}), \
|
||||
"Not expected no of representations"
|
||||
failures.append(DBAssert.count_of_types(dbcon, "representation", 5))
|
||||
|
||||
assert 2 == dbcon.count_documents({"type": "representation",
|
||||
"context.subset": "modelMain",
|
||||
"context.ext": "abc"}), \
|
||||
"Not expected no of representations with ext 'abc'"
|
||||
additional_args = {"context.subset": "modelMain",
|
||||
"context.ext": "abc"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 2,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert 2 == dbcon.count_documents({"type": "representation",
|
||||
"context.subset": "modelMain",
|
||||
"context.ext": "ma"}), \
|
||||
"Not expected no of representations with ext 'abc'"
|
||||
additional_args = {"context.subset": "modelMain",
|
||||
"context.ext": "ma"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 2,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "workfileTest_task",
|
||||
"context.ext": "mb"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert not any(failures)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -1,17 +1,20 @@
|
|||
import os
|
||||
import pytest
|
||||
import shutil
|
||||
import re
|
||||
|
||||
from tests.lib.testing_classes import HostFixtures
|
||||
from tests.lib.testing_classes import (
|
||||
HostFixtures,
|
||||
PublishTest,
|
||||
)
|
||||
|
||||
|
||||
class NukeTestClass(HostFixtures):
|
||||
class NukeHostFixtures(HostFixtures):
|
||||
@pytest.fixture(scope="module")
|
||||
def last_workfile_path(self, download_test_data, output_folder_url):
|
||||
"""Get last_workfile_path from source data.
|
||||
|
||||
"""
|
||||
source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk"
|
||||
source_file_name = "test_project_test_asset_test_task_v001.nk"
|
||||
src_path = os.path.join(download_test_data,
|
||||
"input",
|
||||
"workfile",
|
||||
|
|
@ -27,7 +30,16 @@ class NukeTestClass(HostFixtures):
|
|||
dest_path = os.path.join(dest_folder,
|
||||
source_file_name)
|
||||
|
||||
shutil.copy(src_path, dest_path)
|
||||
# rewrite old root with temporary file
|
||||
# TODO - using only C:/projects seems wrong - but where to get root ?
|
||||
replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE)
|
||||
with open(src_path, "r") as fp:
|
||||
updated = fp.read()
|
||||
updated = replace_pattern.sub(output_folder_url.replace("\\", '/'),
|
||||
updated)
|
||||
|
||||
with open(dest_path, "w") as fp:
|
||||
fp.write(updated)
|
||||
|
||||
yield dest_path
|
||||
|
||||
|
|
@ -41,4 +53,11 @@ class NukeTestClass(HostFixtures):
|
|||
monkeypatch_session.setenv("NUKE_PATH",
|
||||
"{}{}{}".format(startup_path,
|
||||
os.pathsep,
|
||||
original_nuke_path))
|
||||
original_nuke_path))
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def skip_compare_folders(self):
|
||||
yield []
|
||||
|
||||
class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest):
|
||||
"""Testing class for local publishes."""
|
||||
|
|
|
|||
|
|
@ -1,17 +1,25 @@
|
|||
import logging
|
||||
|
||||
from tests.lib.assert_classes import DBAssert
|
||||
from tests.integration.hosts.nuke.lib import NukeTestClass
|
||||
from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass
|
||||
|
||||
log = logging.getLogger("test_publish_in_nuke")
|
||||
|
||||
|
||||
class TestPublishInNuke(NukeTestClass):
|
||||
class TestPublishInNuke(NukeLocalPublishTestClass):
|
||||
"""Basic test case for publishing in Nuke
|
||||
|
||||
Uses generic TestCase to prepare fixtures for test data, testing DBs,
|
||||
env vars.
|
||||
|
||||
!!!
|
||||
It expects modified path in WriteNode,
|
||||
use '[python {nuke.script_directory()}]' instead of regular root
|
||||
dir (eg. instead of `c:/projects/test_project/test_asset/test_task`).
|
||||
Access file path by selecting WriteNode group, CTRL+Enter, update file
|
||||
input
|
||||
!!!
|
||||
|
||||
Opens Nuke, run publish on prepared workile.
|
||||
|
||||
Then checks content of DB (if subset, version, representations were
|
||||
|
|
@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass):
|
|||
|
||||
How to run:
|
||||
(in cmd with activated {OPENPYPE_ROOT}/.venv)
|
||||
{OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501
|
||||
{OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py
|
||||
runtests ../tests/integration/hosts/nuke # noqa: E501
|
||||
|
||||
To check log/errors from launched app's publish process keep PERSIST
|
||||
to True and check `test_openpype.logs` collection.
|
||||
|
|
@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass):
|
|||
("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "")
|
||||
]
|
||||
|
||||
APP = "nuke"
|
||||
APP_GROUP = "nuke"
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
TIMEOUT = 50 # publish timeout
|
||||
|
||||
# could be overwritten by command line arguments
|
||||
# keep empty to locate latest installed variant or explicit
|
||||
APP_VARIANT = ""
|
||||
PERSIST = True # True - keep test_db, test_openpype, outputted test files
|
||||
PERSIST = False # True - keep test_db, test_openpype, outputted test files
|
||||
TEST_DATA_FOLDER = None
|
||||
|
||||
def test_db_asserts(self, dbcon, publish_finished):
|
||||
|
|
@ -52,7 +61,7 @@ class TestPublishInNuke(NukeTestClass):
|
|||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="renderCompositingInNukeMain"))
|
||||
name="renderTest_taskMain"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
|
|
@ -61,7 +70,7 @@ class TestPublishInNuke(NukeTestClass):
|
|||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 4))
|
||||
|
||||
additional_args = {"context.subset": "renderCompositingInNukeMain",
|
||||
additional_args = {"context.subset": "renderTest_taskMain",
|
||||
"context.ext": "exr"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
|
|
|
|||
|
|
@ -2,10 +2,13 @@ import os
|
|||
import pytest
|
||||
import shutil
|
||||
|
||||
from tests.lib.testing_classes import HostFixtures
|
||||
from tests.lib.testing_classes import (
|
||||
HostFixtures,
|
||||
PublishTest
|
||||
)
|
||||
|
||||
|
||||
class PhotoshopTestClass(HostFixtures):
|
||||
class PhotoshopTestClass(HostFixtures, PublishTest):
|
||||
@pytest.fixture(scope="module")
|
||||
def last_workfile_path(self, download_test_data, output_folder_url):
|
||||
"""Get last_workfile_path from source data.
|
||||
|
|
@ -32,3 +35,7 @@ class PhotoshopTestClass(HostFixtures):
|
|||
def startup_scripts(self, monkeypatch_session, download_test_data):
|
||||
"""Points Maya to userSetup file from input data"""
|
||||
pass
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def skip_compare_folders(self):
|
||||
yield []
|
||||
|
|
|
|||
|
|
@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass):
|
|||
("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "")
|
||||
]
|
||||
|
||||
APP = "photoshop"
|
||||
APP_GROUP = "photoshop"
|
||||
# keep empty to locate latest installed variant or explicit
|
||||
APP_VARIANT = ""
|
||||
|
||||
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
|
||||
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
|
||||
|
|
@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass):
|
|||
name="workfileTest_task"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 8))
|
||||
DBAssert.count_of_types(dbcon, "representation", 6))
|
||||
|
||||
additional_args = {"context.subset": "imageMainForeground",
|
||||
"context.ext": "png"}
|
||||
|
|
|
|||
|
|
@ -118,9 +118,8 @@ class DBHandler:
|
|||
"Run with overwrite=True")
|
||||
else:
|
||||
if collection:
|
||||
coll = self.client[db_name_out].get(collection)
|
||||
if coll:
|
||||
coll.drop()
|
||||
if collection in self.client[db_name_out].list_collection_names(): # noqa
|
||||
self.client[db_name_out][collection].drop()
|
||||
else:
|
||||
self.teardown(db_name_out)
|
||||
|
||||
|
|
@ -133,7 +132,11 @@ class DBHandler:
|
|||
db_name=db_name, db_name_out=db_name_out,
|
||||
collection=collection)
|
||||
print("mongorestore query:: {}".format(query))
|
||||
subprocess.run(query)
|
||||
try:
|
||||
subprocess.run(query)
|
||||
except FileNotFoundError:
|
||||
raise RuntimeError("'mongorestore' utility must be on path."
|
||||
"Please install it.")
|
||||
|
||||
def teardown(self, db_name):
|
||||
"""Drops 'db_name' if exists."""
|
||||
|
|
@ -231,13 +234,15 @@ class DBHandler:
|
|||
# Examples
|
||||
# handler = DBHandler(uri="mongodb://localhost:27017")
|
||||
# #
|
||||
# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps"
|
||||
# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa
|
||||
# # #
|
||||
# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project")
|
||||
# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project")
|
||||
# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql",
|
||||
# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa
|
||||
#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa
|
||||
|
||||
# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa
|
||||
# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql",
|
||||
# collection="test_project",
|
||||
# drop=False, mode="upsert")
|
||||
# handler.setup_from_sql("test_db", "c:\\projects\\sql",
|
||||
# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql",
|
||||
# collection="test_project",
|
||||
# drop=False, mode="upsert")
|
||||
|
|
|
|||
|
|
@ -8,9 +8,12 @@ import tempfile
|
|||
import shutil
|
||||
import glob
|
||||
import platform
|
||||
import requests
|
||||
import re
|
||||
|
||||
from tests.lib.db_handler import DBHandler
|
||||
from common.openpype_common.distribution.file_handler import RemoteFileHandler
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
class BaseTest:
|
||||
|
|
@ -36,9 +39,9 @@ class ModuleUnitTest(BaseTest):
|
|||
PERSIST = False # True to not purge temporary folder nor test DB
|
||||
|
||||
TEST_OPENPYPE_MONGO = "mongodb://localhost:27017"
|
||||
TEST_DB_NAME = "test_db"
|
||||
TEST_DB_NAME = "avalon_tests"
|
||||
TEST_PROJECT_NAME = "test_project"
|
||||
TEST_OPENPYPE_NAME = "test_openpype"
|
||||
TEST_OPENPYPE_NAME = "openpype_tests"
|
||||
|
||||
TEST_FILES = []
|
||||
|
||||
|
|
@ -57,7 +60,7 @@ class ModuleUnitTest(BaseTest):
|
|||
m.undo()
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def download_test_data(self, test_data_folder, persist=False):
|
||||
def download_test_data(self, test_data_folder, persist, request):
|
||||
test_data_folder = test_data_folder or self.TEST_DATA_FOLDER
|
||||
if test_data_folder:
|
||||
print("Using existing folder {}".format(test_data_folder))
|
||||
|
|
@ -78,7 +81,8 @@ class ModuleUnitTest(BaseTest):
|
|||
print("Temporary folder created:: {}".format(tmpdir))
|
||||
yield tmpdir
|
||||
|
||||
persist = persist or self.PERSIST
|
||||
persist = (persist or self.PERSIST or
|
||||
self.is_test_failed(request))
|
||||
if not persist:
|
||||
print("Removing {}".format(tmpdir))
|
||||
shutil.rmtree(tmpdir)
|
||||
|
|
@ -125,7 +129,8 @@ class ModuleUnitTest(BaseTest):
|
|||
monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data)
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def db_setup(self, download_test_data, env_var, monkeypatch_session):
|
||||
def db_setup(self, download_test_data, env_var, monkeypatch_session,
|
||||
request):
|
||||
"""Restore prepared MongoDB dumps into selected DB."""
|
||||
backup_dir = os.path.join(download_test_data, "input", "dumps")
|
||||
|
||||
|
|
@ -135,13 +140,14 @@ class ModuleUnitTest(BaseTest):
|
|||
overwrite=True,
|
||||
db_name_out=self.TEST_DB_NAME)
|
||||
|
||||
db_handler.setup_from_dump("openpype", backup_dir,
|
||||
db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir,
|
||||
overwrite=True,
|
||||
db_name_out=self.TEST_OPENPYPE_NAME)
|
||||
|
||||
yield db_handler
|
||||
|
||||
if not self.PERSIST:
|
||||
persist = self.PERSIST or self.is_test_failed(request)
|
||||
if not persist:
|
||||
db_handler.teardown(self.TEST_DB_NAME)
|
||||
db_handler.teardown(self.TEST_OPENPYPE_NAME)
|
||||
|
||||
|
|
@ -166,6 +172,13 @@ class ModuleUnitTest(BaseTest):
|
|||
mongo_client = OpenPypeMongoConnection.get_mongo_client()
|
||||
yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"]
|
||||
|
||||
def is_test_failed(self, request):
|
||||
# if request.node doesn't have rep_call, something failed
|
||||
try:
|
||||
return request.node.rep_call.failed
|
||||
except AttributeError:
|
||||
return True
|
||||
|
||||
|
||||
class PublishTest(ModuleUnitTest):
|
||||
"""Test class for publishing in hosts.
|
||||
|
|
@ -188,7 +201,7 @@ class PublishTest(ModuleUnitTest):
|
|||
TODO: implement test on file size, file content
|
||||
"""
|
||||
|
||||
APP = ""
|
||||
APP_GROUP = ""
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
|
||||
|
|
@ -210,10 +223,10 @@ class PublishTest(ModuleUnitTest):
|
|||
if not app_variant:
|
||||
variant = (
|
||||
application_manager.find_latest_available_variant_for_group(
|
||||
self.APP))
|
||||
self.APP_GROUP))
|
||||
app_variant = variant.name
|
||||
|
||||
yield "{}/{}".format(self.APP, app_variant)
|
||||
yield "{}/{}".format(self.APP_GROUP, app_variant)
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def output_folder_url(self, download_test_data):
|
||||
|
|
@ -310,7 +323,8 @@ class PublishTest(ModuleUnitTest):
|
|||
yield True
|
||||
|
||||
def test_folder_structure_same(self, dbcon, publish_finished,
|
||||
download_test_data, output_folder_url):
|
||||
download_test_data, output_folder_url,
|
||||
skip_compare_folders):
|
||||
"""Check if expected and published subfolders contain same files.
|
||||
|
||||
Compares only presence, not size nor content!
|
||||
|
|
@ -328,12 +342,33 @@ class PublishTest(ModuleUnitTest):
|
|||
glob.glob(expected_dir_base + "\\**", recursive=True)
|
||||
if f != expected_dir_base and os.path.exists(f))
|
||||
|
||||
not_matched = expected.symmetric_difference(published)
|
||||
assert not not_matched, "Missing {} files".format(
|
||||
"\n".join(sorted(not_matched)))
|
||||
filtered_published = self._filter_files(published,
|
||||
skip_compare_folders)
|
||||
|
||||
# filter out temp files also in expected
|
||||
# could be polluted by accident by copying 'output' to zip file
|
||||
filtered_expected = self._filter_files(expected, skip_compare_folders)
|
||||
|
||||
not_mtched = filtered_expected.symmetric_difference(filtered_published)
|
||||
if not_mtched:
|
||||
raise AssertionError("Missing {} files".format(
|
||||
"\n".join(sorted(not_mtched))))
|
||||
|
||||
def _filter_files(self, source_files, skip_compare_folders):
|
||||
"""Filter list of files according to regex pattern."""
|
||||
filtered = set()
|
||||
for file_path in source_files:
|
||||
if skip_compare_folders:
|
||||
if not any([re.search(val, file_path)
|
||||
for val in skip_compare_folders]):
|
||||
filtered.add(file_path)
|
||||
else:
|
||||
filtered.add(file_path)
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
class HostFixtures(PublishTest):
|
||||
class HostFixtures():
|
||||
"""Host specific fixtures. Should be implemented once per host."""
|
||||
@pytest.fixture(scope="module")
|
||||
def last_workfile_path(self, download_test_data, output_folder_url):
|
||||
|
|
@ -344,3 +379,8 @@ class HostFixtures(PublishTest):
|
|||
def startup_scripts(self, monkeypatch_session, download_test_data):
|
||||
""""Adds init scripts (like userSetup) to expected location"""
|
||||
raise NotImplementedError
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def skip_compare_folders(self):
|
||||
"""Use list of regexs to filter out published folders from comparing"""
|
||||
raise NotImplementedError
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -33,11 +33,11 @@ def test_openpype_version(printer):
|
|||
assert str(v2) == "1.2.3-x"
|
||||
assert v1 > v2
|
||||
|
||||
v3 = OpenPypeVersion(1, 2, 3, staging=True)
|
||||
assert str(v3) == "1.2.3+staging"
|
||||
v3 = OpenPypeVersion(1, 2, 3)
|
||||
assert str(v3) == "1.2.3"
|
||||
|
||||
v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1")
|
||||
assert str(v4) == "1.2.3-rc.1+staging"
|
||||
v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1")
|
||||
assert str(v4) == "1.2.3-rc.1"
|
||||
assert v3 > v4
|
||||
assert v1 > v4
|
||||
assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1")
|
||||
|
|
@ -73,7 +73,7 @@ def test_openpype_version(printer):
|
|||
OpenPypeVersion(4, 8, 10),
|
||||
OpenPypeVersion(4, 8, 20),
|
||||
OpenPypeVersion(4, 8, 9),
|
||||
OpenPypeVersion(1, 2, 3, staging=True),
|
||||
OpenPypeVersion(1, 2, 3),
|
||||
OpenPypeVersion(1, 2, 3, build="foo")
|
||||
]
|
||||
res = sorted(sort_versions)
|
||||
|
|
@ -104,27 +104,26 @@ def test_openpype_version(printer):
|
|||
with pytest.raises(ValueError):
|
||||
_ = OpenPypeVersion(version="booobaa")
|
||||
|
||||
v11 = OpenPypeVersion(version="4.6.7-foo+staging")
|
||||
v11 = OpenPypeVersion(version="4.6.7-foo")
|
||||
assert v11.major == 4
|
||||
assert v11.minor == 6
|
||||
assert v11.patch == 7
|
||||
assert v11.staging is True
|
||||
assert v11.prerelease == "foo"
|
||||
|
||||
|
||||
def test_get_main_version():
|
||||
ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo")
|
||||
ver = OpenPypeVersion(1, 2, 3, prerelease="foo")
|
||||
assert ver.get_main_version() == "1.2.3"
|
||||
|
||||
|
||||
def test_get_version_path_from_list():
|
||||
versions = [
|
||||
OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')),
|
||||
OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")),
|
||||
OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")),
|
||||
OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo"))
|
||||
]
|
||||
path = BootstrapRepos.get_version_path_from_list(
|
||||
"3.4.5+staging", versions)
|
||||
"3.4.5", versions)
|
||||
|
||||
assert path == Path("/bar/baz")
|
||||
|
||||
|
|
@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
result = fix_bootstrap.find_openpype(include_zips=True)
|
||||
# we should have results as file were created
|
||||
assert result is not None, "no OpenPype version found"
|
||||
# latest item in `result` should be latest version found.
|
||||
# latest item in `result` should be the latest version found.
|
||||
# this will be `7.2.10-foo+staging` even with *staging* in since we've
|
||||
# dropped the logic to handle staging separately and in alphabetical
|
||||
# sorting it is after `strange`.
|
||||
expected_path = Path(
|
||||
d_path / "{}{}{}".format(
|
||||
test_versions_2[3].prefix,
|
||||
test_versions_2[3].version,
|
||||
test_versions_2[3].suffix
|
||||
test_versions_2[4].prefix,
|
||||
test_versions_2[4].version,
|
||||
test_versions_2[4].suffix
|
||||
)
|
||||
)
|
||||
assert result, "nothing found"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue