Merge branch 'feature/OP-2019_Create-test-publish-class-for-AE' into feature/OP-2053_Add-validator-to-check-correct-version-of-extension-for-PS-and-AE

This commit is contained in:
Petr Kalis 2021-12-09 13:34:34 +01:00
commit 2a540cdbde
7 changed files with 215 additions and 22 deletions

View file

@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
"""Close AE after publish. For Webpublishing only."""
import os
import pyblish.api
from avalon import aftereffects
class CloseAE(pyblish.api.ContextPlugin):
"""Close AE after publish. For Webpublishing only.
"""
order = pyblish.api.IntegratorOrder + 14
label = "Close AE"
optional = True
active = True
hosts = ["aftereffects"]
targets = ["remotepublish"]
def process(self, context):
self.log.info("CloseAE")
stub = aftereffects.stub()
self.log.info("Shutting down AE")
stub.save()
stub.close()
self.log.info("AE closed")

View file

@ -19,10 +19,9 @@ class ExtractLocalRender(openpype.api.Extractor):
staging_dir = instance.data["stagingDir"]
self.log.info("staging_dir::{}".format(staging_dir))
stub.render(staging_dir)
# pull file name from Render Queue Output module
render_q = stub.get_render_info()
stub.render(staging_dir)
if not render_q:
raise ValueError("No file extension set in Render Queue")
_, ext = os.path.splitext(os.path.basename(render_q.file_name))

View file

@ -11,6 +11,25 @@ from openpype.lib.mongo import OpenPypeMongoConnection
from openpype.lib.plugin_tools import parse_json
def headless_publish(log, close_plugin_name=None, is_test=False):
"""Runs publish in a opened host with a context and closes Python process.
Host is being closed via ClosePS pyblish plugin which triggers 'exit'
method in ConsoleTrayApp.
"""
if not is_test:
dbcon = get_webpublish_conn()
_id = os.environ.get("BATCH_LOG_ID")
if not _id:
log.warning("Unable to store log records, "
"batch will be unfinished!")
return
publish_and_log(dbcon, _id, log, close_plugin_name)
else:
publish(log, close_plugin_name)
def get_webpublish_conn():
"""Get connection to OP 'webpublishes' collection."""
mongo_client = OpenPypeMongoConnection.get_mongo_client()
@ -37,6 +56,33 @@ def start_webpublish_log(dbcon, batch_id, user):
}).inserted_id
def publish(log, close_plugin_name=None):
"""Loops through all plugins, logs to console. Used for tests.
Args:
log (OpenPypeLogger)
close_plugin_name (str): name of plugin with responsibility to
close host app
"""
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
close_plugin = _get_close_plugin(close_plugin_name, log)
for result in pyblish.util.publish_iter():
for record in result["records"]:
log.info("{}: {}".format(
result["plugin"].label, record.msg))
if result["error"]:
log.error(error_format.format(**result))
uninstall()
if close_plugin: # close host app explicitly after error
context = pyblish.api.Context()
close_plugin().process(context)
sys.exit(1)
def publish_and_log(dbcon, _id, log, close_plugin_name=None):
"""Loops through all plugins, logs ok and fails into OP DB.

View file

@ -1101,6 +1101,23 @@
"linux": []
},
"environment": {}
},
"2022": {
"enabled": true,
"variant_label": "2022",
"executables": {
"windows": [
"C:\\Program Files\\Adobe\\Adobe After Effects 2022\\Support Files\\AfterFX.exe"
],
"darwin": [],
"linux": []
},
"arguments": {
"windows": [],
"darwin": [],
"linux": []
},
"environment": {}
}
}
},

View file

@ -14,12 +14,12 @@ How to run:
----------
- single test class could be run by PyCharm and its pytest runner directly
- OR
- use Openpype command 'runtests' from command line
-- `${OPENPYPE_ROOT}/start.py runtests`
- use Openpype command 'runtests' from command line (`.venv` in ${OPENPYPE_ROOT} must be activated to use configured Python!)
-- `${OPENPYPE_ROOT}/python start.py runtests`
By default, this command will run all tests in ${OPENPYPE_ROOT}/tests.
Specific location could be provided to this command as an argument, either as absolute path, or relative path to ${OPENPYPE_ROOT}.
(eg. `${OPENPYPE_ROOT}/start.py runtests ../tests/integration`) will trigger only tests in `integration` folder.
(eg. `${OPENPYPE_ROOT}/python start.py runtests ../tests/integration`) will trigger only tests in `integration` folder.
See `${OPENPYPE_ROOT}/cli.py:runtests` for other arguments.

View file

@ -0,0 +1,101 @@
import pytest
import os
import shutil
from tests.lib.testing_classes import PublishTest
class TestPublishInAfterEffects(PublishTest):
"""Basic test case for publishing in AfterEffects
Uses generic TestCase to prepare fixtures for test data, testing DBs,
env vars.
Opens AfterEffects, run publish on prepared workile.
Test zip file sets 3 required env vars:
- HEADLESS_PUBLISH - this triggers publish immediately app is open
- IS_TEST - this differentiate between regular webpublish
- PYBLISH_TARGETS
Then checks content of DB (if subset, version, representations were
created.
Checks tmp folder if all expected files were published.
"""
PERSIST = True
TEST_FILES = [
("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf",
"test_aftereffects_publish.zip",
"")
]
APP = "aftereffects"
APP_VARIANT = "2022"
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
TIMEOUT = 120 # publish timeout
@pytest.fixture(scope="module")
def last_workfile_path(self, download_test_data):
"""Get last_workfile_path from source data.
Maya expects workfile in proper folder, so copy is done first.
"""
src_path = os.path.join(download_test_data,
"input",
"workfile",
"test_project_test_asset_TestTask_v001.aep")
dest_folder = os.path.join(download_test_data,
self.PROJECT,
self.ASSET,
"work",
self.TASK)
os.makedirs(dest_folder)
dest_path = os.path.join(dest_folder,
"test_project_test_asset_TestTask_v001.aep")
shutil.copy(src_path, dest_path)
yield dest_path
@pytest.fixture(scope="module")
def startup_scripts(self, monkeypatch_session, download_test_data):
"""Points AfterEffects to userSetup file from input data"""
pass
def test_db_asserts(self, dbcon, publish_finished):
"""Host and input data dependent expected results in DB."""
print("test_db_asserts")
assert 3 == dbcon.count_documents({"type": "version"}), \
"Not expected no of versions"
assert 0 == dbcon.count_documents({"type": "version",
"name": {"$ne": 1}}), \
"Only versions with 1 expected"
assert 1 == dbcon.count_documents({"type": "subset",
"name": "imageMainBackgroundcopy"
}), \
"modelMain subset must be present"
assert 1 == dbcon.count_documents({"type": "subset",
"name": "workfileTesttask"}), \
"workfileTesttask subset must be present"
assert 1 == dbcon.count_documents({"type": "subset",
"name": "reviewTesttask"}), \
"reviewTesttask subset must be present"
assert 6 == dbcon.count_documents({"type": "representation"}), \
"Not expected no of representations"
assert 1 == dbcon.count_documents({"type": "representation",
"context.subset": "imageMainBackgroundcopy", #noqa E501
"context.ext": "png"}), \
"Not expected no of representations with ext 'png'"
if __name__ == "__main__":
test_case = TestPublishInAfterEffects()

View file

@ -11,7 +11,12 @@ class TestPublishInPhotoshop(PublishTest):
Uses generic TestCase to prepare fixtures for test data, testing DBs,
env vars.
Opens Maya, run publish on prepared workile.
Opens Photoshop, run publish on prepared workile.
Test zip file sets 3 required env vars:
- HEADLESS_PUBLISH - this triggers publish immediately app is open
- IS_TEST - this differentiate between regular webpublish
- PYBLISH_TARGETS
Then checks content of DB (if subset, version, representations were
created.
@ -21,11 +26,11 @@ class TestPublishInPhotoshop(PublishTest):
PERSIST = True
TEST_FILES = [
("1Bciy2pCwMKl1UIpxuPnlX_LHMo_Xkq0K", "test_photoshop_publish.zip", "")
("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "")
]
APP = "photoshop"
APP_VARIANT = "2020"
APP_VARIANT = "2021"
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
@ -56,12 +61,12 @@ class TestPublishInPhotoshop(PublishTest):
@pytest.fixture(scope="module")
def startup_scripts(self, monkeypatch_session, download_test_data):
"""Points Maya to userSetup file from input data"""
os.environ["IS_HEADLESS"] = "true"
pass
def test_db_asserts(self, dbcon, publish_finished):
"""Host and input data dependent expected results in DB."""
print("test_db_asserts")
assert 5 == dbcon.count_documents({"type": "version"}), \
assert 3 == dbcon.count_documents({"type": "version"}), \
"Not expected no of versions"
assert 0 == dbcon.count_documents({"type": "version",
@ -69,25 +74,21 @@ class TestPublishInPhotoshop(PublishTest):
"Only versions with 1 expected"
assert 1 == dbcon.count_documents({"type": "subset",
"name": "modelMain"}), \
"name": "imageMainBackgroundcopy"}
), \
"modelMain subset must be present"
assert 1 == dbcon.count_documents({"type": "subset",
"name": "workfileTest_task"}), \
"name": "workfileTesttask"}), \
"workfileTest_task subset must be present"
assert 11 == dbcon.count_documents({"type": "representation"}), \
assert 6 == dbcon.count_documents({"type": "representation"}), \
"Not expected no of representations"
assert 2 == dbcon.count_documents({"type": "representation",
"context.subset": "modelMain",
"context.ext": "abc"}), \
"Not expected no of representations with ext 'abc'"
assert 2 == dbcon.count_documents({"type": "representation",
"context.subset": "modelMain",
"context.ext": "ma"}), \
"Not expected no of representations with ext 'abc'"
assert 1 == dbcon.count_documents({"type": "representation",
"context.subset": "imageMainBackgroundcopy", # noqa: E501
"context.ext": "png"}), \
"Not expected no of representations with ext 'png'"
if __name__ == "__main__":