diff --git a/openpype/hooks/pre_foundry_apps.py b/openpype/hooks/pre_foundry_apps.py index 85f68c6b60..70554cbedb 100644 --- a/openpype/hooks/pre_foundry_apps.py +++ b/openpype/hooks/pre_foundry_apps.py @@ -13,7 +13,7 @@ class LaunchFoundryAppsWindows(PreLaunchHook): # Should be as last hook because must change launch arguments to string order = 1000 - app_groups = ["nuke", "nukex", "hiero", "nukestudio"] + app_groups = ["nuke", "nukex", "hiero", "nukestudio", "aftereffects"] platforms = ["windows"] def execute(self): diff --git a/repos/avalon-core b/repos/avalon-core index 7e5efd6885..e37f4f92ed 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 7e5efd6885330d84bb8495975bcab84df49bfa3d +Subproject commit e37f4f92ed25f89c870fdcb7f9538da7d0d7de90 diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 3f3f191ac7..abadb0fb92 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,9 +1,12 @@ import pytest import os import shutil +import logging from tests.lib.testing_classes import PublishTest +log = logging.getLogger("test_publish_in_nuke") + class TestPublishInNuke(PublishTest): """Basic test case for publishing in Nuke @@ -21,11 +24,11 @@ class TestPublishInNuke(PublishTest): PERSIST = True TEST_FILES = [ - ("1Bciy2pCwMKl1UIpxuPnlX_LHMo_Xkq0K", "test_Nuke_publish.zip", "") + ("1635L4gww9nEkP-1EclfWXNdeDuRjDhey", "test_Nuke_publish.zip", "") ] - APP = "Nuke" - APP_VARIANT = "12" + APP = "nuke" + APP_VARIANT = "12-2" APP_NAME = "{}/{}".format(APP, APP_VARIANT) @@ -37,26 +40,42 @@ class TestPublishInNuke(PublishTest): Maya expects workfile in proper folder, so copy is done first. """ - src_path = os.path.join(download_test_data, - "input", - "workfile", - "test_project_test_asset_TestTask_v001.psd") + print("last_workfile_path") + log.info("log last_workfile_path") + src_path = os.path.join( + download_test_data, + "input", + "workfile", + "test_project_test_asset_CompositingInNuke_v001.nk") dest_folder = os.path.join(download_test_data, self.PROJECT, self.ASSET, "work", self.TASK) os.makedirs(dest_folder) - dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.psd") + dest_path = os.path.join( + dest_folder, "test_project_test_asset_CompositingInNuke_v001.nk") shutil.copy(src_path, dest_path) yield dest_path @pytest.fixture(scope="module") def startup_scripts(self, monkeypatch_session, download_test_data): - """Points Maya to userSetup file from input data""" - pass + """Points Nuke to userSetup file from input data""" + print("startup_scripts") + log.info("log startup_scripts") + startup_path = os.path.join(download_test_data, + "input", + "startup") + startup_path = "C:\\projects\\test_nuke_publish\\input\\startup" + original_pythonpath = os.environ.get("NUKE_PATH") + monkeypatch_session.setenv("NUKE_PATH", + "{}{}{}".format(original_pythonpath, + os.pathsep, + startup_path)) + print("NUKE_PATH:: {}{}{}".format(startup_path, + os.pathsep, + original_pythonpath)) def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index 4dde5ba46e..88cde4d05f 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -165,7 +165,7 @@ class DBHandler: if collection: if not db_name: raise ValueError("db_name must be present") - coll_part = "--nsInclude={}.{}".format(db_name, collection) + coll_part = "--collection={}".format(collection) query = "\"{}\" --uri=\"{}\" --out={} {} {}".format( "mongodump", uri, output_path, db_part, coll_part ) @@ -220,11 +220,11 @@ class DBHandler: return query -#handler = DBHandler(uri="mongodb://localhost:27017") +handler = DBHandler(uri="mongodb://localhost:27017") # -#backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" # # -#handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") +handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") #handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") # # handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", # # collection="test_project",