diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index f827d275a6..6f651076ce 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,7 @@ body: label: Version description: What version are you running? Look to OpenPype Tray options: + - 3.17.7-nightly.7 - 3.17.7-nightly.6 - 3.17.7-nightly.5 - 3.17.7-nightly.4 @@ -134,7 +135,6 @@ body: - 3.15.3-nightly.3 - 3.15.3-nightly.2 - 3.15.3-nightly.1 - - 3.15.2 validations: required: true - type: dropdown diff --git a/openpype/client/server/conversion_utils.py b/openpype/client/server/conversion_utils.py index 51af99e722..e8d3c4cbe4 100644 --- a/openpype/client/server/conversion_utils.py +++ b/openpype/client/server/conversion_utils.py @@ -606,7 +606,7 @@ def convert_v4_version_to_v3(version): output_data[dst_key] = version[src_key] if "createdAt" in version: - created_at = arrow.get(version["createdAt"]) + created_at = arrow.get(version["createdAt"]).to("local") output_data["time"] = created_at.strftime("%Y%m%dT%H%M%SZ") output["data"] = output_data diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index bdb48e11f8..b44e986d83 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -60,8 +60,9 @@ class ExtractLocalRender(publish.Extractor): first_repre = not representations if instance.data["review"] and first_repre: repre_data["tags"] = ["review"] - thumbnail_path = os.path.join(staging_dir, files[0]) - instance.data["thumbnailSource"] = thumbnail_path + # TODO return back when Extract from source same as regular + # thumbnail_path = os.path.join(staging_dir, files[0]) + # instance.data["thumbnailSource"] = thumbnail_path representations.append(repre_data) diff --git a/openpype/hosts/houdini/plugins/publish/collect_chunk_size.py b/openpype/hosts/houdini/plugins/publish/collect_chunk_size.py index 1c867e930a..f8cd4089e2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_chunk_size.py +++ b/openpype/hosts/houdini/plugins/publish/collect_chunk_size.py @@ -14,18 +14,13 @@ class CollectChunkSize(pyblish.api.InstancePlugin, hosts = ["houdini"] targets = ["local", "remote"] label = "Collect Chunk Size" - chunkSize = 999999 + chunk_size = 999999 def process(self, instance): # need to get the chunk size info from the setting attr_values = self.get_attr_values_from_data(instance.data) instance.data["chunkSize"] = attr_values.get("chunkSize") - @classmethod - def apply_settings(cls, project_settings): - project_setting = project_settings["houdini"]["publish"]["CollectChunkSize"] # noqa - cls.chunkSize = project_setting["chunk_size"] - @classmethod def get_attribute_defs(cls): return [ @@ -33,7 +28,6 @@ class CollectChunkSize(pyblish.api.InstancePlugin, minimum=1, maximum=999999, decimals=0, - default=cls.chunkSize, + default=cls.chunk_size, label="Frame Per Task") - ] diff --git a/openpype/hosts/max/api/preview_animation.py b/openpype/hosts/max/api/preview_animation.py index 6c7b8eaa80..74579b165f 100644 --- a/openpype/hosts/max/api/preview_animation.py +++ b/openpype/hosts/max/api/preview_animation.py @@ -198,8 +198,8 @@ def _render_preview_animation_max_pre_2024( res_width, res_height, filename=filepath ) dib = rt.gw.getViewportDib() - dib_width = rt.renderWidth - dib_height = rt.renderHeight + dib_width = float(dib.width) + dib_height = float(dib.height) # aspect ratio viewportRatio = dib_width / dib_height renderRatio = float(res_width / res_height) diff --git a/openpype/hosts/max/plugins/publish/validate_resolution_setting.py b/openpype/hosts/max/plugins/publish/validate_resolution_setting.py index 7d91a7b991..1c4b05c556 100644 --- a/openpype/hosts/max/plugins/publish/validate_resolution_setting.py +++ b/openpype/hosts/max/plugins/publish/validate_resolution_setting.py @@ -1,9 +1,12 @@ import pyblish.api +from pymxs import runtime as rt from openpype.pipeline import ( - PublishValidationError, OptionalPyblishPluginMixin ) -from pymxs import runtime as rt +from openpype.pipeline.publish import ( + RepairAction, + PublishValidationError +) from openpype.hosts.max.api.lib import reset_scene_resolution @@ -16,6 +19,7 @@ class ValidateResolutionSetting(pyblish.api.InstancePlugin, hosts = ["max"] label = "Validate Resolution Setting" optional = True + actions = [RepairAction] def process(self, instance): if not self.is_active(instance.data): diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py index 9109bf6726..eb06875601 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py @@ -257,8 +257,6 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): if 'shot' not in instance.data.get('family', ''): continue - name = instance.data["asset"] - # get handles handle_start = int(instance.data["handleStart"]) handle_end = int(instance.data["handleEnd"]) @@ -286,6 +284,8 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): parents = instance.data.get('parents', []) self.log.debug(f"parents: {pformat(parents)}") + # Split by '/' for AYON where asset is a path + name = instance.data["asset"].split("/")[-1] actual = {name: in_info} for parent in reversed(parents): diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 2cd08f862e..1cb480b552 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -583,18 +583,9 @@ def prompt_new_file_with_mesh(mesh_filepath): file_dialog.setDirectory(os.path.dirname(mesh_filepath)) url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath)) file_dialog.selectUrl(url) - - # Give the explorer window time to refresh to the folder and select - # the file - while not file_dialog.selectedFiles(): - app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 1000) - print(f"Selected: {file_dialog.selectedFiles()}") - - # Set it again now we know the path is refreshed - without this - # accepting the dialog will often not trigger the correct filepath - file_dialog.setDirectory(os.path.dirname(mesh_filepath)) - url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath)) - file_dialog.selectUrl(url) + # TODO: find a way to improve the process event to + # load more complicated mesh + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 3000) file_dialog.done(file_dialog.Accepted) app.processEvents(QtCore.QEventLoop.AllEvents) @@ -628,7 +619,12 @@ def prompt_new_file_with_mesh(mesh_filepath): mesh_filename_label = mesh_filename.findChild(QtWidgets.QLabel) if not mesh_filename_label.text(): dialog.close() - raise RuntimeError(f"Failed to set mesh path: {mesh_filepath}") + substance_painter.logging.warning( + "Failed to set mesh path with the prompt dialog:" + f"{mesh_filepath}\n\n" + "Creating new project directly with the mesh path instead.") + else: + dialog.done(dialog.Accepted) new_action = _get_new_project_action() if not new_action: diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 57db869a11..08c1d5c391 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -44,14 +44,22 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # Get user inputs import_cameras = data.get("import_cameras", True) preserve_strokes = data.get("preserve_strokes", True) - + sp_settings = substance_painter.project.Settings( + import_cameras=import_cameras + ) if not substance_painter.project.is_open(): # Allow to 'initialize' a new project path = self.filepath_from_context(context) + # TODO: improve the prompt dialog function to not + # only works for simple polygon scene result = prompt_new_file_with_mesh(mesh_filepath=path) if not result: - self.log.info("User cancelled new project prompt.") - return + self.log.info("User cancelled new project prompt." + "Creating new project directly from" + " Substance Painter API Instead.") + settings = substance_painter.project.create( + mesh_file_path=path, settings=sp_settings + ) else: # Reload the mesh diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py index e00ac64244..b99b634da1 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -155,8 +155,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin): else {} ) - asset_name = instance.data["asset"] - # get handles handle_start = int(instance.data["handleStart"]) handle_end = int(instance.data["handleEnd"]) @@ -177,6 +175,8 @@ class CollectShotInstance(pyblish.api.InstancePlugin): parents = instance.data.get('parents', []) + # Split by '/' for AYON where asset is a path + asset_name = instance.data["asset"].split("/")[-1] actual = {asset_name: in_info} for parent in reversed(parents): diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index 1c8356d5fe..5618eb0c2e 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -140,7 +140,7 @@ def is_running_staging(): latest_version = get_latest_version(local=False, remote=True) staging_version = latest_version - if current_version == production_version: + if current_version == staging_version: return True return is_staging_enabled() diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index b5fd1e4bb9..32f10ba4c8 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -61,8 +61,9 @@ class CollectHierarchy(pyblish.api.ContextPlugin): "resolutionHeight": instance.data["resolutionHeight"], "pixelAspect": instance.data["pixelAspect"] } - - actual = {instance.data["asset"]: shot_data} + # Split by '/' for AYON where asset is a path + name = instance.data["asset"].split("/")[-1] + actual = {name: shot_data} for parent in reversed(instance.data["parents"]): next_dict = {} diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 0f29fec054..af0ef17789 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -68,7 +68,6 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): ] def process(self, instance): - anatomy = instance.context.data["anatomy"] template_data = copy.deepcopy(instance.data["anatomyData"]) @@ -80,11 +79,18 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "representation": "TEMP" }) - # For the first time publish - if instance.data.get("hierarchy"): - template_data.update({ - "hierarchy": instance.data["hierarchy"] - }) + # Add fill keys for editorial publishing creating new entity + # TODO handle in editorial plugin + if instance.data.get("newAssetPublishing"): + if "hierarchy" not in instance.data: + template_data["hierarchy"] = instance.data["hierarchy"] + + if "asset" not in template_data: + asset_name = instance.data["asset"].split("/")[-1] + template_data["asset"] = asset_name + template_data["folder"] = { + "name": asset_name + } publish_templates = anatomy.templates_obj["publish"] if "folder" in publish_templates: diff --git a/openpype/plugins/publish/extract_hierarchy_to_ayon.py b/openpype/plugins/publish/extract_hierarchy_to_ayon.py index ef69369d67..8f791a6093 100644 --- a/openpype/plugins/publish/extract_hierarchy_to_ayon.py +++ b/openpype/plugins/publish/extract_hierarchy_to_ayon.py @@ -223,23 +223,24 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): valid_ids = set() hierarchy_queue = collections.deque() - hierarchy_queue.append((project_id, project_children_context)) + hierarchy_queue.append((project_id, "", project_children_context)) while hierarchy_queue: queue_item = hierarchy_queue.popleft() - parent_id, children_context = queue_item + parent_id, parent_path, children_context = queue_item if not children_context: continue - for asset, asset_info in children_context.items(): + for folder_name, folder_info in children_context.items(): + folder_path = "{}/{}".format(parent_path, folder_name) if ( - asset not in active_folder_paths - and not asset_info.get("childs") + folder_path not in active_folder_paths + and not folder_info.get("childs") ): continue - asset_name = asset.split("/")[-1] + item_id = uuid.uuid4().hex - new_item = copy.deepcopy(asset_info) - new_item["name"] = asset_name + new_item = copy.deepcopy(folder_info) + new_item["name"] = folder_name new_item["children"] = [] new_children_context = new_item.pop("childs", None) tasks = new_item.pop("tasks", {}) @@ -253,9 +254,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): items_by_id[item_id] = new_item parent_id_by_item_id[item_id] = parent_id - if asset in active_folder_paths: + if folder_path in active_folder_paths: valid_ids.add(item_id) - hierarchy_queue.append((item_id, new_children_context)) + hierarchy_queue.append( + (item_id, folder_path, new_children_context) + ) if not valid_ids: return None diff --git a/openpype/resources/__init__.py b/openpype/resources/__init__.py index b33d1bf023..c429fb8c3e 100644 --- a/openpype/resources/__init__.py +++ b/openpype/resources/__init__.py @@ -1,6 +1,6 @@ import os from openpype import AYON_SERVER_ENABLED -from openpype.lib.openpype_version import is_running_staging +from openpype.lib.openpype_version import is_staging_enabled RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -59,7 +59,7 @@ def get_openpype_icon_filepath(staging=None): return get_resource("icons", "AYON_icon_dev.png") if staging is None: - staging = is_running_staging() + staging = is_staging_enabled() if staging: return get_openpype_staging_icon_filepath() @@ -68,7 +68,7 @@ def get_openpype_icon_filepath(staging=None): def get_openpype_splash_filepath(staging=None): if staging is None: - staging = is_running_staging() + staging = is_staging_enabled() if AYON_SERVER_ENABLED: if os.getenv("AYON_USE_DEV") == "1": diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 0001b23967..813e7153ea 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -62,6 +62,12 @@ "Main" ] }, + "CreateMantraIFD": { + "enabled": true, + "default_variants": [ + "Main" + ] + }, "CreateMantraROP": { "enabled": true, "default_variants": [ @@ -137,14 +143,14 @@ } }, "publish": { + "CollectAssetHandles": { + "use_asset_handles": true + }, "CollectChunkSize": { "enabled": true, "optional": true, "chunk_size": 999999 }, - "CollectAssetHandles": { - "use_asset_handles": true - }, "ValidateContainers": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json index f37738c4ec..213ec9d04e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json @@ -69,6 +69,10 @@ "key": "CreateKarmaROP", "label": "Create Karma ROP" }, + { + "key": "CreateMantraIFD", + "label": "Create Mantra IFD" + }, { "key": "CreateMantraROP", "label": "Create Mantra ROP" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json index 935c2a4c35..aaaf4311b7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_publish.json @@ -25,6 +25,31 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CollectChunkSize", + "label": "Collect Chunk Size", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "number", + "key": "chunk_size", + "label": "Frames Per Task" + } + ] + }, { "type": "label", "label": "Validators" @@ -55,31 +80,6 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "checkbox_key": "enabled", - "key": "CollectChunkSize", - "label": "Collect Chunk Size", - "is_group": true, - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "boolean", - "key": "optional", - "label": "Optional" - }, - { - "type": "number", - "key": "chunk_size", - "label": "Frames Per Task" - } - ] - }, { "type": "dict", "collapsible": true, diff --git a/openpype/style/style.css b/openpype/style/style.css index ca368f84f8..f6ecebd683 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -512,52 +512,58 @@ QAbstractItemView::item:selected:hover { } /* Row colors (alternate colors) are from left - right */ -QAbstractItemView:branch { - background: transparent; +QTreeView::branch { + background: {color:bg-view}; +} +QTreeView::branch:hover { + background: {color:bg-view}; +} +QTreeView::branch:selected { + background: {color:bg-view}; } QAbstractItemView::branch:open:has-children:!has-siblings, QAbstractItemView::branch:open:has-children:has-siblings { border-image: none; image: url(:/openpype/images/branch_open.png); - background: transparent; + background: {color:bg-view}; } QAbstractItemView::branch:open:has-children:!has-siblings:hover, QAbstractItemView::branch:open:has-children:has-siblings:hover { border-image: none; image: url(:/openpype/images/branch_open_on.png); - background: transparent; + background: {color:bg-view}; } QAbstractItemView::branch:has-children:!has-siblings:closed, QAbstractItemView::branch:closed:has-children:has-siblings { border-image: none; image: url(:/openpype/images/branch_closed.png); - background: transparent; + background: {color:bg-view}; } QAbstractItemView::branch:has-children:!has-siblings:closed:hover, QAbstractItemView::branch:closed:has-children:has-siblings:hover { border-image: none; image: url(:/openpype/images/branch_closed_on.png); - background: transparent; + background: {color:bg-view}; } QAbstractItemView::branch:has-siblings:!adjoins-item { border-image: none; image: url(:/openpype/images/transparent.png); - background: transparent; + background: {color:bg-view}; } QAbstractItemView::branch:has-siblings:adjoins-item { border-image: none; image: url(:/openpype/images/transparent.png); - background: transparent; + background: {color:bg-view}; } QAbstractItemView::branch:!has-children:!has-siblings:adjoins-item { border-image: none; image: url(:/openpype/images/transparent.png); - background: transparent; + background: {color:bg-view}; } CompleterView { diff --git a/openpype/tools/ayon_loader/models/products.py b/openpype/tools/ayon_loader/models/products.py index daa36aefdc..135f28df97 100644 --- a/openpype/tools/ayon_loader/models/products.py +++ b/openpype/tools/ayon_loader/models/products.py @@ -44,7 +44,7 @@ def version_item_from_entity(version): # NOTE There is also 'updatedAt', should be used that instead? # TODO skip conversion - converting to '%Y%m%dT%H%M%SZ' is because # 'PrettyTimeDelegate' expects it - created_at = arrow.get(version["createdAt"]) + created_at = arrow.get(version["createdAt"]).to("local") published_time = created_at.strftime("%Y%m%dT%H%M%SZ") author = version["author"] version_num = version["version"] diff --git a/openpype/tools/ayon_workfiles/models/workfiles.py b/openpype/tools/ayon_workfiles/models/workfiles.py index 907b9b5383..d74a8e164d 100644 --- a/openpype/tools/ayon_workfiles/models/workfiles.py +++ b/openpype/tools/ayon_workfiles/models/workfiles.py @@ -606,7 +606,7 @@ class PublishWorkfilesModel: print("Failed to format workfile path: {}".format(exc)) dirpath, filename = os.path.split(workfile_path) - created_at = arrow.get(repre_entity["createdAt"]) + created_at = arrow.get(repre_entity["createdAt"].to("local")) return FileItem( dirpath, filename, diff --git a/server_addon/houdini/server/settings/create.py b/server_addon/houdini/server/settings/create.py index e8db917849..a5ca4d477b 100644 --- a/server_addon/houdini/server/settings/create.py +++ b/server_addon/houdini/server/settings/create.py @@ -52,6 +52,9 @@ class CreatePluginsModel(BaseSettingsModel): CreateKarmaROP: CreatorModel = Field( default_factory=CreatorModel, title="Create Karma ROP") + CreateMantraIFD: CreatorModel = Field( + default_factory=CreatorModel, + title="Create Mantra IFD") CreateMantraROP: CreatorModel = Field( default_factory=CreatorModel, title="Create Mantra ROP") @@ -114,6 +117,10 @@ DEFAULT_HOUDINI_CREATE_SETTINGS = { "enabled": True, "default_variants": ["Main"] }, + "CreateMantraIFD": { + "enabled": True, + "default_variants": ["Main"] + }, "CreateMantraROP": { "enabled": True, "default_variants": ["Main"] diff --git a/server_addon/houdini/server/settings/publish.py b/server_addon/houdini/server/settings/publish.py index 92a676b0d0..f551b3a209 100644 --- a/server_addon/houdini/server/settings/publish.py +++ b/server_addon/houdini/server/settings/publish.py @@ -13,6 +13,14 @@ class CollectAssetHandlesModel(BaseSettingsModel): title="Use asset handles") +class CollectChunkSizeModel(BaseSettingsModel): + """Collect Chunk Size.""" + enabled: bool = Field(title="Enabled") + optional: bool = Field(title="Optional") + chunk_size: int = Field( + title="Frames Per Task") + + class ValidateWorkfilePathsModel(BaseSettingsModel): enabled: bool = Field(title="Enabled") optional: bool = Field(title="Optional") @@ -38,6 +46,10 @@ class PublishPluginsModel(BaseSettingsModel): title="Collect Asset Handles.", section="Collectors" ) + CollectChunkSize: CollectChunkSizeModel = Field( + default_factory=CollectChunkSizeModel, + title="Collect Chunk Size." + ) ValidateContainers: BasicValidateModel = Field( default_factory=BasicValidateModel, title="Validate Latest Containers.", @@ -63,6 +75,11 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = { "CollectAssetHandles": { "use_asset_handles": True }, + "CollectChunkSize": { + "enabled": True, + "optional": True, + "chunk_size": 999999 + }, "ValidateContainers": { "enabled": True, "optional": True, diff --git a/server_addon/houdini/server/version.py b/server_addon/houdini/server/version.py index 75cf7831c4..6232f7ab18 100644 --- a/server_addon/houdini/server/version.py +++ b/server_addon/houdini/server/version.py @@ -1 +1 @@ -__version__ = "0.2.9" +__version__ = "0.2.10" diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index 30761693a8..1aa612207d 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -60,7 +60,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): name="renderTest_taskMain")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) + DBAssert.count_of_types(dbcon, "representation", 3)) additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} @@ -77,7 +77,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py index 0e9cd3b00d..4254b951b5 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py @@ -71,7 +71,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla name="renderTest_taskMain2")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 5)) + DBAssert.count_of_types(dbcon, "representation", 4)) additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} @@ -89,7 +89,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 2e4f343a5a..f13043d8bb 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -58,7 +58,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): name="renderTest_taskMain")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) + DBAssert.count_of_types(dbcon, "representation", 3)) additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} @@ -75,7 +75,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py index a62036e4a7..b99db24e75 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -60,7 +60,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): name="renderTest_taskMain")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) + DBAssert.count_of_types(dbcon, "representation", 2)) additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} @@ -77,7 +77,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", @@ -89,7 +89,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index dcf34844d1..bd9d3e9b50 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -45,7 +45,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): name="renderTest_taskMain")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) + DBAssert.count_of_types(dbcon, "representation", 3)) additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} @@ -62,7 +62,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 7d2b409db3..79d6c22a26 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -54,7 +54,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) - failures.append(DBAssert.count_of_types(dbcon, "representation", 8)) + failures.append(DBAssert.count_of_types(dbcon, "representation", 7)) # hero included additional_args = {"context.subset": "modelMain", @@ -85,7 +85,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "jpg"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain_beauty", diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya/expected/test_project/test_asset/publish/render/renderTest_taskMain_beauty/v001/test_project_test_asset_renderTest_taskMain_beauty_v001.jpg b/tests/integration/hosts/maya/test_deadline_publish_in_maya/expected/test_project/test_asset/publish/render/renderTest_taskMain_beauty/v001/test_project_test_asset_renderTest_taskMain_beauty_v001.jpg deleted file mode 100644 index 79ff799e8b..0000000000 Binary files a/tests/integration/hosts/maya/test_deadline_publish_in_maya/expected/test_project/test_asset/publish/render/renderTest_taskMain_beauty/v001/test_project_test_asset_renderTest_taskMain_beauty_v001.jpg and /dev/null differ diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index a4026f195b..586078ead6 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -69,7 +69,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) + DBAssert.count_of_types(dbcon, "representation", 3)) additional_args = {"context.subset": "workfileTest_task", "context.ext": "nk"} @@ -86,7 +86,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "name": "thumbnail"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 7a90f76662..ade38d60c8 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -481,7 +481,7 @@ class DeadlinePublishTest(PublishTest): while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: - raise ValueError("Timeout for DL finish reached") + raise ValueError("Timeout for Deadline finish reached") response = requests.get(url, timeout=10) if not response.ok: @@ -491,6 +491,61 @@ class DeadlinePublishTest(PublishTest): if not response.json(): raise ValueError("Couldn't find {}".format(deadline_job_id)) + job = response.json()[0] + + def recursive_dependencies(job, results=None): + if results is None: + results = [] + + for dependency in job["Props"]["Dep"]: + dependency = requests.get( + "{}/api/jobs?JobId={}".format( + deadline_url, dependency["JobID"] + ), + timeout=10 + ).json()[0] + results.append(dependency) + grand_dependencies = recursive_dependencies( + dependency, results=results + ) + for grand_dependency in grand_dependencies: + if grand_dependency not in results: + results.append(grand_dependency) + return results + + job_status = { + 0: "Unknown", + 1: "Active", + 2: "Suspended", + 3: "Completed", + 4: "Failed", + 6: "Pending" + } + + jobs_to_validate = [job] + jobs_to_validate.extend(recursive_dependencies(job)) + failed_jobs = [] + errors = [] + for job in jobs_to_validate: + if "Failed" == job_status[job["Stat"]]: + failed_jobs.append(str(job)) + + resp_error = requests.get( + "{}/api/jobreports?JobID={}&Data=allerrorcontents".format( + deadline_url, job["_id"] + ), + timeout=10 + ) + errors.extend(resp_error.json()) + + msg = "Errors in Deadline:\n" + msg += "\n".join(errors) + assert not errors, msg + + msg = "Failed in Deadline:\n" + msg += "\n".join(failed_jobs) + assert not failed_jobs, msg + # '0001-...' returned until job is finished valid_date_finished = response.json()[0]["DateComp"][:4] != "0001"