diff --git a/CHANGELOG.md b/CHANGELOG.md index b7ef795f0a..2adb4ac154 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [3.13.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD) + +**🐛 Bug fixes** + +- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638) +- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631) + +**🔀 Refactored code** + +- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639) +- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637) + +**Merged pull requests:** + +- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636) + ## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.13.0-nightly.1...3.13.0) @@ -37,6 +55,7 @@ - General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579) - Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576) - Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570) +- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555) **🔀 Refactored code** @@ -68,13 +87,9 @@ - Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540) - General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526) - Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516) -- Ftrack: add source into Note [\#3509](https://github.com/pypeclub/OpenPype/pull/3509) -- Add pack and unpack convenience scripts [\#3502](https://github.com/pypeclub/OpenPype/pull/3502) -- NewPublisher: Keep plugins with mismatch target in report [\#3498](https://github.com/pypeclub/OpenPype/pull/3498) **🐛 Bug fixes** -- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569) - Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562) - NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559) - Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557) @@ -89,8 +104,6 @@ - General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519) - Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514) - TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513) -- TrayPublisher: Make sure host name is filled [\#3504](https://github.com/pypeclub/OpenPype/pull/3504) -- NewPublisher: Groups work and enum multivalue [\#3501](https://github.com/pypeclub/OpenPype/pull/3501) **🔀 Refactored code** diff --git a/openpype/client/operations.py b/openpype/client/operations.py index 69d1eb2bb6..c4b95bf696 100644 --- a/openpype/client/operations.py +++ b/openpype/client/operations.py @@ -17,6 +17,7 @@ CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" CURRENT_VERSION_SCHEMA = "openpype:version-3.0" CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" +CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" def _create_or_convert_to_mongo_id(mongo_id): @@ -188,6 +189,38 @@ def new_representation_doc( } +def new_workfile_info_doc( + filename, asset_id, task_name, files, data=None, entity_id=None +): + """Create skeleton data of workfile info document. + + Workfile document is at this moment used primarily for artist notes. + + Args: + filename (str): Filename of workfile. + asset_id (Union[str, ObjectId]): Id of asset under which workfile live. + task_name (str): Task under which was workfile created. + files (List[str]): List of rootless filepaths related to workfile. + data (Dict[str, Any]): Additional metadata. + + Returns: + Dict[str, Any]: Skeleton of workfile info document. + """ + + if not data: + data = {} + + return { + "_id": _create_or_convert_to_mongo_id(entity_id), + "type": "workfile", + "parent": ObjectId(asset_id), + "task_name": task_name, + "filename": filename, + "data": data, + "files": files + } + + def _prepare_update_data(old_doc, new_doc, replace): changes = {} for key, value in new_doc.items(): @@ -243,6 +276,20 @@ def prepare_representation_update_data(old_doc, new_doc, replace=True): return _prepare_update_data(old_doc, new_doc, replace) +def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): + """Compare two workfile info documents and prepare update data. + + Based on compared values will create update data for 'UpdateOperation'. + + Empty output means that documents are identical. + + Returns: + Dict[str, Any]: Changes between old and new document. + """ + + return _prepare_update_data(old_doc, new_doc, replace) + + @six.add_metaclass(ABCMeta) class AbstractOperation(object): """Base operation class. diff --git a/openpype/hooks/pre_copy_template_workfile.py b/openpype/hooks/pre_copy_template_workfile.py index dffac22ee2..70c549919f 100644 --- a/openpype/hooks/pre_copy_template_workfile.py +++ b/openpype/hooks/pre_copy_template_workfile.py @@ -1,11 +1,11 @@ import os import shutil -from openpype.lib import ( - PreLaunchHook, - get_custom_workfile_template_by_context, +from openpype.lib import PreLaunchHook +from openpype.settings import get_project_settings +from openpype.pipeline.workfile import ( + get_custom_workfile_template, get_custom_workfile_template_by_string_context ) -from openpype.settings import get_project_settings class CopyTemplateWorkfile(PreLaunchHook): @@ -54,41 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook): project_name = self.data["project_name"] asset_name = self.data["asset_name"] task_name = self.data["task_name"] + host_name = self.application.host_name project_settings = get_project_settings(project_name) - host_settings = project_settings[self.application.host_name] - - workfile_builder_settings = host_settings.get("workfile_builder") - if not workfile_builder_settings: - # TODO remove warning when deprecated - self.log.warning(( - "Seems like old version of settings is used." - " Can't access custom templates in host \"{}\"." - ).format(self.application.full_label)) - return - - if not workfile_builder_settings["create_first_version"]: - self.log.info(( - "Project \"{}\" has turned off to create first workfile for" - " application \"{}\"" - ).format(project_name, self.application.full_label)) - return - - # Backwards compatibility - template_profiles = workfile_builder_settings.get("custom_templates") - if not template_profiles: - self.log.info( - "Custom templates are not filled. Skipping template copy." - ) - return project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") if project_doc and asset_doc: self.log.debug("Started filtering of custom template paths.") - template_path = get_custom_workfile_template_by_context( - template_profiles, project_doc, asset_doc, task_name, anatomy + template_path = get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + project_settings ) else: @@ -96,10 +77,13 @@ class CopyTemplateWorkfile(PreLaunchHook): "Global data collection probably did not execute." " Using backup solution." )) - dbcon = self.data.get("dbcon") template_path = get_custom_workfile_template_by_string_context( - template_profiles, project_name, asset_name, task_name, - dbcon, anatomy + project_name, + asset_name, + task_name, + host_name, + anatomy, + project_settings ) if not template_path: diff --git a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py index b59107f155..4d45f67ded 100644 --- a/openpype/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/openpype/hosts/flame/plugins/publish/integrate_batch_group.py @@ -3,9 +3,9 @@ import copy from collections import OrderedDict from pprint import pformat import pyblish -from openpype.lib import get_workdir import openpype.hosts.flame.api as opfapi import openpype.pipeline as op_pipeline +from openpype.pipeline.workfile import get_workdir class IntegrateBatchGroup(pyblish.api.InstancePlugin): @@ -324,7 +324,13 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): project_doc = instance.data["projectEntity"] asset_entity = instance.data["assetEntity"] anatomy = instance.context.data["anatomy"] + project_settings = instance.context.data["project_settings"] return get_workdir( - project_doc, asset_entity, task_data["name"], "flame", anatomy + project_doc, + asset_entity, + task_data["name"], + "flame", + anatomy, + project_settings=project_settings ) diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 87ff8e2ffe..49ef340679 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -15,7 +15,7 @@ from openpype.pipeline import ( from openpype.lib import version_up from openpype.hosts.fusion import api from openpype.hosts.fusion.api import lib -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py index 01d55db647..93f775b24b 100644 --- a/openpype/hosts/fusion/utility_scripts/switch_ui.py +++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( legacy_io, ) from openpype.hosts.fusion import api -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Fusion Switch Shot") diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index c3ce8b0227..b7ab529a55 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,9 +6,9 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -from openpype.api import BuildWorkfile from openpype.settings import get_project_settings from openpype.pipeline import legacy_io +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib, lib_rendersettings from .lib import get_main_window, IS_HEADLESS diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 9280805945..e50ebfccad 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -208,7 +208,8 @@ class ReferenceLoader(Loader): file_type = { "ma": "mayaAscii", "mb": "mayaBinary", - "abc": "Alembic" + "abc": "Alembic", + "fbx": "FBX" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation @@ -234,7 +235,7 @@ class ReferenceLoader(Loader): path = self.prepare_root_value(path, representation["context"] ["project"] - ["code"]) + ["name"]) content = cmds.file(path, loadReference=reference_node, type=file_type, diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index e1f4efcc07..f8014c1f86 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -193,7 +193,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin): ) # append full path - full_exp_files = [] aov_dict = {} default_render_file = context.data.get('project_settings')\ .get('maya')\ @@ -213,6 +212,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): full_paths.append(full_path) publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths + full_exp_files = [aov_dict] frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 0b26e922d5..8be0c7aae5 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -429,7 +429,19 @@ class ExtractLook(openpype.api.Extractor): # node doesn't have color space attribute color_space = "Raw" else: - if files_metadata[source]["color_space"] == "Raw": + # get the resolved files + metadata = files_metadata.get(source) + # if the files are unresolved from `source` + # assume color space from the first file of + # the resource + if not metadata: + first_file = next(iter(resource.get( + "files", [])), None) + if not first_file: + continue + first_filepath = os.path.normpath(first_file) + metadata = files_metadata[first_filepath] + if metadata["color_space"] == "Raw": # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 501ab4ba93..a53d932db1 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -21,7 +21,6 @@ from openpype.client import ( ) from openpype.api import ( Logger, - BuildWorkfile, get_version_from_path, get_current_project_settings, ) @@ -39,7 +38,11 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) -from openpype.pipeline.context_tools import get_current_project_asset +from openpype.pipeline.context_tools import ( + get_current_project_asset, + get_custom_workfile_template_from_session +) +from openpype.pipeline.workfile import BuildWorkfile from . import gizmo_menu @@ -912,7 +915,7 @@ def get_render_path(node): avalon_knob_data = read_avalon_data(node) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -1920,7 +1923,7 @@ class WorkfileSettings(object): families.append(avalon_knob_data.get("families")) nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -2219,7 +2222,7 @@ def get_write_node_template_attr(node): avalon_knob_data = read_avalon_data(node) # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) @@ -2444,15 +2447,12 @@ def _launch_workfile_app(): def process_workfile_builder(): - from openpype.lib import ( - env_value_to_bool, - get_custom_workfile_template - ) # to avoid looping of the callback, remove it! nuke.removeOnCreate(process_workfile_builder, nodeClass="Root") # get state from settings - workfile_builder = get_current_project_settings()["nuke"].get( + project_settings = get_current_project_settings() + workfile_builder = project_settings["nuke"].get( "workfile_builder", {}) # get all imortant settings @@ -2462,7 +2462,6 @@ def process_workfile_builder(): # get settings createfv_on = workfile_builder.get("create_first_version") or None - custom_templates = workfile_builder.get("custom_templates") or None builder_on = workfile_builder.get("builder_on_start") or None last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") @@ -2470,8 +2469,8 @@ def process_workfile_builder(): # generate first version in file not existing and feature is enabled if createfv_on and not os.path.exists(last_workfile_path): # get custom template path if any - custom_template_path = get_custom_workfile_template( - custom_templates + custom_template_path = get_custom_workfile_template_from_session( + project_settings=project_settings ) # if custom template is defined diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 0afc56d2f7..c1cd8f771a 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -9,7 +9,6 @@ import pyblish.api import openpype from openpype.api import ( Logger, - BuildWorkfile, get_current_project_settings ) from openpype.lib import register_event_callback @@ -22,6 +21,7 @@ from openpype.pipeline import ( deregister_inventory_action_path, AVALON_CONTAINER_ID, ) +from openpype.pipeline.workfile import BuildWorkfile from openpype.tools.utils import host_tools from .command import viewer_update_and_undo_stop diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 8b09d20755..a99b300730 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,15 +1,15 @@ import os -from openpype.lib import ( - StringTemplate, - get_workfile_template_key_from_context, - get_last_workfile_with_version, -) +from openpype.lib import StringTemplate from openpype.pipeline import ( registered_host, legacy_io, Anatomy, ) +from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context, + get_last_workfile_with_version, +) from openpype.pipeline.template_data import get_template_data_with_names from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -57,8 +57,7 @@ class LoadWorkfile(plugin.Loader): asset_name, task_name, host_name, - project_name=project_name, - dbcon=legacy_io + project_name=project_name ) anatomy = Anatomy(project_name) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index da8623ea13..8c92665366 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -27,11 +27,6 @@ from openpype.settings.constants import ( from . import PypeLogger from .profiles_filtering import filter_profiles from .local_settings import get_openpype_username -from .avalon_context import ( - get_workdir_with_workdir_data, - get_workfile_template_key, - get_last_workfile -) from .python_module_tools import ( modules_from_path, @@ -1635,7 +1630,14 @@ def prepare_context_environments(data, env_group=None): data["task_type"] = task_type try: - workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + from openpype.pipeline.workfile import get_workdir_with_workdir_data + + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) except Exception as exc: raise ApplicationLaunchFailed( @@ -1725,11 +1727,19 @@ def _prepare_last_workfile(data, workdir): if not last_workfile_path: extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) if extensions: + from openpype.pipeline.workfile import ( + get_workfile_template_key, + get_last_workfile + ) + anatomy = data["anatomy"] project_settings = data["project_settings"] task_type = workdir_data["task"]["type"] template_key = get_workfile_template_key( - task_type, app.host_name, project_settings=project_settings + task_type, + app.host_name, + project_name, + project_settings=project_settings ) # Find last workfile file_template = str(anatomy.templates[template_key]["file"]) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 42854f39d6..3c7e9b6289 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -321,6 +321,8 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key_from_context") def get_workfile_template_key_from_context( asset_name, task_name, host_name, project_name=None, dbcon=None, project_settings=None @@ -349,27 +351,26 @@ def get_workfile_template_key_from_context( ValueError: When both 'dbcon' and 'project_name' were not passed. """ + + from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context + ) + if not project_name: if not dbcon: raise ValueError(( "`get_workfile_template_key_from_context` requires to pass" " one of 'dbcon' or 'project_name' arguments." )) - project_name = dbcon.active_project() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] - ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - - return get_workfile_template_key( - task_type, host_name, project_name, project_settings + return get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings ) +@deprecated( + "openpype.pipeline.workfile.get_workfile_template_key") def get_workfile_template_key( task_type, host_name, project_name=None, project_settings=None ): @@ -393,40 +394,12 @@ def get_workfile_template_key( ValueError: When both 'project_name' and 'project_settings' were not passed. """ - default = "work" - if not task_type or not host_name: - return default - if not project_settings: - if not project_name: - raise ValueError(( - "`get_workfile_template_key` requires to pass" - " one of 'project_name' or 'project_settings' arguments." - )) - project_settings = get_project_settings(project_name) + from openpype.pipeline.workfile import get_workfile_template_key - try: - profiles = ( - project_settings - ["global"] - ["tools"] - ["Workfiles"] - ["workfile_template_profiles"] - ) - except Exception: - profiles = [] - - if not profiles: - return default - - profile_filter = { - "task_types": task_type, - "hosts": host_name - } - profile = filter_profiles(profiles, profile_filter) - if profile: - return profile["workfile_template"] or default - return default + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) @deprecated("openpype.pipeline.template_data.get_template_data") @@ -454,6 +427,7 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir_with_workdir_data( workdir_data, anatomy=None, project_name=None, template_key=None ): @@ -480,31 +454,24 @@ def get_workdir_with_workdir_data( Raises: ValueError: When both `anatomy` and `project_name` are set to None. """ + if not anatomy and not project_name: raise ValueError(( "Missing required arguments one of `project_name` or `anatomy`" " must be entered." )) - if not anatomy: - from openpype.pipeline import Anatomy - anatomy = Anatomy(project_name) + if not project_name: + project_name = anatomy.project_name - if not template_key: - template_key = get_workfile_template_key( - workdir_data["task"]["type"], - workdir_data["app"], - project_name=workdir_data["project"]["name"] - ) + from openpype.pipeline.workfile import get_workdir_with_workdir_data - anatomy_filled = anatomy.format(workdir_data) - # Output is TemplateResult object which contain useful data - output = anatomy_filled[template_key]["folder"] - if output: - return output.normalized() - return output + return get_workdir_with_workdir_data( + workdir_data, project_name, anatomy, template_key + ) +@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data") def get_workdir( project_doc, asset_doc, @@ -533,18 +500,15 @@ def get_workdir( TemplateResult: Workdir path. """ - from openpype.pipeline import Anatomy - from openpype.pipeline.template_data import get_template_data - - if not anatomy: - anatomy = Anatomy(project_doc["name"]) - - workdir_data = get_template_data( - project_doc, asset_doc, task_name, host_name - ) + from openpype.pipeline.workfile import get_workdir # Output is TemplateResult object which contain useful data - return get_workdir_with_workdir_data( - workdir_data, anatomy, template_key=template_key + return get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy, + template_key ) @@ -564,6 +528,7 @@ def template_data_from_session(session=None): """ from openpype.pipeline.context_tools import get_template_data_from_session + return get_template_data_from_session(session) @@ -590,6 +555,8 @@ def compute_session_changes( dict: The required changes in the Session dictionary. """ + from openpype.pipeline.context_tools import get_workdir_from_session + changes = dict() # If no changes, return directly @@ -636,30 +603,11 @@ def compute_session_changes( return changes -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_workdir_from_session") def get_workdir_from_session(session=None, template_key=None): - from openpype.pipeline import Anatomy - from openpype.pipeline.context_tools import get_template_data_from_session + from openpype.pipeline.context_tools import get_workdir_from_session - if session is None: - session = legacy_io.Session - project_name = session["AVALON_PROJECT"] - host_name = session["AVALON_APP"] - anatomy = Anatomy(project_name) - template_data = get_template_data_from_session(session) - anatomy_filled = anatomy.format(template_data) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - task_type, - host_name, - project_name=project_name - ) - path = anatomy_filled[template_key]["folder"] - if path: - path = os.path.normpath(path) - return path + return get_workdir_from_session(session, template_key) @with_pipeline_io @@ -706,7 +654,6 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): return changes -@with_pipeline_io @deprecated("openpype.client.get_workfile_info") def get_workfile_doc(asset_id, task_name, filename, dbcon=None): """Return workfile document for entered context. @@ -727,13 +674,14 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io project_name = dbcon.active_project() return get_workfile_info(project_name, asset_id, task_name, filename) -@with_pipeline_io +@deprecated def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """Creates or replace workfile document in mongo. @@ -754,6 +702,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Filter of workfile document @@ -800,7 +749,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): ) -@with_pipeline_io +@deprecated def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not workfile_doc: # TODO add log message @@ -811,6 +760,7 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): # Use legacy_io if dbcon is not entered if not dbcon: + from openpype.pipeline import legacy_io dbcon = legacy_io # Convert data to mongo modification keys/values @@ -828,661 +778,11 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): ) -class BuildWorkfile: - """Wrapper for build workfile process. +@deprecated("openpype.pipeline.workfile.BuildWorkfile") +def BuildWorkfile(): + from openpype.pipeline.workfile import BuildWorkfile - Load representations for current context by build presets. Build presets - are host related, since each host has it's loaders. - """ - - log = logging.getLogger("BuildWorkfile") - - @staticmethod - def map_subsets_by_family(subsets): - subsets_by_family = collections.defaultdict(list) - for subset in subsets: - family = subset["data"].get("family") - if not family: - families = subset["data"].get("families") - if not families: - continue - family = families[0] - - subsets_by_family[family].append(subset) - return subsets_by_family - - def process(self): - """Main method of this wrapper. - - Building of workfile is triggered and is possible to implement - post processing of loaded containers if necessary. - """ - containers = self.build_workfile() - - return containers - - @with_pipeline_io - def build_workfile(self): - """Prepares and load containers into workfile. - - Loads latest versions of current and linked assets to workfile by logic - stored in Workfile profiles from presets. Profiles are set by host, - filtered by current task name and used by families. - - Each family can specify representation names and loaders for - representations and first available and successful loaded - representation is returned as container. - - At the end you'll get list of loaded containers per each asset. - - loaded_containers [{ - "asset_entity": , - "containers": [, , ...] - }, { - "asset_entity": , - "containers": [, ...] - }, { - ... - }] - """ - from openpype.pipeline import discover_loader_plugins - - # Get current asset name and entity - project_name = legacy_io.active_project() - current_asset_name = legacy_io.Session["AVALON_ASSET"] - current_asset_entity = get_asset_by_name( - project_name, current_asset_name - ) - # Skip if asset was not found - if not current_asset_entity: - print("Asset entity with name `{}` was not found".format( - current_asset_name - )) - return - - # Prepare available loaders - loaders_by_name = {} - for loader in discover_loader_plugins(): - loader_name = loader.__name__ - if loader_name in loaders_by_name: - raise KeyError( - "Duplicated loader name {0}!".format(loader_name) - ) - loaders_by_name[loader_name] = loader - - # Skip if there are any loaders - if not loaders_by_name: - self.log.warning("There are no registered loaders.") - return - - # Get current task name - current_task_name = legacy_io.Session["AVALON_TASK"] - - # Load workfile presets for task - self.build_presets = self.get_build_presets( - current_task_name, current_asset_entity - ) - - # Skip if there are any presets for task - if not self.build_presets: - self.log.warning( - "Current task `{}` does not have any loading preset.".format( - current_task_name - ) - ) - return - - # Get presets for loading current asset - current_context_profiles = self.build_presets.get("current_context") - # Get presets for loading linked assets - link_context_profiles = self.build_presets.get("linked_assets") - # Skip if both are missing - if not current_context_profiles and not link_context_profiles: - self.log.warning( - "Current task `{}` has empty loading preset.".format( - current_task_name - ) - ) - return - - elif not current_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any loading" - " preset for it's context." - ).format(current_task_name)) - - elif not link_context_profiles: - self.log.warning(( - "Current task `{}` doesn't have any" - "loading preset for it's linked assets." - ).format(current_task_name)) - - # Prepare assets to process by workfile presets - assets = [] - current_asset_id = None - if current_context_profiles: - # Add current asset entity if preset has current context set - assets.append(current_asset_entity) - current_asset_id = current_asset_entity["_id"] - - if link_context_profiles: - # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(current_asset_entity) - if link_assets: - assets.extend(link_assets) - - # Skip if there are no assets. This can happen if only linked mapping - # is set and there are no links for his asset. - if not assets: - self.log.warning( - "Asset does not have linked assets. Nothing to process." - ) - return - - # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(assets) - - # Load containers by prepared entities and presets - loaded_containers = [] - # - Current asset containers - if current_asset_id and current_asset_id in prepared_entities: - current_context_data = prepared_entities.pop(current_asset_id) - loaded_data = self.load_containers_by_asset_data( - current_context_data, current_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # - Linked assets container - for linked_asset_data in prepared_entities.values(): - loaded_data = self.load_containers_by_asset_data( - linked_asset_data, link_context_profiles, loaders_by_name - ) - if loaded_data: - loaded_containers.append(loaded_data) - - # Return list of loaded containers - return loaded_containers - - @with_pipeline_io - def get_build_presets(self, task_name, asset_doc): - """ Returns presets to build workfile for task name. - - Presets are loaded for current project set in - io.Session["AVALON_PROJECT"], filtered by registered host - and entered task name. - - Args: - task_name (str): Task name used for filtering build presets. - - Returns: - (dict): preset per entered task name - """ - host_name = os.environ["AVALON_APP"] - project_settings = get_project_settings( - legacy_io.Session["AVALON_PROJECT"] - ) - - host_settings = project_settings.get(host_name) or {} - # Get presets for host - wb_settings = host_settings.get("workfile_builder") - if not wb_settings: - # backward compatibility - wb_settings = host_settings.get("workfile_build") or {} - - builder_profiles = wb_settings.get("profiles") - if not builder_profiles: - return None - - task_type = ( - asset_doc - .get("data", {}) - .get("tasks", {}) - .get(task_name, {}) - .get("type") - ) - filter_data = { - "task_types": task_type, - "tasks": task_name - } - return filter_profiles(builder_profiles, filter_data) - - def _filter_build_profiles(self, build_profiles, loaders_by_name): - """ Filter build profiles by loaders and prepare process data. - - Valid profile must have "loaders", "families" and "repre_names" keys - with valid values. - - "loaders" expects list of strings representing possible loaders. - - "families" expects list of strings for filtering - by main subset family. - - "repre_names" expects list of strings for filtering by - representation name. - - Lowered "families" and "repre_names" are prepared for each profile with - all required keys. - - Args: - build_profiles (dict): Profiles for building workfile. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list): Filtered and prepared profiles. - """ - valid_profiles = [] - for profile in build_profiles: - # Check loaders - profile_loaders = profile.get("loaders") - if not profile_loaders: - self.log.warning(( - "Build profile has missing loaders configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check if any loader is available - loaders_match = False - for loader_name in profile_loaders: - if loader_name in loaders_by_name: - loaders_match = True - break - - if not loaders_match: - self.log.warning(( - "All loaders from Build profile are not available: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check families - profile_families = profile.get("families") - if not profile_families: - self.log.warning(( - "Build profile is missing families configuration: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Check representation names - profile_repre_names = profile.get("repre_names") - if not profile_repre_names: - self.log.warning(( - "Build profile is missing" - " representation names filtering: {0}" - ).format(json.dumps(profile, indent=4))) - continue - - # Prepare lowered families and representation names - profile["families_lowered"] = [ - fam.lower() for fam in profile_families - ] - profile["repre_names_lowered"] = [ - name.lower() for name in profile_repre_names - ] - - valid_profiles.append(profile) - - return valid_profiles - - def _prepare_profile_for_subsets(self, subsets, profiles): - """Select profile for each subset by it's data. - - Profiles are filtered for each subset individually. - Profile is filtered by subset's family, optionally by name regex and - representation names set in profile. - It is possible to not find matching profile for subset, in that case - subset is skipped and it is possible that none of subsets have - matching profile. - - Args: - subsets (list): Subset documents. - profiles (dict): Build profiles. - - Returns: - (dict) Profile by subset's id. - """ - # Prepare subsets - subsets_by_family = self.map_subsets_by_family(subsets) - - profiles_per_subset_id = {} - for family, subsets in subsets_by_family.items(): - family_low = family.lower() - for profile in profiles: - # Skip profile if does not contain family - if family_low not in profile["families_lowered"]: - continue - - # Precompile name filters as regexes - profile_regexes = profile.get("subset_name_filters") - if profile_regexes: - _profile_regexes = [] - for regex in profile_regexes: - _profile_regexes.append(re.compile(regex)) - profile_regexes = _profile_regexes - - # TODO prepare regex compilation - for subset in subsets: - # Verify regex filtering (optional) - if profile_regexes: - valid = False - for pattern in profile_regexes: - if re.match(pattern, subset["name"]): - valid = True - break - - if not valid: - continue - - profiles_per_subset_id[subset["_id"]] = profile - - # break profiles loop on finding the first matching profile - break - return profiles_per_subset_id - - def load_containers_by_asset_data( - self, asset_entity_data, build_profiles, loaders_by_name - ): - """Load containers for entered asset entity by Build profiles. - - Args: - asset_entity_data (dict): Prepared data with subsets, last version - and representations for specific asset. - build_profiles (dict): Build profiles. - loaders_by_name (dict): Available loaders per name. - - Returns: - (dict) Output contains asset document and loaded containers. - """ - - # Make sure all data are not empty - if not asset_entity_data or not build_profiles or not loaders_by_name: - return - - asset_entity = asset_entity_data["asset_entity"] - - valid_profiles = self._filter_build_profiles( - build_profiles, loaders_by_name - ) - if not valid_profiles: - self.log.warning( - "There are not valid Workfile profiles. Skipping process." - ) - return - - self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) - - subsets_by_id = {} - version_by_subset_id = {} - repres_by_version_id = {} - for subset_id, in_data in asset_entity_data["subsets"].items(): - subset_entity = in_data["subset_entity"] - subsets_by_id[subset_entity["_id"]] = subset_entity - - version_data = in_data["version"] - version_entity = version_data["version_entity"] - version_by_subset_id[subset_id] = version_entity - repres_by_version_id[version_entity["_id"]] = ( - version_data["repres"] - ) - - if not subsets_by_id: - self.log.warning("There are not subsets for asset {0}".format( - asset_entity["name"] - )) - return - - profiles_per_subset_id = self._prepare_profile_for_subsets( - subsets_by_id.values(), valid_profiles - ) - if not profiles_per_subset_id: - self.log.warning("There are not valid subsets.") - return - - valid_repres_by_subset_id = collections.defaultdict(list) - for subset_id, profile in profiles_per_subset_id.items(): - profile_repre_names = profile["repre_names_lowered"] - - version_entity = version_by_subset_id[subset_id] - version_id = version_entity["_id"] - repres = repres_by_version_id[version_id] - for repre in repres: - repre_name_low = repre["name"].lower() - if repre_name_low in profile_repre_names: - valid_repres_by_subset_id[subset_id].append(repre) - - # DEBUG message - msg = "Valid representations for Asset: `{}`".format( - asset_entity["name"] - ) - for subset_id, repres in valid_repres_by_subset_id.items(): - subset = subsets_by_id[subset_id] - msg += "\n# Subset Name/ID: `{}`/{}".format( - subset["name"], subset_id - ) - for repre in repres: - msg += "\n## Repre name: `{}`".format(repre["name"]) - - self.log.debug(msg) - - containers = self._load_containers( - valid_repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ) - - return { - "asset_entity": asset_entity, - "containers": containers - } - - @with_pipeline_io - def _load_containers( - self, repres_by_subset_id, subsets_by_id, - profiles_per_subset_id, loaders_by_name - ): - """Real load by collected data happens here. - - Loading of representations per subset happens here. Each subset can - loads one representation. Loading is tried in specific order. - Representations are tried to load by names defined in configuration. - If subset has representation matching representation name each loader - is tried to load it until any is successful. If none of them was - successful then next representation name is tried. - Subset process loop ends when any representation is loaded or - all matching representations were already tried. - - Args: - repres_by_subset_id (dict): Available representations mapped - by their parent (subset) id. - subsets_by_id (dict): Subset documents mapped by their id. - profiles_per_subset_id (dict): Build profiles mapped by subset id. - loaders_by_name (dict): Available loaders per name. - - Returns: - (list) Objects of loaded containers. - """ - from openpype.pipeline import ( - IncompatibleLoaderError, - load_container, - ) - - loaded_containers = [] - - # Get subset id order from build presets. - build_presets = self.build_presets.get("current_context", []) - build_presets += self.build_presets.get("linked_assets", []) - subset_ids_ordered = [] - for preset in build_presets: - for preset_family in preset["families"]: - for id, subset in subsets_by_id.items(): - if preset_family not in subset["data"].get("families", []): - continue - - subset_ids_ordered.append(id) - - # Order representations from subsets. - print("repres_by_subset_id", repres_by_subset_id) - representations_ordered = [] - representations = [] - for id in subset_ids_ordered: - for subset_id, repres in repres_by_subset_id.items(): - if repres in representations: - continue - - if id == subset_id: - representations_ordered.append((subset_id, repres)) - representations.append(repres) - - print("representations", representations) - - # Load ordered representations. - for subset_id, repres in representations_ordered: - subset_name = subsets_by_id[subset_id]["name"] - - profile = profiles_per_subset_id[subset_id] - loaders_last_idx = len(profile["loaders"]) - 1 - repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 - - repre_by_low_name = { - repre["name"].lower(): repre for repre in repres - } - - is_loaded = False - for repre_name_idx, profile_repre_name in enumerate( - profile["repre_names_lowered"] - ): - # Break iteration if representation was already loaded - if is_loaded: - break - - repre = repre_by_low_name.get(profile_repre_name) - if not repre: - continue - - for loader_idx, loader_name in enumerate(profile["loaders"]): - if is_loaded: - break - - loader = loaders_by_name.get(loader_name) - if not loader: - continue - try: - container = load_container( - loader, - repre["_id"], - name=subset_name - ) - loaded_containers.append(container) - is_loaded = True - - except Exception as exc: - if exc == IncompatibleLoaderError: - self.log.info(( - "Loader `{}` is not compatible with" - " representation `{}`" - ).format(loader_name, repre["name"])) - - else: - self.log.error( - "Unexpected error happened during loading", - exc_info=True - ) - - msg = "Loading failed." - if loader_idx < loaders_last_idx: - msg += " Trying next loader." - elif repre_name_idx < repre_names_last_idx: - msg += ( - " Loading of subset `{}` was not successful." - ).format(subset_name) - else: - msg += " Trying next representation." - self.log.info(msg) - - return loaded_containers - - @with_pipeline_io - def _collect_last_version_repres(self, asset_docs): - """Collect subsets, versions and representations for asset_entities. - - Args: - asset_entities (list): Asset entities for which want to find data - - Returns: - (dict): collected entities - - Example output: - ``` - { - {Asset ID}: { - "asset_entity": , - "subsets": { - {Subset ID}: { - "subset_entity": , - "version": { - "version_entity": , - "repres": [ - , , ... - ] - } - }, - ... - } - }, - ... - } - output[asset_id]["subsets"][subset_id]["version"]["repres"] - ``` - """ - - output = {} - if not asset_docs: - return output - - asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} - - project_name = legacy_io.active_project() - subsets = list(get_subsets( - project_name, asset_ids=asset_docs_by_ids.keys() - )) - subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - - last_version_by_subset_id = get_last_versions( - project_name, subset_entity_by_ids.keys() - ) - last_version_docs_by_id = { - version["_id"]: version - for version in last_version_by_subset_id.values() - } - repre_docs = get_representations( - project_name, version_ids=last_version_docs_by_id.keys() - ) - - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = last_version_docs_by_id[version_id] - - subset_id = version_doc["parent"] - subset_doc = subset_entity_by_ids[subset_id] - - asset_id = subset_doc["parent"] - asset_doc = asset_docs_by_ids[asset_id] - - if asset_id not in output: - output[asset_id] = { - "asset_entity": asset_doc, - "subsets": {} - } - - if subset_id not in output[asset_id]["subsets"]: - output[asset_id]["subsets"][subset_id] = { - "subset_entity": subset_doc, - "version": { - "version_entity": version_doc, - "repres": [] - } - } - - output[asset_id]["subsets"][subset_id]["version"]["repres"].append( - repre_doc - ) - - return output + return BuildWorkfile() @with_pipeline_io @@ -1515,13 +815,21 @@ def get_creator_by_name(creator_name, case_sensitive=False): return None -@with_pipeline_io +@deprecated def change_timer_to_current_context(): """Called after context change to change timers. + Deprecated: + This method is specific for TimersManager module so please use the + functionality from there. Function will be removed after release + version 3.14.* + TODO: - use TimersManager's static method instead of reimplementing it here """ + + from openpype.pipeline import legacy_io + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: log.warning("Couldn't find webserver url") @@ -1609,6 +917,8 @@ def _get_task_context_data_for_anatomy( return data +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_context") def get_custom_workfile_template_by_context( template_profiles, project_doc, asset_doc, task_name, anatomy=None ): @@ -1662,6 +972,9 @@ def get_custom_workfile_template_by_context( return None +@deprecated( + "openpype.pipeline.workfile.get_custom_workfile_template_by_string_context" +) def get_custom_workfile_template_by_string_context( template_profiles, project_name, asset_name, task_name, dbcon=None, anatomy=None @@ -1706,7 +1019,7 @@ def get_custom_workfile_template_by_string_context( ) -@with_pipeline_io +@deprecated("openpype.pipeline.context_tools.get_custom_workfile_template") def get_custom_workfile_template(template_profiles): """Filter and fill workfile template profiles by current context. @@ -1721,6 +1034,8 @@ def get_custom_workfile_template(template_profiles): context. (Existence of formatted path is not validated.) """ + from openpype.pipeline import legacy_io + return get_custom_workfile_template_by_string_context( template_profiles, legacy_io.Session["AVALON_PROJECT"], @@ -1730,6 +1045,7 @@ def get_custom_workfile_template(template_profiles): ) +@deprecated("openpype.pipeline.workfile.get_last_workfile_with_version") def get_last_workfile_with_version( workdir, file_template, fill_data, extensions ): @@ -1745,78 +1061,15 @@ def get_last_workfile_with_version( tuple: Last workfile with version if there is any otherwise returns (None, None). """ - if not os.path.exists(workdir): - return None, None - # Fast match on extension - filenames = [ - filename - for filename in os.listdir(workdir) - if os.path.splitext(filename)[1] in extensions - ] + from openpype.pipeline.workfile import get_last_workfile_with_version - # Build template without optionals, version to digits only regex - # and comment to any definable value. - _ext = [] - for ext in extensions: - if not ext.startswith("."): - ext = "." + ext - # Escape dot for regex - ext = "\\" + ext - _ext.append(ext) - ext_expression = "(?:" + "|".join(_ext) + ")" - - # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end - file_template = re.sub(r"\.?{ext}", ext_expression, file_template) - # Replace optional keys with optional content regex - file_template = re.sub(r"<.*?>", r".*?", file_template) - # Replace `{version}` with group regex - file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) - file_template = re.sub(r"{comment.*?}", r".+?", file_template) - file_template = StringTemplate.format_strict_template( - file_template, fill_data + return get_last_workfile_with_version( + workdir, file_template, fill_data, extensions ) - # Match with ignore case on Windows due to the Windows - # OS not being case-sensitive. This avoids later running - # into the error that the file did exist if it existed - # with a different upper/lower-case. - kwargs = {} - if platform.system().lower() == "windows": - kwargs["flags"] = re.IGNORECASE - - # Get highest version among existing matching files - version = None - output_filenames = [] - for filename in sorted(filenames): - match = re.match(file_template, filename, **kwargs) - if not match: - continue - - file_version = int(match.group(1)) - if version is None or file_version > version: - output_filenames[:] = [] - version = file_version - - if file_version == version: - output_filenames.append(filename) - - output_filename = None - if output_filenames: - if len(output_filenames) == 1: - output_filename = output_filenames[0] - else: - last_time = None - for _output_filename in output_filenames: - full_path = os.path.join(workdir, _output_filename) - mod_time = os.path.getmtime(full_path) - if last_time is None or last_time < mod_time: - output_filename = _output_filename - last_time = mod_time - - return output_filename, version - +@deprecated("openpype.pipeline.workfile.get_last_workfile") def get_last_workfile( workdir, file_template, fill_data, extensions, full_path=False ): @@ -1834,22 +1087,12 @@ def get_last_workfile( Returns: str: Last or first workfile as filename of full path to filename. """ - filename, version = get_last_workfile_with_version( - workdir, file_template, fill_data, extensions + + from openpype.pipeline.workfile import get_last_workfile + + return get_last_workfile( + workdir, file_template, fill_data, extensions, full_path ) - if filename is None: - data = copy.deepcopy(fill_data) - data["version"] = 1 - data.pop("comment", None) - if not data.get("ext"): - data["ext"] = extensions[0] - data["ext"] = data["ext"].replace('.', '') - filename = StringTemplate.format_strict_template(file_template, data) - - if full_path: - return os.path.normpath(os.path.join(workdir, filename)) - - return filename @with_pipeline_io diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 172649c951..61b95cf06d 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -34,7 +34,7 @@ def get_openpype_version_from_path(path, build=True): # if only builds are requested if build and not os.path.isfile(exe): # noqa: E501 - print(f" ! path is not a build: {path}") + print(" ! path is not a build: {}".format(path)) return None version = {} @@ -70,11 +70,12 @@ def inject_openpype_environment(deadlinePlugin): # lets go over all available and find compatible build. requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print((">>> Scanning for compatible requested " - f"version {requested_version}")) + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) install_dir = DirectoryUtils.SearchDirectoryList(dir_list) if install_dir: - print(f"--- Looking for OpenPype at: {install_dir}") + print("--- Looking for OpenPype at: {}".format(install_dir)) sub_dirs = [ f.path for f in os.scandir(install_dir) if f.is_dir() @@ -83,18 +84,20 @@ def inject_openpype_environment(deadlinePlugin): version = get_openpype_version_from_path(subdir) if not version: continue - print(f" - found: {version} - {subdir}") + print(" - found: {} - {}".format(version, subdir)) openpype_versions.append((version, subdir)) exe = FileUtils.SearchFileList(exe_list) if openpype_versions: # if looking for requested compatible version, # add the implicitly specified to the list too. - print(f"Looking for OpenPype at: {os.path.dirname(exe)}") + print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) version = get_openpype_version_from_path( os.path.dirname(exe)) if version: - print(f" - found: {version} - {os.path.dirname(exe)}") + print(" - found: {} - {}".format( + version, os.path.dirname(exe) + )) openpype_versions.append((version, os.path.dirname(exe))) if requested_version: @@ -106,8 +109,9 @@ def inject_openpype_environment(deadlinePlugin): int(t) if t.isdigit() else t.lower() for t in re.split(r"(\d+)", ver[0]) ]) - print(("*** Latest available version found is " - f"{openpype_versions[-1][0]}")) + print(( + "*** Latest available version found is {}" + ).format(openpype_versions[-1][0])) requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 compatible_versions = [] for version in openpype_versions: @@ -127,8 +131,9 @@ def inject_openpype_environment(deadlinePlugin): int(t) if t.isdigit() else t.lower() for t in re.split(r"(\d+)", ver[0]) ]) - print(("*** Latest compatible version found is " - f"{compatible_versions[-1][0]}")) + print(( + "*** Latest compatible version found is {}" + ).format(compatible_versions[-1][0])) # create list of executables for different platform and let # Deadline decide. exe_list = [ @@ -234,78 +239,6 @@ def inject_render_job_id(deadlinePlugin): print(">>> Injection end.") -def pype_command_line(executable, arguments, workingDirectory): - """Remap paths in comand line argument string. - - Using Deadline rempper it will remap all path found in command-line. - - Args: - executable (str): path to executable - arguments (str): arguments passed to executable - workingDirectory (str): working directory path - - Returns: - Tuple(executable, arguments, workingDirectory) - - """ - print("-" * 40) - print("executable: {}".format(executable)) - print("arguments: {}".format(arguments)) - print("workingDirectory: {}".format(workingDirectory)) - print("-" * 40) - print("Remapping arguments ...") - arguments = RepositoryUtils.CheckPathMapping(arguments) - print("* {}".format(arguments)) - print("-" * 40) - return executable, arguments, workingDirectory - - -def pype(deadlinePlugin): - """Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars. - - `PYPE_METADATA_FILE` is used on farm to point to rendered data. This path - originates on platform from which this job was published. To be able to - publish on different platform, this path needs to be remapped. - - `PYPE_PYTHON_EXE` can be used to specify custom location of python - interpreter to use for Pype. This is remappeda also if present even - though it probably doesn't make much sense. - - Arguments: - deadlinePlugin: Deadline job plugin passed by Deadline - - """ - print(">>> Getting job ...") - job = deadlinePlugin.GetJob() - # PYPE should be here, not OPENPYPE - backward compatibility!! - pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE") - pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE") - print(">>> Having backward compatible env vars {}/{}".format(pype_metadata, - pype_python)) - # test if it is pype publish job. - if pype_metadata: - pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata) - if platform.system().lower() == "linux": - pype_metadata = pype_metadata.replace("\\", "/") - - print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata)) - job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_METADATA_FILE", pype_metadata) - - if pype_python: - pype_python = RepositoryUtils.CheckPathMapping(pype_python) - if platform.system().lower() == "linux": - pype_python = pype_python.replace("\\", "/") - - print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python)) - job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python) - deadlinePlugin.SetProcessEnvironmentVariable( - "PYPE_PYTHON_EXE", pype_python) - - deadlinePlugin.ModifyCommandLineCallback += pype_command_line - - def __main__(deadlinePlugin): print("*** GlobalJobPreload start ...") print(">>> Getting job ...") @@ -329,5 +262,3 @@ def __main__(deadlinePlugin): inject_render_job_id(deadlinePlugin) elif openpype_render_job == '1' or openpype_remote_job == '1': inject_openpype_environment(deadlinePlugin) - else: - pype(deadlinePlugin) # backward compatibility with Pype2 diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index c7fa2dce5e..fb1cdf340e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -12,12 +12,10 @@ from openpype.client import ( get_assets, ) from openpype.settings import get_project_settings, get_system_settings -from openpype.lib import ( - get_workfile_template_key, - StringTemplate, -) +from openpype.lib import StringTemplate from openpype.pipeline import Anatomy from openpype.pipeline.template_data import get_template_data +from openpype.pipeline.workfile import get_workfile_template_key from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks @@ -299,7 +297,10 @@ class FillWorkfileAttributeAction(BaseAction): task_type = workfile_data["task"]["type"] template_key = get_workfile_template_key( - task_type, host_name, project_settings=project_settings + task_type, + host_name, + project_name, + project_settings=project_settings ) if template_key in templates_by_key: template = templates_by_key[template_key] diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 8f5566e8ec..e03cf2b30e 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -219,18 +219,23 @@ def update_op_assets( # Add parents for hierarchy item_data["parents"] = [] - while parent_zou_id is not None: - parent_doc = asset_doc_ids[parent_zou_id] + ancestor_id = parent_zou_id + while ancestor_id is not None: + parent_doc = asset_doc_ids[ancestor_id] item_data["parents"].insert(0, parent_doc["name"]) # Get parent entity parent_entity = parent_doc["data"]["zou"] - parent_zou_id = parent_entity.get("parent_id") + ancestor_id = parent_entity.get("parent_id") - if item_type in ["Shot", "Sequence"]: + # Build OpenPype compatible name + if item_type in ["Shot", "Sequence"] and parent_zou_id is not None: # Name with parents hierarchy "({episode}_){sequence}_{shot}" # to avoid duplicate name issue item_name = f"{item_data['parents'][-1]}_{item['name']}" + + # Update doc name + asset_doc_ids[item["id"]]["name"] = item_name else: item_name = item["name"] diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 688a17f14f..8c2fe1cad9 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -62,7 +62,7 @@ class AbstractProvider: @abc.abstractmethod def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -75,7 +75,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): name of project_name file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -87,7 +87,7 @@ class AbstractProvider: @abc.abstractmethod def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -99,7 +99,7 @@ class AbstractProvider: arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py index dfc42fed75..89d6990841 100644 --- a/openpype/modules/sync_server/providers/dropbox.py +++ b/openpype/modules/sync_server/providers/dropbox.py @@ -224,7 +224,7 @@ class DropboxHandler(AbstractProvider): return False def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. @@ -237,7 +237,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -290,7 +290,7 @@ class DropboxHandler(AbstractProvider): cursor.offset = f.tell() server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -301,7 +301,7 @@ class DropboxHandler(AbstractProvider): return path def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download file from provider into local system @@ -313,7 +313,7 @@ class DropboxHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -337,7 +337,7 @@ class DropboxHandler(AbstractProvider): self.dbx.files_download_to_file(local_path, source_path) server.update_db( - collection=collection, + project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index aa7329b104..bef707788b 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -251,7 +251,7 @@ class GDriveHandler(AbstractProvider): return folder_id def upload_file(self, source_path, path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -264,7 +264,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -324,7 +324,7 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) @@ -333,7 +333,7 @@ class GDriveHandler(AbstractProvider): last_tick = time.time() log.debug("Uploaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, @@ -358,7 +358,7 @@ class GDriveHandler(AbstractProvider): return response['id'] def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'local_path'. @@ -372,7 +372,7 @@ class GDriveHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -410,7 +410,7 @@ class GDriveHandler(AbstractProvider): while response is None: if server.is_representation_paused(representation['_id'], check_parents=True, - project_name=collection): + project_name=project_name): raise ValueError("Paused during process, please redo.") if status: status_val = float(status.progress()) @@ -419,7 +419,7 @@ class GDriveHandler(AbstractProvider): last_tick = time.time() log.debug("Downloaded %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index 172cb338cf..01bc891d08 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -82,7 +82,7 @@ class LocalDriveHandler(AbstractProvider): return editable def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False, direction="Upload"): """ Copies file from 'source_path' to 'target_path' @@ -95,7 +95,7 @@ class LocalDriveHandler(AbstractProvider): thread = threading.Thread(target=self._copy, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, direction) else: if os.path.exists(target_path): @@ -105,13 +105,14 @@ class LocalDriveHandler(AbstractProvider): return os.path.basename(target_path) def download_file(self, source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Download a file form 'source_path' to 'local_path' """ return self.upload_file(source_path, local_path, - server, collection, file, representation, site, + server, project_name, file, + representation, site, overwrite, direction="Download") def delete_file(self, path): @@ -188,7 +189,7 @@ class LocalDriveHandler(AbstractProvider): except shutil.SameFileError: print("same files, skipping") - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -204,7 +205,7 @@ class LocalDriveHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py index 49b87b14ec..302ffae3e6 100644 --- a/openpype/modules/sync_server/providers/sftp.py +++ b/openpype/modules/sync_server/providers/sftp.py @@ -222,7 +222,7 @@ class SFTPHandler(AbstractProvider): return os.path.basename(path) def upload_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. @@ -235,7 +235,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -256,7 +256,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._upload, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "upload") return os.path.basename(target_path) @@ -267,7 +267,7 @@ class SFTPHandler(AbstractProvider): conn.put(source_path, target_path) def download_file(self, source_path, target_path, - server, collection, file, representation, site, + server, project_name, file, representation, site, overwrite=False): """ Downloads single file from 'source_path' (remote) to 'target_path'. @@ -281,7 +281,7 @@ class SFTPHandler(AbstractProvider): arguments for saving progress: server (SyncServer): server instance to call update_db on - collection (str): name of collection + project_name (str): file (dict): info about uploaded file (matches structure from db) representation (dict): complete repre containing 'file' site (str): site name @@ -302,7 +302,7 @@ class SFTPHandler(AbstractProvider): thread = threading.Thread(target=self._download, args=(source_path, target_path)) thread.start() - self._mark_progress(collection, file, representation, server, + self._mark_progress(project_name, file, representation, server, site, source_path, target_path, "download") return os.path.basename(target_path) @@ -425,7 +425,7 @@ class SFTPHandler(AbstractProvider): pysftp.exceptions.ConnectionException): log.warning("Couldn't connect", exc_info=True) - def _mark_progress(self, collection, file, representation, server, site, + def _mark_progress(self, project_name, file, representation, server, site, source_path, target_path, direction): """ Updates progress field in DB by values 0-1. @@ -446,7 +446,7 @@ class SFTPHandler(AbstractProvider): status_val = target_file_size / source_file_size last_tick = time.time() log.debug(direction + "ed %d%%." % int(status_val * 100)) - server.update_db(collection=collection, + server.update_db(project_name=project_name, new_file_id=None, file=file, representation=representation, diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 356a75f99d..97538fcd4e 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -14,7 +14,7 @@ from .utils import SyncStatus, ResumableError log = PypeLogger().get_logger("SyncServer") -async def upload(module, collection, file, representation, provider_name, +async def upload(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Upload single 'file' of a 'representation' to 'provider'. @@ -31,7 +31,7 @@ async def upload(module, collection, file, representation, provider_name, Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source db file (dictionary): of file from representation in Mongo representation (dictionary): of representation provider_name (string): gdrive, gdc etc. @@ -47,15 +47,16 @@ async def upload(module, collection, file, representation, provider_name, # thread can do that at a time, upload/download to prepared # structure should be run in parallel remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) file_path = file.get("path", "") try: - local_file_path, remote_file_path = resolve_paths(module, - file_path, collection, remote_site_name, remote_handler + local_file_path, remote_file_path = resolve_paths( + module, file_path, project_name, + remote_site_name, remote_handler ) except Exception as exp: print(exp) @@ -74,27 +75,28 @@ async def upload(module, collection, file, representation, provider_name, local_file_path, remote_file_path, module, - collection, + project_name, file, representation, remote_site_name, True ) - module.handle_alternate_site(collection, representation, remote_site_name, + module.handle_alternate_site(project_name, representation, + remote_site_name, file["_id"], file_id) return file_id -async def download(module, collection, file, representation, provider_name, +async def download(module, project_name, file, representation, provider_name, remote_site_name, tree=None, preset=None): """ Downloads file to local folder denoted in representation.Context. Args: module(SyncServerModule): object to run SyncServerModule API - collection (str): source collection + project_name (str): source file (dictionary) : info about processed file representation (dictionary): repr that 'file' belongs to provider_name (string): 'gdrive' etc @@ -108,20 +110,20 @@ async def download(module, collection, file, representation, provider_name, """ with module.lock: remote_handler = lib.factory.get_provider(provider_name, - collection, + project_name, remote_site_name, tree=tree, presets=preset) file_path = file.get("path", "") local_file_path, remote_file_path = resolve_paths( - module, file_path, collection, remote_site_name, remote_handler + module, file_path, project_name, remote_site_name, remote_handler ) local_folder = os.path.dirname(local_file_path) os.makedirs(local_folder, exist_ok=True) - local_site = module.get_active_site(collection) + local_site = module.get_active_site(project_name) loop = asyncio.get_running_loop() file_id = await loop.run_in_executor(None, @@ -129,20 +131,20 @@ async def download(module, collection, file, representation, provider_name, remote_file_path, local_file_path, module, - collection, + project_name, file, representation, local_site, True ) - module.handle_alternate_site(collection, representation, local_site, + module.handle_alternate_site(project_name, representation, local_site, file["_id"], file_id) return file_id -def resolve_paths(module, file_path, collection, +def resolve_paths(module, file_path, project_name, remote_site_name=None, remote_handler=None): """ Returns tuple of local and remote file paths with {root} @@ -153,7 +155,7 @@ def resolve_paths(module, file_path, collection, Args: module(SyncServerModule): object to run SyncServerModule API file_path(string): path with {root} - collection(string): project name + project_name(string): project name remote_site_name(string): remote site remote_handler(AbstractProvider): implementation Returns: @@ -164,7 +166,7 @@ def resolve_paths(module, file_path, collection, remote_file_path = remote_handler.resolve_path(file_path) local_handler = lib.factory.get_provider( - 'local_drive', collection, module.get_active_site(collection)) + 'local_drive', project_name, module.get_active_site(project_name)) local_file_path = local_handler.resolve_path(file_path) return local_file_path, remote_file_path @@ -269,8 +271,8 @@ class SyncServerThread(threading.Thread): - gets list of collections in DB - gets list of active remote providers (has configuration, credentials) - - for each collection it looks for representations that should - be synced + - for each project_name it looks for representations that + should be synced - synchronize found collections - update representations - fills error messages for exceptions - waits X seconds and repeat @@ -282,17 +284,17 @@ class SyncServerThread(threading.Thread): import time start_time = time.time() self.module.set_sync_project_settings() # clean cache - collection = None + project_name = None enabled_projects = self.module.get_enabled_projects() - for collection in enabled_projects: - preset = self.module.sync_project_settings[collection] + for project_name in enabled_projects: + preset = self.module.sync_project_settings[project_name] - local_site, remote_site = self._working_sites(collection) + local_site, remote_site = self._working_sites(project_name) if not all([local_site, remote_site]): continue sync_repres = self.module.get_sync_representations( - collection, + project_name, local_site, remote_site ) @@ -310,7 +312,7 @@ class SyncServerThread(threading.Thread): remote_provider = \ self.module.get_provider_for_site(site=remote_site) handler = lib.factory.get_provider(remote_provider, - collection, + project_name, remote_site, presets=site_preset) limit = lib.factory.get_provider_batch_limit( @@ -341,7 +343,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( upload(self.module, - collection, + project_name, file, sync, remote_provider, @@ -353,7 +355,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, remote_site, - collection + project_name )) processed_file_path.add(file_path) if status == SyncStatus.DO_DOWNLOAD: @@ -361,7 +363,7 @@ class SyncServerThread(threading.Thread): limit -= 1 task = asyncio.create_task( download(self.module, - collection, + project_name, file, sync, remote_provider, @@ -373,7 +375,7 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, local_site, - collection + project_name )) processed_file_path.add(file_path) @@ -384,12 +386,12 @@ class SyncServerThread(threading.Thread): return_exceptions=True) for file_id, info in zip(files_created, files_processed_info): - file, representation, site, collection = info + file, representation, site, project_name = info error = None if isinstance(file_id, BaseException): error = str(file_id) file_id = None - self.module.update_db(collection, + self.module.update_db(project_name, file_id, file, representation, @@ -399,7 +401,7 @@ class SyncServerThread(threading.Thread): duration = time.time() - start_time log.debug("One loop took {:.2f}s".format(duration)) - delay = self.module.get_loop_delay(collection) + delay = self.module.get_loop_delay(project_name) log.debug("Waiting for {} seconds to new loop".format(delay)) self.timer = asyncio.create_task(self.run_timer(delay)) await asyncio.gather(self.timer) @@ -458,19 +460,19 @@ class SyncServerThread(threading.Thread): self.timer.cancel() self.timer = None - def _working_sites(self, collection): - if self.module.is_project_paused(collection): + def _working_sites(self, project_name): + if self.module.is_project_paused(project_name): log.debug("Both sites same, skipping") return None, None - local_site = self.module.get_active_site(collection) - remote_site = self.module.get_remote_site(collection) + local_site = self.module.get_active_site(project_name) + remote_site = self.module.get_remote_site(project_name) if local_site == remote_site: log.debug("{}-{} sites same, skipping".format(local_site, remote_site)) return None, None - configured_sites = _get_configured_sites(self.module, collection) + configured_sites = _get_configured_sites(self.module, project_name) if not all([local_site in configured_sites, remote_site in configured_sites]): log.debug("Some of the sites {} - {} is not ".format(local_site, diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 4027561d22..8fdfab9c2e 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -25,6 +25,8 @@ from .providers import lib from .utils import time_function, SyncStatus, SiteAlreadyPresentError +from openpype.client import get_representations, get_representation_by_id + log = PypeLogger.get_logger("SyncServer") @@ -128,12 +130,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.projects_processed = set() """ Start of Public API """ - def add_site(self, collection, representation_id, site_name=None, + def add_site(self, project_name, representation_id, site_name=None, force=False): """ Adds new site to representation to be synced. - 'collection' must have synchronization enabled (globally or + 'project_name' must have synchronization enabled (globally or project only) Used as a API endpoint from outside applications (Loader etc). @@ -141,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to reset existing site. Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists @@ -151,25 +153,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") if not site_name: site_name = self.DEFAULT_SITE - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, force=force) - def remove_site(self, collection, representation_id, site_name, + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ Removes 'site_name' for particular 'representation_id' on - 'collection' + 'project_name' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site remove_local_files (bool): remove only files for 'local_id' @@ -178,15 +180,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: throws ValueError if any issue """ - if not self.get_sync_project_setting(collection): + if not self.get_sync_project_setting(project_name): raise ValueError("Project not configured") - self.reset_site_on_representation(collection, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, remove=True) if remove_local_files: - self._remove_local_file(collection, representation_id, site_name) + self._remove_local_file(project_name, representation_id, site_name) def compute_resource_sync_sites(self, project_name): """Get available resource sync sites state for publish process. @@ -333,9 +335,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return alt_site_pairs - def clear_project(self, collection, site_name): + def clear_project(self, project_name, site_name): """ - Clear 'collection' of 'site_name' and its local files + Clear 'project_name' of 'site_name' and its local files Works only on real local sites, not on 'studio' """ @@ -344,16 +346,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "files.sites.name": site_name } + # TODO currently not possible to replace with get_representations representations = list( - self.connection.database[collection].find(query)) + self.connection.database[project_name].find(query)) if not representations: self.log.debug("No repre found") return for repre in representations: - self.remove_site(collection, repre.get("_id"), site_name, True) + self.remove_site(project_name, repre.get("_id"), site_name, True) - def create_validate_project_task(self, collection, site_name): + def create_validate_project_task(self, project_name, site_name): """Adds metadata about project files validation on a queue. This process will loop through all representation and check if @@ -370,33 +373,28 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ task = { "type": "validate", - "project_name": collection, - "func": lambda: self.validate_project(collection, site_name, + "project_name": project_name, + "func": lambda: self.validate_project(project_name, site_name, reset_missing=True) } - self.projects_processed.add(collection) + self.projects_processed.add(project_name) self.long_running_tasks.append(task) - def validate_project(self, collection, site_name, reset_missing=False): - """Validate 'collection' of 'site_name' and its local files + def validate_project(self, project_name, site_name, reset_missing=False): + """Validate 'project_name' of 'site_name' and its local files If file present and not marked with a 'site_name' in DB, DB is updated with site name and file modified date. Args: - collection (string): project name + project_name (string): project name site_name (string): active site name reset_missing (bool): if True reset site in DB if missing physically """ - self.log.debug("Validation of {} for {} started".format(collection, + self.log.debug("Validation of {} for {} started".format(project_name, site_name)) - query = { - "type": "representation" - } - - representations = list( - self.connection.database[collection].find(query)) + representations = list(get_representations(project_name)) if not representations: self.log.debug("No repre found") return @@ -416,7 +414,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): continue file_path = repre_file.get("path", "") - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file_path) @@ -428,14 +426,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "Adding site {} for {}".format(site_name, repre_id)) - query = { - "_id": repre_id - } created_dt = datetime.fromtimestamp( os.path.getmtime(local_file_path)) elem = {"name": site_name, "created_dt": created_dt} - self._add_site(collection, query, repre, elem, + self._add_site(project_name, repre, elem, site_name=site_name, file_id=repre_file["_id"], force=True) @@ -445,41 +440,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.log.debug("Resetting site {} for {}". format(site_name, repre_id)) self.reset_site_on_representation( - collection, repre_id, site_name=site_name, + project_name, repre_id, site_name=site_name, file_id=repre_file["_id"]) sites_reset += 1 if sites_added % 100 == 0: self.log.debug("Sites added {}".format(sites_added)) - self.log.debug("Validation of {} for {} ended".format(collection, + self.log.debug("Validation of {} for {} ended".format(project_name, site_name)) self.log.info("Sites added {}, sites reset {}".format(sites_added, reset_missing)) - def pause_representation(self, collection, representation_id, site_name): + def pause_representation(self, project_name, representation_id, site_name): """ Sets 'representation_id' as paused, eg. no syncing should be happening on it. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ log.info("Pausing SyncServer for {}".format(representation_id)) self._paused_representations.add(representation_id) - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=True) - def unpause_representation(self, collection, representation_id, site_name): + def unpause_representation(self, project_name, + representation_id, site_name): """ Sets 'representation_id' as unpaused. Does not fail or warn if repre wasn't paused. Args: - collection (string): project name + project_name (string): project name representation_id (string): MongoDB objectId value site_name (string): 'gdrive', 'studio' etc. """ @@ -489,7 +485,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): except KeyError: pass # self.paused_representations is not persistent - self.reset_site_on_representation(collection, representation_id, + self.reset_site_on_representation(project_name, representation_id, site_name=site_name, pause=False) def is_representation_paused(self, representation_id, @@ -520,7 +516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): happening on all representation inside. Args: - project_name (string): collection name + project_name (string): project_name name """ log.info("Pausing SyncServer for {}".format(project_name)) self._paused_projects.add(project_name) @@ -532,7 +528,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Does not fail or warn if project wasn't paused. Args: - project_name (string): collection name + project_name (string): """ log.info("Unpausing SyncServer for {}".format(project_name)) try: @@ -545,7 +541,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns if 'project_name' is paused or not. Args: - project_name (string): collection name + project_name (string): check_parents (bool): check if server itself is not paused Returns: @@ -944,8 +940,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return True return False - def handle_alternate_site(self, collection, representation, processed_site, - file_id, synced_file_id): + def handle_alternate_site(self, project_name, representation, + processed_site, file_id, synced_file_id): """ For special use cases where one site vendors another. @@ -958,7 +954,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): same location >> file is accesible on 'sftp' site right away. Args: - collection (str): name of project + project_name (str): name of project representation (dict) processed_site (str): real site_name of published/uploaded file file_id (ObjectId): DB id of file handled @@ -982,26 +978,112 @@ class SyncServerModule(OpenPypeModule, ITrayModule): alternate_sites = set(alternate_sites) for alt_site in alternate_sites: - query = { - "_id": representation["_id"] - } elem = {"name": alt_site, "created_dt": datetime.now(), "id": synced_file_id} self.log.debug("Adding alternate {} to {}".format( alt_site, representation["_id"])) - self._add_site(collection, query, + self._add_site(project_name, representation, elem, alt_site, file_id=file_id, force=True) + def get_repre_info_for_versions(self, project_name, version_ids, + active_site, remote_site): + """Returns representation documents for versions and sites combi + + Args: + project_name (str) + version_ids (list): of version[_id] + active_site (string): 'local', 'studio' etc + remote_site (string): dtto + Returns: + + """ + self.connection.Session["AVALON_PROJECT"] = project_name + query = [ + {"$match": {"parent": {"$in": version_ids}, + "type": "representation", + "files.sites.name": {"$exists": 1}}}, + {"$unwind": "$files"}, + {'$addFields': { + 'order_local': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', active_site]} + } + } + }}, + {'$addFields': { + 'order_remote': { + '$filter': { + 'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', remote_site]} + } + } + }}, + {'$addFields': { + 'progress_local': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_local.progress"}, + "$order_local.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$addFields': { + 'progress_remote': {"$arrayElemAt": [{ + '$cond': [ + {'$size': "$order_remote.progress"}, + "$order_remote.progress", + # if exists created_dt count is as available + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}, + 0 + ]} + }}, + {'$group': { # first group by repre + '_id': '$_id', + 'parent': {'$first': '$parent'}, + 'avail_ratio_local': { + '$first': { + '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] + } + }, + 'avail_ratio_remote': { + '$first': { + '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] + } + } + }}, + {'$group': { # second group by parent, eg version_id + '_id': '$parent', + 'repre_count': {'$sum': 1}, # total representations + # fully available representation for site + 'avail_repre_local': {'$sum': "$avail_ratio_local"}, + 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, + }}, + ] + # docs = list(self.connection.aggregate(query)) + return self.connection.aggregate(query) + """ End of Public API """ - def get_local_file_path(self, collection, site_name, file_path): + def get_local_file_path(self, project_name, site_name, file_path): """ Externalized for app """ - handler = LocalDriveHandler(collection, site_name) + handler = LocalDriveHandler(project_name, site_name) local_file_path = handler.resolve_path(file_path) return local_file_path @@ -1288,7 +1370,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return sites.get(site, 'N/A') @time_function - def get_sync_representations(self, collection, active_site, remote_site): + def get_sync_representations(self, project_name, active_site, remote_site): """ Get representations that should be synced, these could be recognised by presence of document in 'files.sites', where key is @@ -1299,8 +1381,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): better performance. Goal is to get as few representations as possible. Args: - collection (string): name of collection (in most cases matches - project name + project_name (string): active_site (string): identifier of current active site (could be 'local_0' when working from home, 'studio' when working in the studio (default) @@ -1309,10 +1390,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Returns: (list) of dictionaries """ - log.debug("Check representations for : {}".format(collection)) - self.connection.Session["AVALON_PROJECT"] = collection + log.debug("Check representations for : {}".format(project_name)) + self.connection.Session["AVALON_PROJECT"] = project_name # retry_cnt - number of attempts to sync specific file before giving up - retries_arr = self._get_retries_arr(collection) + retries_arr = self._get_retries_arr(project_name) match = { "type": "representation", "$or": [ @@ -1449,14 +1530,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return SyncStatus.DO_NOTHING - def update_db(self, collection, new_file_id, file, representation, + def update_db(self, project_name, new_file_id, file, representation, site, error=None, progress=None, priority=None): """ Update 'provider' portion of records in DB with success (file_id) or error (exception) Args: - collection (string): name of project - force to db connection as + project_name (string): name of project - force to db connection as each file might come from different collection new_file_id (string): file (dictionary): info about processed file (pulled from DB) @@ -1499,7 +1580,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if file_id: arr_filter.append({'f._id': ObjectId(file_id)}) - self.connection.database[collection].update_one( + self.connection.database[project_name].update_one( query, update, upsert=True, @@ -1562,7 +1643,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return -1, None - def reset_site_on_representation(self, collection, representation_id, + def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, remove=False, pause=None, force=False): """ @@ -1579,7 +1660,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Should be used when repre should be synced to new site. Args: - collection (string): name of project (eg. collection) in DB + project_name (string): name of project (eg. collection) in DB representation_id(string): _id of representation file_id (string): file _id in representation side (string): local or remote side @@ -1593,20 +1674,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule): not 'force' ValueError - other errors (repre not found, misconfiguration) """ - query = { - "_id": ObjectId(representation_id) - } - - representation = self.connection.database[collection].find_one(query) + representation = get_representation_by_id(project_name, + representation_id) if not representation: raise ValueError("Representation {} not found in {}". - format(representation_id, collection)) + format(representation_id, project_name)) + if side and site_name: raise ValueError("Misconfiguration, only one of side and " + "site_name arguments should be passed.") - local_site = self.get_active_site(collection) - remote_site = self.get_remote_site(collection) + local_site = self.get_active_site(project_name) + remote_site = self.get_remote_site(project_name) if side: if side == 'local': @@ -1617,37 +1696,43 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} if file_id: # reset site for particular file - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) elif side: # reset site for whole representation - self._reset_site(collection, query, elem, site_name) + self._reset_site(project_name, representation_id, elem, site_name) elif remove: # remove site for whole representation - self._remove_site(collection, query, representation, site_name) + self._remove_site(project_name, + representation, site_name) elif pause is not None: - self._pause_unpause_site(collection, query, + self._pause_unpause_site(project_name, representation, site_name, pause) else: # add new site to all files for representation - self._add_site(collection, query, representation, elem, site_name, + self._add_site(project_name, representation, elem, site_name, force=force) - def _update_site(self, collection, query, update, arr_filter): + def _update_site(self, project_name, representation_id, + update, arr_filter): """ Auxiliary method to call update_one function on DB Used for refactoring ugly reset_provider_for_file """ - self.connection.database[collection].update_one( + query = { + "_id": ObjectId(representation_id) + } + + self.connection.database[project_name].update_one( query, update, upsert=True, array_filters=arr_filter ) - def _reset_site_for_file(self, collection, query, + def _reset_site_for_file(self, project_name, representation_id, elem, file_id, site_name): """ Resets 'site_name' for 'file_id' on representation in 'query' on - 'collection' + 'project_name' """ update = { "$set": {"files.$[f].sites.$[s]": elem} @@ -1660,9 +1745,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _reset_site(self, collection, query, elem, site_name): + def _reset_site(self, project_name, representation_id, elem, site_name): """ Resets 'site_name' for all files of representation in 'query' """ @@ -1674,9 +1759,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, update, arr_filter) - def _remove_site(self, collection, query, representation, site_name): + def _remove_site(self, project_name, representation, site_name): """ Removes 'site_name' for 'representation' in 'query' @@ -1698,10 +1783,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): } arr_filter = [] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _pause_unpause_site(self, collection, query, - representation, site_name, pause): + def _pause_unpause_site(self, project_name, representation, + site_name, pause): """ Pauses/unpauses all files for 'representation' based on 'pause' @@ -1733,12 +1819,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'s.name': site_name} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation["_id"], + update, arr_filter) - def _add_site(self, collection, query, representation, elem, site_name, + def _add_site(self, project_name, representation, elem, site_name, force=False, file_id=None): """ - Adds 'site_name' to 'representation' on 'collection' + Adds 'site_name' to 'representation' on 'project_name' Args: representation (dict) @@ -1746,10 +1833,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Use 'force' to remove existing or raises ValueError """ + representation_id = representation["_id"] reset_existing = False files = representation.get("files", []) if not files: - log.debug("No files for {}".format(representation["_id"])) + log.debug("No files for {}".format(representation_id)) return for repre_file in files: @@ -1759,7 +1847,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): for site in repre_file.get("sites"): if site["name"] == site_name: if force or site.get("error"): - self._reset_site_for_file(collection, query, + self._reset_site_for_file(project_name, + representation_id, elem, repre_file["_id"], site_name) reset_existing = True @@ -1785,14 +1874,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): {'f._id': file_id} ] - self._update_site(collection, query, update, arr_filter) + self._update_site(project_name, representation_id, + update, arr_filter) - def _remove_local_file(self, collection, representation_id, site_name): + def _remove_local_file(self, project_name, representation_id, site_name): """ Removes all local files for 'site_name' of 'representation_id' Args: - collection (string): project name (must match DB) + project_name (string): project name (must match DB) representation_id (string): MongoDB _id value site_name (string): name of configured and active site @@ -1808,21 +1898,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule): provider_name = self.get_provider_for_site(site=site_name) if provider_name == 'local_drive': - query = { - "_id": ObjectId(representation_id) - } - - representation = list( - self.connection.database[collection].find(query)) + representation = get_representation_by_id(project_name, + representation_id, + fields=["files"]) if not representation: self.log.debug("No repre {} found".format( representation_id)) return - representation = representation.pop() local_file_path = '' for file in representation.get("files"): - local_file_path = self.get_local_file_path(collection, + local_file_path = self.get_local_file_path(project_name, site_name, file.get("path", "") ) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index 6d1e85c17a..629c4cbbf1 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -11,6 +11,7 @@ from openpype.tools.utils.delegates import pretty_timestamp from openpype.lib import PypeLogger from openpype.api import get_local_site_id +from openpype.client import get_representation_by_id from . import lib @@ -440,7 +441,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): full text filtering. Allows pagination, most of heavy lifting is being done on DB side. - Single model matches to single collection. When project is changed, + Single model matches to single project. When project is changed, model is reset and refreshed. Args: @@ -919,11 +920,10 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel): repre_id = self.data(index, Qt.UserRole) - representation = list(self.dbcon.find({"type": "representation", - "_id": repre_id})) + representation = get_representation_by_id(self.project, repre_id) if representation: self.sync_server.update_db(self.project, None, None, - representation.pop(), + representation, get_local_site_id(), priority=value) self.is_editing = False @@ -1357,11 +1357,10 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel): file_id = self.data(index, Qt.UserRole) updated_file = None - # conversion from cursor to list - representations = list(self.dbcon.find({"type": "representation", - "_id": self._id})) + representation = get_representation_by_id(self.project, self._id) + if not representation: + return - representation = representations.pop() for repre_file in representation["files"]: if repre_file["_id"] == file_id: updated_file = repre_file diff --git a/openpype/modules/timers_manager/plugins/publish/start_timer.py b/openpype/modules/timers_manager/plugins/publish/start_timer.py new file mode 100644 index 0000000000..6408327ca1 --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/start_timer.py @@ -0,0 +1,39 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + +import pyblish.api + +from openpype.pipeline import legacy_io + + +class StartTimer(pyblish.api.ContextPlugin): + label = "Start Timer" + order = pyblish.api.IntegratorOrder + 1 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + project_name = legacy_io.active_project() + asset_name = legacy_io.Session.get("AVALON_ASSET") + task_name = legacy_io.Session.get("AVALON_TASK") + if not project_name or not asset_name or not task_name: + self.log.info(( + "Current context does not contain all" + " required information to start a timer." + )) + return + timers_manager.start_timer_with_webserver( + project_name, asset_name, task_name, self.log + ) diff --git a/openpype/modules/timers_manager/plugins/publish/stop_timer.py b/openpype/modules/timers_manager/plugins/publish/stop_timer.py new file mode 100644 index 0000000000..a8674ff2ca --- /dev/null +++ b/openpype/modules/timers_manager/plugins/publish/stop_timer.py @@ -0,0 +1,27 @@ +""" +Requires: + context -> system_settings + context -> openPypeModules +""" + + +import pyblish.api + + +class StopTimer(pyblish.api.ContextPlugin): + label = "Stop Timer" + order = pyblish.api.ExtractorOrder - 0.49 + hosts = ["*"] + + def process(self, context): + timers_manager = context.data["openPypeModules"]["timers_manager"] + if not timers_manager.enabled: + self.log.debug("TimersManager is disabled") + return + + modules_settings = context.data["system_settings"]["modules"] + if not modules_settings["timers_manager"]["disregard_publishing"]: + self.log.debug("Publish is not affecting running timers.") + return + + timers_manager.stop_timer_with_webserver(self.log) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 3453e4bc4c..93332ace4f 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -6,12 +6,15 @@ from openpype.client import get_asset_by_name from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, - ILaunchHookPaths + ILaunchHookPaths, + IPluginPaths ) from openpype.lib.events import register_event_callback from .exceptions import InvalidContextError +TIMER_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) + class ExampleTimersManagerConnector: """Timers manager can handle timers of multiple modules/addons. @@ -33,6 +36,7 @@ class ExampleTimersManagerConnector: } ``` """ + # Not needed at all def __init__(self, module): # Store timer manager module to be able call it's methods when needed @@ -72,7 +76,12 @@ class ExampleTimersManagerConnector: self._timers_manager_module.timer_stopped(self._module.id) -class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): +class TimersManager( + OpenPypeModule, + ITrayService, + ILaunchHookPaths, + IPluginPaths +): """ Handles about Timers. Should be able to start/stop all timers at once. @@ -177,11 +186,19 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): def get_launch_hook_paths(self): """Implementation of `ILaunchHookPaths`.""" + return os.path.join( - os.path.dirname(os.path.abspath(__file__)), + TIMER_MODULE_DIR, "launch_hooks" ) + def get_plugin_paths(self): + """Implementation of `IPluginPaths`.""" + + return { + "publish": [os.path.join(TIMER_MODULE_DIR, "plugins", "publish")] + } + @staticmethod def get_timer_data_for_context( project_name, asset_name, task_name, logger=None @@ -388,6 +405,7 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): logger (logging.Logger): Logger object. Using 'print' if not passed. """ + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") if not webserver_url: msg = "Couldn't find webserver url" @@ -415,6 +433,36 @@ class TimersManager(OpenPypeModule, ITrayService, ILaunchHookPaths): return requests.post(rest_api_url, json=data) + @staticmethod + def stop_timer_with_webserver(logger=None): + """Prepared method for calling stop timers on REST api. + + Args: + logger (logging.Logger): Logger used for logging messages. + """ + + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + msg = "Couldn't find webserver url" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + try: + import requests + except Exception: + msg = "Couldn't start timer ('requests' is not available)" + if logger is not None: + logger.warning(msg) + else: + print(msg) + return + + return requests.post(rest_api_url) + def on_host_install(self, host, host_name, project_name): self.log.debug("Installing task changed callback") register_event_callback("taskChanged", self._on_host_task_change) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index c8c70e5ea8..5f763cd249 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -22,6 +22,10 @@ from openpype.settings import get_project_settings from .publish.lib import filter_pyblish_plugins from .anatomy import Anatomy from .template_data import get_template_data_with_names +from .workfile import ( + get_workfile_template_key, + get_custom_workfile_template_by_string_context, +) from . import ( legacy_io, register_loader_plugin_path, @@ -377,3 +381,67 @@ def get_template_data_from_session(session=None, system_settings=None): return get_template_data_with_names( project_name, asset_name, task_name, host_name, system_settings ) + + +def get_workdir_from_session(session=None, template_key=None): + """Template data for template fill from session keys. + + Args: + session (Union[Dict[str, str], None]): The Session to use. If not + provided use the currently active global Session. + template_key (str): Prepared template key from which workdir is + calculated. + + Returns: + str: Workdir path. + """ + + if session is None: + session = legacy_io.Session + project_name = session["AVALON_PROJECT"] + host_name = session["AVALON_APP"] + anatomy = Anatomy(project_name) + template_data = get_template_data_from_session(session) + anatomy_filled = anatomy.format(template_data) + + if not template_key: + task_type = template_data["task"]["type"] + template_key = get_workfile_template_key( + task_type, + host_name, + project_name=project_name + ) + path = anatomy_filled[template_key]["folder"] + if path: + path = os.path.normpath(path) + return path + + +def get_custom_workfile_template_from_session( + session=None, project_settings=None +): + """Filter and fill workfile template profiles by current context. + + Current context is defined by `legacy_io.Session`. That's why this + function should be used only inside host where context is set and stable. + + Args: + session (Union[None, Dict[str, str]]): Session from which are taken + data. + project_settings(Dict[str, Any]): Template profiles from settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + """ + + if session is None: + session = legacy_io.Session + + return get_custom_workfile_template_by_string_context( + session["AVALON_PROJECT"], + session["AVALON_ASSET"], + session["AVALON_TASK"], + session["AVALON_APP"], + project_settings=project_settings + ) diff --git a/openpype/pipeline/workfile/__init__.py b/openpype/pipeline/workfile/__init__.py new file mode 100644 index 0000000000..0aad29b6f9 --- /dev/null +++ b/openpype/pipeline/workfile/__init__.py @@ -0,0 +1,30 @@ +from .path_resolving import ( + get_workfile_template_key_from_context, + get_workfile_template_key, + get_workdir_with_workdir_data, + get_workdir, + + get_last_workfile_with_version, + get_last_workfile, + + get_custom_workfile_template, + get_custom_workfile_template_by_string_context, +) + +from .build_workfile import BuildWorkfile + + +__all__ = ( + "get_workfile_template_key_from_context", + "get_workfile_template_key", + "get_workdir_with_workdir_data", + "get_workdir", + + "get_last_workfile_with_version", + "get_last_workfile", + + "get_custom_workfile_template", + "get_custom_workfile_template_by_string_context", + + "BuildWorkfile", +) diff --git a/openpype/pipeline/workfile/build_workfile.py b/openpype/pipeline/workfile/build_workfile.py new file mode 100644 index 0000000000..bb6fcb4189 --- /dev/null +++ b/openpype/pipeline/workfile/build_workfile.py @@ -0,0 +1,693 @@ +import os +import re +import collections +import json + +from openpype.client import ( + get_asset_by_name, + get_subsets, + get_last_versions, + get_representations, +) +from openpype.settings import get_project_settings +from openpype.lib import ( + get_linked_assets, + filter_profiles, + Logger, +) +from openpype.pipeline import legacy_io +from openpype.pipeline.load import ( + discover_loader_plugins, + IncompatibleLoaderError, + load_container, +) + + +class BuildWorkfile: + """Wrapper for build workfile process. + + Load representations for current context by build presets. Build presets + are host related, since each host has it's loaders. + """ + + _log = None + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @staticmethod + def map_subsets_by_family(subsets): + subsets_by_family = collections.defaultdict(list) + for subset in subsets: + family = subset["data"].get("family") + if not family: + families = subset["data"].get("families") + if not families: + continue + family = families[0] + + subsets_by_family[family].append(subset) + return subsets_by_family + + def process(self): + """Main method of this wrapper. + + Building of workfile is triggered and is possible to implement + post processing of loaded containers if necessary. + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + return self.build_workfile() + + def build_workfile(self): + """Prepares and load containers into workfile. + + Loads latest versions of current and linked assets to workfile by logic + stored in Workfile profiles from presets. Profiles are set by host, + filtered by current task name and used by families. + + Each family can specify representation names and loaders for + representations and first available and successful loaded + representation is returned as container. + + At the end you'll get list of loaded containers per each asset. + + loaded_containers [{ + "asset_entity": , + "containers": [, , ...] + }, { + "asset_entity": , + "containers": [, ...] + }, { + ... + }] + + Returns: + List[Dict[str, Any]]: Loaded containers during build. + """ + + loaded_containers = [] + + # Get current asset name and entity + project_name = legacy_io.active_project() + current_asset_name = legacy_io.Session["AVALON_ASSET"] + current_asset_entity = get_asset_by_name( + project_name, current_asset_name + ) + # Skip if asset was not found + if not current_asset_entity: + print("Asset entity with name `{}` was not found".format( + current_asset_name + )) + return loaded_containers + + # Prepare available loaders + loaders_by_name = {} + for loader in discover_loader_plugins(): + loader_name = loader.__name__ + if loader_name in loaders_by_name: + raise KeyError( + "Duplicated loader name {0}!".format(loader_name) + ) + loaders_by_name[loader_name] = loader + + # Skip if there are any loaders + if not loaders_by_name: + self.log.warning("There are no registered loaders.") + return loaded_containers + + # Get current task name + current_task_name = legacy_io.Session["AVALON_TASK"] + + # Load workfile presets for task + self.build_presets = self.get_build_presets( + current_task_name, current_asset_entity + ) + + # Skip if there are any presets for task + if not self.build_presets: + self.log.warning( + "Current task `{}` does not have any loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + # Get presets for loading current asset + current_context_profiles = self.build_presets.get("current_context") + # Get presets for loading linked assets + link_context_profiles = self.build_presets.get("linked_assets") + # Skip if both are missing + if not current_context_profiles and not link_context_profiles: + self.log.warning( + "Current task `{}` has empty loading preset.".format( + current_task_name + ) + ) + return loaded_containers + + elif not current_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any loading" + " preset for it's context." + ).format(current_task_name)) + + elif not link_context_profiles: + self.log.warning(( + "Current task `{}` doesn't have any" + "loading preset for it's linked assets." + ).format(current_task_name)) + + # Prepare assets to process by workfile presets + assets = [] + current_asset_id = None + if current_context_profiles: + # Add current asset entity if preset has current context set + assets.append(current_asset_entity) + current_asset_id = current_asset_entity["_id"] + + if link_context_profiles: + # Find and append linked assets if preset has set linked mapping + link_assets = get_linked_assets(current_asset_entity) + if link_assets: + assets.extend(link_assets) + + # Skip if there are no assets. This can happen if only linked mapping + # is set and there are no links for his asset. + if not assets: + self.log.warning( + "Asset does not have linked assets. Nothing to process." + ) + return loaded_containers + + # Prepare entities from database for assets + prepared_entities = self._collect_last_version_repres(assets) + + # Load containers by prepared entities and presets + # - Current asset containers + if current_asset_id and current_asset_id in prepared_entities: + current_context_data = prepared_entities.pop(current_asset_id) + loaded_data = self.load_containers_by_asset_data( + current_context_data, current_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # - Linked assets container + for linked_asset_data in prepared_entities.values(): + loaded_data = self.load_containers_by_asset_data( + linked_asset_data, link_context_profiles, loaders_by_name + ) + if loaded_data: + loaded_containers.append(loaded_data) + + # Return list of loaded containers + return loaded_containers + + def get_build_presets(self, task_name, asset_doc): + """ Returns presets to build workfile for task name. + + Presets are loaded for current project set in + io.Session["AVALON_PROJECT"], filtered by registered host + and entered task name. + + Args: + task_name (str): Task name used for filtering build presets. + + Returns: + Dict[str, Any]: preset per entered task name + """ + + host_name = os.environ["AVALON_APP"] + project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) + + host_settings = project_settings.get(host_name) or {} + # Get presets for host + wb_settings = host_settings.get("workfile_builder") + if not wb_settings: + # backward compatibility + wb_settings = host_settings.get("workfile_build") or {} + + builder_profiles = wb_settings.get("profiles") + if not builder_profiles: + return None + + task_type = ( + asset_doc + .get("data", {}) + .get("tasks", {}) + .get(task_name, {}) + .get("type") + ) + filter_data = { + "task_types": task_type, + "tasks": task_name + } + return filter_profiles(builder_profiles, filter_data) + + def _filter_build_profiles(self, build_profiles, loaders_by_name): + """ Filter build profiles by loaders and prepare process data. + + Valid profile must have "loaders", "families" and "repre_names" keys + with valid values. + - "loaders" expects list of strings representing possible loaders. + - "families" expects list of strings for filtering + by main subset family. + - "repre_names" expects list of strings for filtering by + representation name. + + Lowered "families" and "repre_names" are prepared for each profile with + all required keys. + + Args: + build_profiles (Dict[str, Any]): Profiles for building workfile. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Filtered and prepared profiles. + """ + + valid_profiles = [] + for profile in build_profiles: + # Check loaders + profile_loaders = profile.get("loaders") + if not profile_loaders: + self.log.warning(( + "Build profile has missing loaders configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check if any loader is available + loaders_match = False + for loader_name in profile_loaders: + if loader_name in loaders_by_name: + loaders_match = True + break + + if not loaders_match: + self.log.warning(( + "All loaders from Build profile are not available: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check families + profile_families = profile.get("families") + if not profile_families: + self.log.warning(( + "Build profile is missing families configuration: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Check representation names + profile_repre_names = profile.get("repre_names") + if not profile_repre_names: + self.log.warning(( + "Build profile is missing" + " representation names filtering: {0}" + ).format(json.dumps(profile, indent=4))) + continue + + # Prepare lowered families and representation names + profile["families_lowered"] = [ + fam.lower() for fam in profile_families + ] + profile["repre_names_lowered"] = [ + name.lower() for name in profile_repre_names + ] + + valid_profiles.append(profile) + + return valid_profiles + + def _prepare_profile_for_subsets(self, subsets, profiles): + """Select profile for each subset by it's data. + + Profiles are filtered for each subset individually. + Profile is filtered by subset's family, optionally by name regex and + representation names set in profile. + It is possible to not find matching profile for subset, in that case + subset is skipped and it is possible that none of subsets have + matching profile. + + Args: + subsets (List[Dict[str, Any]]): Subset documents. + profiles (List[Dict[str, Any]]): Build profiles. + + Returns: + Dict[str, Any]: Profile by subset's id. + """ + + # Prepare subsets + subsets_by_family = self.map_subsets_by_family(subsets) + + profiles_per_subset_id = {} + for family, subsets in subsets_by_family.items(): + family_low = family.lower() + for profile in profiles: + # Skip profile if does not contain family + if family_low not in profile["families_lowered"]: + continue + + # Precompile name filters as regexes + profile_regexes = profile.get("subset_name_filters") + if profile_regexes: + _profile_regexes = [] + for regex in profile_regexes: + _profile_regexes.append(re.compile(regex)) + profile_regexes = _profile_regexes + + # TODO prepare regex compilation + for subset in subsets: + # Verify regex filtering (optional) + if profile_regexes: + valid = False + for pattern in profile_regexes: + if re.match(pattern, subset["name"]): + valid = True + break + + if not valid: + continue + + profiles_per_subset_id[subset["_id"]] = profile + + # break profiles loop on finding the first matching profile + break + return profiles_per_subset_id + + def load_containers_by_asset_data( + self, asset_entity_data, build_profiles, loaders_by_name + ): + """Load containers for entered asset entity by Build profiles. + + Args: + asset_entity_data (Dict[str, Any]): Prepared data with subsets, + last versions and representations for specific asset. + build_profiles (Dict[str, Any]): Build profiles. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + Dict[str, Any]: Output contains asset document + and loaded containers. + """ + + # Make sure all data are not empty + if not asset_entity_data or not build_profiles or not loaders_by_name: + return + + asset_entity = asset_entity_data["asset_entity"] + + valid_profiles = self._filter_build_profiles( + build_profiles, loaders_by_name + ) + if not valid_profiles: + self.log.warning( + "There are not valid Workfile profiles. Skipping process." + ) + return + + self.log.debug("Valid Workfile profiles: {}".format(valid_profiles)) + + subsets_by_id = {} + version_by_subset_id = {} + repres_by_version_id = {} + for subset_id, in_data in asset_entity_data["subsets"].items(): + subset_entity = in_data["subset_entity"] + subsets_by_id[subset_entity["_id"]] = subset_entity + + version_data = in_data["version"] + version_entity = version_data["version_entity"] + version_by_subset_id[subset_id] = version_entity + repres_by_version_id[version_entity["_id"]] = ( + version_data["repres"] + ) + + if not subsets_by_id: + self.log.warning("There are not subsets for asset {0}".format( + asset_entity["name"] + )) + return + + profiles_per_subset_id = self._prepare_profile_for_subsets( + subsets_by_id.values(), valid_profiles + ) + if not profiles_per_subset_id: + self.log.warning("There are not valid subsets.") + return + + valid_repres_by_subset_id = collections.defaultdict(list) + for subset_id, profile in profiles_per_subset_id.items(): + profile_repre_names = profile["repre_names_lowered"] + + version_entity = version_by_subset_id[subset_id] + version_id = version_entity["_id"] + repres = repres_by_version_id[version_id] + for repre in repres: + repre_name_low = repre["name"].lower() + if repre_name_low in profile_repre_names: + valid_repres_by_subset_id[subset_id].append(repre) + + # DEBUG message + msg = "Valid representations for Asset: `{}`".format( + asset_entity["name"] + ) + for subset_id, repres in valid_repres_by_subset_id.items(): + subset = subsets_by_id[subset_id] + msg += "\n# Subset Name/ID: `{}`/{}".format( + subset["name"], subset_id + ) + for repre in repres: + msg += "\n## Repre name: `{}`".format(repre["name"]) + + self.log.debug(msg) + + containers = self._load_containers( + valid_repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ) + + return { + "asset_entity": asset_entity, + "containers": containers + } + + def _load_containers( + self, repres_by_subset_id, subsets_by_id, + profiles_per_subset_id, loaders_by_name + ): + """Real load by collected data happens here. + + Loading of representations per subset happens here. Each subset can + loads one representation. Loading is tried in specific order. + Representations are tried to load by names defined in configuration. + If subset has representation matching representation name each loader + is tried to load it until any is successful. If none of them was + successful then next representation name is tried. + Subset process loop ends when any representation is loaded or + all matching representations were already tried. + + Args: + repres_by_subset_id (Dict[str, Dict[str, Any]]): Available + representations mapped by their parent (subset) id. + subsets_by_id (Dict[str, Dict[str, Any]]): Subset documents + mapped by their id. + profiles_per_subset_id (Dict[str, Dict[str, Any]]): Build profiles + mapped by subset id. + loaders_by_name (Dict[str, LoaderPlugin]): Available loaders + per name. + + Returns: + List[Dict[str, Any]]: Objects of loaded containers. + """ + + loaded_containers = [] + + # Get subset id order from build presets. + build_presets = self.build_presets.get("current_context", []) + build_presets += self.build_presets.get("linked_assets", []) + subset_ids_ordered = [] + for preset in build_presets: + for preset_family in preset["families"]: + for id, subset in subsets_by_id.items(): + if preset_family not in subset["data"].get("families", []): + continue + + subset_ids_ordered.append(id) + + # Order representations from subsets. + print("repres_by_subset_id", repres_by_subset_id) + representations_ordered = [] + representations = [] + for id in subset_ids_ordered: + for subset_id, repres in repres_by_subset_id.items(): + if repres in representations: + continue + + if id == subset_id: + representations_ordered.append((subset_id, repres)) + representations.append(repres) + + print("representations", representations) + + # Load ordered representations. + for subset_id, repres in representations_ordered: + subset_name = subsets_by_id[subset_id]["name"] + + profile = profiles_per_subset_id[subset_id] + loaders_last_idx = len(profile["loaders"]) - 1 + repre_names_last_idx = len(profile["repre_names_lowered"]) - 1 + + repre_by_low_name = { + repre["name"].lower(): repre for repre in repres + } + + is_loaded = False + for repre_name_idx, profile_repre_name in enumerate( + profile["repre_names_lowered"] + ): + # Break iteration if representation was already loaded + if is_loaded: + break + + repre = repre_by_low_name.get(profile_repre_name) + if not repre: + continue + + for loader_idx, loader_name in enumerate(profile["loaders"]): + if is_loaded: + break + + loader = loaders_by_name.get(loader_name) + if not loader: + continue + try: + container = load_container( + loader, + repre["_id"], + name=subset_name + ) + loaded_containers.append(container) + is_loaded = True + + except Exception as exc: + if exc == IncompatibleLoaderError: + self.log.info(( + "Loader `{}` is not compatible with" + " representation `{}`" + ).format(loader_name, repre["name"])) + + else: + self.log.error( + "Unexpected error happened during loading", + exc_info=True + ) + + msg = "Loading failed." + if loader_idx < loaders_last_idx: + msg += " Trying next loader." + elif repre_name_idx < repre_names_last_idx: + msg += ( + " Loading of subset `{}` was not successful." + ).format(subset_name) + else: + msg += " Trying next representation." + self.log.info(msg) + + return loaded_containers + + def _collect_last_version_repres(self, asset_docs): + """Collect subsets, versions and representations for asset_entities. + + Args: + asset_docs (List[Dict[str, Any]]): Asset entities for which + want to find data. + + Returns: + Dict[str, Any]: collected entities + + Example output: + ``` + { + {Asset ID}: { + "asset_entity": , + "subsets": { + {Subset ID}: { + "subset_entity": , + "version": { + "version_entity": , + "repres": [ + , , ... + ] + } + }, + ... + } + }, + ... + } + output[asset_id]["subsets"][subset_id]["version"]["repres"] + ``` + """ + + output = {} + if not asset_docs: + return output + + asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs} + + project_name = legacy_io.active_project() + subsets = list(get_subsets( + project_name, asset_ids=asset_docs_by_ids.keys() + )) + subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} + + last_version_by_subset_id = get_last_versions( + project_name, subset_entity_by_ids.keys() + ) + last_version_docs_by_id = { + version["_id"]: version + for version in last_version_by_subset_id.values() + } + repre_docs = get_representations( + project_name, version_ids=last_version_docs_by_id.keys() + ) + + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + version_doc = last_version_docs_by_id[version_id] + + subset_id = version_doc["parent"] + subset_doc = subset_entity_by_ids[subset_id] + + asset_id = subset_doc["parent"] + asset_doc = asset_docs_by_ids[asset_id] + + if asset_id not in output: + output[asset_id] = { + "asset_entity": asset_doc, + "subsets": {} + } + + if subset_id not in output[asset_id]["subsets"]: + output[asset_id]["subsets"][subset_id] = { + "subset_entity": subset_doc, + "version": { + "version_entity": version_doc, + "repres": [] + } + } + + output[asset_id]["subsets"][subset_id]["version"]["repres"].append( + repre_doc + ) + + return output diff --git a/openpype/pipeline/workfile/path_resolving.py b/openpype/pipeline/workfile/path_resolving.py new file mode 100644 index 0000000000..ed1d1d793e --- /dev/null +++ b/openpype/pipeline/workfile/path_resolving.py @@ -0,0 +1,464 @@ +import os +import re +import copy +import platform + +from openpype.client import get_project, get_asset_by_name +from openpype.settings import get_project_settings +from openpype.lib import ( + filter_profiles, + Logger, + StringTemplate, +) +from openpype.pipeline import Anatomy +from openpype.pipeline.template_data import get_template_data + + +def get_workfile_template_key_from_context( + asset_name, task_name, host_name, project_name, project_settings=None +): + """Helper function to get template key for workfile template. + + Do the same as `get_workfile_template_key` but returns value for "session + context". + + Args: + asset_name(str): Name of asset document. + task_name(str): Task name for which is template key retrieved. + Must be available on asset document under `data.tasks`. + host_name(str): Name of host implementation for which is workfile + used. + project_name(str): Project name where asset and task is. + project_settings(Dict[str, Any]): Project settings for passed + 'project_name'. Not required at all but makes function faster. + """ + + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["data.tasks"] + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + return get_workfile_template_key( + task_type, host_name, project_name, project_settings + ) + + +def get_workfile_template_key( + task_type, host_name, project_name, project_settings=None +): + """Workfile template key which should be used to get workfile template. + + Function is using profiles from project settings to return right template + for passet task type and host name. + + Args: + task_type(str): Name of task type. + host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) + project_name(str): Name of project in which context should look for + settings. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. + """ + + default = "work" + if not task_type or not host_name: + return default + + if not project_settings: + project_settings = get_project_settings(project_name) + + try: + profiles = ( + project_settings + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_template_profiles"] + ) + except Exception: + profiles = [] + + if not profiles: + return default + + profile_filter = { + "task_types": task_type, + "hosts": host_name + } + profile = filter_profiles(profiles, profile_filter) + if profile: + return profile["workfile_template"] or default + return default + + +def get_workdir_with_workdir_data( + workdir_data, + project_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + It is possible to pass only project's name instead of project's anatomy but + one of them **must** be entered. It is preferred to enter anatomy if is + available as initialization of a new Anatomy object may be time consuming. + + Args: + workdir_data (Dict[str, Any]): Data to fill workdir template. + project_name (str): Project's name. + anatomy (Anatomy): Anatomy object for specific project. Faster + processing if is passed. + template_key (str): Key of work templates in anatomy templates. If not + passed `get_workfile_template_key_from_context` is used to get it. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_name) + + if not template_key: + template_key = get_workfile_template_key( + workdir_data["task"]["type"], + workdir_data["app"], + workdir_data["project"]["name"], + project_settings + ) + + anatomy_filled = anatomy.format(workdir_data) + # Output is TemplateResult object which contain useful data + output = anatomy_filled[template_key]["folder"] + if output: + return output.normalized() + return output + + +def get_workdir( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + template_key=None, + project_settings=None +): + """Fill workdir path from entered data and project's anatomy. + + Args: + project_doc (Dict[str, Any]): Mongo document of project from MongoDB. + asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. + task_name (str): Task name for which are workdir data preapred. + host_name (str): Host which is used to workdir. This is required + because workdir template may contain `{app}` key. In `Session` + is stored under `AVALON_APP` key. + anatomy (Anatomy): Optional argument. Anatomy object is created using + project name from `project_doc`. It is preferred to pass this + argument as initialization of a new Anatomy object may be time + consuming. + template_key (str): Key of work templates in anatomy templates. Default + value is defined in `get_workdir_with_workdir_data`. + project_settings(Dict[str, Any]): Prepared project settings for + project name. Optional to make processing faster. Ans id used only + if 'template_key' is not passed. + + Returns: + TemplateResult: Workdir path. + """ + + if not anatomy: + anatomy = Anatomy(project_doc["name"]) + + workdir_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # Output is TemplateResult object which contain useful data + return get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + template_key, + project_settings + ) + + +def get_last_workfile_with_version( + workdir, file_template, fill_data, extensions +): + """Return last workfile version. + + Usign workfile template and it's filling data find most possible last + version of workfile which was created for the context. + + Functionality is fully based on knowing which keys are optional or what + values are expected as value. + + The last modified file is used if more files can be considered as + last workfile. + + Args: + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + fill_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. + + Returns: + Tuple[Union[str, None], Union[int, None]]: Last workfile with version + if there is any workfile otherwise None for both. + """ + + if not os.path.exists(workdir): + return None, None + + dotted_extensions = set() + for ext in extensions: + if not ext.startswith("."): + ext = ".{}".format(ext) + dotted_extensions.add(ext) + + # Fast match on extension + filenames = [ + filename + for filename in os.listdir(workdir) + if os.path.splitext(filename)[-1] in dotted_extensions + ] + + # Build template without optionals, version to digits only regex + # and comment to any definable value. + # Escape extensions dot for regex + regex_exts = [ + "\\" + ext + for ext in dotted_extensions + ] + ext_expression = "(?:" + "|".join(regex_exts) + ")" + + # Replace `.{ext}` with `{ext}` so we are sure there is not dot at the end + file_template = re.sub(r"\.?{ext}", ext_expression, file_template) + # Replace optional keys with optional content regex + file_template = re.sub(r"<.*?>", r".*?", file_template) + # Replace `{version}` with group regex + file_template = re.sub(r"{version.*?}", r"([0-9]+)", file_template) + file_template = re.sub(r"{comment.*?}", r".+?", file_template) + file_template = StringTemplate.format_strict_template( + file_template, fill_data + ) + + # Match with ignore case on Windows due to the Windows + # OS not being case-sensitive. This avoids later running + # into the error that the file did exist if it existed + # with a different upper/lower-case. + kwargs = {} + if platform.system().lower() == "windows": + kwargs["flags"] = re.IGNORECASE + + # Get highest version among existing matching files + version = None + output_filenames = [] + for filename in sorted(filenames): + match = re.match(file_template, filename, **kwargs) + if not match: + continue + + file_version = int(match.group(1)) + if version is None or file_version > version: + output_filenames[:] = [] + version = file_version + + if file_version == version: + output_filenames.append(filename) + + output_filename = None + if output_filenames: + if len(output_filenames) == 1: + output_filename = output_filenames[0] + else: + last_time = None + for _output_filename in output_filenames: + full_path = os.path.join(workdir, _output_filename) + mod_time = os.path.getmtime(full_path) + if last_time is None or last_time < mod_time: + output_filename = _output_filename + last_time = mod_time + + return output_filename, version + + +def get_last_workfile( + workdir, file_template, fill_data, extensions, full_path=False +): + """Return last workfile filename. + + Returns file with version 1 if there is not workfile yet. + + Args: + workdir(str): Path to dir where workfiles are stored. + file_template(str): Template of file name. + fill_data(Dict[str, Any]): Data for filling template. + extensions(Iterable[str]): All allowed file extensions of workfile. + full_path(bool): Full path to file is returned if set to True. + + Returns: + str: Last or first workfile as filename of full path to filename. + """ + + filename, version = get_last_workfile_with_version( + workdir, file_template, fill_data, extensions + ) + if filename is None: + data = copy.deepcopy(fill_data) + data["version"] = 1 + data.pop("comment", None) + if not data.get("ext"): + data["ext"] = extensions[0] + data["ext"] = data["ext"].replace('.', '') + filename = StringTemplate.format_strict_template(file_template, data) + + if full_path: + return os.path.normpath(os.path.join(workdir, filename)) + + return filename + + +def get_custom_workfile_template( + project_doc, + asset_doc, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Custom workfile template can be used as first version of workfiles. + Template is a file on a disk which is set in settings. Expected settings + structure to have this feature enabled is: + project settings + |- + |- workfile_builder + |- create_first_version - a bool which must be set to 'True' + |- custom_templates - profiles based on task name/type which + points to a file which is copied as + first workfile + + It is expected that passed argument are already queried documents of + project and asset as parents of processing task name. + + Args: + project_doc (Dict[str, Any]): Project document from MongoDB. + asset_doc (Dict[str, Any]): Asset document from MongoDB. + task_name (str): Name of task for which templates are filtered. + host_name (str): Name of host. + anatomy (Anatomy): Optionally passed anatomy object for passed project + name. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. Existence of formatted path is not validated. + None: If no profile is matching context. + """ + + log = Logger.get_logger("CustomWorkfileResolve") + + project_name = project_doc["name"] + if project_settings is None: + project_settings = get_project_settings(project_name) + + host_settings = project_settings.get(host_name) + if not host_settings: + log.info("Host \"{}\" doesn't have settings".format(host_name)) + return None + + workfile_builder_settings = host_settings.get("workfile_builder") + if not workfile_builder_settings: + log.info(( + "Seems like old version of settings is used." + " Can't access custom templates in host \"{}\"." + ).format(host_name)) + return + + if not workfile_builder_settings["create_first_version"]: + log.info(( + "Project \"{}\" has turned off to create first workfile for" + " host \"{}\"" + ).format(project_name, host_name)) + return + + # Backwards compatibility + template_profiles = workfile_builder_settings.get("custom_templates") + if not template_profiles: + log.info( + "Custom templates are not filled. Skipping template copy." + ) + return + + if anatomy is None: + anatomy = Anatomy(project_name) + + # get project, asset, task anatomy context data + anatomy_context_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + # add root dict + anatomy_context_data["root"] = anatomy.roots + + # get task type for the task in context + current_task_type = anatomy_context_data["task"]["type"] + + # get path from matching profile + matching_item = filter_profiles( + template_profiles, + {"task_types": current_task_type} + ) + # when path is available try to format it in case + # there are some anatomy template strings + if matching_item: + template = matching_item["path"][platform.system().lower()] + return StringTemplate.format_strict_template( + template, anatomy_context_data + ).normalized() + + return None + + +def get_custom_workfile_template_by_string_context( + project_name, + asset_name, + task_name, + host_name, + anatomy=None, + project_settings=None +): + """Filter and fill workfile template profiles by passed context. + + Passed context are string representations of project, asset and task. + Function will query documents of project and asset to be able use + `get_custom_workfile_template` for rest of logic. + + Args: + project_name(str): Project name. + asset_name(str): Asset name. + task_name(str): Task name. + host_name (str): Name of host. + anatomy(Anatomy): Optionally prepared anatomy object for passed + project. + project_settings(Dict[str, Any]): Preloaded project settings. + + Returns: + str: Path to template or None if none of profiles match current + context. (Existence of formatted path is not validated.) + None: If no profile is matching context. + """ + + project_doc = get_project(project_name) + asset_doc = get_asset_by_name(project_name, asset_name) + + return get_custom_workfile_template( + project_doc, asset_doc, task_name, host_name, anatomy, project_settings + ) diff --git a/openpype/plugins/publish/collect_modules.py b/openpype/plugins/publish/collect_modules.py index 2f6cb1ef0e..d76096bcd9 100644 --- a/openpype/plugins/publish/collect_modules.py +++ b/openpype/plugins/publish/collect_modules.py @@ -7,7 +7,7 @@ import pyblish.api class CollectModules(pyblish.api.ContextPlugin): """Collect OpenPype modules.""" - order = pyblish.api.CollectorOrder - 0.45 + order = pyblish.api.CollectorOrder - 0.5 label = "OpenPype Modules" def process(self, context): diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 7442d3aacb..e16f324e0a 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -1210,7 +1210,6 @@ class ExtractReview(pyblish.api.InstancePlugin): # Get instance data pixel_aspect = temp_data["pixel_aspect"] - if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " @@ -1230,6 +1229,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # - settings value can't have None but has value of 0 output_width = output_def.get("width") or output_width or None output_height = output_def.get("height") or output_height or None + # Force to use input resolution if output resolution was not defined + # in settings. Resolution from instance is not used when + # 'use_input_res' is set to 'True'. + use_input_res = False # Overscal color overscan_color_value = "black" @@ -1241,6 +1244,17 @@ class ExtractReview(pyblish.api.InstancePlugin): ) self.log.debug("Overscan color: `{}`".format(overscan_color_value)) + # Scale input to have proper pixel aspect ratio + # - scale width by the pixel aspect ratio + scale_pixel_aspect = output_def.get("scale_pixel_aspect", True) + if scale_pixel_aspect and pixel_aspect != 1: + # Change input width after pixel aspect + input_width = int(input_width * pixel_aspect) + use_input_res = True + filters.append(( + "scale={}x{}:flags=lanczos".format(input_width, input_height) + )) + # Convert overscan value video filters overscan_crop = output_def.get("overscan_crop") overscan = OverscanCrop( @@ -1251,13 +1265,10 @@ class ExtractReview(pyblish.api.InstancePlugin): # resolution by it's values if overscan_crop_filters: filters.extend(overscan_crop_filters) + # Change input resolution after overscan crop input_width = overscan.width() input_height = overscan.height() - # Use output resolution as inputs after cropping to skip usage of - # instance data resolution - if output_width is None or output_height is None: - output_width = input_width - output_height = input_height + use_input_res = True # Make sure input width and height is not an odd number input_width_is_odd = bool(input_width % 2 != 0) @@ -1283,8 +1294,10 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("input_width: `{}`".format(input_width)) self.log.debug("input_height: `{}`".format(input_height)) - # Use instance resolution if output definition has not set it. - if output_width is None or output_height is None: + # Use instance resolution if output definition has not set it + # - use instance resolution only if there were not scale changes + # that may massivelly affect output 'use_input_res' + if not use_input_res and output_width is None or output_height is None: output_width = temp_data["resolution_width"] output_height = temp_data["resolution_height"] @@ -1326,7 +1339,6 @@ class ExtractReview(pyblish.api.InstancePlugin): output_width == input_width and output_height == input_height and not letter_box_enabled - and pixel_aspect == 1 ): self.log.debug( "Output resolution is same as input's" @@ -1336,39 +1348,8 @@ class ExtractReview(pyblish.api.InstancePlugin): new_repre["resolutionHeight"] = input_height return filters - # defining image ratios - input_res_ratio = ( - (float(input_width) * pixel_aspect) / input_height - ) - output_res_ratio = float(output_width) / float(output_height) - self.log.debug("input_res_ratio: `{}`".format(input_res_ratio)) - self.log.debug("output_res_ratio: `{}`".format(output_res_ratio)) - - # Round ratios to 2 decimal places for comparing - input_res_ratio = round(input_res_ratio, 2) - output_res_ratio = round(output_res_ratio, 2) - - # get scale factor - scale_factor_by_width = ( - float(output_width) / (input_width * pixel_aspect) - ) - scale_factor_by_height = ( - float(output_height) / input_height - ) - - self.log.debug( - "scale_factor_by_with: `{}`".format(scale_factor_by_width) - ) - self.log.debug( - "scale_factor_by_height: `{}`".format(scale_factor_by_height) - ) - # scaling none square pixels and 1920 width - if ( - input_height != output_height - or input_width != output_width - or pixel_aspect != 1 - ): + if input_height != output_height or input_width != output_width: filters.extend([ ( "scale={}x{}" diff --git a/openpype/plugins/publish/help/validate_containers.xml b/openpype/plugins/publish/help/validate_containers.xml index e540c3c7a9..5d18bb4c19 100644 --- a/openpype/plugins/publish/help/validate_containers.xml +++ b/openpype/plugins/publish/help/validate_containers.xml @@ -3,9 +3,9 @@ Not up-to-date assets -## Obsolete containers found +## Outdated containers found -Scene contains one or more obsolete loaded containers, eg. items loaded into scene by Loader. +Scene contains one or more outdated loaded containers, eg. versions loaded into scene by Loader are not latest. ### How to repair? @@ -17,8 +17,7 @@ Use 'Scene Inventory' and update all highlighted old container to latest OR ### __Detailed Info__ (optional) -This validator protects you from rendering obsolete content, someone modified some referenced asset in this scene, eg. - by skipping this you would ignore changes to that asset. +This validates whether you're working with the latest versions of published content loaded into your scene. This protects you from using outdated versions of an asset. \ No newline at end of file diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 735b7e50fa..7d698ff98d 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -313,13 +313,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): } repre_context = template_filled.used_values for key in self.db_representation_context_keys: - if ( - key in repre_context or - key not in anatomy_data - ): - continue - - repre_context[key] = anatomy_data[key] + value = anatomy_data.get(key) + if value is not None: + repre_context[key] = value # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py deleted file mode 100644 index 112d92bef0..0000000000 --- a/openpype/plugins/publish/start_timer.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api - -from openpype.lib import change_timer_to_current_context - - -class StartTimer(pyblish.api.ContextPlugin): - label = "Start Timer" - order = pyblish.api.IntegratorOrder + 1 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - change_timer_to_current_context() diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py deleted file mode 100644 index 414e43a3c4..0000000000 --- a/openpype/plugins/publish/stop_timer.py +++ /dev/null @@ -1,17 +0,0 @@ -import os -import requests - -import pyblish.api - - -class StopTimer(pyblish.api.ContextPlugin): - label = "Stop Timer" - order = pyblish.api.ExtractorOrder - 0.49 - hosts = ["*"] - - def process(self, context): - modules_settings = context.data["system_settings"]["modules"] - if modules_settings["timers_manager"]["disregard_publishing"]: - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) - requests.post(rest_api_url) diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 15f189e7cb..fc22f060a2 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -17,7 +17,7 @@ from openpype.pipeline import ( legacy_io, ) -from openpype.lib.avalon_context import get_workdir_from_session +from openpype.pipeline.context_tools import get_workdir_from_session log = logging.getLogger("Update Slap Comp") diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index e509db2791..0ff9363ba7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -85,6 +85,7 @@ ], "width": 0, "height": 0, + "scale_pixel_aspect": true, "bg_color": [ 0, 0, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index b9d0b7daba..e1aa230b49 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -319,6 +319,15 @@ "minimum": 0, "maximum": 100000 }, + { + "type": "label", + "label": "Rescale input when it's pixel aspect ratio is not 1. Usefull for anamorph reviews." + }, + { + "key": "scale_pixel_aspect", + "label": "Scale pixel aspect", + "type": "boolean" + }, { "type": "label", "label": "Background color is used only when input have transparency and Alpha is higher than 0." diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index a5174bd804..3ce44ea6c8 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -272,15 +272,15 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): # update availability on active site when version changes if self.sync_server.enabled and version_doc: - query = self._repre_per_version_pipeline( + repre_info = self.sync_server.get_repre_info_for_versions( + project_name, [version_doc["_id"]], self.active_site, self.remote_site ) - docs = list(self.dbcon.aggregate(query)) - if docs: - repre = docs.pop() - version_doc["data"].update(self._get_repre_dict(repre)) + if repre_info: + version_doc["data"].update( + self._get_repre_dict(repre_info[0])) self.set_version(index, version_doc) @@ -478,16 +478,16 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): for _subset_id, doc in last_versions_by_subset_id.items(): version_ids.add(doc["_id"]) - query = self._repre_per_version_pipeline( + repres = self.sync_server.get_repre_info_for_versions( + project_name, list(version_ids), self.active_site, self.remote_site ) - - for doc in self.dbcon.aggregate(query): + for repre in repres: if self._doc_fetching_stop: return doc["active_provider"] = self.active_provider doc["remote_provider"] = self.remote_provider - repre_info[doc["_id"]] = doc + repre_info[repre["_id"]] = repre self._doc_payload = { "asset_docs_by_id": asset_docs_by_id, @@ -827,83 +827,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel): return data - def _repre_per_version_pipeline(self, version_ids, - active_site, remote_site): - query = [ - {"$match": {"parent": {"$in": version_ids}, - "type": "representation", - "files.sites.name": {"$exists": 1}}}, - {"$unwind": "$files"}, - {'$addFields': { - 'order_local': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', active_site]} - } - } - }}, - {'$addFields': { - 'order_remote': { - '$filter': { - 'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', remote_site]} - } - } - }}, - {'$addFields': { - 'progress_local': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_local.progress"}, - "$order_local.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_local.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$addFields': { - 'progress_remote': {"$arrayElemAt": [{ - '$cond': [ - {'$size': "$order_remote.progress"}, - "$order_remote.progress", - # if exists created_dt count is as available - {'$cond': [ - {'$size': "$order_remote.created_dt"}, - [1], - [0] - ]} - ]}, - 0 - ]} - }}, - {'$group': { # first group by repre - '_id': '$_id', - 'parent': {'$first': '$parent'}, - 'avail_ratio_local': { - '$first': { - '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}] - } - }, - 'avail_ratio_remote': { - '$first': { - '$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}] - } - } - }}, - {'$group': { # second group by parent, eg version_id - '_id': '$parent', - 'repre_count': {'$sum': 1}, # total representations - # fully available representation for site - 'avail_repre_local': {'$sum': "$avail_ratio_local"}, - 'avail_repre_remote': {'$sum': "$avail_ratio_remote"}, - }}, - ] - return query - class GroupMemberFilterProxyModel(QtCore.QSortFilterProxyModel): """Provide the feature of filtering group by the acceptance of members diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 63d181b2d6..e0e43aaba7 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -551,16 +551,16 @@ class SceneInventoryView(QtWidgets.QTreeView): "toggle": selection_model.Toggle, }[options.get("mode", "select")] - for item in iter_model_rows(model, 0): - item = item.data(InventoryModel.ItemRole) + for index in iter_model_rows(model, 0): + item = index.data(InventoryModel.ItemRole) if item.get("isGroupNode"): continue name = item.get("objectName") if name in object_names: - self.scrollTo(item) # Ensure item is visible + self.scrollTo(index) # Ensure item is visible flags = select_mode | selection_model.Rows - selection_model.select(item, flags) + selection_model.select(index, flags) object_names.remove(name) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index 34692b7102..a4109c511e 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -12,7 +12,6 @@ from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate from openpype.lib import ( emit_event, - get_workfile_template_key, create_workdir_extra_folders, ) from openpype.lib.avalon_context import ( @@ -24,6 +23,8 @@ from openpype.pipeline import ( legacy_io, Anatomy, ) +from openpype.pipeline.workfile import get_workfile_template_key + from .model import ( WorkAreaFilesModel, PublishFilesModel, diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index ea602846e7..cded4eb1a5 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,11 +5,11 @@ import logging from Qt import QtWidgets, QtCore -from openpype.lib import get_last_workfile_with_version from openpype.pipeline import ( registered_host, legacy_io, ) +from openpype.pipeline.workfile import get_last_workfile_with_version from openpype.pipeline.template_data import get_template_data_with_names from openpype.tools.utils import PlaceholderLineEdit diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 0b0d67e589..de42b80d64 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -1,18 +1,20 @@ import os import datetime +import copy from Qt import QtCore, QtWidgets, QtGui from openpype.client import ( - get_asset_by_id, get_asset_by_name, get_workfile_info, ) +from openpype.client.operations import ( + OperationsSession, + new_workfile_info_doc, + prepare_workfile_info_update_data, +) from openpype import style from openpype import resources -from openpype.lib import ( - create_workfile_doc, - save_workfile_data_to_doc, -) +from openpype.pipeline import Anatomy from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget @@ -324,10 +326,23 @@ class Window(QtWidgets.QWidget): workfile_doc, data = self.side_panel.get_workfile_data() if not workfile_doc: filepath = self.files_widget._get_selected_filepath() - self._create_workfile_doc(filepath, force=True) - workfile_doc = self._get_current_workfile_doc() + workfile_doc = self._create_workfile_doc(filepath) - save_workfile_data_to_doc(workfile_doc, data, legacy_io) + new_workfile_doc = copy.deepcopy(workfile_doc) + new_workfile_doc["data"] = data + update_data = prepare_workfile_info_update_data( + workfile_doc, new_workfile_doc + ) + if not update_data: + return + + project_name = legacy_io.active_project() + + session = OperationsSession() + session.update_entity( + project_name, "workfile", workfile_doc["_id"], update_data + ) + session.commit() def _get_current_workfile_doc(self, filepath=None): if filepath is None: @@ -343,20 +358,32 @@ class Window(QtWidgets.QWidget): project_name, asset_id, task_name, filename ) - def _create_workfile_doc(self, filepath, force=False): - workfile_doc = None - if not force: - workfile_doc = self._get_current_workfile_doc(filepath) + def _create_workfile_doc(self, filepath): + workfile_doc = self._get_current_workfile_doc(filepath) + if workfile_doc: + return workfile_doc - if not workfile_doc: - workdir, filename = os.path.split(filepath) - asset_id = self.assets_widget.get_selected_asset_id() - project_name = legacy_io.active_project() - asset_doc = get_asset_by_id(project_name, asset_id) - task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc( - asset_doc, task_name, filename, workdir, legacy_io - ) + workdir, filename = os.path.split(filepath) + + project_name = legacy_io.active_project() + asset_id = self.assets_widget.get_selected_asset_id() + task_name = self.tasks_widget.get_selected_task_name() + + anatomy = Anatomy(project_name) + success, rootless_dir = anatomy.find_root_template_from_path(workdir) + filepath = "/".join([ + os.path.normpath(rootless_dir).replace("\\", "/"), + filename + ]) + + workfile_doc = new_workfile_info_doc( + filename, asset_id, task_name, [filepath] + ) + + session = OperationsSession() + session.create_entity(project_name, "workfile", workfile_doc) + session.commit() + return workfile_doc def refresh(self): # Refresh asset widget diff --git a/openpype/version.py b/openpype/version.py index c41e69d00d..6ff5dfb7b5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.13.1-nightly.1" +__version__ = "3.13.1-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 994c83d369..9cbdc295ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.13.1-nightly.1" # OpenPype +version = "3.13.1-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License"