From 10fa0ee5c463de5676a5e209e447c2a845f6b3f6 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 15 Dec 2021 16:34:21 +0000 Subject: [PATCH 001/357] Implemented creator for render --- .../unreal/plugins/create/create_render.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/create/create_render.py diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py new file mode 100644 index 0000000000..a0bf320225 --- /dev/null +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -0,0 +1,52 @@ +import unreal +from openpype.hosts.unreal.api.plugin import Creator +from avalon.unreal import pipeline + + +class CreateRender(Creator): + """Create instance for sequence for rendering""" + + name = "unrealRender" + label = "Unreal - Render" + family = "render" + icon = "cube" + asset_types = ["LevelSequence"] + + root = "/Game/AvalonInstances" + suffix = "_INS" + + def __init__(self, *args, **kwargs): + super(CreateRender, self).__init__(*args, **kwargs) + + def process(self): + name = self.data["subset"] + + print(self.data) + + selection = [] + if (self.options or {}).get("useSelection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [ + a.get_path_name() for a in sel_objects + if a.get_class().get_name() in self.asset_types] + + unreal.log("selection: {}".format(selection)) + # instantiate(self.root, name, self.data, selection, self.suffix) + # container_name = "{}{}".format(name, self.suffix) + + # if we specify assets, create new folder and move them there. If not, + # just create empty folder + # new_name = pipeline.create_folder(self.root, name) + path = "{}/{}".format(self.root, name) + unreal.EditorAssetLibrary.make_directory(path) + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + for a in selection: + d = self.data.copy() + d["sequence"] = a + asset = ar.get_asset_by_object_path(a).get_asset() + container_name = asset.get_name() + pipeline.create_publish_instance(instance=container_name, path=path) + pipeline.imprint("{}/{}".format(path, container_name), d) + From 4ff7cf67ab7fe852216c7f408161ca0f9d0d5ecf Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 31 Jan 2022 11:20:13 +0000 Subject: [PATCH 002/357] Loading layouts and cameras now create level sequences for hierarchy --- .../hosts/unreal/plugins/load/load_camera.py | 143 ++++++++++++++--- .../hosts/unreal/plugins/load/load_layout.py | 149 ++++++++++++++++-- 2 files changed, 256 insertions(+), 36 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index b2b25eec73..00d17407f9 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -15,6 +15,20 @@ class CameraLoader(api.Loader): icon = "cube" color = "orange" + def _add_sub_sequence(self, master, sub): + track = master.add_master_track(unreal.MovieSceneCinematicShotTrack) + section = track.add_section() + section.set_editor_property('sub_sequence', sub) + return section + + def _get_data(self, asset_name): + asset_doc = io.find_one({ + "type": "asset", + "name": asset_name + }) + + return asset_doc.get("data") + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -39,7 +53,13 @@ class CameraLoader(api.Loader): """ # Create directory for asset and avalon container - root = "/Game/Avalon/Assets" + hierarchy = context.get('asset').get('data').get('parents') + root = "/Game/Avalon" + hierarchy_dir = root + hierarchy_list = [] + for h in hierarchy: + hierarchy_dir = f"{hierarchy_dir}/{h}" + hierarchy_list.append(hierarchy_dir) asset = context.get('asset').get('name') suffix = "_CON" if asset: @@ -49,9 +69,9 @@ class CameraLoader(api.Loader): tools = unreal.AssetToolsHelpers().get_asset_tools() + # Create a unique name for the camera directory unique_number = 1 - - if unreal.EditorAssetLibrary.does_directory_exist(f"{root}/{asset}"): + if unreal.EditorAssetLibrary.does_directory_exist(f"{hierarchy_dir}/{asset}"): asset_content = unreal.EditorAssetLibrary.list_assets( f"{root}/{asset}", recursive=False, include_folder=True ) @@ -71,42 +91,121 @@ class CameraLoader(api.Loader): unique_number = f_numbers[-1] + 1 asset_dir, container_name = tools.create_unique_asset_name( - f"{root}/{asset}/{name}_{unique_number:02d}", suffix="") + f"{hierarchy_dir}/{asset}/{name}_{unique_number:02d}", suffix="") container_name += suffix + # sequence = None + + # ar = unreal.AssetRegistryHelpers.get_asset_registry() + + # if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + # unreal.EditorAssetLibrary.make_directory(asset_dir) + + # sequence = tools.create_asset( + # asset_name=asset_name, + # package_path=asset_dir, + # asset_class=unreal.LevelSequence, + # factory=unreal.LevelSequenceFactoryNew() + # ) + # else: + # asset_content = unreal.EditorAssetLibrary.list_assets( + # asset_dir, recursive=False) + # for a in asset_content: + # obj = ar.get_asset_by_object_path(a) + # if obj.get_asset().get_class().get_name() == 'LevelSequence': + # sequence = obj.get_asset() + # break + + # assert sequence, "Sequence not found" + + # Get all the sequences in the hierarchy. It will create them, if + # they don't exist. + sequences = [] + i = 0 + for h in hierarchy_list: + root_content = unreal.EditorAssetLibrary.list_assets( + h, recursive=False, include_folder=False) + + existing_sequences = [ + unreal.EditorAssetLibrary.find_asset_data(asset) + for asset in root_content + if unreal.EditorAssetLibrary.find_asset_data( + asset).get_class().get_name() == 'LevelSequence' + ] + + # for asset in root_content: + # asset_data = EditorAssetLibrary.find_asset_data(asset) + # # imported_asset = unreal.AssetRegistryHelpers.get_asset( + # # imported_asset_data) + # if asset_data.get_class().get_name() == 'LevelSequence': + # break + + if not existing_sequences: + scene = tools.create_asset( + asset_name=hierarchy[i], + package_path=h, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() + ) + sequences.append(scene) + else: + for e in existing_sequences: + sequences.append(e.get_asset()) + + i += 1 + unreal.EditorAssetLibrary.make_directory(asset_dir) - sequence = tools.create_asset( - asset_name=asset_name, + cam_seq = tools.create_asset( + asset_name=asset, package_path=asset_dir, asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew() ) - io_asset = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ - "type": "asset", - "name": io_asset - }) + sequences.append(cam_seq) - data = asset_doc.get("data") + # Add sequences data to hierarchy + data_i = self._get_data(sequences[0].get_name()) + + for i in range(0, len(sequences) - 1): + section = self._add_sub_sequence(sequences[i], sequences[i + 1]) + + print(sequences[i]) + print(sequences[i + 1]) + + data_j = self._get_data(sequences[i + 1].get_name()) + + if data_i: + sequences[i].set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) + sequences[i].set_playback_start(data_i.get("frameStart")) + sequences[i].set_playback_end(data_i.get("frameEnd")) + if data_j: + section.set_range( + data_j.get("frameStart"), + data_j.get("frameEnd")) + + data_i = data_j + + data = self._get_data(asset) if data: - sequence.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0)) - sequence.set_playback_start(data.get("frameStart")) - sequence.set_playback_end(data.get("frameEnd")) + cam_seq.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0)) + cam_seq.set_playback_start(data.get("frameStart")) + cam_seq.set_playback_end(data.get("frameEnd")) settings = unreal.MovieSceneUserImportFBXSettings() settings.set_editor_property('reduce_keys', False) - unreal.SequencerTools.import_fbx( - unreal.EditorLevelLibrary.get_editor_world(), - sequence, - sequence.get_bindings(), - settings, - self.fname - ) + if cam_seq: + unreal.SequencerTools.import_fbx( + unreal.EditorLevelLibrary.get_editor_world(), + cam_seq, + cam_seq.get_bindings(), + settings, + self.fname + ) # Create Asset Container lib.create_avalon_container(container=container_name, path=asset_dir) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 19d0b74e3e..7554a4658b 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -9,7 +9,7 @@ from unreal import AssetToolsHelpers from unreal import FBXImportType from unreal import MathLibrary as umath -from avalon import api, pipeline +from avalon import api, io, pipeline from avalon.unreal import lib from avalon.unreal import pipeline as unreal_pipeline @@ -74,10 +74,26 @@ class LayoutLoader(api.Loader): return None - def _process_family(self, assets, classname, transform, inst_name=None): + def _add_sub_sequence(self, master, sub): + track = master.add_master_track(unreal.MovieSceneCinematicShotTrack) + section = track.add_section() + section.set_editor_property('sub_sequence', sub) + return section + + def _get_data(self, asset_name): + asset_doc = io.find_one({ + "type": "asset", + "name": asset_name + }) + + return asset_doc.get("data") + + def _process_family( + self, assets, classname, transform, sequence, inst_name=None): ar = unreal.AssetRegistryHelpers.get_asset_registry() actors = [] + bindings = [] for asset in assets: obj = ar.get_asset_by_object_path(asset).get_asset() @@ -109,11 +125,17 @@ class LayoutLoader(api.Loader): actors.append(actor) - return actors + binding = sequence.add_possessable(actor) + # root_component_binding = sequence.add_possessable(actor.root_component) + # root_component_binding.set_parent(binding) + + bindings.append(binding) + + return actors, bindings def _import_animation( self, asset_dir, path, instance_name, skeleton, actors_dict, - animation_file): + animation_file, bindings_dict, sequence): anim_file = Path(animation_file) anim_file_name = anim_file.with_suffix('') @@ -192,7 +214,20 @@ class LayoutLoader(api.Loader): actor.skeletal_mesh_component.animation_data.set_editor_property( 'anim_to_play', animation) - def _process(self, libpath, asset_dir, loaded=None): + # Add animation to the sequencer + bindings = bindings_dict.get(instance_name) + + for binding in bindings: + binding.add_track(unreal.MovieSceneSkeletalAnimationTrack) + for track in binding.get_tracks(): + section = track.add_section() + section.set_range( + sequence.get_playback_start(), + sequence.get_playback_end()) + sec_params = section.get_editor_property('params') + sec_params.set_editor_property('animation', animation) + + def _process(self, libpath, asset_dir, sequence, loaded=None): ar = unreal.AssetRegistryHelpers.get_asset_registry() with open(libpath, "r") as fp: @@ -207,6 +242,7 @@ class LayoutLoader(api.Loader): skeleton_dict = {} actors_dict = {} + bindings_dict = {} for element in data: reference = None @@ -264,12 +300,13 @@ class LayoutLoader(api.Loader): actors = [] if family == 'model': - actors = self._process_family( - assets, 'StaticMesh', transform, inst) + actors, _ = self._process_family( + assets, 'StaticMesh', transform, sequence, inst) elif family == 'rig': - actors = self._process_family( - assets, 'SkeletalMesh', transform, inst) + actors, bindings = self._process_family( + assets, 'SkeletalMesh', transform, sequence, inst) actors_dict[inst] = actors + bindings_dict[inst] = bindings if family == 'rig': # Finds skeleton among the imported assets @@ -289,8 +326,13 @@ class LayoutLoader(api.Loader): if animation_file and skeleton: self._import_animation( - asset_dir, path, instance_name, skeleton, - actors_dict, animation_file) + asset_dir, path, instance_name, skeleton, actors_dict, + animation_file, bindings_dict, sequence) + + # track = sequence.add_master_track( + # unreal.MovieSceneActorReferenceTrack) + # section = track.add_section() + # section.set_editor_property('sub_sequence', sequence) def _remove_family(self, assets, components, classname, propname): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -356,7 +398,13 @@ class LayoutLoader(api.Loader): list(str): list of container content """ # Create directory for asset and avalon container - root = "/Game/Avalon/Assets" + hierarchy = context.get('asset').get('data').get('parents') + root = "/Game/Avalon" + hierarchy_dir = root + hierarchy_list = [] + for h in hierarchy: + hierarchy_dir = f"{hierarchy_dir}/{h}" + hierarchy_list.append(hierarchy_dir) asset = context.get('asset').get('name') suffix = "_CON" if asset: @@ -366,13 +414,86 @@ class LayoutLoader(api.Loader): tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") + "{}/{}/{}".format(hierarchy_dir, asset, name), suffix="") container_name += suffix EditorAssetLibrary.make_directory(asset_dir) - self._process(self.fname, asset_dir) + # Get all the sequences in the hierarchy. It will create them, if + # they don't exist. + sequences = [] + i = 0 + for h in hierarchy_list: + root_content = EditorAssetLibrary.list_assets( + h, recursive=False, include_folder=False) + + existing_sequences = [ + EditorAssetLibrary.find_asset_data(asset) + for asset in root_content + if EditorAssetLibrary.find_asset_data( + asset).get_class().get_name() == 'LevelSequence' + ] + + # for asset in root_content: + # asset_data = EditorAssetLibrary.find_asset_data(asset) + # # imported_asset = unreal.AssetRegistryHelpers.get_asset( + # # imported_asset_data) + # if asset_data.get_class().get_name() == 'LevelSequence': + # break + + if not existing_sequences: + scene = tools.create_asset( + asset_name=hierarchy[i], + package_path=h, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() + ) + sequences.append(scene) + else: + for e in existing_sequences: + sequences.append(e.get_asset()) + + i += 1 + + # TODO: check if shot already exists + + shot = tools.create_asset( + asset_name=asset, + package_path=asset_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew() + ) + + sequences.append(shot) + + # Add sequences data to hierarchy + data_i = self._get_data(sequences[0].get_name()) + + for i in range(0, len(sequences) - 1): + section = self._add_sub_sequence(sequences[i], sequences[i + 1]) + + data_j = self._get_data(sequences[i + 1].get_name()) + + if data_i: + sequences[i].set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) + sequences[i].set_playback_start(data_i.get("frameStart")) + sequences[i].set_playback_end(data_i.get("frameEnd")) + if data_j: + section.set_range( + data_j.get("frameStart"), + data_j.get("frameEnd")) + + data_i = data_j + + data = self._get_data(asset) + + if data: + shot.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0)) + shot.set_playback_start(data.get("frameStart")) + shot.set_playback_end(data.get("frameEnd")) + + self._process(self.fname, asset_dir, shot) # Create Asset Container lib.create_avalon_container( From 5efc23c7433c08da7b42c182e7bcd1a4ad7b16ac Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 31 Jan 2022 11:22:47 +0000 Subject: [PATCH 003/357] Added button for starting the rendering of the selected instance --- openpype/hosts/unreal/api/rendering.py | 85 +++++++++++++++++++ openpype/hosts/unreal/api/tools_ui.py | 7 ++ .../unreal/plugins/create/create_render.py | 5 +- 3 files changed, 95 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/unreal/api/rendering.py diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py new file mode 100644 index 0000000000..7c58987c0d --- /dev/null +++ b/openpype/hosts/unreal/api/rendering.py @@ -0,0 +1,85 @@ +import avalon.unreal.pipeline as pipeline +import avalon.unreal.lib as lib +import unreal + + +queue = None +executor = None + +def _queue_finish_callback(exec, success): + unreal.log("Render completed. Success: " + str(success)) + + # Delete our reference so we don't keep it alive. + global executor + global queue + del executor + del queue + + +def _job_finish_callback(job, success): + # You can make any edits you want to the editor world here, and the world + # will be duplicated when the next render happens. Make sure you undo your + # edits in OnQueueFinishedCallback if you don't want to leak state changes + # into the editor world. + unreal.log("Individual job completed.") + + +def start_rendering(): + """ + Start the rendering process. + """ + print("Starting rendering...") + + # Get selected sequences + assets = unreal.EditorUtilityLibrary.get_selected_assets() + + # instances = pipeline.ls_inst() + instances = [ + a for a in assets + if a.get_class().get_name() == "AvalonPublishInstance"] + + inst_data = [] + + for i in instances: + data = pipeline.parse_container(i.get_path_name()) + if data["family"] == "render": + inst_data.append(data) + + # subsystem = unreal.get_editor_subsystem(unreal.MoviePipelineQueueSubsystem) + # queue = subsystem.get_queue() + global queue + queue = unreal.MoviePipelineQueue() + + for i in inst_data: + job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob) + job.sequence = unreal.SoftObjectPath(i["sequence"]) + job.map = unreal.SoftObjectPath(i["map"]) + job.author = "OpenPype" + + # User data could be used to pass data to the job, that can be read + # in the job's OnJobFinished callback. We could, for instance, + # pass the AvalonPublishInstance's path to the job. + # job.user_data = "" + + output_setting = job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineOutputSetting) + output_setting.output_resolution = unreal.IntPoint(1280, 720) + output_setting.file_name_format = "{sequence_name}.{frame_number}" + output_setting.output_directory.path += f"{i['subset']}/" + + renderPass = job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineDeferredPassBase) + renderPass.disable_multisample_effects = True + + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_PNG) + + # TODO: check if queue is empty + + global executor + executor = unreal.MoviePipelinePIEExecutor() + executor.on_executor_finished_delegate.add_callable_unique( + _queue_finish_callback) + executor.on_individual_job_finished_delegate.add_callable_unique( + _job_finish_callback) # Only available on PIE Executor + executor.execute(queue) diff --git a/openpype/hosts/unreal/api/tools_ui.py b/openpype/hosts/unreal/api/tools_ui.py index 93361c3574..2500f8495f 100644 --- a/openpype/hosts/unreal/api/tools_ui.py +++ b/openpype/hosts/unreal/api/tools_ui.py @@ -7,6 +7,7 @@ from openpype import ( ) from openpype.tools.utils import host_tools from openpype.tools.utils.lib import qt_app_context +from openpype.hosts.unreal.api import rendering class ToolsBtnsWidget(QtWidgets.QWidget): @@ -20,6 +21,7 @@ class ToolsBtnsWidget(QtWidgets.QWidget): load_btn = QtWidgets.QPushButton("Load...", self) publish_btn = QtWidgets.QPushButton("Publish...", self) manage_btn = QtWidgets.QPushButton("Manage...", self) + render_btn = QtWidgets.QPushButton("Render...", self) experimental_tools_btn = QtWidgets.QPushButton( "Experimental tools...", self ) @@ -30,6 +32,7 @@ class ToolsBtnsWidget(QtWidgets.QWidget): layout.addWidget(load_btn, 0) layout.addWidget(publish_btn, 0) layout.addWidget(manage_btn, 0) + layout.addWidget(render_btn, 0) layout.addWidget(experimental_tools_btn, 0) layout.addStretch(1) @@ -37,6 +40,7 @@ class ToolsBtnsWidget(QtWidgets.QWidget): load_btn.clicked.connect(self._on_load) publish_btn.clicked.connect(self._on_publish) manage_btn.clicked.connect(self._on_manage) + render_btn.clicked.connect(self._on_render) experimental_tools_btn.clicked.connect(self._on_experimental) def _on_create(self): @@ -51,6 +55,9 @@ class ToolsBtnsWidget(QtWidgets.QWidget): def _on_manage(self): self.tool_required.emit("sceneinventory") + def _on_render(self): + rendering.start_rendering() + def _on_experimental(self): self.tool_required.emit("experimental_tools") diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index a0bf320225..0128808a70 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -44,9 +44,10 @@ class CreateRender(Creator): for a in selection: d = self.data.copy() + d["members"] = [a] d["sequence"] = a + d["map"] = unreal.EditorLevelLibrary.get_editor_world().get_path_name() asset = ar.get_asset_by_object_path(a).get_asset() - container_name = asset.get_name() + container_name = f"{asset.get_name()}{self.suffix}" pipeline.create_publish_instance(instance=container_name, path=path) pipeline.imprint("{}/{}".format(path, container_name), d) - From 67339b488be8727d864f4f7804dacc9d9f267e6a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 31 Jan 2022 11:23:26 +0000 Subject: [PATCH 004/357] Implemented extraction of renders --- .../unreal/plugins/publish/extract_render.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/publish/extract_render.py diff --git a/openpype/hosts/unreal/plugins/publish/extract_render.py b/openpype/hosts/unreal/plugins/publish/extract_render.py new file mode 100644 index 0000000000..7ba53c9155 --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/extract_render.py @@ -0,0 +1,46 @@ +from pathlib import Path +import openpype.api +from avalon import io +import unreal + + +class ExtractRender(openpype.api.Extractor): + """Extract render.""" + + label = "Extract Render" + hosts = ["unreal"] + families = ["render"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + + # Perform extraction + self.log.info("Performing extraction..") + + # Get the render output directory + project_dir = unreal.Paths.project_dir() + render_dir = f"{project_dir}/Saved/MovieRenders/{instance.data['subset']}" + + assert unreal.Paths.directory_exists(render_dir), \ + "Render directory does not exist" + + render_path = Path(render_dir) + + frames = [] + + for x in render_path.iterdir(): + if x.is_file() and x.suffix == '.png': + frames.append(str(x)) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + render_representation = { + 'name': 'png', + 'ext': 'png', + 'files': frames, + "stagingDir": stagingdir, + } + instance.data["representations"].append(render_representation) From 2966068aa58d7e42f5a86cd289355242a51e779b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 15 Feb 2022 12:27:54 +0000 Subject: [PATCH 005/357] Layout and Cameras create the level and sequence hierarchy structure --- .../hosts/unreal/plugins/load/load_camera.py | 93 +++++------ .../hosts/unreal/plugins/load/load_layout.py | 147 +++++++++++++++--- 2 files changed, 159 insertions(+), 81 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 00d17407f9..feab531aaa 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -15,12 +15,6 @@ class CameraLoader(api.Loader): icon = "cube" color = "orange" - def _add_sub_sequence(self, master, sub): - track = master.add_master_track(unreal.MovieSceneCinematicShotTrack) - section = track.add_section() - section.set_editor_property('sub_sequence', sub) - return section - def _get_data(self, asset_name): asset_doc = io.find_one({ "type": "asset", @@ -29,6 +23,35 @@ class CameraLoader(api.Loader): return asset_doc.get("data") + def _set_sequence_hierarchy(self, seq_i, seq_j, data_i, data_j): + if data_i: + seq_i.set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) + seq_i.set_playback_start(data_i.get("frameStart")) + seq_i.set_playback_end(data_i.get("frameEnd") + 1) + + tracks = seq_i.get_master_tracks() + track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + track = t + break + if not track: + track = seq_i.add_master_track(unreal.MovieSceneSubTrack) + + subscenes = track.get_sections() + subscene = None + for s in subscenes: + if s.get_editor_property('sub_sequence') == seq_j: + subscene = s + break + if not subscene: + subscene = track.add_section() + subscene.set_row_index(len(track.get_sections())) + subscene.set_editor_property('sub_sequence', seq_j) + subscene.set_range( + data_j.get("frameStart"), + data_j.get("frameEnd") + 1) + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -95,30 +118,6 @@ class CameraLoader(api.Loader): container_name += suffix - # sequence = None - - # ar = unreal.AssetRegistryHelpers.get_asset_registry() - - # if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - # unreal.EditorAssetLibrary.make_directory(asset_dir) - - # sequence = tools.create_asset( - # asset_name=asset_name, - # package_path=asset_dir, - # asset_class=unreal.LevelSequence, - # factory=unreal.LevelSequenceFactoryNew() - # ) - # else: - # asset_content = unreal.EditorAssetLibrary.list_assets( - # asset_dir, recursive=False) - # for a in asset_content: - # obj = ar.get_asset_by_object_path(a) - # if obj.get_asset().get_class().get_name() == 'LevelSequence': - # sequence = obj.get_asset() - # break - - # assert sequence, "Sequence not found" - # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] @@ -134,13 +133,6 @@ class CameraLoader(api.Loader): asset).get_class().get_name() == 'LevelSequence' ] - # for asset in root_content: - # asset_data = EditorAssetLibrary.find_asset_data(asset) - # # imported_asset = unreal.AssetRegistryHelpers.get_asset( - # # imported_asset_data) - # if asset_data.get_class().get_name() == 'LevelSequence': - # break - if not existing_sequences: scene = tools.create_asset( asset_name=hierarchy[i], @@ -158,42 +150,27 @@ class CameraLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) cam_seq = tools.create_asset( - asset_name=asset, + asset_name=f"{asset}_camera", package_path=asset_dir, asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew() ) - sequences.append(cam_seq) - # Add sequences data to hierarchy data_i = self._get_data(sequences[0].get_name()) for i in range(0, len(sequences) - 1): - section = self._add_sub_sequence(sequences[i], sequences[i + 1]) - - print(sequences[i]) - print(sequences[i + 1]) - data_j = self._get_data(sequences[i + 1].get_name()) - if data_i: - sequences[i].set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) - sequences[i].set_playback_start(data_i.get("frameStart")) - sequences[i].set_playback_end(data_i.get("frameEnd")) - if data_j: - section.set_range( - data_j.get("frameStart"), - data_j.get("frameEnd")) + self._set_sequence_hierarchy( + sequences[i], sequences[i + 1], data_i, data_j) data_i = data_j + parent_data = self._get_data(sequences[-1].get_name()) data = self._get_data(asset) - - if data: - cam_seq.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0)) - cam_seq.set_playback_start(data.get("frameStart")) - cam_seq.set_playback_end(data.get("frameEnd")) + self._set_sequence_hierarchy( + sequences[-1], cam_seq, parent_data, data) settings = unreal.MovieSceneUserImportFBXSettings() settings.set_editor_property('reduce_keys', False) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 7554a4658b..a7d5a5841f 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -5,6 +5,7 @@ from pathlib import Path import unreal from unreal import EditorAssetLibrary from unreal import EditorLevelLibrary +from unreal import EditorLevelUtils from unreal import AssetToolsHelpers from unreal import FBXImportType from unreal import MathLibrary as umath @@ -74,12 +75,6 @@ class LayoutLoader(api.Loader): return None - def _add_sub_sequence(self, master, sub): - track = master.add_master_track(unreal.MovieSceneCinematicShotTrack) - section = track.add_section() - section.set_editor_property('sub_sequence', sub) - return section - def _get_data(self, asset_name): asset_doc = io.find_one({ "type": "asset", @@ -88,6 +83,78 @@ class LayoutLoader(api.Loader): return asset_doc.get("data") + def _set_sequence_hierarchy(self, seq_i, seq_j, data_i, data_j, map_paths): + # Set data for the parent sequence + if data_i: + seq_i.set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) + seq_i.set_playback_start(data_i.get("frameStart")) + seq_i.set_playback_end(data_i.get("frameEnd") + 1) + + # Get existing sequencer tracks or create them if they don't exist + tracks = seq_i.get_master_tracks() + subscene_track = None + visibility_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + if t.get_class() == unreal.MovieSceneLevelVisibilityTrack.static_class(): + visibility_track = t + if not subscene_track: + subscene_track = seq_i.add_master_track(unreal.MovieSceneSubTrack) + if not visibility_track: + visibility_track = seq_i.add_master_track(unreal.MovieSceneLevelVisibilityTrack) + + # Create the sub-scene section + subscenes = subscene_track.get_sections() + subscene = None + for s in subscenes: + if s.get_editor_property('sub_sequence') == seq_j: + subscene = s + break + if not subscene: + subscene = subscene_track.add_section() + subscene.set_row_index(len(subscene_track.get_sections())) + subscene.set_editor_property('sub_sequence', seq_j) + subscene.set_range( + data_j.get("frameStart"), + data_j.get("frameEnd") + 1) + + # Create the visibility section + ar = unreal.AssetRegistryHelpers.get_asset_registry() + maps = [] + for m in map_paths: + # Unreal requires to load the level to get the map name + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(m) + maps.append(str(ar.get_asset_by_object_path(m).asset_name)) + + vis_section = visibility_track.add_section() + index = len(visibility_track.get_sections()) + + vis_section.set_range( + data_j.get("frameStart"), + data_j.get("frameEnd") + 1) + vis_section.set_visibility(unreal.LevelVisibility.VISIBLE) + vis_section.set_row_index(index) + vis_section.set_level_names(maps) + + if data_j.get("frameStart") > 1: + hid_section = visibility_track.add_section() + hid_section.set_range( + 1, + data_j.get("frameStart")) + hid_section.set_visibility(unreal.LevelVisibility.HIDDEN) + hid_section.set_row_index(index) + hid_section.set_level_names(maps) + if data_j.get("frameEnd") < data_i.get("frameEnd"): + hid_section = visibility_track.add_section() + hid_section.set_range( + data_j.get("frameEnd") + 1, + data_i.get("frameEnd") + 1) + hid_section.set_visibility(unreal.LevelVisibility.HIDDEN) + hid_section.set_row_index(index) + hid_section.set_level_names(maps) + def _process_family( self, assets, classname, transform, sequence, inst_name=None): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -420,6 +487,37 @@ class LayoutLoader(api.Loader): EditorAssetLibrary.make_directory(asset_dir) + # Create map for the shot, and create hierarchy of map. If the maps + # already exist, we will use them. + maps = [] + for h in hierarchy_list: + a = h.split('/')[-1] + map = f"{h}/{a}_map.{a}_map" + new = False + + if not EditorAssetLibrary.does_asset_exist(map): + EditorLevelLibrary.new_level(f"{h}/{a}_map") + new = True + + maps.append({"map": map, "new": new}) + + EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") + maps.append( + {"map":f"{asset_dir}/{asset}_map.{asset}_map", "new": True}) + + for i in range(0, len(maps) - 1): + for j in range(i + 1, len(maps)): + if maps[j].get('new'): + EditorLevelLibrary.load_level(maps[i].get('map')) + EditorLevelUtils.add_level_to_world( + EditorLevelLibrary.get_editor_world(), + maps[j].get('map'), + unreal.LevelStreamingDynamic + ) + EditorLevelLibrary.save_all_dirty_levels() + + EditorLevelLibrary.load_level(maps[-1].get('map')) + # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] @@ -456,8 +554,6 @@ class LayoutLoader(api.Loader): i += 1 - # TODO: check if shot already exists - shot = tools.create_asset( asset_name=asset, package_path=asset_dir, @@ -465,36 +561,39 @@ class LayoutLoader(api.Loader): factory=unreal.LevelSequenceFactoryNew() ) - sequences.append(shot) - # Add sequences data to hierarchy data_i = self._get_data(sequences[0].get_name()) for i in range(0, len(sequences) - 1): - section = self._add_sub_sequence(sequences[i], sequences[i + 1]) + maps_to_add = [] + for j in range(i + 1, len(maps)): + maps_to_add.append(maps[j].get('map')) data_j = self._get_data(sequences[i + 1].get_name()) - if data_i: - sequences[i].set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) - sequences[i].set_playback_start(data_i.get("frameStart")) - sequences[i].set_playback_end(data_i.get("frameEnd")) - if data_j: - section.set_range( - data_j.get("frameStart"), - data_j.get("frameEnd")) + self._set_sequence_hierarchy( + sequences[i], sequences[i + 1], + data_i, data_j, + maps_to_add) data_i = data_j + parent_data = self._get_data(sequences[-1].get_name()) data = self._get_data(asset) + self._set_sequence_hierarchy( + sequences[-1], shot, + parent_data, data, + [maps[-1].get('map')]) - if data: - shot.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0)) - shot.set_playback_start(data.get("frameStart")) - shot.set_playback_end(data.get("frameEnd")) + EditorLevelLibrary.load_level(maps[-1].get('map')) self._process(self.fname, asset_dir, shot) + for s in sequences: + EditorAssetLibrary.save_asset(s.get_full_name()) + + EditorLevelLibrary.save_current_level() + # Create Asset Container lib.create_avalon_container( container=container_name, path=asset_dir) @@ -520,6 +619,8 @@ class LayoutLoader(api.Loader): for a in asset_content: EditorAssetLibrary.save_asset(a) + EditorLevelLibrary.load_level(maps[0].get('map')) + return asset_content def update(self, container, representation): From 9e7208187599b8095c9e03a873b750682862e717 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 16 Feb 2022 09:48:28 +0000 Subject: [PATCH 006/357] Animation are added to sequence when loaded --- .../unreal/plugins/load/load_animation.py | 188 +++++++++++------- 1 file changed, 119 insertions(+), 69 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index 20baa30847..7d054c4899 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -1,10 +1,14 @@ import os import json +import unreal +from unreal import EditorAssetLibrary +from unreal import MovieSceneSkeletalAnimationTrack +from unreal import MovieSceneSkeletalAnimationSection + from avalon import api, pipeline from avalon.unreal import lib from avalon.unreal import pipeline as unreal_pipeline -import unreal class AnimationFBXLoader(api.Loader): @@ -16,59 +20,13 @@ class AnimationFBXLoader(api.Loader): icon = "cube" color = "orange" - def load(self, context, name, namespace, options=None): - """ - Load and containerise representation into Content Browser. - - This is two step process. First, import FBX to temporary path and - then call `containerise()` on it - this moves all content to new - directory and then it will create AssetContainer there and imprint it - with metadata. This will mark this path as container. - - Args: - context (dict): application context - name (str): subset name - namespace (str): in Unreal this is basically path to container. - This is not passed here, so namespace is set - by `containerise()` because only then we know - real path. - data (dict): Those would be data to be imprinted. This is not used - now, data are imprinted by `containerise()`. - - Returns: - list(str): list of container content - """ - - # Create directory for asset and avalon container - root = "/Game/Avalon/Assets" - asset = context.get('asset').get('name') - suffix = "_CON" - if asset: - asset_name = "{}_{}".format(asset, name) - else: - asset_name = "{}".format(name) - - tools = unreal.AssetToolsHelpers().get_asset_tools() - asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset, name), suffix="") - - container_name += suffix - - unreal.EditorAssetLibrary.make_directory(asset_dir) - + def _process(self, asset_dir, asset_name, instance_name): automated = False actor = None task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() - libpath = self.fname.replace("fbx", "json") - - with open(libpath, "r") as fp: - data = json.load(fp) - - instance_name = data.get("instance_name") - if instance_name: automated = True # Old method to get the actor @@ -126,6 +84,116 @@ class AnimationFBXLoader(api.Loader): unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + animation = None + + for a in asset_content: + imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a) + imported_asset = unreal.AssetRegistryHelpers.get_asset( + imported_asset_data) + if imported_asset.__class__ == unreal.AnimSequence: + animation = imported_asset + break + + if animation: + animation.set_editor_property('enable_root_motion', True) + actor.skeletal_mesh_component.set_editor_property( + 'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE) + actor.skeletal_mesh_component.animation_data.set_editor_property( + 'anim_to_play', animation) + + return animation + + def load(self, context, name, namespace, options=None): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and avalon container + hierarchy = context.get('asset').get('data').get('parents') + root = "/Game/Avalon" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + f"{root}/Assets/{asset}/{name}", suffix="") + + hierarchy_dir = root + for h in hierarchy: + hierarchy_dir = f"{hierarchy_dir}/{h}" + hierarchy_dir = f"{hierarchy_dir}/{asset}" + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + libpath = self.fname.replace("fbx", "json") + + with open(libpath, "r") as fp: + data = json.load(fp) + + instance_name = data.get("instance_name") + + animation = self._process(asset_dir, container_name, instance_name) + + asset_content = unreal.EditorAssetLibrary.list_assets( + hierarchy_dir, recursive=True, include_folder=False) + + # Get the sequence for the layout, excluding the camera one. + sequences = [a for a in asset_content + if (EditorAssetLibrary.find_asset_data(a).get_class() == + unreal.LevelSequence.static_class() and + "_camera" not in a.split("/")[-1])] + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + for s in sequences: + sequence = ar.get_asset_by_object_path(s).get_asset() + possessables = [ + p for p in sequence.get_possessables() + if p.get_display_name() == instance_name] + + for p in possessables: + tracks = [ + t for t in p.get_tracks() + if (t.get_class() == + MovieSceneSkeletalAnimationTrack.static_class())] + + for t in tracks: + sections = [ + s for s in t.get_sections() + if (s.get_class() == + MovieSceneSkeletalAnimationSection.static_class())] + + for s in sections: + s.params.set_editor_property('animation', animation) + # Create Asset Container lib.create_avalon_container( container=container_name, path=asset_dir) @@ -145,29 +213,11 @@ class AnimationFBXLoader(api.Loader): unreal_pipeline.imprint( "{}/{}".format(asset_dir, container_name), data) - asset_content = unreal.EditorAssetLibrary.list_assets( - asset_dir, recursive=True, include_folder=True - ) + imported_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=False) - animation = None - - for a in asset_content: + for a in imported_content: unreal.EditorAssetLibrary.save_asset(a) - imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a) - imported_asset = unreal.AssetRegistryHelpers.get_asset( - imported_asset_data) - if imported_asset.__class__ == unreal.AnimSequence: - animation = imported_asset - break - - if animation: - animation.set_editor_property('enable_root_motion', True) - actor.skeletal_mesh_component.set_editor_property( - 'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE) - actor.skeletal_mesh_component.animation_data.set_editor_property( - 'anim_to_play', animation) - - return asset_content def update(self, container, representation): name = container["asset_name"] From d11a871bb181d0934efda4579ccafb5c73cb8c17 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 17 Feb 2022 16:34:18 +0000 Subject: [PATCH 007/357] Changed logic to obtain min and max frame of sequences --- .../hosts/unreal/plugins/load/load_camera.py | 32 ++++++++++++++--- .../hosts/unreal/plugins/load/load_layout.py | 35 +++++++++++++++---- 2 files changed, 55 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index feab531aaa..2d29319fc7 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -24,11 +24,6 @@ class CameraLoader(api.Loader): return asset_doc.get("data") def _set_sequence_hierarchy(self, seq_i, seq_j, data_i, data_j): - if data_i: - seq_i.set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) - seq_i.set_playback_start(data_i.get("frameStart")) - seq_i.set_playback_end(data_i.get("frameEnd") + 1) - tracks = seq_i.get_master_tracks() track = None for t in tracks: @@ -140,6 +135,33 @@ class CameraLoader(api.Loader): asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew() ) + + asset_data = io.find_one({ + "type": "asset", + "name": h.split('/')[-1] + }) + + id = asset_data.get('_id') + + start_frames = [] + end_frames = [] + + elements = list( + io.find({"type": "asset", "data.visualParent": id})) + for e in elements: + start_frames.append(e.get('data').get('clipIn')) + end_frames.append(e.get('data').get('clipOut')) + + elements.extend(io.find({ + "type": "asset", + "data.visualParent": e.get('_id') + })) + + scene.set_display_rate( + unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) + scene.set_playback_start(min(start_frames)) + scene.set_playback_end(max(end_frames)) + sequences.append(scene) else: for e in existing_sequences: diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index a7d5a5841f..a36bd6663a 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -1,4 +1,4 @@ -import os +import os, sys import json from pathlib import Path @@ -84,12 +84,6 @@ class LayoutLoader(api.Loader): return asset_doc.get("data") def _set_sequence_hierarchy(self, seq_i, seq_j, data_i, data_j, map_paths): - # Set data for the parent sequence - if data_i: - seq_i.set_display_rate(unreal.FrameRate(data_i.get("fps"), 1.0)) - seq_i.set_playback_start(data_i.get("frameStart")) - seq_i.set_playback_end(data_i.get("frameEnd") + 1) - # Get existing sequencer tracks or create them if they don't exist tracks = seq_i.get_master_tracks() subscene_track = None @@ -547,6 +541,33 @@ class LayoutLoader(api.Loader): asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew() ) + + asset_data = io.find_one({ + "type": "asset", + "name": h.split('/')[-1] + }) + + id = asset_data.get('_id') + + start_frames = [] + end_frames = [] + + elements = list( + io.find({"type": "asset", "data.visualParent": id})) + for e in elements: + start_frames.append(e.get('data').get('clipIn')) + end_frames.append(e.get('data').get('clipOut')) + + elements.extend(io.find({ + "type": "asset", + "data.visualParent": e.get('_id') + })) + + scene.set_display_rate( + unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) + scene.set_playback_start(min(start_frames)) + scene.set_playback_end(max(end_frames)) + sequences.append(scene) else: for e in existing_sequences: From ce4984d7e60488b20e08850a87468f488805822c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 17 Feb 2022 17:35:14 +0000 Subject: [PATCH 008/357] Camera is now saved in the right level --- .../hosts/unreal/plugins/load/load_camera.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 2d29319fc7..61d9c04d2f 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -78,7 +78,10 @@ class CameraLoader(api.Loader): for h in hierarchy: hierarchy_dir = f"{hierarchy_dir}/{h}" hierarchy_list.append(hierarchy_dir) + print(h) + print(hierarchy_dir) asset = context.get('asset').get('name') + print(asset) suffix = "_CON" if asset: asset_name = "{}_{}".format(asset, name) @@ -113,6 +116,23 @@ class CameraLoader(api.Loader): container_name += suffix + current_level = unreal.EditorLevelLibrary.get_editor_world().get_full_name() + unreal.EditorLevelLibrary.save_all_dirty_levels() + + # asset_content = unreal.EditorAssetLibrary.list_assets( + # f"{hierarchy_dir}/{asset}/", recursive=True, include_folder=False + # ) + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + filter = unreal.ARFilter( + class_names = ["World"], + package_paths = [f"{hierarchy_dir}/{asset}/"], + recursive_paths = True) + maps = ar.get_assets(filter) + + # There should be only one map in the list + unreal.EditorLevelLibrary.load_level(maps[0].get_full_name()) + # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] @@ -224,6 +244,9 @@ class CameraLoader(api.Loader): unreal_pipeline.imprint( "{}/{}".format(asset_dir, container_name), data) + unreal.EditorLevelLibrary.save_all_dirty_levels() + unreal.EditorLevelLibrary.load_level(current_level) + asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True ) From fa64a26a17271522e80af955a48f6238c5c4af2b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 17 Feb 2022 17:46:45 +0000 Subject: [PATCH 009/357] Removed debug prints --- openpype/hosts/unreal/plugins/load/load_camera.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 61d9c04d2f..6ee88f8acc 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -78,10 +78,7 @@ class CameraLoader(api.Loader): for h in hierarchy: hierarchy_dir = f"{hierarchy_dir}/{h}" hierarchy_list.append(hierarchy_dir) - print(h) - print(hierarchy_dir) asset = context.get('asset').get('name') - print(asset) suffix = "_CON" if asset: asset_name = "{}_{}".format(asset, name) @@ -119,10 +116,6 @@ class CameraLoader(api.Loader): current_level = unreal.EditorLevelLibrary.get_editor_world().get_full_name() unreal.EditorLevelLibrary.save_all_dirty_levels() - # asset_content = unreal.EditorAssetLibrary.list_assets( - # f"{hierarchy_dir}/{asset}/", recursive=True, include_folder=False - # ) - ar = unreal.AssetRegistryHelpers.get_asset_registry() filter = unreal.ARFilter( class_names = ["World"], From 26d63e6beacf7334eaa4d5c4a5cad789adc75db5 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Feb 2022 12:15:06 +0000 Subject: [PATCH 010/357] Fix start and end frames of sequences --- .../hosts/unreal/plugins/load/load_layout.py | 48 ++++++++++--------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index a36bd6663a..5d7977b237 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -1,4 +1,4 @@ -import os, sys +import os import json from pathlib import Path @@ -83,7 +83,9 @@ class LayoutLoader(api.Loader): return asset_doc.get("data") - def _set_sequence_hierarchy(self, seq_i, seq_j, data_i, data_j, map_paths): + def _set_sequence_hierarchy(self, + seq_i, seq_j, max_frame_i, min_frame_j, max_frame_j, map_paths + ): # Get existing sequencer tracks or create them if they don't exist tracks = seq_i.get_master_tracks() subscene_track = None @@ -110,8 +112,8 @@ class LayoutLoader(api.Loader): subscene.set_row_index(len(subscene_track.get_sections())) subscene.set_editor_property('sub_sequence', seq_j) subscene.set_range( - data_j.get("frameStart"), - data_j.get("frameEnd") + 1) + min_frame_j, + max_frame_j + 1) # Create the visibility section ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -126,25 +128,25 @@ class LayoutLoader(api.Loader): index = len(visibility_track.get_sections()) vis_section.set_range( - data_j.get("frameStart"), - data_j.get("frameEnd") + 1) + min_frame_j, + max_frame_j + 1) vis_section.set_visibility(unreal.LevelVisibility.VISIBLE) vis_section.set_row_index(index) vis_section.set_level_names(maps) - if data_j.get("frameStart") > 1: + if min_frame_j > 1: hid_section = visibility_track.add_section() hid_section.set_range( 1, - data_j.get("frameStart")) + min_frame_j) hid_section.set_visibility(unreal.LevelVisibility.HIDDEN) hid_section.set_row_index(index) hid_section.set_level_names(maps) - if data_j.get("frameEnd") < data_i.get("frameEnd"): + if max_frame_j < max_frame_i: hid_section = visibility_track.add_section() hid_section.set_range( - data_j.get("frameEnd") + 1, - data_i.get("frameEnd") + 1) + max_frame_j + 1, + max_frame_i + 1) hid_section.set_visibility(unreal.LevelVisibility.HIDDEN) hid_section.set_row_index(index) hid_section.set_level_names(maps) @@ -515,6 +517,7 @@ class LayoutLoader(api.Loader): # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] + frame_ranges = [] i = 0 for h in hierarchy_list: root_content = EditorAssetLibrary.list_assets( @@ -563,12 +566,16 @@ class LayoutLoader(api.Loader): "data.visualParent": e.get('_id') })) + min_frame = min(start_frames) + max_frame = max(end_frames) + scene.set_display_rate( unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) - scene.set_playback_start(min(start_frames)) - scene.set_playback_end(max(end_frames)) + scene.set_playback_start(min_frame) + scene.set_playback_end(max_frame) sequences.append(scene) + frame_ranges.append((min_frame, max_frame)) else: for e in existing_sequences: sequences.append(e.get_asset()) @@ -582,28 +589,23 @@ class LayoutLoader(api.Loader): factory=unreal.LevelSequenceFactoryNew() ) - # Add sequences data to hierarchy - data_i = self._get_data(sequences[0].get_name()) - + # sequences and frame_ranges have the same length for i in range(0, len(sequences) - 1): maps_to_add = [] for j in range(i + 1, len(maps)): maps_to_add.append(maps[j].get('map')) - data_j = self._get_data(sequences[i + 1].get_name()) - self._set_sequence_hierarchy( sequences[i], sequences[i + 1], - data_i, data_j, + frame_ranges[i][1], + frame_ranges[i + 1][0], frame_ranges[i + 1][1], maps_to_add) - data_i = data_j - - parent_data = self._get_data(sequences[-1].get_name()) data = self._get_data(asset) self._set_sequence_hierarchy( sequences[-1], shot, - parent_data, data, + frame_ranges[-1][1], + data.get('clipIn'), data.get('clipOut'), [maps[-1].get('map')]) EditorLevelLibrary.load_level(maps[-1].get('map')) From 55bcd6bba71f0ef191f2597a9aefc61c01639950 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Feb 2022 10:19:29 +0000 Subject: [PATCH 011/357] Set correct start and end frames for existing sequences --- openpype/hosts/unreal/plugins/load/load_layout.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 5d7977b237..58b6f661b9 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -579,6 +579,9 @@ class LayoutLoader(api.Loader): else: for e in existing_sequences: sequences.append(e.get_asset()) + frame_ranges.append(( + e.get_asset().get_playback_start(), + e.get_asset().get_playback_end())) i += 1 From 10c6d7be08aaeccc219dec51361c793ac495fcff Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Feb 2022 10:20:30 +0000 Subject: [PATCH 012/357] Add an empty Camera Cut Track to the sequences in the hierarchy --- .../hosts/unreal/plugins/load/load_layout.py | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 58b6f661b9..05615ff083 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -530,15 +530,8 @@ class LayoutLoader(api.Loader): asset).get_class().get_name() == 'LevelSequence' ] - # for asset in root_content: - # asset_data = EditorAssetLibrary.find_asset_data(asset) - # # imported_asset = unreal.AssetRegistryHelpers.get_asset( - # # imported_asset_data) - # if asset_data.get_class().get_name() == 'LevelSequence': - # break - if not existing_sequences: - scene = tools.create_asset( + sequence = tools.create_asset( asset_name=hierarchy[i], package_path=h, asset_class=unreal.LevelSequence, @@ -569,13 +562,23 @@ class LayoutLoader(api.Loader): min_frame = min(start_frames) max_frame = max(end_frames) - scene.set_display_rate( + sequence.set_display_rate( unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) - scene.set_playback_start(min_frame) - scene.set_playback_end(max_frame) + sequence.set_playback_start(min_frame) + sequence.set_playback_end(max_frame) - sequences.append(scene) + sequences.append(sequence) frame_ranges.append((min_frame, max_frame)) + + tracks = sequence.get_master_tracks() + track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneCameraCutTrack.static_class(): + track = t + break + if not track: + track = sequence.add_master_track( + unreal.MovieSceneCameraCutTrack) else: for e in existing_sequences: sequences.append(e.get_asset()) From 6bb615e2a04d47068b091290fcbac6481c1fa01e Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Feb 2022 11:31:19 +0000 Subject: [PATCH 013/357] Set correct start and end frame for camera sequences --- .../hosts/unreal/plugins/load/load_camera.py | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 6ee88f8acc..12aaceb385 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -23,7 +23,9 @@ class CameraLoader(api.Loader): return asset_doc.get("data") - def _set_sequence_hierarchy(self, seq_i, seq_j, data_i, data_j): + def _set_sequence_hierarchy(self, + seq_i, seq_j, min_frame_j, max_frame_j + ): tracks = seq_i.get_master_tracks() track = None for t in tracks: @@ -44,8 +46,8 @@ class CameraLoader(api.Loader): subscene.set_row_index(len(track.get_sections())) subscene.set_editor_property('sub_sequence', seq_j) subscene.set_range( - data_j.get("frameStart"), - data_j.get("frameEnd") + 1) + min_frame_j, + max_frame_j + 1) def load(self, context, name, namespace, data): """ @@ -129,6 +131,7 @@ class CameraLoader(api.Loader): # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] + frame_ranges = [] i = 0 for h in hierarchy_list: root_content = unreal.EditorAssetLibrary.list_assets( @@ -170,15 +173,22 @@ class CameraLoader(api.Loader): "data.visualParent": e.get('_id') })) + min_frame = min(start_frames) + max_frame = max(end_frames) + scene.set_display_rate( unreal.FrameRate(asset_data.get('data').get("fps"), 1.0)) - scene.set_playback_start(min(start_frames)) - scene.set_playback_end(max(end_frames)) + scene.set_playback_start(min_frame) + scene.set_playback_end(max_frame) sequences.append(scene) + frame_ranges.append((min_frame, max_frame)) else: for e in existing_sequences: sequences.append(e.get_asset()) + frame_ranges.append(( + e.get_asset().get_playback_start(), + e.get_asset().get_playback_end())) i += 1 @@ -192,20 +202,15 @@ class CameraLoader(api.Loader): ) # Add sequences data to hierarchy - data_i = self._get_data(sequences[0].get_name()) - for i in range(0, len(sequences) - 1): - data_j = self._get_data(sequences[i + 1].get_name()) - self._set_sequence_hierarchy( - sequences[i], sequences[i + 1], data_i, data_j) + sequences[i], sequences[i + 1], + frame_ranges[i + 1][0], frame_ranges[i + 1][1]) - data_i = data_j - - parent_data = self._get_data(sequences[-1].get_name()) data = self._get_data(asset) self._set_sequence_hierarchy( - sequences[-1], cam_seq, parent_data, data) + sequences[-1], cam_seq, + data.get('clipIn'), data.get('clipOut')) settings = unreal.MovieSceneUserImportFBXSettings() settings.set_editor_property('reduce_keys', False) From 35f5e4a8408fb52e1c0d1d318416e922a55f32f5 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 2 Mar 2022 11:17:58 +0000 Subject: [PATCH 014/357] Render from the master sequence and output keeps the hierarchy --- openpype/hosts/unreal/api/rendering.py | 99 +++++++++++++------ .../unreal/plugins/create/create_render.py | 62 +++++++++++- 2 files changed, 129 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index 7c58987c0d..8eb4e1e75a 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -1,11 +1,11 @@ import avalon.unreal.pipeline as pipeline -import avalon.unreal.lib as lib import unreal queue = None executor = None + def _queue_finish_callback(exec, success): unreal.log("Render completed. Success: " + str(success)) @@ -45,41 +45,84 @@ def start_rendering(): if data["family"] == "render": inst_data.append(data) - # subsystem = unreal.get_editor_subsystem(unreal.MoviePipelineQueueSubsystem) + # subsystem = unreal.get_editor_subsystem( + # unreal.MoviePipelineQueueSubsystem) # queue = subsystem.get_queue() global queue queue = unreal.MoviePipelineQueue() + ar = unreal.AssetRegistryHelpers.get_asset_registry() + for i in inst_data: - job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob) - job.sequence = unreal.SoftObjectPath(i["sequence"]) - job.map = unreal.SoftObjectPath(i["map"]) - job.author = "OpenPype" + sequence = ar.get_asset_by_object_path(i["sequence"]).get_asset() - # User data could be used to pass data to the job, that can be read - # in the job's OnJobFinished callback. We could, for instance, - # pass the AvalonPublishInstance's path to the job. - # job.user_data = "" + sequences = [{ + "sequence": sequence, + "output": f"{i['subset']}/{sequence.get_name()}", + "frame_range": ( + int(float(i["startFrame"])), + int(float(i["endFrame"])) + 1) + }] + render_list = [] - output_setting = job.get_configuration().find_or_add_setting_by_class( - unreal.MoviePipelineOutputSetting) - output_setting.output_resolution = unreal.IntPoint(1280, 720) - output_setting.file_name_format = "{sequence_name}.{frame_number}" - output_setting.output_directory.path += f"{i['subset']}/" + # Get all the sequences to render. If there are subsequences, + # add them and their frame ranges to the render list. We also + # use the names for the output paths. + for s in sequences: + tracks = s.get('sequence').get_master_tracks() + subscene_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + if subscene_track is not None and subscene_track.get_sections(): + subscenes = subscene_track.get_sections() - renderPass = job.get_configuration().find_or_add_setting_by_class( - unreal.MoviePipelineDeferredPassBase) - renderPass.disable_multisample_effects = True + for ss in subscenes: + sequences.append({ + "sequence": ss.get_sequence(), + "output": f"{s.get('output')}/{ss.get_sequence().get_name()}", + "frame_range": ( + ss.get_start_frame(), ss.get_end_frame()) + }) + else: + # Avoid rendering camera sequences + if "_camera" not in s.get('sequence').get_name(): + render_list.append(s) - job.get_configuration().find_or_add_setting_by_class( - unreal.MoviePipelineImageSequenceOutput_PNG) + # Create the rendering jobs and add them to the queue. + for r in render_list: + job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob) + job.sequence = unreal.SoftObjectPath(i["master_sequence"]) + job.map = unreal.SoftObjectPath(i["master_level"]) + job.author = "OpenPype" - # TODO: check if queue is empty + # User data could be used to pass data to the job, that can be + # read in the job's OnJobFinished callback. We could, + # for instance, pass the AvalonPublishInstance's path to the job. + # job.user_data = "" - global executor - executor = unreal.MoviePipelinePIEExecutor() - executor.on_executor_finished_delegate.add_callable_unique( - _queue_finish_callback) - executor.on_individual_job_finished_delegate.add_callable_unique( - _job_finish_callback) # Only available on PIE Executor - executor.execute(queue) + settings = job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineOutputSetting) + settings.output_resolution = unreal.IntPoint(1920, 1080) + settings.custom_start_frame = r.get("frame_range")[0] + settings.custom_end_frame = r.get("frame_range")[1] + settings.use_custom_playback_range = True + settings.file_name_format = "{sequence_name}.{frame_number}" + settings.output_directory.path += r.get('output') + + renderPass = job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineDeferredPassBase) + renderPass.disable_multisample_effects = True + + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_PNG) + + # If there are jobs in the queue, start the rendering process. + if queue.get_jobs(): + global executor + executor = unreal.MoviePipelinePIEExecutor() + executor.on_executor_finished_delegate.add_callable_unique( + _queue_finish_callback) + executor.on_individual_job_finished_delegate.add_callable_unique( + _job_finish_callback) # Only available on PIE Executor + executor.execute(queue) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 0128808a70..de092c4dd7 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,5 +1,8 @@ import unreal + from openpype.hosts.unreal.api.plugin import Creator + +from avalon import io from avalon.unreal import pipeline @@ -21,7 +24,22 @@ class CreateRender(Creator): def process(self): name = self.data["subset"] - print(self.data) + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + # Get the master sequence and the master level. + # There should be only one sequence and one level in the directory. + filter = unreal.ARFilter( + class_names = ["LevelSequence"], + package_paths = [f"/Game/Avalon/{self.data['asset']}"], + recursive_paths = False) + sequences = ar.get_assets(filter) + ms = sequences[0].object_path + filter = unreal.ARFilter( + class_names = ["World"], + package_paths = [f"/Game/Avalon/{self.data['asset']}"], + recursive_paths = False) + levels = ar.get_assets(filter) + ml = levels[0].object_path selection = [] if (self.options or {}).get("useSelection"): @@ -46,8 +64,44 @@ class CreateRender(Creator): d = self.data.copy() d["members"] = [a] d["sequence"] = a - d["map"] = unreal.EditorLevelLibrary.get_editor_world().get_path_name() + d["master_sequence"] = ms + d["master_level"] = ml asset = ar.get_asset_by_object_path(a).get_asset() - container_name = f"{asset.get_name()}{self.suffix}" - pipeline.create_publish_instance(instance=container_name, path=path) + asset_name = asset.get_name() + + # Get frame range. We need to go through the hierarchy and check + # the frame range for the children. + asset_data = io.find_one({ + "type": "asset", + "name": asset_name + }) + id = asset_data.get('_id') + + elements = list( + io.find({"type": "asset", "data.visualParent": id})) + + if elements: + start_frames = [] + end_frames = [] + for e in elements: + start_frames.append(e.get('data').get('clipIn')) + end_frames.append(e.get('data').get('clipOut')) + + elements.extend(io.find({ + "type": "asset", + "data.visualParent": e.get('_id') + })) + + min_frame = min(start_frames) + max_frame = max(end_frames) + else: + min_frame = asset_data.get('data').get('clipIn') + max_frame = asset_data.get('data').get('clipOut') + + d["startFrame"] = min_frame + d["endFrame"] = max_frame + + container_name = f"{asset_name}{self.suffix}" + pipeline.create_publish_instance( + instance=container_name, path=path) pipeline.imprint("{}/{}".format(path, container_name), d) From 5d8bac337f1631f4653d19b5e59f40ee406247c0 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 3 Mar 2022 10:37:02 +0000 Subject: [PATCH 015/357] Fixed frame range and frame rate for shot sequences --- openpype/hosts/unreal/plugins/load/load_camera.py | 4 ++++ openpype/hosts/unreal/plugins/load/load_layout.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 12aaceb385..cea59ae93f 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -208,6 +208,10 @@ class CameraLoader(api.Loader): frame_ranges[i + 1][0], frame_ranges[i + 1][1]) data = self._get_data(asset) + cam_seq.set_display_rate( + unreal.FrameRate(data.get("fps"), 1.0)) + cam_seq.set_playback_start(0) + cam_seq.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) self._set_sequence_hierarchy( sequences[-1], cam_seq, data.get('clipIn'), data.get('clipOut')) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 05615ff083..5a976a1fb5 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -608,6 +608,10 @@ class LayoutLoader(api.Loader): maps_to_add) data = self._get_data(asset) + shot.set_display_rate( + unreal.FrameRate(data.get("fps"), 1.0)) + shot.set_playback_start(0) + shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) self._set_sequence_hierarchy( sequences[-1], shot, frame_ranges[-1][1], From 468a5e145ce138c9470db35a59612e68bcebcb4e Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 3 Mar 2022 11:05:11 +0000 Subject: [PATCH 016/357] Hound fixes --- openpype/hosts/unreal/api/rendering.py | 3 +- .../hosts/unreal/plugins/load/load_camera.py | 69 ++++++++++--------- .../hosts/unreal/plugins/load/load_layout.py | 42 ++++++----- .../unreal/plugins/publish/extract_render.py | 8 ++- 4 files changed, 63 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index 8eb4e1e75a..3ed77cc651 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -80,7 +80,8 @@ def start_rendering(): for ss in subscenes: sequences.append({ "sequence": ss.get_sequence(), - "output": f"{s.get('output')}/{ss.get_sequence().get_name()}", + "output": (f"{s.get('output')}/" + f"{ss.get_sequence().get_name()}"), "frame_range": ( ss.get_start_frame(), ss.get_end_frame()) }) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index cea59ae93f..f93de0a79a 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -1,9 +1,12 @@ import os +import unreal +from unreal import EditorAssetLibrary +from unreal import EditorLevelLibrary + from avalon import api, io, pipeline from avalon.unreal import lib from avalon.unreal import pipeline as unreal_pipeline -import unreal class CameraLoader(api.Loader): @@ -23,8 +26,8 @@ class CameraLoader(api.Loader): return asset_doc.get("data") - def _set_sequence_hierarchy(self, - seq_i, seq_j, min_frame_j, max_frame_j + def _set_sequence_hierarchy( + self, seq_i, seq_j, min_frame_j, max_frame_j ): tracks = seq_i.get_master_tracks() track = None @@ -91,8 +94,8 @@ class CameraLoader(api.Loader): # Create a unique name for the camera directory unique_number = 1 - if unreal.EditorAssetLibrary.does_directory_exist(f"{hierarchy_dir}/{asset}"): - asset_content = unreal.EditorAssetLibrary.list_assets( + if EditorAssetLibrary.does_directory_exist(f"{hierarchy_dir}/{asset}"): + asset_content = EditorAssetLibrary.list_assets( f"{root}/{asset}", recursive=False, include_folder=True ) @@ -115,32 +118,32 @@ class CameraLoader(api.Loader): container_name += suffix - current_level = unreal.EditorLevelLibrary.get_editor_world().get_full_name() - unreal.EditorLevelLibrary.save_all_dirty_levels() + current_level = EditorLevelLibrary.get_editor_world().get_full_name() + EditorLevelLibrary.save_all_dirty_levels() ar = unreal.AssetRegistryHelpers.get_asset_registry() filter = unreal.ARFilter( - class_names = ["World"], - package_paths = [f"{hierarchy_dir}/{asset}/"], - recursive_paths = True) + class_names=["World"], + package_paths=[f"{hierarchy_dir}/{asset}/"], + recursive_paths=True) maps = ar.get_assets(filter) # There should be only one map in the list - unreal.EditorLevelLibrary.load_level(maps[0].get_full_name()) + EditorLevelLibrary.load_level(maps[0].get_full_name()) - # Get all the sequences in the hierarchy. It will create them, if + # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] frame_ranges = [] i = 0 for h in hierarchy_list: - root_content = unreal.EditorAssetLibrary.list_assets( + root_content = EditorAssetLibrary.list_assets( h, recursive=False, include_folder=False) existing_sequences = [ - unreal.EditorAssetLibrary.find_asset_data(asset) + EditorAssetLibrary.find_asset_data(asset) for asset in root_content - if unreal.EditorAssetLibrary.find_asset_data( + if EditorAssetLibrary.find_asset_data( asset).get_class().get_name() == 'LevelSequence' ] @@ -192,7 +195,7 @@ class CameraLoader(api.Loader): i += 1 - unreal.EditorAssetLibrary.make_directory(asset_dir) + EditorAssetLibrary.make_directory(asset_dir) cam_seq = tools.create_asset( asset_name=f"{asset}_camera", @@ -213,15 +216,15 @@ class CameraLoader(api.Loader): cam_seq.set_playback_start(0) cam_seq.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) self._set_sequence_hierarchy( - sequences[-1], cam_seq, - data.get('clipIn'), data.get('clipOut')) + sequences[-1], cam_seq, + data.get('clipIn'), data.get('clipOut')) settings = unreal.MovieSceneUserImportFBXSettings() settings.set_editor_property('reduce_keys', False) if cam_seq: unreal.SequencerTools.import_fbx( - unreal.EditorLevelLibrary.get_editor_world(), + EditorLevelLibrary.get_editor_world(), cam_seq, cam_seq.get_bindings(), settings, @@ -246,15 +249,15 @@ class CameraLoader(api.Loader): unreal_pipeline.imprint( "{}/{}".format(asset_dir, container_name), data) - unreal.EditorLevelLibrary.save_all_dirty_levels() - unreal.EditorLevelLibrary.load_level(current_level) + EditorLevelLibrary.save_all_dirty_levels() + EditorLevelLibrary.load_level(current_level) - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True ) for a in asset_content: - unreal.EditorAssetLibrary.save_asset(a) + EditorAssetLibrary.save_asset(a) return asset_content @@ -264,25 +267,25 @@ class CameraLoader(api.Loader): ar = unreal.AssetRegistryHelpers.get_asset_registry() tools = unreal.AssetToolsHelpers().get_asset_tools() - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( path, recursive=False, include_folder=False ) asset_name = "" for a in asset_content: asset = ar.get_asset_by_object_path(a) if a.endswith("_CON"): - loaded_asset = unreal.EditorAssetLibrary.load_asset(a) - unreal.EditorAssetLibrary.set_metadata_tag( + loaded_asset = EditorAssetLibrary.load_asset(a) + EditorAssetLibrary.set_metadata_tag( loaded_asset, "representation", str(representation["_id"]) ) - unreal.EditorAssetLibrary.set_metadata_tag( + EditorAssetLibrary.set_metadata_tag( loaded_asset, "parent", str(representation["parent"]) ) - asset_name = unreal.EditorAssetLibrary.get_metadata_tag( + asset_name = EditorAssetLibrary.get_metadata_tag( loaded_asset, "asset_name" ) elif asset.asset_class == "LevelSequence": - unreal.EditorAssetLibrary.delete_asset(a) + EditorAssetLibrary.delete_asset(a) sequence = tools.create_asset( asset_name=asset_name, @@ -308,7 +311,7 @@ class CameraLoader(api.Loader): settings.set_editor_property('reduce_keys', False) unreal.SequencerTools.import_fbx( - unreal.EditorLevelLibrary.get_editor_world(), + EditorLevelLibrary.get_editor_world(), sequence, sequence.get_bindings(), settings, @@ -319,11 +322,11 @@ class CameraLoader(api.Loader): path = container["namespace"] parent_path = os.path.dirname(path) - unreal.EditorAssetLibrary.delete_directory(path) + EditorAssetLibrary.delete_directory(path) - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( parent_path, recursive=False, include_folder=True ) if len(asset_content) == 0: - unreal.EditorAssetLibrary.delete_directory(parent_path) + EditorAssetLibrary.delete_directory(parent_path) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 5a976a1fb5..e25f06ad42 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -83,8 +83,8 @@ class LayoutLoader(api.Loader): return asset_doc.get("data") - def _set_sequence_hierarchy(self, - seq_i, seq_j, max_frame_i, min_frame_j, max_frame_j, map_paths + def _set_sequence_hierarchy( + self, seq_i, seq_j, max_frame_i, min_frame_j, max_frame_j, map_paths ): # Get existing sequencer tracks or create them if they don't exist tracks = seq_i.get_master_tracks() @@ -93,12 +93,14 @@ class LayoutLoader(api.Loader): for t in tracks: if t.get_class() == unreal.MovieSceneSubTrack.static_class(): subscene_track = t - if t.get_class() == unreal.MovieSceneLevelVisibilityTrack.static_class(): + if (t.get_class() == + unreal.MovieSceneLevelVisibilityTrack.static_class()): visibility_track = t if not subscene_track: subscene_track = seq_i.add_master_track(unreal.MovieSceneSubTrack) if not visibility_track: - visibility_track = seq_i.add_master_track(unreal.MovieSceneLevelVisibilityTrack) + visibility_track = seq_i.add_master_track( + unreal.MovieSceneLevelVisibilityTrack) # Create the sub-scene section subscenes = subscene_track.get_sections() @@ -152,7 +154,8 @@ class LayoutLoader(api.Loader): hid_section.set_level_names(maps) def _process_family( - self, assets, classname, transform, sequence, inst_name=None): + self, assets, classname, transform, sequence, inst_name=None + ): ar = unreal.AssetRegistryHelpers.get_asset_registry() actors = [] @@ -189,16 +192,15 @@ class LayoutLoader(api.Loader): actors.append(actor) binding = sequence.add_possessable(actor) - # root_component_binding = sequence.add_possessable(actor.root_component) - # root_component_binding.set_parent(binding) bindings.append(binding) return actors, bindings def _import_animation( - self, asset_dir, path, instance_name, skeleton, actors_dict, - animation_file, bindings_dict, sequence): + self, asset_dir, path, instance_name, skeleton, actors_dict, + animation_file, bindings_dict, sequence + ): anim_file = Path(animation_file) anim_file_name = anim_file.with_suffix('') @@ -389,14 +391,9 @@ class LayoutLoader(api.Loader): if animation_file and skeleton: self._import_animation( - asset_dir, path, instance_name, skeleton, actors_dict, + asset_dir, path, instance_name, skeleton, actors_dict, animation_file, bindings_dict, sequence) - # track = sequence.add_master_track( - # unreal.MovieSceneActorReferenceTrack) - # section = track.add_section() - # section.set_editor_property('sub_sequence', sequence) - def _remove_family(self, assets, components, classname, propname): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -499,7 +496,7 @@ class LayoutLoader(api.Loader): EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") maps.append( - {"map":f"{asset_dir}/{asset}_map.{asset}_map", "new": True}) + {"map": f"{asset_dir}/{asset}_map.{asset}_map", "new": True}) for i in range(0, len(maps) - 1): for j in range(i + 1, len(maps)): @@ -514,7 +511,7 @@ class LayoutLoader(api.Loader): EditorLevelLibrary.load_level(maps[-1].get('map')) - # Get all the sequences in the hierarchy. It will create them, if + # Get all the sequences in the hierarchy. It will create them, if # they don't exist. sequences = [] frame_ranges = [] @@ -573,7 +570,8 @@ class LayoutLoader(api.Loader): tracks = sequence.get_master_tracks() track = None for t in tracks: - if t.get_class() == unreal.MovieSceneCameraCutTrack.static_class(): + if (t.get_class() == + unreal.MovieSceneCameraCutTrack.static_class()): track = t break if not track: @@ -613,10 +611,10 @@ class LayoutLoader(api.Loader): shot.set_playback_start(0) shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) self._set_sequence_hierarchy( - sequences[-1], shot, - frame_ranges[-1][1], - data.get('clipIn'), data.get('clipOut'), - [maps[-1].get('map')]) + sequences[-1], shot, + frame_ranges[-1][1], + data.get('clipIn'), data.get('clipOut'), + [maps[-1].get('map')]) EditorLevelLibrary.load_level(maps[-1].get('map')) diff --git a/openpype/hosts/unreal/plugins/publish/extract_render.py b/openpype/hosts/unreal/plugins/publish/extract_render.py index 7ba53c9155..37fe7e916f 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_render.py +++ b/openpype/hosts/unreal/plugins/publish/extract_render.py @@ -1,8 +1,9 @@ from pathlib import Path -import openpype.api -from avalon import io + import unreal +import openpype.api + class ExtractRender(openpype.api.Extractor): """Extract render.""" @@ -21,7 +22,8 @@ class ExtractRender(openpype.api.Extractor): # Get the render output directory project_dir = unreal.Paths.project_dir() - render_dir = f"{project_dir}/Saved/MovieRenders/{instance.data['subset']}" + render_dir = (f"{project_dir}/Saved/MovieRenders/" + f"{instance.data['subset']}") assert unreal.Paths.directory_exists(render_dir), \ "Render directory does not exist" From df02c64f18ec3b8e3bca1b07b99de93a5629a634 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 3 Mar 2022 11:07:23 +0000 Subject: [PATCH 017/357] More hound fixes --- .../hosts/unreal/plugins/create/create_render.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index de092c4dd7..e6c233a2c5 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -29,15 +29,15 @@ class CreateRender(Creator): # Get the master sequence and the master level. # There should be only one sequence and one level in the directory. filter = unreal.ARFilter( - class_names = ["LevelSequence"], - package_paths = [f"/Game/Avalon/{self.data['asset']}"], - recursive_paths = False) + class_names=["LevelSequence"], + package_paths=[f"/Game/Avalon/{self.data['asset']}"], + recursive_paths=False) sequences = ar.get_assets(filter) ms = sequences[0].object_path filter = unreal.ARFilter( - class_names = ["World"], - package_paths = [f"/Game/Avalon/{self.data['asset']}"], - recursive_paths = False) + class_names=["World"], + package_paths=[f"/Game/Avalon/{self.data['asset']}"], + recursive_paths=False) levels = ar.get_assets(filter) ml = levels[0].object_path From 7f83c8a2d028ddeeb772d1bcc7e4a0348568ee25 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 15:21:42 +0100 Subject: [PATCH 018/357] OP-2765 - added methods for New Publisher Removed uuid, replaced with instance_id or first members item --- openpype/hosts/aftereffects/api/__init__.py | 8 ++++- openpype/hosts/aftereffects/api/pipeline.py | 39 +++++++++++++++------ openpype/hosts/aftereffects/api/ws_stub.py | 20 +++++------ 3 files changed, 45 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/aftereffects/api/__init__.py b/openpype/hosts/aftereffects/api/__init__.py index cea1bdc023..2ad1255d27 100644 --- a/openpype/hosts/aftereffects/api/__init__.py +++ b/openpype/hosts/aftereffects/api/__init__.py @@ -16,7 +16,10 @@ from .pipeline import ( uninstall, list_instances, remove_instance, - containerise + containerise, + get_context_data, + update_context_data, + get_context_title ) from .workio import ( @@ -51,6 +54,9 @@ __all__ = [ "list_instances", "remove_instance", "containerise", + "get_context_data", + "update_context_data", + "get_context_title", "file_extensions", "has_unsaved_changes", diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 94f1e3d105..ea03542765 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -10,6 +10,7 @@ from avalon import io, pipeline from openpype import lib from openpype.api import Logger import openpype.hosts.aftereffects +from openpype.pipeline import BaseCreator from .launch_logic import get_stub @@ -67,6 +68,7 @@ def install(): avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) log.info(PUBLISH_PATH) pyblish.api.register_callback( @@ -238,12 +240,6 @@ def list_instances(): if instance.get("schema") and \ "container" in instance.get("schema"): continue - - uuid_val = instance.get("uuid") - if uuid_val: - instance['uuid'] = uuid_val - else: - instance['uuid'] = instance.get("members")[0] # legacy instances.append(instance) return instances @@ -265,8 +261,29 @@ def remove_instance(instance): if not stub: return - stub.remove_instance(instance.get("uuid")) - item = stub.get_item(instance.get("uuid")) - if item: - stub.rename_item(item.id, - item.name.replace(stub.PUBLISH_ICON, '')) + inst_id = instance.get("instance_id") + if not inst_id: + log.warning("No instance identifier for {}".format(instance)) + return + + stub.remove_instance(inst_id) + + if instance.members: + item = stub.get_item(instance.members[0]) + if item: + stub.rename_item(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) + + +def get_context_data(): + print("get_context_data") + return {} + + +def update_context_data(data, changes): + print("update_context_data") + + +def get_context_title(): + """Returns title for Creator window""" + return "AfterEffects" diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 5a0600e92e..d098419e81 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -28,6 +28,7 @@ class AEItem(object): workAreaDuration = attr.ib(default=None) frameRate = attr.ib(default=None) file_name = attr.ib(default=None) + instance_id = attr.ib(default=None) # New Publisher class AfterEffectsServerStub(): @@ -132,8 +133,9 @@ class AfterEffectsServerStub(): is_new = True for item_meta in items_meta: - if item_meta.get('members') \ - and str(item.id) == str(item_meta.get('members')[0]): + if ((item_meta.get('members') and + str(item.id) == str(item_meta.get('members')[0])) or + item_meta.get("instance_id") == item.id): is_new = False if data: item_meta.update(data) @@ -314,15 +316,12 @@ class AfterEffectsServerStub(): Keep matching item in file though. Args: - instance_id(string): instance uuid + instance_id(string): instance id """ cleaned_data = [] for instance in self.get_metadata(): - uuid_val = instance.get("uuid") - if not uuid_val: - uuid_val = instance.get("members")[0] # legacy - if uuid_val != instance_id: + if instance.get("instance_id") != instance_id: cleaned_data.append(instance) payload = json.dumps(cleaned_data, indent=4) @@ -357,7 +356,7 @@ class AfterEffectsServerStub(): item_id (int): Returns: - (namedtuple) + (AEItem) """ res = self.websocketserver.call(self.client.call @@ -418,7 +417,7 @@ class AfterEffectsServerStub(): """ Get render queue info for render purposes Returns: - (namedtuple): with 'file_name' field + (AEItem): with 'file_name' field """ res = self.websocketserver.call(self.client.call ('AfterEffects.get_render_info')) @@ -606,7 +605,8 @@ class AfterEffectsServerStub(): d.get('workAreaStart'), d.get('workAreaDuration'), d.get('frameRate'), - d.get('file_name')) + d.get('file_name'), + d.get("instance_id")) ret.append(item) return ret From 2af112571dd0435b639c78c4ccac9f185e1338e6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 15:26:56 +0100 Subject: [PATCH 019/357] OP-2765 - refactor - order of methods changed --- openpype/hosts/aftereffects/api/pipeline.py | 187 ++++++++++---------- 1 file changed, 96 insertions(+), 91 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index ea03542765..1ec76fd9dd 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -27,39 +27,6 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -def check_inventory(): - if not lib.any_outdated(): - return - - host = pyblish.api.registered_host() - outdated_containers = [] - for container in host.ls(): - representation = container['representation'] - representation_doc = io.find_one( - { - "_id": io.ObjectId(representation), - "type": "representation" - }, - projection={"parent": True} - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - - # Warn about outdated containers. - print("Starting new QApplication..") - app = QtWidgets.QApplication(sys.argv) - - message_box = QtWidgets.QMessageBox() - message_box.setIcon(QtWidgets.QMessageBox.Warning) - msg = "There are outdated containers in the scene." - message_box.setText(msg) - message_box.exec_() - - -def application_launch(): - check_inventory() - - def install(): print("Installing Pype config...") @@ -84,6 +51,11 @@ def uninstall(): avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) +def application_launch(): + """Triggered after start of app""" + check_inventory() + + def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle layer visibility on instance toggles.""" instance[0].Visible = new_value @@ -118,6 +90,77 @@ def get_asset_settings(): } +# loaded containers section +def ls(): + """Yields containers from active AfterEffects document. + + This is the host-equivalent of api.ls(), but instead of listing + assets on disk, it lists assets already loaded in AE; once loaded + they are called 'containers'. Used in Manage tool. + + Containers could be on multiple levels, single images/videos/was as a + FootageItem, or multiple items - backgrounds (folder with automatically + created composition and all imported layers). + + Yields: + dict: container + + """ + try: + stub = get_stub() # only after AfterEffects is up + except lib.ConnectionNotEstablishedYet: + print("Not connected yet, ignoring") + return + + layers_meta = stub.get_metadata() + for item in stub.get_items(comps=True, + folders=True, + footages=True): + data = stub.read(item, layers_meta) + # Skip non-tagged layers. + if not data: + continue + + # Filter to only containers. + if "container" not in data["id"]: + continue + + # Append transient data + data["objectName"] = item.name.replace(stub.LOADED_ICON, '') + data["layer"] = item + yield data + + +def check_inventory(): + """Checks loaded containers if they are of highest version""" + if not lib.any_outdated(): + return + + host = pyblish.api.registered_host() + outdated_containers = [] + for container in host.ls(): + representation = container['representation'] + representation_doc = io.find_one( + { + "_id": io.ObjectId(representation), + "type": "representation" + }, + projection={"parent": True} + ) + if representation_doc and not lib.is_latest(representation_doc): + outdated_containers.append(container) + + # Warn about outdated containers. + print("Starting new QApplication..") + app = QtWidgets.QApplication(sys.argv) + + message_box = QtWidgets.QMessageBox() + message_box.setIcon(QtWidgets.QMessageBox.Warning) + msg = "There are outdated containers in the scene." + message_box.setText(msg) + message_box.exec_() + + def containerise(name, namespace, comp, @@ -159,64 +202,7 @@ def containerise(name, return comp -def _get_stub(): - """ - Handle pulling stub from PS to run operations on host - Returns: - (AEServerStub) or None - """ - try: - stub = get_stub() # only after Photoshop is up - except lib.ConnectionNotEstablishedYet: - print("Not connected yet, ignoring") - return - - if not stub.get_active_document_name(): - return - - return stub - - -def ls(): - """Yields containers from active AfterEffects document. - - This is the host-equivalent of api.ls(), but instead of listing - assets on disk, it lists assets already loaded in AE; once loaded - they are called 'containers'. Used in Manage tool. - - Containers could be on multiple levels, single images/videos/was as a - FootageItem, or multiple items - backgrounds (folder with automatically - created composition and all imported layers). - - Yields: - dict: container - - """ - try: - stub = get_stub() # only after AfterEffects is up - except lib.ConnectionNotEstablishedYet: - print("Not connected yet, ignoring") - return - - layers_meta = stub.get_metadata() - for item in stub.get_items(comps=True, - folders=True, - footages=True): - data = stub.read(item, layers_meta) - # Skip non-tagged layers. - if not data: - continue - - # Filter to only containers. - if "container" not in data["id"]: - continue - - # Append transient data - data["objectName"] = item.name.replace(stub.LOADED_ICON, '') - data["layer"] = item - yield data - - +# created instances section def list_instances(): """ List all created instances from current workfile which @@ -275,6 +261,7 @@ def remove_instance(instance): item.name.replace(stub.PUBLISH_ICON, '')) +# new publisher section def get_context_data(): print("get_context_data") return {} @@ -287,3 +274,21 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" return "AfterEffects" + + +def _get_stub(): + """ + Handle pulling stub from PS to run operations on host + Returns: + (AEServerStub) or None + """ + try: + stub = get_stub() # only after Photoshop is up + except lib.ConnectionNotEstablishedYet: + print("Not connected yet, ignoring") + return + + if not stub.get_active_document_name(): + return + + return stub From a27119bee40d29725eea5493e1b2004d1813669d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:26:04 +0100 Subject: [PATCH 020/357] OP-2765 - renamed old creators --- ...ender.py => create_legacy_local_render.py} | 6 +- .../plugins/create/create_legacy_render.py | 62 +++++++++++++++++++ 2 files changed, 65 insertions(+), 3 deletions(-) rename openpype/hosts/aftereffects/plugins/create/{create_local_render.py => create_legacy_local_render.py} (57%) create mode 100644 openpype/hosts/aftereffects/plugins/create/create_legacy_render.py diff --git a/openpype/hosts/aftereffects/plugins/create/create_local_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py similarity index 57% rename from openpype/hosts/aftereffects/plugins/create/create_local_render.py rename to openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py index 9d2cdcd7be..4fb07f31f8 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_local_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py @@ -1,7 +1,7 @@ -from openpype.hosts.aftereffects.plugins.create import create_render +from openpype.hosts.aftereffects.plugins.create import create_legacy_render -class CreateLocalRender(create_render.CreateRender): +class CreateLocalRender(create_legacy_render.CreateRender): """ Creator to render locally. Created only after default render on farm. So family 'render.local' is @@ -10,4 +10,4 @@ class CreateLocalRender(create_render.CreateRender): name = "renderDefault" label = "Render Locally" - family = "renderLocal" + family = "renderLocal" \ No newline at end of file diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py new file mode 100644 index 0000000000..7da489a731 --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py @@ -0,0 +1,62 @@ +from avalon.api import CreatorError + +import openpype.api +from openpype.hosts.aftereffects.api import ( + get_stub, + list_instances +) + + +class CreateRender(openpype.api.Creator): + """Render folder for publish. + + Creates subsets in format 'familyTaskSubsetname', + eg 'renderCompositingMain'. + + Create only single instance from composition at a time. + """ + + name = "renderDefault" + label = "Render on Farm" + family = "render" + defaults = ["Main"] + + def process(self): + stub = get_stub() # only after After Effects is up + if (self.options or {}).get("useSelection"): + items = stub.get_selected_items( + comps=True, folders=False, footages=False + ) + if len(items) > 1: + raise CreatorError( + "Please select only single composition at time." + ) + + if not items: + raise CreatorError(( + "Nothing to create. Select composition " + "if 'useSelection' or create at least " + "one composition." + )) + + existing_subsets = [ + instance['subset'].lower() + for instance in list_instances() + ] + + item = items.pop() + if self.name.lower() in existing_subsets: + txt = "Instance with name \"{}\" already exists.".format(self.name) + raise CreatorError(txt) + + self.data["members"] = [item.id] + self.data["uuid"] = item.id # for SubsetManager + self.data["subset"] = ( + self.data["subset"] + .replace(stub.PUBLISH_ICON, '') + .replace(stub.LOADED_ICON, '') + ) + + stub.imprint(item, self.data) + stub.set_label_color(item.id, 14) # Cyan options 0 - 16 + stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) \ No newline at end of file From ebc05e82c8001878667aa31d1cba014d9c06f231 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:31:18 +0100 Subject: [PATCH 021/357] OP-2765 - refactored imprint method Uses id instead of full AEItem --- openpype/hosts/aftereffects/api/pipeline.py | 8 ++++---- openpype/hosts/aftereffects/api/ws_stub.py | 8 ++++---- .../hosts/aftereffects/plugins/load/load_background.py | 5 ++--- openpype/hosts/aftereffects/plugins/load/load_file.py | 8 ++++---- 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 1ec76fd9dd..550ff25886 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -178,7 +178,7 @@ def containerise(name, Arguments: name (str): Name of resulting assembly namespace (str): Namespace under which to host container - comp (Comp): Composition to containerise + comp (AEItem): Composition to containerise context (dict): Asset information loader (str, optional): Name of loader used to produce this container. suffix (str, optional): Suffix of container, defaults to `_CON`. @@ -197,7 +197,7 @@ def containerise(name, } stub = get_stub() - stub.imprint(comp, data) + stub.imprint(comp.id, data) return comp @@ -254,8 +254,8 @@ def remove_instance(instance): stub.remove_instance(inst_id) - if instance.members: - item = stub.get_item(instance.members[0]) + if instance.get("members"): + item = stub.get_item(instance["members"][0]) if item: stub.rename_item(item.id, item.name.replace(stub.PUBLISH_ICON, '')) diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index d098419e81..18852d3d6c 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -111,11 +111,11 @@ class AfterEffectsServerStub(): self.log.debug("Couldn't find layer metadata") - def imprint(self, item, data, all_items=None, items_meta=None): + def imprint(self, item_id, data, all_items=None, items_meta=None): """ Save item metadata to Label field of metadata of active document Args: - item (AEItem): + item_id (int|str): id of FootageItem or instance_id for workfiles data(string): json representation for single layer all_items (list of item): for performance, could be injected for usage in loop, if not, single call will be @@ -134,8 +134,8 @@ class AfterEffectsServerStub(): for item_meta in items_meta: if ((item_meta.get('members') and - str(item.id) == str(item_meta.get('members')[0])) or - item_meta.get("instance_id") == item.id): + str(item_id) == str(item_meta.get('members')[0])) or + item_meta.get("instance_id") == item_id): is_new = False if data: item_meta.update(data) diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py index 1a2d6fc432..9b39556040 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_background.py +++ b/openpype/hosts/aftereffects/plugins/load/load_background.py @@ -91,7 +91,7 @@ class BackgroundLoader(AfterEffectsLoader): container["namespace"] = comp_name container["members"] = comp.members - stub.imprint(comp, container) + stub.imprint(comp.id, container) def remove(self, container): """ @@ -100,10 +100,9 @@ class BackgroundLoader(AfterEffectsLoader): Args: container (dict): container to be removed - used to get layer_id """ - print("!!!! container:: {}".format(container)) stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_item(layer.id) def switch(self, container, representation): diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py index 9dbbf7aae1..ba5bb5f69a 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_file.py +++ b/openpype/hosts/aftereffects/plugins/load/load_file.py @@ -96,9 +96,9 @@ class FileLoader(AfterEffectsLoader): # with aftereffects.maintained_selection(): # TODO stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name) stub.imprint( - layer, {"representation": str(representation["_id"]), - "name": context["subset"], - "namespace": layer_name} + layer.id, {"representation": str(representation["_id"]), + "name": context["subset"], + "namespace": layer_name} ) def remove(self, container): @@ -109,7 +109,7 @@ class FileLoader(AfterEffectsLoader): """ stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_item(layer.id) def switch(self, container, representation): From 3c11f46b110d3e74f96b7990845bec375ee46d05 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:32:04 +0100 Subject: [PATCH 022/357] OP-2765 - working version of new creator --- .../plugins/create/create_render.py | 126 ++++++++++++------ 1 file changed, 87 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 8dfc85cdc8..c290bd46c3 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,37 +1,65 @@ -from avalon.api import CreatorError - -import openpype.api -from openpype.hosts.aftereffects.api import ( - get_stub, - list_instances +import json +from openpype import resources +import openpype.hosts.aftereffects.api as api +from openpype.pipeline import ( + Creator, + CreatedInstance, + lib, + CreatorError ) -class CreateRender(openpype.api.Creator): - """Render folder for publish. - - Creates subsets in format 'familyTaskSubsetname', - eg 'renderCompositingMain'. - - Create only single instance from composition at a time. - """ - - name = "renderDefault" - label = "Render on Farm" +class RenderCreator(Creator): + identifier = "render" + label = "Render" family = "render" - defaults = ["Main"] + description = "Render creator" - def process(self): - stub = get_stub() # only after After Effects is up - if (self.options or {}).get("useSelection"): + create_allow_context_change = False + + def get_icon(self): + return resources.get_openpype_splash_filepath() + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance_data = self._handle_legacy(instance_data) + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + created_inst, changes = update_list[0] + print("RenderCreator update_list:: {}-{}".format(created_inst, changes)) + api.get_stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) + + def remove_instances(self, instances): + for instance in instances: + print("instance:: {}".format(instance)) + api.remove_instance(instance) + self._remove_instance_from_context(instance) + + def create(self, subset_name, data, pre_create_data): + print("Data that can be used in create:\n{}".format( + json.dumps(pre_create_data, indent=4) + )) + stub = api.get_stub() # only after After Effects is up + print("pre_create_data:: {}".format(pre_create_data)) + if pre_create_data.get("use_selection"): items = stub.get_selected_items( comps=True, folders=False, footages=False ) + else: + items = stub.get_items(comps=True, folders=False, footages=False) + if len(items) > 1: raise CreatorError( "Please select only single composition at time." ) - + print("items:: {}".format(items)) if not items: raise CreatorError(( "Nothing to create. Select composition " @@ -39,24 +67,44 @@ class CreateRender(openpype.api.Creator): "one composition." )) - existing_subsets = [ - instance['subset'].lower() - for instance in list_instances() + data["members"] = [items[0].id] + new_instance = CreatedInstance(self.family, subset_name, data, self) + new_instance.creator_attributes["farm"] = pre_create_data["farm"] + + api.get_stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + self.log.info(new_instance.data) + self._add_instance_to_context(new_instance) + + def get_default_variants(self): + return [ + "myVariant", + "variantTwo", + "different_variant" ] - item = items.pop() - if self.name.lower() in existing_subsets: - txt = "Instance with name \"{}\" already exists.".format(self.name) - raise CreatorError(txt) + def get_instance_attr_defs(self): + return [lib.BoolDef("farm", label="Render on farm")] - self.data["members"] = [item.id] - self.data["uuid"] = item.id # for SubsetManager - self.data["subset"] = ( - self.data["subset"] - .replace(stub.PUBLISH_ICON, '') - .replace(stub.LOADED_ICON, '') - ) + def get_pre_create_attr_defs(self): + output = [ + lib.BoolDef("use_selection", default=True, label="Use selection"), + lib.UISeparatorDef(), + lib.BoolDef("farm", label="Render on farm") + ] + return output + + def get_detail_description(self): + return """Creator for Render instances""" + + def _handle_legacy(self, instance_data): + """Converts old instances to new format.""" + if instance_data.get("uuid"): + instance_data["item_id"] = instance_data.get("uuid") + instance_data.pop("uuid") + + if not instance_data.get("members"): + instance_data["members"] = [instance_data["item_id"]] + + return instance_data - stub.imprint(item, self.data) - stub.set_label_color(item.id, 14) # Cyan options 0 - 16 - stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) From 082b2306ee08a4f286804d1afe0f8139006e5fe8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:32:50 +0100 Subject: [PATCH 023/357] OP-2765 - changed collector to work with new creator --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index c1c2be4855..61c4897cae 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -10,6 +10,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): + for instance in context: + if instance.data["family"] == "workfile": + self.log.debug("Workfile instance found, skipping") + return + task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) From 64b63369d6b1a8bbf702a3fe34a3ea05e4021d79 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:33:21 +0100 Subject: [PATCH 024/357] OP-2765 - added 'newPublishing' flag to differentiate --- openpype/plugins/publish/collect_from_create_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 16e3f669c3..09584ab37c 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -25,7 +25,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): # Update global data to context context.data.update(create_context.context_data_to_store()) - + context.data["newPublishing"] = True # Update context data for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"): value = create_context.dbcon.Session.get(key) From be05fe990580aff0bc98ffee8243bc4e7536083e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:34:00 +0100 Subject: [PATCH 025/357] OP-2765 - updated collecting of render family Added pre collect for backward compatibility --- .../plugins/publish/collect_render.py | 197 ++++++++++-------- .../plugins/publish/pre_collect_render.py | 47 +++++ 2 files changed, 154 insertions(+), 90 deletions(-) create mode 100644 openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 2a4b773681..1ad3d3dd18 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -2,6 +2,7 @@ import os import re import tempfile import attr +from copy import deepcopy import pyblish.api @@ -29,20 +30,22 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): label = "Collect After Effects Render Layers" hosts = ["aftereffects"] - # internal - family_remapping = { - "render": ("render.farm", "farm"), # (family, label) - "renderLocal": ("render", "local") - } padding_width = 6 rendered_extension = 'png' - stub = get_stub() + _stub = None + + @classmethod + def get_stub(cls): + if not cls._stub: + cls._stub = get_stub() + return cls._stub def get_instances(self, context): instances = [] + instances_to_remove = [] - app_version = self.stub.get_app_version() + app_version = CollectAERender.get_stub().get_app_version() app_version = app_version[0:4] current_file = context.data["currentFile"] @@ -50,105 +53,91 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - compositions = self.stub.get_items(True) + compositions = CollectAERender.get_stub().get_items(True) compositions_by_id = {item.id: item for item in compositions} - for inst in self.stub.get_metadata(): - schema = inst.get('schema') - # loaded asset container skip it - if schema and 'container' in schema: + for inst in context: + family = inst.data["family"] + if family != "render": continue + self._debug_log(inst) - if not inst["members"]: - raise ValueError("Couldn't find id, unable to publish. " + - "Please recreate instance.") - item_id = inst["members"][0] + item_id = inst.data["members"][0] - work_area_info = self.stub.get_work_area(int(item_id)) + work_area_info = CollectAERender.get_stub().get_work_area( + int(item_id)) if not work_area_info: self.log.warning("Orphaned instance, deleting metadata") - self.stub.remove_instance(int(item_id)) + inst_id = inst.get("instance_id") or item_id + CollectAERender.get_stub().remove_instance(inst_id) continue - frameStart = work_area_info.workAreaStart - - frameEnd = round(work_area_info.workAreaStart + - float(work_area_info.workAreaDuration) * - float(work_area_info.frameRate)) - 1 + frame_start = work_area_info.workAreaStart + frame_end = round(work_area_info.workAreaStart + + float(work_area_info.workAreaDuration) * + float(work_area_info.frameRate)) - 1 fps = work_area_info.frameRate # TODO add resolution when supported by extension - if inst["family"] in self.family_remapping.keys() \ - and inst["active"]: - remapped_family = self.family_remapping[inst["family"]] - instance = AERenderInstance( - family=remapped_family[0], - families=[remapped_family[0]], - version=version, - time="", - source=current_file, - label="{} - {}".format(inst["subset"], remapped_family[1]), - subset=inst["subset"], - asset=context.data["assetEntity"]["name"], - attachTo=False, - setMembers='', - publish=True, - renderer='aerender', - name=inst["subset"], - resolutionWidth=asset_entity["data"].get( - "resolutionWidth", - project_entity["data"]["resolutionWidth"]), - resolutionHeight=asset_entity["data"].get( - "resolutionHeight", - project_entity["data"]["resolutionHeight"]), - pixelAspect=1, - tileRendering=False, - tilesX=0, - tilesY=0, - frameStart=frameStart, - frameEnd=frameEnd, - frameStep=1, - toBeRenderedOn='deadline', - fps=fps, - app_version=app_version - ) + if not inst.data["active"]: + continue - comp = compositions_by_id.get(int(item_id)) - if not comp: - raise ValueError("There is no composition for item {}". - format(item_id)) - instance.comp_name = comp.name - instance.comp_id = item_id - instance._anatomy = context.data["anatomy"] - instance.anatomyData = context.data["anatomyData"] + subset_name = inst.data["subset"] + instance = AERenderInstance( + family=family, + families=[family], + version=version, + time="", + source=current_file, + label="{} - {}".format(subset_name, family), + subset=subset_name, + asset=context.data["assetEntity"]["name"], + attachTo=False, + setMembers='', + publish=True, + renderer='aerender', + name=subset_name, + resolutionWidth=asset_entity["data"].get( + "resolutionWidth", + project_entity["data"]["resolutionWidth"]), + resolutionHeight=asset_entity["data"].get( + "resolutionHeight", + project_entity["data"]["resolutionHeight"]), + pixelAspect=1, + tileRendering=False, + tilesX=0, + tilesY=0, + frameStart=frame_start, + frameEnd=frame_end, + frameStep=1, + toBeRenderedOn='deadline', + fps=fps, + app_version=app_version, + anatomyData=deepcopy(context.data["anatomyData"]), + context=context + ) - instance.outputDir = self._get_output_dir(instance) - instance.context = context + comp = compositions_by_id.get(int(item_id)) + if not comp: + raise ValueError("There is no composition for item {}". + format(item_id)) + instance.outputDir = self._get_output_dir(instance) + instance.comp_name = comp.name + instance.comp_id = item_id - settings = get_project_settings(os.getenv("AVALON_PROJECT")) - reviewable_subset_filter = \ - (settings["deadline"] - ["publish"] - ["ProcessSubmittedJobOnFarm"] - ["aov_filter"]) + is_local = "renderLocal" in inst.data["families"] + if inst.data.get("creator_attributes"): + is_local = inst.data["creator_attributes"].get("farm") + if is_local: + # for local renders + instance = self._update_for_local(instance, project_entity) - if inst["family"] == "renderLocal": - # for local renders - instance.anatomyData["version"] = instance.version - instance.anatomyData["subset"] = instance.subset - instance.stagingDir = tempfile.mkdtemp() - instance.projectEntity = project_entity + self.log.info("New instance:: {}".format(instance)) + instances.append(instance) + instances_to_remove.append(inst) - if self.hosts[0] in reviewable_subset_filter.keys(): - for aov_pattern in \ - reviewable_subset_filter[self.hosts[0]]: - if re.match(aov_pattern, instance.subset): - instance.families.append("review") - instance.review = True - break - - self.log.info("New instance:: {}".format(instance)) - instances.append(instance) + for instance in instances_to_remove: + context.remove(instance) return instances @@ -169,7 +158,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): end = render_instance.frameEnd # pull file name from Render Queue Output module - render_q = self.stub.get_render_info() + render_q = CollectAERender.get_stub().get_render_info() if not render_q: raise ValueError("No file extension set in Render Queue") _, ext = os.path.splitext(os.path.basename(render_q.file_name)) @@ -216,3 +205,31 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): # for submit_publish_job return base_dir + + def _update_for_local(self, instance, project_entity): + instance.anatomyData["version"] = instance.version + instance.anatomyData["subset"] = instance.subset + instance.stagingDir = tempfile.mkdtemp() + instance.projectEntity = project_entity + + settings = get_project_settings(os.getenv("AVALON_PROJECT")) + reviewable_subset_filter = (settings["deadline"] + ["publish"] + ["ProcessSubmittedJobOnFarm"] + ["aov_filter"].get(self.hosts[0])) + for aov_pattern in reviewable_subset_filter: + if re.match(aov_pattern, instance.subset): + instance.families.append("review") + instance.review = True + break + + return instance + + def _debug_log(self, instance): + def _default_json(value): + return str(value) + + import json + self.log.info( + json.dumps(instance.data, indent=4, default=_default_json) + ) diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py new file mode 100644 index 0000000000..56dc884634 --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -0,0 +1,47 @@ +import json +import pyblish.api +from openpype.hosts.aftereffects.api import get_stub, list_instances + + +class PreCollectRender(pyblish.api.ContextPlugin): + """ + Checks if render instance is of new type, adds to families to both + existing collectors work same way. + """ + + label = "PreCollect Render" + order = pyblish.api.CollectorOrder + 0.400 + hosts = ["aftereffects"] + + family_remapping = { + "render": ("render.farm", "farm"), # (family, label) + "renderLocal": ("render", "local") + } + + def process(self, context): + if context.data.get("newPublishing"): + self.log.debug("Not applicable for New Publisher, skip") + return + + stub = get_stub() + for inst in list_instances(): + if inst["family"] not in self.family_remapping.keys(): + continue + + if not inst["members"]: + raise ValueError("Couldn't find id, unable to publish. " + + "Please recreate instance.") + + instance = context.create_instance(inst["subset"]) + inst["families"] = [self.family_remapping[inst["family"]]] + instance.data.update(inst) + + self._debug_log(instance) + + def _debug_log(self, instance): + def _default_json(value): + return str(value) + + self.log.info( + json.dumps(instance.data, indent=4, default=_default_json) + ) From c189725f3fdd7babae5709b70fd61708ae67bd91 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:34:27 +0100 Subject: [PATCH 026/357] OP-2765 - missed update for imprint --- .../aftereffects/plugins/publish/validate_instance_asset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py index 71c1750457..3019719947 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -27,7 +27,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): data = stub.read(instance[0]) data["asset"] = api.Session["AVALON_ASSET"] - stub.imprint(instance[0], data) + stub.imprint(instance[0].instance_id, data) class ValidateInstanceAsset(pyblish.api.InstancePlugin): From 7967496b5c64c3e1a5c126de7c0a3f90dd3e81f5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:34:52 +0100 Subject: [PATCH 027/357] OP-2765 - added CreatorError to pipeline api --- openpype/pipeline/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index e968df4011..2b7a39d444 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -4,7 +4,8 @@ from .create import ( BaseCreator, Creator, AutoCreator, - CreatedInstance + CreatedInstance, + CreatorError ) from .publish import ( @@ -21,6 +22,7 @@ __all__ = ( "Creator", "AutoCreator", "CreatedInstance", + "CreatorError", "PublishValidationError", "KnownPublishError", From 4434a4b1888f65a55aa86a365d186aabb6ec69cf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 15:44:19 +0100 Subject: [PATCH 028/357] OP-2765 - added default to Setting for subset name of workfile in AE --- openpype/settings/defaults/project_settings/global.json | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index f08bee8b2d..71c837659e 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -268,6 +268,7 @@ "workfile" ], "hosts": [ + "aftereffects", "tvpaint" ], "task_types": [], From e24ef3a9eba62a9dbcae252dcf70d9608145724b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 16:32:16 +0100 Subject: [PATCH 029/357] OP-2765 - added workfile creator and modified collector Workfile collector shouldn't create new isntance for NP, but should update version --- .../plugins/create/workfile_creator.py | 75 +++++++++++++++++++ .../plugins/publish/collect_workfile.py | 33 ++++---- 2 files changed, 94 insertions(+), 14 deletions(-) create mode 100644 openpype/hosts/aftereffects/plugins/create/workfile_creator.py diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py new file mode 100644 index 0000000000..2d9d42ee8c --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -0,0 +1,75 @@ +from avalon import io + +import openpype.hosts.aftereffects.api as api +from openpype.pipeline import ( + AutoCreator, + CreatedInstance +) + + +class AEWorkfileCreator(AutoCreator): + identifier = "workfile" + family = "workfile" + + def get_instance_attr_defs(self): + return [] + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + subset_name = instance_data["subset"] + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + # nothing to change on workfiles + pass + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.family == self.family: + existing_instance = instance + break + + variant = '' + project_name = io.Session["AVALON_PROJECT"] + asset_name = io.Session["AVALON_ASSET"] + task_name = io.Session["AVALON_TASK"] + host_name = io.Session["AVALON_APP"] + + if existing_instance is None: + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update(self.get_dynamic_data( + variant, task_name, asset_doc, project_name, host_name + )) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + + api.get_stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + + elif ( + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 61c4897cae..29ec3a64e6 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -10,10 +10,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): + create_instance = True for instance in context: if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, skipping") - return + self.log.debug("Workfile instance found, do not create new") + create_instance = False task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] @@ -44,20 +45,24 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" subset = family + task.capitalize() - # Create instance - instance = context.create_instance(subset) - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() - }) + if create_instance: # old publish + # Create instance + instance = context.create_instance(subset) - # adding basic script data - instance.data.update(shared_instance_data) + # creating instance data + instance.data.update({ + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() + }) + + # adding basic script data + instance.data.update(shared_instance_data) + else: + instance.data.update({"version": version}) # creating representation representation = { From 97b9b035db68132f22e4d48874a02ad5bf76c9af Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 17:54:21 +0100 Subject: [PATCH 030/357] OP-2765 - added helper logging function --- .../aftereffects/plugins/publish/collect_render.py | 13 +------------ .../plugins/publish/collect_workfile.py | 9 +++------ openpype/lib/__init__.py | 3 ++- openpype/lib/log.py | 12 ++++++++++++ 4 files changed, 18 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 1ad3d3dd18..b41fb5d5f5 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -59,7 +59,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): family = inst.data["family"] if family != "render": continue - self._debug_log(inst) item_id = inst.data["members"][0] @@ -127,12 +126,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): is_local = "renderLocal" in inst.data["families"] if inst.data.get("creator_attributes"): - is_local = inst.data["creator_attributes"].get("farm") + is_local = not inst.data["creator_attributes"].get("farm") if is_local: # for local renders instance = self._update_for_local(instance, project_entity) - self.log.info("New instance:: {}".format(instance)) instances.append(instance) instances_to_remove.append(inst) @@ -224,12 +222,3 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): break return instance - - def _debug_log(self, instance): - def _default_json(value): - return str(value) - - import json - self.log.info( - json.dumps(instance.data, indent=4, default=_default_json) - ) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 29ec3a64e6..d8a324f828 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,6 +2,8 @@ import os from avalon import api import pyblish.api +from openpype.lib import debug_log_instance + class CollectWorkfile(pyblish.api.ContextPlugin): """ Adds the AE render instances """ @@ -61,8 +63,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # adding basic script data instance.data.update(shared_instance_data) - else: - instance.data.update({"version": version}) # creating representation representation = { @@ -74,7 +74,4 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data["representations"].append(representation) - self.log.info('Publishing After Effects workfile') - - for i in context: - self.log.debug(f"{i.data['families']}") + debug_log_instance(self.log, "Workfile instance", instance) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 6a24f30455..fb7afe7cb3 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -35,7 +35,7 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger, timeit +from .log import PypeLogger, timeit, debug_log_instance from .path_templates import ( merge_dict, @@ -313,6 +313,7 @@ __all__ = [ "OpenPypeMongoConnection", "timeit", + "debug_log_instance", "is_overlapping_otio_ranges", "otio_range_with_handles", diff --git a/openpype/lib/log.py b/openpype/lib/log.py index a42faef008..7824e96159 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -23,6 +23,7 @@ import time import traceback import threading import copy +import json from . import Terminal from .mongo import ( @@ -493,3 +494,14 @@ def timeit(method): print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) return result return timed + + +def debug_log_instance(logger, msg, instance): + """Helper function to write instance.data as json""" + def _default_json(value): + return str(value) + + logger.debug(msg) + logger.debug( + json.dumps(instance.data, indent=4, default=_default_json) + ) From 9065530eefdc98daf604d282f9f49e16614bcd0d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 18:20:36 +0100 Subject: [PATCH 031/357] OP-2765 - fixed wrong assignment of representations to instances --- .../aftereffects/plugins/publish/collect_workfile.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index d8a324f828..1bb476d80b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -12,11 +12,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): - create_instance = True + existing_instance = None for instance in context: if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, do not create new") - create_instance = False + self.log.debug("Workfile instance found, won't create new") + existing_instance = instance + break task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] @@ -47,8 +48,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" subset = family + task.capitalize() - - if create_instance: # old publish + if existing_instance is None: # old publish # Create instance instance = context.create_instance(subset) @@ -63,6 +63,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # adding basic script data instance.data.update(shared_instance_data) + else: + instance = existing_instance # creating representation representation = { From 7b9ec117e7a32dd34d634d3a6d9ecaca54bb983f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 19:02:05 +0100 Subject: [PATCH 032/357] OP-2765 - add fallback to uuid for backward compatibility --- openpype/hosts/aftereffects/api/pipeline.py | 2 +- openpype/hosts/aftereffects/api/ws_stub.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 550ff25886..4ae88e649a 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -247,7 +247,7 @@ def remove_instance(instance): if not stub: return - inst_id = instance.get("instance_id") + inst_id = instance.get("instance_id") or instance.get("uuid") # legacy if not inst_id: log.warning("No instance identifier for {}".format(instance)) return diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 18852d3d6c..1d3b69e038 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -321,7 +321,8 @@ class AfterEffectsServerStub(): cleaned_data = [] for instance in self.get_metadata(): - if instance.get("instance_id") != instance_id: + inst_id = instance.get("instance_id") or instance.get("uuid") + if inst_id != instance_id: cleaned_data.append(instance) payload = json.dumps(cleaned_data, indent=4) From e127e08be4bc7ca1c05c3e5b560cf3e00dd53590 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 8 Mar 2022 12:08:56 +0000 Subject: [PATCH 033/357] Fix paths for loading animations --- openpype/hosts/unreal/plugins/load/load_animation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index bc4a42c84b..c1f7942ef0 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -134,7 +134,7 @@ class AnimationFBXLoader(plugin.Loader): # Create directory for asset and avalon container hierarchy = context.get('asset').get('data').get('parents') - root = "/Game/Avalon" + root = "/Game/OpenPype" asset = context.get('asset').get('name') suffix = "_CON" if asset: @@ -144,7 +144,7 @@ class AnimationFBXLoader(plugin.Loader): tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{root}/Assets/{asset}/{name}", suffix="") + f"{root}/Animations/{asset}/{name}", suffix="") hierarchy_dir = root for h in hierarchy: From 0e050d37e91d7730985cfae6d1eed62e97dd915b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:30:24 +0100 Subject: [PATCH 034/357] OP-2765 - fix legacy handling when creating --- .../plugins/create/create_render.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index c290bd46c3..0a907a02d8 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,4 +1,5 @@ -import json +import avalon.api + from openpype import resources import openpype.hosts.aftereffects.api as api from openpype.pipeline import ( @@ -22,7 +23,9 @@ class RenderCreator(Creator): def collect_instances(self): for instance_data in api.list_instances(): - creator_id = instance_data.get("creator_identifier") + # legacy instances have family=='render' or 'renderLocal', use them + creator_id = (instance_data.get("creator_identifier") or + instance_data.get("family").replace("Local", '')) if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) instance = CreatedInstance.from_existing( @@ -32,22 +35,16 @@ class RenderCreator(Creator): def update_instances(self, update_list): created_inst, changes = update_list[0] - print("RenderCreator update_list:: {}-{}".format(created_inst, changes)) api.get_stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) def remove_instances(self, instances): for instance in instances: - print("instance:: {}".format(instance)) api.remove_instance(instance) self._remove_instance_from_context(instance) def create(self, subset_name, data, pre_create_data): - print("Data that can be used in create:\n{}".format( - json.dumps(pre_create_data, indent=4) - )) stub = api.get_stub() # only after After Effects is up - print("pre_create_data:: {}".format(pre_create_data)) if pre_create_data.get("use_selection"): items = stub.get_selected_items( comps=True, folders=False, footages=False @@ -59,7 +56,6 @@ class RenderCreator(Creator): raise CreatorError( "Please select only single composition at time." ) - print("items:: {}".format(items)) if not items: raise CreatorError(( "Nothing to create. Select composition " @@ -73,7 +69,6 @@ class RenderCreator(Creator): api.get_stub().imprint(new_instance.get("instance_id"), new_instance.data_to_store()) - self.log.info(new_instance.data) self._add_instance_to_context(new_instance) def get_default_variants(self): @@ -99,12 +94,20 @@ class RenderCreator(Creator): def _handle_legacy(self, instance_data): """Converts old instances to new format.""" + if not instance_data.get("members"): + instance_data["members"] = [instance_data.get("uuid")] + if instance_data.get("uuid"): - instance_data["item_id"] = instance_data.get("uuid") + # uuid not needed, replaced with unique instance_id + api.get_stub().remove_instance(instance_data.get("uuid")) instance_data.pop("uuid") - if not instance_data.get("members"): - instance_data["members"] = [instance_data["item_id"]] + if not instance_data.get("task"): + instance_data["task"] = avalon.api.Session.get("AVALON_TASK") + + if not instance_data.get("creator_attributes"): + is_old_farm = instance_data["family"] != "renderLocal" + instance_data["creator_attributes"] = {"farm": is_old_farm} + instance_data["family"] = self.family return instance_data - From ef726be366bbaaa877262b5843ff118205f24183 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 8 Mar 2022 12:30:38 +0000 Subject: [PATCH 035/357] Activates MovieRenderPipeline plugin when creating Unreal project --- openpype/hosts/unreal/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/unreal/lib.py b/openpype/hosts/unreal/lib.py index d4a776e892..805e883c64 100644 --- a/openpype/hosts/unreal/lib.py +++ b/openpype/hosts/unreal/lib.py @@ -254,6 +254,7 @@ def create_unreal_project(project_name: str, {"Name": "PythonScriptPlugin", "Enabled": True}, {"Name": "EditorScriptingUtilities", "Enabled": True}, {"Name": "SequencerScripting", "Enabled": True}, + {"Name": "MovieRenderPipeline", "Enabled": True}, {"Name": "OpenPype", "Enabled": True} ] } From ca0a38f8de82e488e9353d1f1117a4e60620e41f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:32:26 +0100 Subject: [PATCH 036/357] OP-2765 - fixed exclude filter to user family or families properly Added render.farm to excluded, as in NP family is always 'render' --- openpype/plugins/publish/integrate_new.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 6e0940d459..581902205f 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -103,7 +103,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "hda", "usd" ] - exclude_families = ["clip"] + exclude_families = ["clip", "render.farm"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", "family", "hierarchy", "task", "username" @@ -121,11 +121,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_grouping_profiles = None def process(self, instance): - self.integrated_file_sizes = {} - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return + for ef in self.exclude_families: + if ( + instance.data["family"] == ef or + ef in instance.data["families"]): + self.log.debug("Excluded family '{}' in '{}' or {}".format( + ef, instance.data["family"], instance.data["families"])) + return + self.integrated_file_sizes = {} try: self.register(instance) self.log.info("Integrated Asset in to the database ...") @@ -214,7 +218,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Ensure at least one file is set up for transfer in staging dir. repres = instance.data.get("representations") - assert repres, "Instance has no files to transfer" + repres = instance.data.get("representations") + msg = "Instance {} has no files to transfer".format( + instance.data["family"]) + assert repres, msg assert isinstance(repres, (list, tuple)), ( "Instance 'files' must be a list, got: {0} {1}".format( str(type(repres)), str(repres) From 296a2d162704b9ca0c1974d4b8093fe698760d6b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:34:12 +0100 Subject: [PATCH 037/357] OP-2765 - added publish flag to new instance of workfile --- openpype/hosts/aftereffects/plugins/publish/collect_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 1bb476d80b..67f037e6e6 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -65,6 +65,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data.update(shared_instance_data) else: instance = existing_instance + instance.data["publish"] = True # for DL # creating representation representation = { From 2d9bac166a466f8489e38997ec440c6f23476f26 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:35:02 +0100 Subject: [PATCH 038/357] OP-2765 - modified proper families renderLocal is legacy, should be removed in the future --- .../hosts/aftereffects/plugins/publish/extract_local_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index b738068a7b..7323a0b125 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -12,7 +12,7 @@ class ExtractLocalRender(openpype.api.Extractor): order = openpype.api.Extractor.order - 0.47 label = "Extract Local Render" hosts = ["aftereffects"] - families = ["render"] + families = ["renderLocal", "render.local"] def process(self, instance): stub = get_stub() From bf51f8452b8e2410d049f63389e3179bec31b600 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:40:41 +0100 Subject: [PATCH 039/357] OP-2765 - modified collect render plugin Should handle both legacy and new style of publishing --- .../hosts/aftereffects/plugins/publish/collect_render.py | 8 +++++--- .../aftereffects/plugins/publish/pre_collect_render.py | 9 +++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index b41fb5d5f5..d31571b6b5 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -84,7 +84,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): subset_name = inst.data["subset"] instance = AERenderInstance( family=family, - families=[family], version=version, time="", source=current_file, @@ -124,19 +123,20 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instance.comp_name = comp.name instance.comp_id = item_id - is_local = "renderLocal" in inst.data["families"] + is_local = "renderLocal" in inst.data["families"] # legacy if inst.data.get("creator_attributes"): is_local = not inst.data["creator_attributes"].get("farm") if is_local: # for local renders instance = self._update_for_local(instance, project_entity) + else: + instance.families = ["render.farm"] instances.append(instance) instances_to_remove.append(inst) for instance in instances_to_remove: context.remove(instance) - return instances def get_expected_files(self, render_instance): @@ -205,10 +205,12 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): return base_dir def _update_for_local(self, instance, project_entity): + """Update old saved instances to current publishing format""" instance.anatomyData["version"] = instance.version instance.anatomyData["subset"] = instance.subset instance.stagingDir = tempfile.mkdtemp() instance.projectEntity = project_entity + instance.families = ["render.local"] settings = get_project_settings(os.getenv("AVALON_PROJECT")) reviewable_subset_filter = (settings["deadline"] diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 56dc884634..614a04b4b7 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -1,12 +1,14 @@ import json import pyblish.api -from openpype.hosts.aftereffects.api import get_stub, list_instances +from openpype.hosts.aftereffects.api import list_instances class PreCollectRender(pyblish.api.ContextPlugin): """ - Checks if render instance is of new type, adds to families to both + Checks if render instance is of old type, adds to families to both existing collectors work same way. + + Could be removed in the future when no one uses old publish. """ label = "PreCollect Render" @@ -15,7 +17,7 @@ class PreCollectRender(pyblish.api.ContextPlugin): family_remapping = { "render": ("render.farm", "farm"), # (family, label) - "renderLocal": ("render", "local") + "renderLocal": ("render.local", "local") } def process(self, context): @@ -23,7 +25,6 @@ class PreCollectRender(pyblish.api.ContextPlugin): self.log.debug("Not applicable for New Publisher, skip") return - stub = get_stub() for inst in list_instances(): if inst["family"] not in self.family_remapping.keys(): continue From 9e3ea9139a06ad3cc495f8d0c43eb64a7eff8260 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:58:20 +0100 Subject: [PATCH 040/357] OP-2765 - Hound --- openpype/hosts/aftereffects/api/pipeline.py | 2 +- .../aftereffects/plugins/create/create_legacy_local_render.py | 2 +- .../hosts/aftereffects/plugins/create/create_legacy_render.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 4ae88e649a..4ade90e4dd 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -152,7 +152,7 @@ def check_inventory(): # Warn about outdated containers. print("Starting new QApplication..") - app = QtWidgets.QApplication(sys.argv) + _app = QtWidgets.QApplication(sys.argv) message_box = QtWidgets.QMessageBox() message_box.setIcon(QtWidgets.QMessageBox.Warning) diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py index 4fb07f31f8..04413acbcf 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py @@ -10,4 +10,4 @@ class CreateLocalRender(create_legacy_render.CreateRender): name = "renderDefault" label = "Render Locally" - family = "renderLocal" \ No newline at end of file + family = "renderLocal" diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py index 7da489a731..8dfc85cdc8 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py @@ -59,4 +59,4 @@ class CreateRender(openpype.api.Creator): stub.imprint(item, self.data) stub.set_label_color(item.id, 14) # Cyan options 0 - 16 - stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) \ No newline at end of file + stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) From fedc09f83dd304923715803253702a547cfbe9ff Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 8 Mar 2022 13:05:14 +0000 Subject: [PATCH 041/357] Set bound scale for rig actors loaded with layout This is needed for actors that gets close to the camera, that wouldn't be rendered without this parameter set to the maximum value. --- openpype/hosts/unreal/plugins/load/load_layout.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 9f30affa3d..d99224042a 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -194,6 +194,11 @@ class LayoutLoader(plugin.Loader): ), False) actor.set_actor_scale3d(transform.get('scale')) + if class_name == 'SkeletalMesh': + skm_comp = actor.get_editor_property( + 'skeletal_mesh_component') + skm_comp.set_bounds_scale(10.0) + actors.append(actor) binding = sequence.add_possessable(actor) From 3b72117a946d15954112b77107d04f325d30c0a3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 19:11:55 +0100 Subject: [PATCH 042/357] OP-2765 - refactored validator --- .../publish/validate_scene_settings.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py index 273ccd295e..0753e3c09a 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py @@ -62,12 +62,13 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): expected_settings = get_asset_settings() self.log.info("config from DB::{}".format(expected_settings)) - if any(re.search(pattern, os.getenv('AVALON_TASK')) + task_name = instance.data["anatomyData"]["task"]["name"] + if any(re.search(pattern, task_name) for pattern in self.skip_resolution_check): expected_settings.pop("resolutionWidth") expected_settings.pop("resolutionHeight") - if any(re.search(pattern, os.getenv('AVALON_TASK')) + if any(re.search(pattern, task_name) for pattern in self.skip_timelines_check): expected_settings.pop('fps', None) expected_settings.pop('frameStart', None) @@ -87,10 +88,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): duration = instance.data.get("frameEndHandle") - \ instance.data.get("frameStartHandle") + 1 - self.log.debug("filtered config::{}".format(expected_settings)) + self.log.debug("validated items::{}".format(expected_settings)) current_settings = { "fps": fps, + "frameStart": instance.data.get("frameStart"), + "frameEnd": instance.data.get("frameEnd"), + "handleStart": instance.data.get("handleStart"), + "handleEnd": instance.data.get("handleEnd"), "frameStartHandle": instance.data.get("frameStartHandle"), "frameEndHandle": instance.data.get("frameEndHandle"), "resolutionWidth": instance.data.get("resolutionWidth"), @@ -103,24 +108,22 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): invalid_keys = set() for key, value in expected_settings.items(): if value != current_settings[key]: - invalid_settings.append( - "{} expected: {} found: {}".format(key, value, - current_settings[key]) - ) + msg = "'{}' expected: '{}' found: '{}'".format( + key, value, current_settings[key]) + + if key == "duration" and expected_settings.get("handleStart"): + msg += "Handles included in calculation. Remove " \ + "handles in DB or extend frame range in " \ + "Composition Setting." + + invalid_settings.append(msg) invalid_keys.add(key) - if ((expected_settings.get("handleStart") - or expected_settings.get("handleEnd")) - and invalid_settings): - msg = "Handles included in calculation. Remove handles in DB " +\ - "or extend frame range in Composition Setting." - invalid_settings[-1]["reason"] = msg - - msg = "Found invalid settings:\n{}".format( - "\n".join(invalid_settings) - ) - if invalid_settings: + msg = "Found invalid settings:\n{}".format( + "\n".join(invalid_settings) + ) + invalid_keys_str = ",".join(invalid_keys) break_str = "
" invalid_setting_str = "Found invalid settings:
{}".\ From 84b6a6cc6949ea849376f410417c9198a92a9241 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 10:20:58 +0100 Subject: [PATCH 043/357] OP-2868 - added configuration for default variant value to Settings --- .../plugins/create/create_render.py | 16 +++++++++---- .../project_settings/aftereffects.json | 7 ++++++ .../schema_project_aftereffects.json | 23 +++++++++++++++++++ 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 0a907a02d8..e690af63d0 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -18,6 +18,16 @@ class RenderCreator(Creator): create_allow_context_change = False + def __init__( + self, create_context, system_settings, project_settings, headless=False + ): + super(RenderCreator, self).__init__(create_context, system_settings, + project_settings, headless) + self._default_variants = (project_settings["aftereffects"] + ["create"] + ["RenderCreator"] + ["defaults"]) + def get_icon(self): return resources.get_openpype_splash_filepath() @@ -72,11 +82,7 @@ class RenderCreator(Creator): self._add_instance_to_context(new_instance) def get_default_variants(self): - return [ - "myVariant", - "variantTwo", - "different_variant" - ] + return self._default_variants def get_instance_attr_defs(self): return [lib.BoolDef("farm", label="Render on farm")] diff --git a/openpype/settings/defaults/project_settings/aftereffects.json b/openpype/settings/defaults/project_settings/aftereffects.json index 6a9a399069..8083aa0972 100644 --- a/openpype/settings/defaults/project_settings/aftereffects.json +++ b/openpype/settings/defaults/project_settings/aftereffects.json @@ -1,4 +1,11 @@ { + "create": { + "RenderCreator": { + "defaults": [ + "Main" + ] + } + }, "publish": { "ValidateSceneSettings": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json index 4c4cd225ab..1a3eaef540 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json @@ -5,6 +5,29 @@ "label": "AfterEffects", "is_file": true, "children": [ + { + "type": "dict", + "collapsible": true, + "key": "create", + "label": "Creator plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "RenderCreator", + "label": "Create render", + "children": [ + { + "type": "list", + "key": "defaults", + "label": "Default Variants", + "object_type": "text", + "docstring": "Fill default variant(s) (like 'Main' or 'Default') used in subset name creation." + } + ] + } + ] + }, { "type": "dict", "collapsible": true, From 87d114a272cac020f1a482b6209ad01a9907ba01 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 10:49:43 +0100 Subject: [PATCH 044/357] OP-2765 - added error message when creating same subset --- openpype/hosts/aftereffects/plugins/create/create_render.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 0a907a02d8..e75353c7a5 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -63,6 +63,11 @@ class RenderCreator(Creator): "one composition." )) + for inst in self.create_context.instances: + if subset_name == inst.subset_name: + raise CreatorError("{} already exists".format( + inst.subset_name)) + data["members"] = [items[0].id] new_instance = CreatedInstance(self.family, subset_name, data, self) new_instance.creator_attributes["farm"] = pre_create_data["farm"] From 32f015098b95d7953d94d878f32afbd4022a18df Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 11:08:51 +0100 Subject: [PATCH 045/357] OP-2765 - reimplemented get_context_title --- openpype/hosts/aftereffects/api/pipeline.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 4ade90e4dd..38ab2225bf 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -273,7 +273,12 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" - return "AfterEffects" + import avalon.api + + project_name = avalon.api.Session["AVALON_PROJECT"] + asset_name = avalon.api.Session["AVALON_ASSET"] + task_name = avalon.api.Session["AVALON_TASK"] + return "{}/{}/{}".format(project_name, asset_name, task_name) def _get_stub(): From 56e2121e308f6bdf7e1551336ae3c28104920775 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 15:23:46 +0100 Subject: [PATCH 046/357] OP-2765 - fix local rendering in old publish --- openpype/hosts/aftereffects/plugins/publish/collect_render.py | 4 ++-- .../hosts/aftereffects/plugins/publish/pre_collect_render.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index d31571b6b5..43efd34635 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -57,7 +57,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): compositions_by_id = {item.id: item for item in compositions} for inst in context: family = inst.data["family"] - if family != "render": + if family not in ["render", "renderLocal"]: # legacy continue item_id = inst.data["members"][0] @@ -123,7 +123,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instance.comp_name = comp.name instance.comp_id = item_id - is_local = "renderLocal" in inst.data["families"] # legacy + is_local = "renderLocal" in inst.data["family"] # legacy if inst.data.get("creator_attributes"): is_local = not inst.data["creator_attributes"].get("farm") if is_local: diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 614a04b4b7..3e84753555 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -34,7 +34,7 @@ class PreCollectRender(pyblish.api.ContextPlugin): "Please recreate instance.") instance = context.create_instance(inst["subset"]) - inst["families"] = [self.family_remapping[inst["family"]]] + inst["families"] = [self.family_remapping[inst["family"]][0]] instance.data.update(inst) self._debug_log(instance) From ec9b4802f40d6fe1d3dd02ab1195bace33ef0c82 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 16:07:18 +0100 Subject: [PATCH 047/357] OP-2765 - trigger failure when new instance tried to be published by Pyblish This could happen if artist try to switch between old Pyblish and New Publish --- .../hosts/aftereffects/plugins/publish/pre_collect_render.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 3e84753555..46bb9865b9 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -26,6 +26,10 @@ class PreCollectRender(pyblish.api.ContextPlugin): return for inst in list_instances(): + if inst.get("creator_attributes"): + raise ValueError("Instance created in New publisher, " + "cannot be published in Pyblish") + if inst["family"] not in self.family_remapping.keys(): continue From a5c38a8b2f19d24c55c2be564ab701f68f886c36 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 16:24:59 +0100 Subject: [PATCH 048/357] OP-2765 - added new label for families In the future they will be both merged to render.farm (when Harmony is updated to New Publisher). --- openpype/lib/abstract_collect_render.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 3839aad45d..e160f5a040 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -138,7 +138,9 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): try: if "workfile" in instance.data["families"]: instance.data["publish"] = True - if "renderFarm" in instance.data["families"]: + # TODO merge renderFarm and render.farm + if ("renderFarm" in instance.data["families"] or + "render.farm" in instance.data["families"]): instance.data["remove"] = True except KeyError: # be tolerant if 'families' is missing. From 3b9e319de27548a935b2aaba2064193a674fdd88 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 16:26:37 +0100 Subject: [PATCH 049/357] OP-2765 - fixed resolution between local and farm --- .../hosts/aftereffects/plugins/publish/collect_render.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 43efd34635..aa5bc58ac2 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -84,6 +84,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): subset_name = inst.data["subset"] instance = AERenderInstance( family=family, + families=inst.data.get("families", []), version=version, time="", source=current_file, @@ -130,7 +131,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): # for local renders instance = self._update_for_local(instance, project_entity) else: - instance.families = ["render.farm"] + fam = "render.farm" + if fam not in instance.families: + instance.families.append(fam) instances.append(instance) instances_to_remove.append(inst) @@ -210,7 +213,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instance.anatomyData["subset"] = instance.subset instance.stagingDir = tempfile.mkdtemp() instance.projectEntity = project_entity - instance.families = ["render.local"] + fam = "render.local" + if fam not in instance.families: + instance.families.append(fam) settings = get_project_settings(os.getenv("AVALON_PROJECT")) reviewable_subset_filter = (settings["deadline"] From d4f50e2abdf55fed0c12f439062c75b5c780a7e3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:10:18 +0100 Subject: [PATCH 050/357] OP-2765 - fix imports for legacy farm creator --- .../aftereffects/plugins/create/create_legacy_render.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py index 8dfc85cdc8..e4fbb47a33 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py @@ -1,13 +1,12 @@ -from avalon.api import CreatorError - -import openpype.api +from openpype.pipeline import create +from openpype.pipeline import CreatorError from openpype.hosts.aftereffects.api import ( get_stub, list_instances ) -class CreateRender(openpype.api.Creator): +class CreateRender(create.LegacyCreator): """Render folder for publish. Creates subsets in format 'familyTaskSubsetname', @@ -23,6 +22,7 @@ class CreateRender(openpype.api.Creator): def process(self): stub = get_stub() # only after After Effects is up + items = [] if (self.options or {}).get("useSelection"): items = stub.get_selected_items( comps=True, folders=False, footages=False From a15552f878a0aab7ecfa37053ea2b646161cd37b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:10:42 +0100 Subject: [PATCH 051/357] OP-2765 - fix imports for new creator --- .../hosts/aftereffects/plugins/create/create_render.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index e75353c7a5..1a5a826137 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,7 +1,7 @@ -import avalon.api +from avalon import api as avalon_api from openpype import resources -import openpype.hosts.aftereffects.api as api +from openpype.hosts.aftereffects import api from openpype.pipeline import ( Creator, CreatedInstance, @@ -25,7 +25,7 @@ class RenderCreator(Creator): for instance_data in api.list_instances(): # legacy instances have family=='render' or 'renderLocal', use them creator_id = (instance_data.get("creator_identifier") or - instance_data.get("family").replace("Local", '')) + instance_data.get("family", '').replace("Local", '')) if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) instance = CreatedInstance.from_existing( @@ -108,7 +108,7 @@ class RenderCreator(Creator): instance_data.pop("uuid") if not instance_data.get("task"): - instance_data["task"] = avalon.api.Session.get("AVALON_TASK") + instance_data["task"] = avalon_api.Session.get("AVALON_TASK") if not instance_data.get("creator_attributes"): is_old_farm = instance_data["family"] != "renderLocal" From 60edd3abe6bf52271d7f1d84635f0be482d31c65 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:13:35 +0100 Subject: [PATCH 052/357] OP-2765 - added functionality to store/retrive context data These data is used for context publish information, for example storing enabling/disabling of validators. Currently not present in AE. --- openpype/hosts/aftereffects/api/pipeline.py | 22 +++++++++++++-------- openpype/hosts/aftereffects/api/ws_stub.py | 10 ++++++---- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 38ab2225bf..978d035020 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -9,6 +9,7 @@ from avalon import io, pipeline from openpype import lib from openpype.api import Logger +from openpype.pipeline import LegacyCreator import openpype.hosts.aftereffects from openpype.pipeline import BaseCreator @@ -34,7 +35,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) log.info(PUBLISH_PATH) @@ -48,7 +49,7 @@ def install(): def uninstall(): pyblish.api.deregister_plugin_path(PUBLISH_PATH) avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH) def application_launch(): @@ -223,10 +224,8 @@ def list_instances(): layers_meta = stub.get_metadata() for instance in layers_meta: - if instance.get("schema") and \ - "container" in instance.get("schema"): - continue - instances.append(instance) + if instance.get("id") == "pyblish.avalon.instance": + instances.append(instance) return instances @@ -263,12 +262,19 @@ def remove_instance(instance): # new publisher section def get_context_data(): - print("get_context_data") + meta = _get_stub().get_metadata() + for item in meta: + if item.get("id") == "publish_context": + item.pop("id") + return item + return {} def update_context_data(data, changes): - print("update_context_data") + item = data + item["id"] = "publish_context" + _get_stub().imprint(item["id"], item) def get_context_title(): diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 1d3b69e038..d2dc40ec89 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -155,10 +155,12 @@ class AfterEffectsServerStub(): item_ids = [int(item.id) for item in all_items] cleaned_data = [] for meta in result_meta: - # for creation of instance OR loaded container - if 'instance' in meta.get('id') or \ - int(meta.get('members')[0]) in item_ids: - cleaned_data.append(meta) + # do not added instance with nonexistend item id + if meta.get("members"): + if int(meta["members"][0]) not in item_ids: + continue + + cleaned_data.append(meta) payload = json.dumps(cleaned_data, indent=4) From 3b4f96efa601351bb894f64a6e3d2d2e2c55d88b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:19:42 +0100 Subject: [PATCH 053/357] OP-2765 - more explicit error message --- .../hosts/aftereffects/plugins/publish/pre_collect_render.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 46bb9865b9..03ec184524 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -28,7 +28,9 @@ class PreCollectRender(pyblish.api.ContextPlugin): for inst in list_instances(): if inst.get("creator_attributes"): raise ValueError("Instance created in New publisher, " - "cannot be published in Pyblish") + "cannot be published in Pyblish.\n" + "Please publish in New Publisher " + "or recreate instances with legacy Creators") if inst["family"] not in self.family_remapping.keys(): continue From 88a59bc0ee4efa61c6765a26094342f7d4d6b106 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 10 Mar 2022 16:18:49 +0000 Subject: [PATCH 054/357] Fixed class name for Render Publish Instance --- openpype/hosts/unreal/api/rendering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index d70d621b8a..38bcf21b1c 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -37,7 +37,7 @@ def start_rendering(): # instances = pipeline.ls_inst() instances = [ a for a in assets - if a.get_class().get_name() == "AvalonPublishInstance"] + if a.get_class().get_name() == "OpenPypePublishInstance"] inst_data = [] From 4ad37ad687324c934612aa12235a4292a70955ac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 12 Mar 2022 01:10:30 +0000 Subject: [PATCH 055/357] Bump pillow from 9.0.0 to 9.0.1 Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.0.0 to 9.0.1. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/9.0.0...9.0.1) --- updated-dependencies: - dependency-name: pillow dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- poetry.lock | 146 ++++++++++++++++++++++++++-------------------------- 1 file changed, 73 insertions(+), 73 deletions(-) diff --git a/poetry.lock b/poetry.lock index ee7b839b8d..b8c7090cc0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -680,15 +680,8 @@ category = "main" optional = false python-versions = "*" -[package.dependencies] -attrs = ">=17.4.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -pyrsistent = ">=0.14.0" -six = ">=1.11.0" - [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format = ["rfc3987", "strict-rfc3339", "webcolors"] [[package]] name = "keyring" @@ -826,7 +819,7 @@ six = "*" [[package]] name = "pillow" -version = "9.0.0" +version = "9.0.1" description = "Python Imaging Library (Fork)" category = "main" optional = false @@ -1087,14 +1080,6 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -[[package]] -name = "pyrsistent" -version = "0.18.1" -description = "Persistent/Functional/Immutable data structures" -category = "main" -optional = false -python-versions = ">=3.7" - [[package]] name = "pysftp" version = "0.2.9" @@ -1633,7 +1618,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "2f78d48a6aad2d8a88b7dd7f31a76d907bec9fb65f0086fba6b6d2e1605f0f88" +content-hash = "b02313c8255a1897b0f0617ad4884a5943696c363512921aab1cb2dd8f4fdbe0" [metadata.files] acre = [] @@ -2171,12 +2156,28 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2185,14 +2186,27 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2202,6 +2216,12 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2289,38 +2309,41 @@ pathlib2 = [ {file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"}, ] pillow = [ - {file = "Pillow-9.0.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:113723312215b25c22df1fdf0e2da7a3b9c357a7d24a93ebbe80bfda4f37a8d4"}, - {file = "Pillow-9.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb47a548cea95b86494a26c89d153fd31122ed65255db5dcbc421a2d28eb3379"}, - {file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31b265496e603985fad54d52d11970383e317d11e18e856971bdbb86af7242a4"}, - {file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d154ed971a4cc04b93a6d5b47f37948d1f621f25de3e8fa0c26b2d44f24e3e8f"}, - {file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fe92813d208ce8aa7d76da878bdc84b90809f79ccbad2a288e9bcbeac1d9bd"}, - {file = "Pillow-9.0.0-cp310-cp310-win32.whl", hash = "sha256:d5dcea1387331c905405b09cdbfb34611050cc52c865d71f2362f354faee1e9f"}, - {file = "Pillow-9.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:52abae4c96b5da630a8b4247de5428f593465291e5b239f3f843a911a3cf0105"}, - {file = "Pillow-9.0.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:72c3110228944019e5f27232296c5923398496b28be42535e3b2dc7297b6e8b6"}, - {file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97b6d21771da41497b81652d44191489296555b761684f82b7b544c49989110f"}, - {file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72f649d93d4cc4d8cf79c91ebc25137c358718ad75f99e99e043325ea7d56100"}, - {file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aaf07085c756f6cb1c692ee0d5a86c531703b6e8c9cae581b31b562c16b98ce"}, - {file = "Pillow-9.0.0-cp37-cp37m-win32.whl", hash = "sha256:03b27b197deb4ee400ed57d8d4e572d2d8d80f825b6634daf6e2c18c3c6ccfa6"}, - {file = "Pillow-9.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a09a9d4ec2b7887f7a088bbaacfd5c07160e746e3d47ec5e8050ae3b2a229e9f"}, - {file = "Pillow-9.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:490e52e99224858f154975db61c060686df8a6b3f0212a678e5d2e2ce24675c9"}, - {file = "Pillow-9.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:500d397ddf4bbf2ca42e198399ac13e7841956c72645513e8ddf243b31ad2128"}, - {file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ebd8b9137630a7bbbff8c4b31e774ff05bbb90f7911d93ea2c9371e41039b52"}, - {file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd0e5062f11cb3e730450a7d9f323f4051b532781026395c4323b8ad055523c4"}, - {file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f3b4522148586d35e78313db4db0df4b759ddd7649ef70002b6c3767d0fdeb7"}, - {file = "Pillow-9.0.0-cp38-cp38-win32.whl", hash = "sha256:0b281fcadbb688607ea6ece7649c5d59d4bbd574e90db6cd030e9e85bde9fecc"}, - {file = "Pillow-9.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5050d681bcf5c9f2570b93bee5d3ec8ae4cf23158812f91ed57f7126df91762"}, - {file = "Pillow-9.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c2067b3bb0781f14059b112c9da5a91c80a600a97915b4f48b37f197895dd925"}, - {file = "Pillow-9.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d16b6196fb7a54aff6b5e3ecd00f7c0bab1b56eee39214b2b223a9d938c50af"}, - {file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98cb63ca63cb61f594511c06218ab4394bf80388b3d66cd61d0b1f63ee0ea69f"}, - {file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc462d24500ba707e9cbdef436c16e5c8cbf29908278af053008d9f689f56dee"}, - {file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3586e12d874ce2f1bc875a3ffba98732ebb12e18fb6d97be482bd62b56803281"}, - {file = "Pillow-9.0.0-cp39-cp39-win32.whl", hash = "sha256:68e06f8b2248f6dc8b899c3e7ecf02c9f413aab622f4d6190df53a78b93d97a5"}, - {file = "Pillow-9.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6579f9ba84a3d4f1807c4aab4be06f373017fc65fff43498885ac50a9b47a553"}, - {file = "Pillow-9.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:47f5cf60bcb9fbc46011f75c9b45a8b5ad077ca352a78185bd3e7f1d294b98bb"}, - {file = "Pillow-9.0.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fd8053e1f8ff1844419842fd474fc359676b2e2a2b66b11cc59f4fa0a301315"}, - {file = "Pillow-9.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c5439bfb35a89cac50e81c751317faea647b9a3ec11c039900cd6915831064d"}, - {file = "Pillow-9.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95545137fc56ce8c10de646074d242001a112a92de169986abd8c88c27566a05"}, - {file = "Pillow-9.0.0.tar.gz", hash = "sha256:ee6e2963e92762923956fe5d3479b1fdc3b76c83f290aad131a2f98c3df0593e"}, + {file = "Pillow-9.0.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4"}, + {file = "Pillow-9.0.1-1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2632d0f846b7c7600edf53c48f8f9f1e13e62f66a6dbc15191029d950bfed976"}, + {file = "Pillow-9.0.1-1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9618823bd237c0d2575283f2939655f54d51b4527ec3972907a927acbcc5bfc"}, + {file = "Pillow-9.0.1-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:9bfdb82cdfeccec50aad441afc332faf8606dfa5e8efd18a6692b5d6e79f00fd"}, + {file = "Pillow-9.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5100b45a4638e3c00e4d2320d3193bdabb2d75e79793af7c3eb139e4f569f16f"}, + {file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:528a2a692c65dd5cafc130de286030af251d2ee0483a5bf50c9348aefe834e8a"}, + {file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f29d831e2151e0b7b39981756d201f7108d3d215896212ffe2e992d06bfe049"}, + {file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:855c583f268edde09474b081e3ddcd5cf3b20c12f26e0d434e1386cc5d318e7a"}, + {file = "Pillow-9.0.1-cp310-cp310-win32.whl", hash = "sha256:d9d7942b624b04b895cb95af03a23407f17646815495ce4547f0e60e0b06f58e"}, + {file = "Pillow-9.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81c4b81611e3a3cb30e59b0cf05b888c675f97e3adb2c8672c3154047980726b"}, + {file = "Pillow-9.0.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:413ce0bbf9fc6278b2d63309dfeefe452835e1c78398efb431bab0672fe9274e"}, + {file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80fe64a6deb6fcfdf7b8386f2cf216d329be6f2781f7d90304351811fb591360"}, + {file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cef9c85ccbe9bee00909758936ea841ef12035296c748aaceee535969e27d31b"}, + {file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d19397351f73a88904ad1aee421e800fe4bbcd1aeee6435fb62d0a05ccd1030"}, + {file = "Pillow-9.0.1-cp37-cp37m-win32.whl", hash = "sha256:d21237d0cd37acded35154e29aec853e945950321dd2ffd1a7d86fe686814669"}, + {file = "Pillow-9.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ede5af4a2702444a832a800b8eb7f0a7a1c0eed55b644642e049c98d589e5092"}, + {file = "Pillow-9.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b5b3f092fe345c03bca1e0b687dfbb39364b21ebb8ba90e3fa707374b7915204"}, + {file = "Pillow-9.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:335ace1a22325395c4ea88e00ba3dc89ca029bd66bd5a3c382d53e44f0ccd77e"}, + {file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db6d9fac65bd08cea7f3540b899977c6dee9edad959fa4eaf305940d9cbd861c"}, + {file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5"}, + {file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d4b1341ac07ae07eb2cc682f459bec932a380c3b122f5540432d8977e64eae"}, + {file = "Pillow-9.0.1-cp38-cp38-win32.whl", hash = "sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c"}, + {file = "Pillow-9.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:7f7609a718b177bf171ac93cea9fd2ddc0e03e84d8fa4e887bdfc39671d46b00"}, + {file = "Pillow-9.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:80ca33961ced9c63358056bd08403ff866512038883e74f3a4bf88ad3eb66838"}, + {file = "Pillow-9.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c3c33ac69cf059bbb9d1a71eeaba76781b450bc307e2291f8a4764d779a6b28"}, + {file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12875d118f21cf35604176872447cdb57b07126750a33748bac15e77f90f1f9c"}, + {file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:514ceac913076feefbeaf89771fd6febde78b0c4c1b23aaeab082c41c694e81b"}, + {file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c5c79ab7dfce6d88f1ba639b77e77a17ea33a01b07b99840d6ed08031cb2a7"}, + {file = "Pillow-9.0.1-cp39-cp39-win32.whl", hash = "sha256:718856856ba31f14f13ba885ff13874be7fefc53984d2832458f12c38205f7f7"}, + {file = "Pillow-9.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac"}, + {file = "Pillow-9.0.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97"}, + {file = "Pillow-9.0.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253e8a302a96df6927310a9d44e6103055e8fb96a6822f8b7f514bb7ef77de56"}, + {file = "Pillow-9.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6295f6763749b89c994fcb6d8a7f7ce03c3992e695f89f00b741b4580b199b7e"}, + {file = "Pillow-9.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a9f44cd7e162ac6191491d7249cceb02b8116b0f7e847ee33f739d7cb1ea1f70"}, + {file = "Pillow-9.0.1.tar.gz", hash = "sha256:6c8bc8238a7dfdaf7a75f5ec5a663f4173f8c367e5a39f87e720495e1eed75fa"}, ] platformdirs = [ {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, @@ -2598,29 +2621,6 @@ pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] -pyrsistent = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] pysftp = [ {file = "pysftp-0.2.9.tar.gz", hash = "sha256:fbf55a802e74d663673400acd92d5373c1c7ee94d765b428d9f977567ac4854a"}, ] From 65b00455614cadd5f279fcfdd37c41f976697c99 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 17:31:57 +0100 Subject: [PATCH 056/357] OP-2766 - fixed not working self.log in New Publisher --- openpype/pipeline/create/creator_plugins.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 1ac2c420a2..f05b132fc6 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -69,7 +69,9 @@ class BaseCreator: @property def log(self): if self._log is None: - self._log = logging.getLogger(self.__class__.__name__) + from openpype.api import Logger + + self._log = Logger.get_logger(self.__class__.__name__) return self._log def _add_instance_to_context(self, instance): From a71dad4608e0be4a91c75769e5edf6722f52f9ff Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 17:35:17 +0100 Subject: [PATCH 057/357] OP-2766 - implemented auto creator for PS Creates workfile instance, updated imprint function. --- openpype/hosts/photoshop/api/pipeline.py | 52 +++++++++---- openpype/hosts/photoshop/api/ws_stub.py | 33 +++++---- .../plugins/create/workfile_creator.py | 73 +++++++++++++++++++ 3 files changed, 131 insertions(+), 27 deletions(-) create mode 100644 openpype/hosts/photoshop/plugins/create/workfile_creator.py diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 1be8129aa1..0e3f1215aa 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -8,7 +8,7 @@ from avalon import pipeline, io from openpype.api import Logger from openpype.lib import register_event_callback -from openpype.pipeline import LegacyCreator +from openpype.pipeline import LegacyCreator, BaseCreator import openpype.hosts.photoshop from . import lib @@ -71,6 +71,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) + avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) log.info(PUBLISH_PATH) pyblish.api.register_callback( @@ -144,12 +145,9 @@ def list_instances(): layers_meta = stub.get_layers_metadata() if layers_meta: for key, instance in layers_meta.items(): - schema = instance.get("schema") - if schema and "container" in schema: - continue - - instance['uuid'] = key - instances.append(instance) + if instance.get("id") == "pyblish.avalon.instance": # TODO only this way? + instance['uuid'] = key + instances.append(instance) return instances @@ -170,11 +168,18 @@ def remove_instance(instance): if not stub: return - stub.remove_instance(instance.get("uuid")) - layer = stub.get_layer(instance.get("uuid")) - if layer: - stub.rename_layer(instance.get("uuid"), - layer.name.replace(stub.PUBLISH_ICON, '')) + inst_id = instance.get("instance_id") or instance.get("uuid") # legacy + if not inst_id: + log.warning("No instance identifier for {}".format(instance)) + return + + stub.remove_instance(inst_id) + + if instance.get("members"): + item = stub.get_item(instance["members"][0]) + if item: + stub.rename_item(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) def _get_stub(): @@ -226,6 +231,27 @@ def containerise( "members": [str(layer.id)] } stub = lib.stub() - stub.imprint(layer, data) + stub.imprint(layer.id, data) return layer + + +def get_context_data(): + pass + + +def update_context_data(data, changes): + # item = data + # item["id"] = "publish_context" + # _get_stub().imprint(item["id"], item) + pass + + +def get_context_title(): + """Returns title for Creator window""" + import avalon.api + + project_name = avalon.api.Session["AVALON_PROJECT"] + asset_name = avalon.api.Session["AVALON_ASSET"] + task_name = avalon.api.Session["AVALON_TASK"] + return "{}/{}/{}".format(project_name, asset_name, task_name) \ No newline at end of file diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index 64d89f5420..a99f184080 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -27,6 +27,7 @@ class PSItem(object): members = attr.ib(factory=list) long_name = attr.ib(default=None) color_code = attr.ib(default=None) # color code of layer + instance_id = attr.ib(default=None) class PhotoshopServerStub: @@ -82,7 +83,7 @@ class PhotoshopServerStub: return layers_meta.get(str(layer.id)) - def imprint(self, layer, data, all_layers=None, layers_meta=None): + def imprint(self, item_id, data, all_layers=None, items_meta=None): """Save layer metadata to Headline field of active document Stores metadata in format: @@ -108,28 +109,29 @@ class PhotoshopServerStub: }] - for loaded instances Args: - layer (PSItem): + item_id (str): data(string): json representation for single layer all_layers (list of PSItem): for performance, could be injected for usage in loop, if not, single call will be triggered - layers_meta(string): json representation from Headline + items_meta(string): json representation from Headline (for performance - provide only if imprint is in loop - value should be same) Returns: None """ - if not layers_meta: - layers_meta = self.get_layers_metadata() + if not items_meta: + items_meta = self.get_layers_metadata() # json.dumps writes integer values in a dictionary to string, so # anticipating it here. - if str(layer.id) in layers_meta and layers_meta[str(layer.id)]: + item_id = str(item_id) + if item_id in items_meta.keys(): if data: - layers_meta[str(layer.id)].update(data) + items_meta[item_id].update(data) else: - layers_meta.pop(str(layer.id)) + items_meta.pop(item_id) else: - layers_meta[str(layer.id)] = data + items_meta[item_id] = data # Ensure only valid ids are stored. if not all_layers: @@ -137,12 +139,14 @@ class PhotoshopServerStub: layer_ids = [layer.id for layer in all_layers] cleaned_data = [] - for layer_id in layers_meta: - if int(layer_id) in layer_ids: - cleaned_data.append(layers_meta[layer_id]) + for item in items_meta.values(): + if item.get("members"): + if int(item["members"][0]) not in layer_ids: + continue + + cleaned_data.append(item) payload = json.dumps(cleaned_data, indent=4) - self.websocketserver.call( self.client.call('Photoshop.imprint', payload=payload) ) @@ -528,6 +532,7 @@ class PhotoshopServerStub: d.get('type'), d.get('members'), d.get('long_name'), - d.get("color_code") + d.get("color_code"), + d.get("instance_id") )) return ret diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py new file mode 100644 index 0000000000..d66a05cad7 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -0,0 +1,73 @@ +from avalon import io + +import openpype.hosts.photoshop.api as api +from openpype.pipeline import ( + AutoCreator, + CreatedInstance +) + + +class PSWorkfileCreator(AutoCreator): + identifier = "workfile" + family = "workfile" + + def get_instance_attr_defs(self): + return [] + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + subset_name = instance_data["subset"] + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + # nothing to change on workfiles + pass + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.family == self.family: + existing_instance = instance + break + + variant = '' + project_name = io.Session["AVALON_PROJECT"] + asset_name = io.Session["AVALON_ASSET"] + task_name = io.Session["AVALON_TASK"] + host_name = io.Session["AVALON_APP"] + if existing_instance is None: + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update(self.get_dynamic_data( + variant, task_name, asset_doc, project_name, host_name + )) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + api.stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + + elif ( + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name From cdb2047ef7e205054f2c31fb6f336e259fa93d47 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 17:35:40 +0100 Subject: [PATCH 058/357] OP-2766 - renamed legacy creator --- .../plugins/create/create_legacy_image.py | 99 +++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 openpype/hosts/photoshop/plugins/create/create_legacy_image.py diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py new file mode 100644 index 0000000000..a001b5f171 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -0,0 +1,99 @@ +from Qt import QtWidgets +from openpype.pipeline import create +from openpype.hosts.photoshop import api as photoshop + + +class CreateImage(create.LegacyCreator): + """Image folder for publish.""" + + name = "imageDefault" + label = "Image" + family = "image" + defaults = ["Main"] + + def process(self): + groups = [] + layers = [] + create_group = False + + stub = photoshop.stub() + if (self.options or {}).get("useSelection"): + multiple_instances = False + selection = stub.get_selected_layers() + self.log.info("selection {}".format(selection)) + if len(selection) > 1: + # Ask user whether to create one image or image per selected + # item. + msg_box = QtWidgets.QMessageBox() + msg_box.setIcon(QtWidgets.QMessageBox.Warning) + msg_box.setText( + "Multiple layers selected." + "\nDo you want to make one image per layer?" + ) + msg_box.setStandardButtons( + QtWidgets.QMessageBox.Yes | + QtWidgets.QMessageBox.No | + QtWidgets.QMessageBox.Cancel + ) + ret = msg_box.exec_() + if ret == QtWidgets.QMessageBox.Yes: + multiple_instances = True + elif ret == QtWidgets.QMessageBox.Cancel: + return + + if multiple_instances: + for item in selection: + if item.group: + groups.append(item) + else: + layers.append(item) + else: + group = stub.group_selected_layers(self.name) + groups.append(group) + + elif len(selection) == 1: + # One selected item. Use group if its a LayerSet (group), else + # create a new group. + if selection[0].group: + groups.append(selection[0]) + else: + layers.append(selection[0]) + elif len(selection) == 0: + # No selection creates an empty group. + create_group = True + else: + group = stub.create_group(self.name) + groups.append(group) + + if create_group: + group = stub.create_group(self.name) + groups.append(group) + + for layer in layers: + stub.select_layers([layer]) + group = stub.group_selected_layers(layer.name) + groups.append(group) + + creator_subset_name = self.data["subset"] + for group in groups: + long_names = [] + group.name = group.name.replace(stub.PUBLISH_ICON, ''). \ + replace(stub.LOADED_ICON, '') + + subset_name = creator_subset_name + if len(groups) > 1: + subset_name += group.name.title().replace(" ", "") + + if group.long_name: + for directory in group.long_name[::-1]: + name = directory.replace(stub.PUBLISH_ICON, '').\ + replace(stub.LOADED_ICON, '') + long_names.append(name) + + self.data.update({"subset": subset_name}) + self.data.update({"uuid": str(group.id)}) + self.data.update({"long_name": "_".join(long_names)}) + stub.imprint(group, self.data) + # reusing existing group, need to rename afterwards + if not create_group: + stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) From bfce93027ccd5ebbb227b7af80ba8d73c77f3453 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 15:00:17 +0100 Subject: [PATCH 059/357] Update openpype/hosts/aftereffects/plugins/create/create_render.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/aftereffects/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 1a5a826137..550fb6b0ef 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -72,7 +72,7 @@ class RenderCreator(Creator): new_instance = CreatedInstance(self.family, subset_name, data, self) new_instance.creator_attributes["farm"] = pre_create_data["farm"] - api.get_stub().imprint(new_instance.get("instance_id"), + api.get_stub().imprint(new_instance.id, new_instance.data_to_store()) self._add_instance_to_context(new_instance) From d3441215749e303311370a41a9c82aa934b6cfb0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 15:00:33 +0100 Subject: [PATCH 060/357] Update openpype/hosts/aftereffects/plugins/create/create_render.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/aftereffects/plugins/create/create_render.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 550fb6b0ef..88462667ed 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -70,7 +70,9 @@ class RenderCreator(Creator): data["members"] = [items[0].id] new_instance = CreatedInstance(self.family, subset_name, data, self) - new_instance.creator_attributes["farm"] = pre_create_data["farm"] + if "farm" in pre_create_data: + use_farm = pre_create_data["farm"] + new_instance.creator_attributes["farm"] = use_farm api.get_stub().imprint(new_instance.id, new_instance.data_to_store()) From bff1b77c0635493c3236f663c7a444eaf2d350e4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 16:17:46 +0100 Subject: [PATCH 061/357] OP-2766 - changed format of layer metadata Removing uuid, replaced with members[0] and instance_id. Layers metadata now returned as a list, not dictionary to follow AE implementation. --- openpype/hosts/photoshop/api/pipeline.py | 3 +- openpype/hosts/photoshop/api/ws_stub.py | 60 ++++++++++++------------ 2 files changed, 32 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 0e3f1215aa..8d64942c9e 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -144,9 +144,8 @@ def list_instances(): instances = [] layers_meta = stub.get_layers_metadata() if layers_meta: - for key, instance in layers_meta.items(): + for instance in layers_meta: if instance.get("id") == "pyblish.avalon.instance": # TODO only this way? - instance['uuid'] = key instances.append(instance) return instances diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index a99f184080..dd29ef4e84 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -81,7 +81,11 @@ class PhotoshopServerStub: if layers_meta is None: layers_meta = self.get_layers_metadata() - return layers_meta.get(str(layer.id)) + for layer_meta in layers_meta: + if layer_meta.get("members"): + if layer.id == layer_meta["members"][0]: + return layer + print("Unable to find layer metadata for {}".format(layer.id)) def imprint(self, item_id, data, all_layers=None, items_meta=None): """Save layer metadata to Headline field of active document @@ -125,13 +129,21 @@ class PhotoshopServerStub: # json.dumps writes integer values in a dictionary to string, so # anticipating it here. item_id = str(item_id) - if item_id in items_meta.keys(): - if data: - items_meta[item_id].update(data) + is_new = True + result_meta = [] + for item_meta in items_meta: + if ((item_meta.get('members') and + item_id == str(item_meta.get('members')[0])) or + item_meta.get("instance_id") == item_id): + is_new = False + if data: + item_meta.update(data) + result_meta.append(item_meta) else: - items_meta.pop(item_id) - else: - items_meta[item_id] = data + result_meta.append(item_meta) + + if is_new: + result_meta.append(data) # Ensure only valid ids are stored. if not all_layers: @@ -139,7 +151,7 @@ class PhotoshopServerStub: layer_ids = [layer.id for layer in all_layers] cleaned_data = [] - for item in items_meta.values(): + for item in result_meta: if item.get("members"): if int(item["members"][0]) not in layer_ids: continue @@ -374,38 +386,27 @@ class PhotoshopServerStub: (Headline accessible by File > File Info) Returns: - (string): - json documents + (list) example: {"8":{"active":true,"subset":"imageBG", "family":"image","id":"pyblish.avalon.instance", "asset":"Town"}} 8 is layer(group) id - used for deletion, update etc. """ - layers_data = {} res = self.websocketserver.call(self.client.call('Photoshop.read')) + layers_data = [] try: - layers_data = json.loads(res) + if res: + layers_data = json.loads(res) except json.decoder.JSONDecodeError: pass # format of metadata changed from {} to [] because of standardization # keep current implementation logic as its working - if not isinstance(layers_data, dict): - temp_layers_meta = {} - for layer_meta in layers_data: - layer_id = layer_meta.get("uuid") - if not layer_id: - layer_id = layer_meta.get("members")[0] - - temp_layers_meta[layer_id] = layer_meta - layers_data = temp_layers_meta - else: - # legacy version of metadata + if isinstance(layers_data, dict): for layer_id, layer_meta in layers_data.items(): if layer_meta.get("schema") != "openpype:container-2.0": - layer_meta["uuid"] = str(layer_id) - else: layer_meta["members"] = [str(layer_id)] - + layers_data = list(layers_data.values()) return layers_data def import_smart_object(self, path, layer_name, as_reference=False): @@ -476,11 +477,12 @@ class PhotoshopServerStub: ) def remove_instance(self, instance_id): - cleaned_data = {} + cleaned_data = [] - for key, instance in self.get_layers_metadata().items(): - if key != instance_id: - cleaned_data[key] = instance + for item in self.get_layers_metadata(): + inst_id = item.get("instance_id") or item.get("uuid") + if inst_id != instance_id: + cleaned_data.append(inst_id) payload = json.dumps(cleaned_data, indent=4) From c46b41804d108cc976aae64410ce520ac3117dda Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 16:18:07 +0100 Subject: [PATCH 062/357] OP-2766 - implemented new image Creator Working implementation of New Publisher (not full backward compatibility yet). --- openpype/hosts/photoshop/api/__init__.py | 8 +- .../photoshop/plugins/create/create_image.py | 156 ++++++++++++------ .../plugins/create/create_legacy_image.py | 2 +- .../plugins/create/workfile_creator.py | 2 + .../plugins/publish/collect_instances.py | 4 + .../plugins/publish/collect_workfile.py | 30 ++-- .../plugins/publish/extract_image.py | 9 +- 7 files changed, 148 insertions(+), 63 deletions(-) diff --git a/openpype/hosts/photoshop/api/__init__.py b/openpype/hosts/photoshop/api/__init__.py index 17ea957066..94152b5706 100644 --- a/openpype/hosts/photoshop/api/__init__.py +++ b/openpype/hosts/photoshop/api/__init__.py @@ -12,7 +12,10 @@ from .pipeline import ( remove_instance, install, uninstall, - containerise + containerise, + get_context_data, + update_context_data, + get_context_title ) from .plugin import ( PhotoshopLoader, @@ -43,6 +46,9 @@ __all__ = [ "install", "uninstall", "containerise", + "get_context_data", + "update_context_data", + "get_context_title", # Plugin "PhotoshopLoader", diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index a001b5f171..a73b79e0fd 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,46 +1,50 @@ -from Qt import QtWidgets -from openpype.pipeline import create -from openpype.hosts.photoshop import api as photoshop +from avalon import api as avalon_api +from openpype.hosts.photoshop import api +from openpype.pipeline import ( + Creator, + CreatedInstance, + lib, + CreatorError +) -class CreateImage(create.LegacyCreator): - """Image folder for publish.""" - - name = "imageDefault" +class ImageCreator(Creator): + """Creates image instance for publishing.""" + identifier = "image" label = "Image" family = "image" - defaults = ["Main"] + description = "Image creator" - def process(self): + def collect_instances(self): + import json + self.log.info("ImageCreator: api.list_instances():: {}".format( + json.dumps(api.list_instances(), indent=4))) + for instance_data in api.list_instances(): + # legacy instances have family=='image' + creator_id = (instance_data.get("creator_identifier") or + instance_data.get("family")) + + self.log.info("ImageCreator: instance_data:: {}".format(json.dumps(instance_data, indent=4))) + if creator_id == self.identifier: + instance_data = self._handle_legacy(instance_data) + + layer = api.stub().get_layer(instance_data["members"][0]) + instance_data["layer"] = layer + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def create(self, subset_name, data, pre_create_data): groups = [] layers = [] create_group = False - stub = photoshop.stub() - if (self.options or {}).get("useSelection"): - multiple_instances = False - selection = stub.get_selected_layers() - self.log.info("selection {}".format(selection)) + stub = api.stub() # only after PS is up + multiple_instances = pre_create_data.get("create_multiple") + selection = stub.get_selected_layers() + if pre_create_data.get("use_selection"): if len(selection) > 1: - # Ask user whether to create one image or image per selected - # item. - msg_box = QtWidgets.QMessageBox() - msg_box.setIcon(QtWidgets.QMessageBox.Warning) - msg_box.setText( - "Multiple layers selected." - "\nDo you want to make one image per layer?" - ) - msg_box.setStandardButtons( - QtWidgets.QMessageBox.Yes | - QtWidgets.QMessageBox.No | - QtWidgets.QMessageBox.Cancel - ) - ret = msg_box.exec_() - if ret == QtWidgets.QMessageBox.Yes: - multiple_instances = True - elif ret == QtWidgets.QMessageBox.Cancel: - return - if multiple_instances: for item in selection: if item.group: @@ -48,25 +52,25 @@ class CreateImage(create.LegacyCreator): else: layers.append(item) else: - group = stub.group_selected_layers(self.name) + group = stub.group_selected_layers(subset_name) groups.append(group) - elif len(selection) == 1: # One selected item. Use group if its a LayerSet (group), else # create a new group. - if selection[0].group: - groups.append(selection[0]) + selected_item = selection[0] + if selected_item.group: + groups.append(selected_item) else: - layers.append(selection[0]) + layers.append(selected_item) elif len(selection) == 0: # No selection creates an empty group. create_group = True else: - group = stub.create_group(self.name) + group = stub.create_group(subset_name) groups.append(group) if create_group: - group = stub.create_group(self.name) + group = stub.create_group(subset_name) groups.append(group) for layer in layers: @@ -74,26 +78,78 @@ class CreateImage(create.LegacyCreator): group = stub.group_selected_layers(layer.name) groups.append(group) - creator_subset_name = self.data["subset"] for group in groups: long_names = [] - group.name = group.name.replace(stub.PUBLISH_ICON, ''). \ - replace(stub.LOADED_ICON, '') + group.name = self._clean_highlights(stub, group.name) - subset_name = creator_subset_name if len(groups) > 1: subset_name += group.name.title().replace(" ", "") if group.long_name: for directory in group.long_name[::-1]: - name = directory.replace(stub.PUBLISH_ICON, '').\ - replace(stub.LOADED_ICON, '') + name = self._clean_highlights(stub, directory) long_names.append(name) - self.data.update({"subset": subset_name}) - self.data.update({"uuid": str(group.id)}) - self.data.update({"long_name": "_".join(long_names)}) - stub.imprint(group, self.data) + data.update({"subset": subset_name}) + data.update({"layer": group}) + data.update({"members": [str(group.id)]}) + data.update({"long_name": "_".join(long_names)}) + + new_instance = CreatedInstance(self.family, subset_name, data, + self) + + stub.imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + self._add_instance_to_context(new_instance) # reusing existing group, need to rename afterwards if not create_group: stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) + + def update_instances(self, update_list): + self.log.info("update_list:: {}".format(update_list)) + created_inst, changes = update_list[0] + api.stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) + + def remove_instances(self, instances): + for instance in instances: + api.remove_instance(instance) + self._remove_instance_from_context(instance) + + def get_default_variants(self): + return [ + "Main" + ] + + def get_pre_create_attr_defs(self): + output = [ + lib.BoolDef("use_selection", default=True, label="Use selection"), + lib.BoolDef("create_multiple", + default=True, + label="Create separate instance for each selected") + ] + return output + + def get_detail_description(self): + return """Creator for Image instances""" + + def _handle_legacy(self, instance_data): + """Converts old instances to new format.""" + if not instance_data.get("members"): + instance_data["members"] = [instance_data.get("uuid")] + + if instance_data.get("uuid"): + # uuid not needed, replaced with unique instance_id + api.stub().remove_instance(instance_data.get("uuid")) + instance_data.pop("uuid") + + if not instance_data.get("task"): + instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + + return instance_data + + def _clean_highlights(self, stub, item): + return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, + '') + + diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index a001b5f171..6fa455fa03 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -91,7 +91,7 @@ class CreateImage(create.LegacyCreator): long_names.append(name) self.data.update({"subset": subset_name}) - self.data.update({"uuid": str(group.id)}) + self.data.update({"members": [str(group.id)]}) self.data.update({"long_name": "_".join(long_names)}) stub.imprint(group, self.data) # reusing existing group, need to rename afterwards diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index d66a05cad7..2a2fda3cc4 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -15,6 +15,7 @@ class PSWorkfileCreator(AutoCreator): return [] def collect_instances(self): + print("coll::{}".format(api.list_instances())) for instance_data in api.list_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: @@ -29,6 +30,7 @@ class PSWorkfileCreator(AutoCreator): pass def create(self, options=None): + print("create") existing_instance = None for instance in self.create_context.instances: if instance.family == self.family: diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index c3e27e9646..ee402dcabf 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -21,6 +21,10 @@ class CollectInstances(pyblish.api.ContextPlugin): } def process(self, context): + if context.data.get("newPublishing"): + self.log.debug("Not applicable for New Publisher, skip") + return + stub = photoshop.stub() layers = stub.get_layers() layers_meta = stub.get_layers_metadata() diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index db1ede14d5..bdbd379a33 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -10,6 +10,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin): hosts = ["photoshop"] def process(self, context): + existing_instance = None + for instance in context: + if instance.data["family"] == "workfile": + self.log.debug("Workfile instance found, won't create new") + existing_instance = instance + break + family = "workfile" task = os.getenv("AVALON_TASK", None) subset = family + task.capitalize() @@ -19,16 +26,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin): base_name = os.path.basename(file_path) # Create instance - instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "label": base_name, - "name": base_name, - "family": family, - "families": [], - "representations": [], - "asset": os.environ["AVALON_ASSET"] - }) + if existing_instance is None: + instance = context.create_instance(subset) + instance.data.update({ + "subset": subset, + "label": base_name, + "name": base_name, + "family": family, + "families": [], + "representations": [], + "asset": os.environ["AVALON_ASSET"] + }) + else: + instance = existing_instance # creating representation _, ext = os.path.splitext(file_path) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index 04ce77ee34..d27c5bc028 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -16,7 +16,8 @@ class ExtractImage(openpype.api.Extractor): formats = ["png", "jpg"] def process(self, instance): - + print("PPPPPP") + self.log.info("fdfdsfdfs") staging_dir = self.staging_dir(instance) self.log.info("Outputting image to {}".format(staging_dir)) @@ -26,7 +27,13 @@ class ExtractImage(openpype.api.Extractor): with photoshop.maintained_selection(): self.log.info("Extracting %s" % str(list(instance))) with photoshop.maintained_visibility(): + self.log.info("instance.data:: {}".format(instance.data)) + print("instance.data::: {}".format(instance.data)) layer = instance.data.get("layer") + self.log.info("layer:: {}".format(layer)) + print("layer::: {}".format(layer)) + if not layer: + return ids = set([layer.id]) add_ids = instance.data.pop("ids", None) if add_ids: From a5ac3ab55b2c67604ef8e2530c57bdf242e6c599 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 16:21:29 +0100 Subject: [PATCH 063/357] OP-2766 - implemented new context methods --- openpype/hosts/photoshop/api/pipeline.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 8d64942c9e..0a99d1779d 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -236,14 +236,21 @@ def containerise( def get_context_data(): - pass + """Get stored values for context (validation enable/disable etc)""" + meta = _get_stub().get_layers_metadata() + for item in meta: + if item.get("id") == "publish_context": + item.pop("id") + return item + + return {} def update_context_data(data, changes): - # item = data - # item["id"] = "publish_context" - # _get_stub().imprint(item["id"], item) - pass + """Store value needed for context""" + item = data + item["id"] = "publish_context" + _get_stub().imprint(item["id"], item) def get_context_title(): From df5fdcc54c6ff125d307036b26a07572671047c9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 10:45:54 +0100 Subject: [PATCH 064/357] OP-2766 - do not store PSItem in metadata PSItem is not serializable --- openpype/hosts/photoshop/plugins/create/create_image.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index a73b79e0fd..4fc9a86635 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -108,6 +108,7 @@ class ImageCreator(Creator): def update_instances(self, update_list): self.log.info("update_list:: {}".format(update_list)) created_inst, changes = update_list[0] + created_inst.pop("layer") # not storing PSItem layer to metadata api.stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) From c422176553ff27cff8d5113958fadf0dc4ddf12e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 11:29:41 +0100 Subject: [PATCH 065/357] OP-2766 - removed hardcoded ftrack, CollectFtrackFamily should be used Added defaults for Ftrack Settings. --- .../plugins/publish/collect_review.py | 25 +++++++++++++------ .../defaults/project_settings/ftrack.json | 12 +++++++++ 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 5ab48b76da..4b6f855a6a 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -1,12 +1,24 @@ +""" +Requires: + None + +Provides: + instance -> family ("review") +""" + import os import pyblish.api class CollectReview(pyblish.api.ContextPlugin): - """Gather the active document as review instance.""" + """Gather the active document as review instance. - label = "Review" + Triggers once even if no 'image' is published as by defaults it creates + flatten image from a workfile. + """ + + label = "Collect Review" order = pyblish.api.CollectorOrder hosts = ["photoshop"] @@ -15,16 +27,13 @@ class CollectReview(pyblish.api.ContextPlugin): task = os.getenv("AVALON_TASK", None) subset = family + task.capitalize() - file_path = context.data["currentFile"] - base_name = os.path.basename(file_path) - instance = context.create_instance(subset) instance.data.update({ "subset": subset, - "label": base_name, - "name": base_name, + "label": subset, + "name": subset, "family": family, - "families": ["ftrack"], + "families": [], "representations": [], "asset": os.environ["AVALON_ASSET"] }) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 01831efad1..015413e64f 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -344,6 +344,18 @@ "tasks": [], "add_ftrack_family": true, "advanced_filtering": [] + }, + { + "hosts": [ + "photoshop" + ], + "families": [ + "review" + ], + "task_types": [], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] } ] }, From a6a1d0fc545d8fc5a8781f40468a95a261ca3b01 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 15:22:46 +0100 Subject: [PATCH 066/357] OP-2766 - fixed broken remove_instance --- openpype/hosts/photoshop/api/ws_stub.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index dd29ef4e84..fa076ecc7e 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -77,14 +77,28 @@ class PhotoshopServerStub: layer: (PSItem) layers_meta: full list from Headline (for performance in loops) Returns: + (dict) of layer metadata stored in PS file + + Example: + { + 'id': 'pyblish.avalon.container', + 'loader': 'ImageLoader', + 'members': ['64'], + 'name': 'imageMainMiddle', + 'namespace': 'Hero_imageMainMiddle_001', + 'representation': '6203dc91e80934d9f6ee7d96', + 'schema': 'openpype:container-2.0' + } """ if layers_meta is None: layers_meta = self.get_layers_metadata() for layer_meta in layers_meta: + layer_id = layer_meta.get("uuid") # legacy if layer_meta.get("members"): - if layer.id == layer_meta["members"][0]: - return layer + layer_id = layer_meta["members"][0] + if str(layer.id) == str(layer_id): + return layer_meta print("Unable to find layer metadata for {}".format(layer.id)) def imprint(self, item_id, data, all_layers=None, items_meta=None): @@ -399,7 +413,7 @@ class PhotoshopServerStub: if res: layers_data = json.loads(res) except json.decoder.JSONDecodeError: - pass + raise ValueError("{} cannot be parsed, recreate meta".format(res)) # format of metadata changed from {} to [] because of standardization # keep current implementation logic as its working if isinstance(layers_data, dict): @@ -482,7 +496,7 @@ class PhotoshopServerStub: for item in self.get_layers_metadata(): inst_id = item.get("instance_id") or item.get("uuid") if inst_id != instance_id: - cleaned_data.append(inst_id) + cleaned_data.append(item) payload = json.dumps(cleaned_data, indent=4) From 01f2c8c1044ddeb78912dc2f6e401a4700e1a67d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 15:24:51 +0100 Subject: [PATCH 067/357] OP-2766 - fixed layer and variant keys --- .../hosts/photoshop/plugins/create/create_image.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 4fc9a86635..c24d8bde2f 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -16,18 +16,13 @@ class ImageCreator(Creator): description = "Image creator" def collect_instances(self): - import json - self.log.info("ImageCreator: api.list_instances():: {}".format( - json.dumps(api.list_instances(), indent=4))) for instance_data in api.list_instances(): # legacy instances have family=='image' creator_id = (instance_data.get("creator_identifier") or instance_data.get("family")) - self.log.info("ImageCreator: instance_data:: {}".format(json.dumps(instance_data, indent=4))) if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) - layer = api.stub().get_layer(instance_data["members"][0]) instance_data["layer"] = layer instance = CreatedInstance.from_existing( @@ -106,9 +101,10 @@ class ImageCreator(Creator): stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) def update_instances(self, update_list): - self.log.info("update_list:: {}".format(update_list)) + self.log.debug("update_list:: {}".format(update_list)) created_inst, changes = update_list[0] - created_inst.pop("layer") # not storing PSItem layer to metadata + if created_inst.get("layer"): + created_inst.pop("layer") # not storing PSItem layer to metadata api.stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) @@ -147,6 +143,9 @@ class ImageCreator(Creator): if not instance_data.get("task"): instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + if not instance_data.get("variant"): + instance_data["variant"] = '' + return instance_data def _clean_highlights(self, stub, item): From 6d787cadd1e21d966332d02b3c6f0915b15633ee Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 15:07:30 +0000 Subject: [PATCH 068/357] Implemented render publishing --- openpype/hosts/unreal/api/pipeline.py | 22 ++++ .../unreal/plugins/create/create_render.py | 113 ++++++++++++++++++ .../plugins/publish/collect_instances.py | 2 +- .../plugins/publish/collect_remove_marked.py | 24 ++++ .../publish/collect_render_instances.py | 106 ++++++++++++++++ .../publish/validate_sequence_frames.py | 45 +++++++ openpype/plugins/publish/extract_review.py | 3 +- 7 files changed, 313 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/unreal/plugins/create/create_render.py create mode 100644 openpype/hosts/unreal/plugins/publish/collect_remove_marked.py create mode 100644 openpype/hosts/unreal/plugins/publish/collect_render_instances.py create mode 100644 openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 9ec11b942d..cf5ac6e4e0 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -47,6 +47,7 @@ def install(): print("installing OpenPype for Unreal ...") print("-=" * 40) logger.info("installing OpenPype for Unreal") + pyblish.api.register_host("unreal") pyblish.api.register_plugin_path(str(PUBLISH_PATH)) register_loader_plugin_path(str(LOAD_PATH)) api.register_plugin_path(LegacyCreator, str(CREATE_PATH)) @@ -416,3 +417,24 @@ def cast_map_to_str_dict(umap) -> dict: """ return {str(key): str(value) for (key, value) in umap.items()} + + +def get_subsequences(sequence: unreal.LevelSequence): + """Get list of subsequences from sequence. + + Args: + sequence (unreal.LevelSequence): Sequence + + Returns: + list(unreal.LevelSequence): List of subsequences + + """ + tracks = sequence.get_master_tracks() + subscene_track = None + for t in tracks: + if t.get_class() == unreal.MovieSceneSubTrack.static_class(): + subscene_track = t + break + if subscene_track is not None and subscene_track.get_sections(): + return subscene_track.get_sections() + return [] diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py new file mode 100644 index 0000000000..49268c91f5 --- /dev/null +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -0,0 +1,113 @@ +import unreal + +from openpype.hosts.unreal.api import pipeline +from openpype.hosts.unreal.api.plugin import Creator + + +class CreateRender(Creator): + """Create instance for sequence for rendering""" + + name = "unrealRender" + label = "Unreal - Render" + family = "render" + icon = "cube" + asset_types = ["LevelSequence"] + + root = "/Game/OpenPype/PublishInstances" + suffix = "_INS" + + def __init__(self, *args, **kwargs): + super(CreateRender, self).__init__(*args, **kwargs) + + def process(self): + subset = self.data["subset"] + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + # Get the master sequence and the master level. + # There should be only one sequence and one level in the directory. + filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"/Game/OpenPype/{self.data['asset']}"], + recursive_paths=False) + sequences = ar.get_assets(filter) + ms = sequences[0].get_editor_property('object_path') + filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"/Game/OpenPype/{self.data['asset']}"], + recursive_paths=False) + levels = ar.get_assets(filter) + ml = levels[0].get_editor_property('object_path') + + selection = [] + if (self.options or {}).get("useSelection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [ + a.get_path_name() for a in sel_objects + if a.get_class().get_name() in self.asset_types] + else: + selection.append(self.data['sequence']) + + unreal.log(f"selection: {selection}") + + path = f"{self.root}" + unreal.EditorAssetLibrary.make_directory(path) + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + for a in selection: + ms_obj = ar.get_asset_by_object_path(ms).get_asset() + + seq_data = None + + if a == ms: + seq_data = { + "sequence": ms_obj, + "output": f"{ms_obj.get_name()}", + "frame_range": ( + ms_obj.get_playback_start(), ms_obj.get_playback_end()) + } + else: + seq_data_list = [{ + "sequence": ms_obj, + "output": f"{ms_obj.get_name()}", + "frame_range": ( + ms_obj.get_playback_start(), ms_obj.get_playback_end()) + }] + + for s in seq_data_list: + subscenes = pipeline.get_subsequences(s.get('sequence')) + + for ss in subscenes: + curr_data = { + "sequence": ss.get_sequence(), + "output": (f"{s.get('output')}/" + f"{ss.get_sequence().get_name()}"), + "frame_range": ( + ss.get_start_frame(), ss.get_end_frame() - 1) + } + + if ss.get_sequence().get_path_name() == a: + seq_data = curr_data + break + seq_data_list.append(curr_data) + + if seq_data is not None: + break + + if not seq_data: + continue + + d = self.data.copy() + d["members"] = [a] + d["sequence"] = a + d["master_sequence"] = ms + d["master_level"] = ml + d["output"] = seq_data.get('output') + d["frameStart"] = seq_data.get('frame_range')[0] + d["frameEnd"] = seq_data.get('frame_range')[1] + + container_name = f"{subset}{self.suffix}" + pipeline.create_publish_instance( + instance=container_name, path=path) + pipeline.imprint(f"{path}/{container_name}", d) diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index 94e732d728..2f604cb322 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -17,7 +17,7 @@ class CollectInstances(pyblish.api.ContextPlugin): """ label = "Collect Instances" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.1 hosts = ["unreal"] def process(self, context): diff --git a/openpype/hosts/unreal/plugins/publish/collect_remove_marked.py b/openpype/hosts/unreal/plugins/publish/collect_remove_marked.py new file mode 100644 index 0000000000..69e69f6630 --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/collect_remove_marked.py @@ -0,0 +1,24 @@ +import pyblish.api + + +class CollectRemoveMarked(pyblish.api.ContextPlugin): + """Remove marked data + + Remove instances that have 'remove' in their instance.data + + """ + + order = pyblish.api.CollectorOrder + 0.499 + label = 'Remove Marked Instances' + + def process(self, context): + + self.log.debug(context) + # make ftrack publishable + instances_to_remove = [] + for instance in context: + if instance.data.get('remove'): + instances_to_remove.append(instance) + + for instance in instances_to_remove: + context.remove(instance) diff --git a/openpype/hosts/unreal/plugins/publish/collect_render_instances.py b/openpype/hosts/unreal/plugins/publish/collect_render_instances.py new file mode 100644 index 0000000000..6eb51517c6 --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/collect_render_instances.py @@ -0,0 +1,106 @@ +from pathlib import Path +from tkinter.font import families +import unreal + +import pyblish.api +from openpype import lib +from openpype.pipeline import legacy_create +from openpype.hosts.unreal.api import pipeline + + +class CollectRenderInstances(pyblish.api.InstancePlugin): + """ This collector will try to find all the rendered frames. + + """ + order = pyblish.api.CollectorOrder + hosts = ["unreal"] + families = ["render"] + label = "Collect Render Instances" + + def process(self, instance): + self.log.debug("Preparing Rendering Instances") + + context = instance.context + + data = instance.data + data['remove'] = True + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + sequence = ar.get_asset_by_object_path( + data.get('sequence')).get_asset() + + sequences = [{ + "sequence": sequence, + "output": data.get('output'), + "frame_range": ( + data.get('frameStart'), data.get('frameEnd')) + }] + + for s in sequences: + self.log.debug(f"Processing: {s.get('sequence').get_name()}") + subscenes = pipeline.get_subsequences(s.get('sequence')) + + if subscenes: + for ss in subscenes: + sequences.append({ + "sequence": ss.get_sequence(), + "output": (f"{s.get('output')}/" + f"{ss.get_sequence().get_name()}"), + "frame_range": ( + ss.get_start_frame(), ss.get_end_frame() - 1) + }) + else: + # Avoid creating instances for camera sequences + if "_camera" not in s.get('sequence').get_name(): + seq = s.get('sequence') + seq_name = seq.get_name() + + new_instance = context.create_instance( + f"{data.get('subset')}_" + f"{seq_name}") + new_instance[:] = seq_name + + new_data = new_instance.data + + new_data["asset"] = seq_name + new_data["setMembers"] = seq_name + new_data["family"] = "render" + new_data["families"] = ["render", "review"] + new_data["parent"] = data.get("parent") + new_data["subset"] = f"{data.get('subset')}_{seq_name}" + new_data["level"] = data.get("level") + new_data["output"] = s.get('output') + new_data["fps"] = seq.get_display_rate().numerator + new_data["frameStart"] = s.get('frame_range')[0] + new_data["frameEnd"] = s.get('frame_range')[1] + new_data["sequence"] = seq.get_path_name() + new_data["master_sequence"] = data["master_sequence"] + new_data["master_level"] = data["master_level"] + + self.log.debug(f"new instance data: {new_data}") + + project_dir = unreal.Paths.project_dir() + render_dir = (f"{project_dir}/Saved/MovieRenders/" + f"{s.get('output')}") + render_path = Path(render_dir) + + frames = [] + + for x in render_path.iterdir(): + if x.is_file() and x.suffix == '.png': + frames.append(str(x.name)) + + if "representations" not in new_instance.data: + new_instance.data["representations"] = [] + + repr = { + 'frameStart': s.get('frame_range')[0], + 'frameEnd': s.get('frame_range')[1], + 'name': 'png', + 'ext': 'png', + 'files': frames, + 'stagingDir': render_dir, + 'tags': ['review'] + } + new_instance.data["representations"].append(repr) diff --git a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py new file mode 100644 index 0000000000..0a77281d16 --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -0,0 +1,45 @@ +from pathlib import Path + +import unreal + +import pyblish.api + + +class ValidateSequenceFrames(pyblish.api.InstancePlugin): + """Ensure the sequence of frames is complete + + The files found in the folder are checked against the frameStart and + frameEnd of the instance. If the first or last file is not + corresponding with the first or last frame it is flagged as invalid. + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Sequence Frames" + families = ["render"] + hosts = ["unreal"] + optional = True + + def process(self, instance): + self.log.debug(instance.data) + + representations = instance.data.get("representations") + for repr in representations: + frames = [] + for x in repr.get("files"): + # Get frame number. The last one contains the file extension, + # while the one before that is the frame number. + # `lstrip` removes any leading zeros. `or "0"` is to tackle + # the case where the frame number is "00". + frame = int(str(x).split('.')[-2]) + frames.append(frame) + frames.sort() + current_range = (frames[0], frames[-1]) + required_range = (instance.data["frameStart"], + instance.data["frameEnd"]) + + if current_range != required_range: + raise ValueError(f"Invalid frame range: {current_range} - " + f"expected: {required_range}") + + assert len(frames) == int(frames[-1]) - int(frames[0]) + 1, \ + "Missing frames" diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 3ecea1f8bd..35ad6270cf 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -51,7 +51,8 @@ class ExtractReview(pyblish.api.InstancePlugin): "resolve", "webpublisher", "aftereffects", - "flame" + "flame", + "unreal" ] # Supported extensions From b9f387dc505664f0c52c6a45330cbc2c6786a611 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 16:09:53 +0000 Subject: [PATCH 069/357] Hound fix --- .../hosts/unreal/plugins/publish/collect_render_instances.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/collect_render_instances.py b/openpype/hosts/unreal/plugins/publish/collect_render_instances.py index 6eb51517c6..9d60b65d08 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_render_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_render_instances.py @@ -1,10 +1,7 @@ from pathlib import Path -from tkinter.font import families import unreal import pyblish.api -from openpype import lib -from openpype.pipeline import legacy_create from openpype.hosts.unreal.api import pipeline From 9fab478edf926bbe45dcfa294d2b53c767ebf086 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 16:54:20 +0000 Subject: [PATCH 070/357] Improvements and more consistency in validator for rendered frames --- .../publish/validate_sequence_frames.py | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py index 0a77281d16..2684581e9d 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py +++ b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -1,4 +1,5 @@ from pathlib import Path +import clique import unreal @@ -20,19 +21,17 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): optional = True def process(self, instance): - self.log.debug(instance.data) - representations = instance.data.get("representations") for repr in representations: - frames = [] - for x in repr.get("files"): - # Get frame number. The last one contains the file extension, - # while the one before that is the frame number. - # `lstrip` removes any leading zeros. `or "0"` is to tackle - # the case where the frame number is "00". - frame = int(str(x).split('.')[-2]) - frames.append(frame) - frames.sort() + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble( + repr["files"], minimum_items=1, patterns=patterns) + + assert not remainder, "Must not have remainder" + assert len(collections) == 1, "Must detect single collection" + collection = collections[0] + frames = list(collection.indexes) + current_range = (frames[0], frames[-1]) required_range = (instance.data["frameStart"], instance.data["frameEnd"]) @@ -41,5 +40,5 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): raise ValueError(f"Invalid frame range: {current_range} - " f"expected: {required_range}") - assert len(frames) == int(frames[-1]) - int(frames[0]) + 1, \ - "Missing frames" + missing = collection.holes().indexes + assert not missing, "Missing frames: %s" % (missing,) From 541f44988dc654bd2609d865875a1ed47908014e Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 16:55:54 +0000 Subject: [PATCH 071/357] More hound fixes --- .../hosts/unreal/plugins/publish/validate_sequence_frames.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py index 2684581e9d..87f1338ee8 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py +++ b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -1,8 +1,5 @@ -from pathlib import Path import clique -import unreal - import pyblish.api From 0274be6bed9c7bf92c862ae33c0eab0db3b88eb4 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 17:22:52 +0000 Subject: [PATCH 072/357] Added rendering --- openpype/hosts/unreal/api/rendering.py | 125 +++++++++++++++++++++++++ 1 file changed, 125 insertions(+) create mode 100644 openpype/hosts/unreal/api/rendering.py diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py new file mode 100644 index 0000000000..376e1b75ce --- /dev/null +++ b/openpype/hosts/unreal/api/rendering.py @@ -0,0 +1,125 @@ +import unreal + +from openpype.hosts.unreal.api import pipeline + + +queue = None +executor = None + + +def _queue_finish_callback(exec, success): + unreal.log("Render completed. Success: " + str(success)) + + # Delete our reference so we don't keep it alive. + global executor + global queue + del executor + del queue + + +def _job_finish_callback(job, success): + # You can make any edits you want to the editor world here, and the world + # will be duplicated when the next render happens. Make sure you undo your + # edits in OnQueueFinishedCallback if you don't want to leak state changes + # into the editor world. + unreal.log("Individual job completed.") + + +def start_rendering(): + """ + Start the rendering process. + """ + print("Starting rendering...") + + # Get selected sequences + assets = unreal.EditorUtilityLibrary.get_selected_assets() + + # instances = pipeline.ls_inst() + instances = [ + a for a in assets + if a.get_class().get_name() == "OpenPypePublishInstance"] + + inst_data = [] + + for i in instances: + data = pipeline.parse_container(i.get_path_name()) + if data["family"] == "render": + inst_data.append(data) + + # subsystem = unreal.get_editor_subsystem( + # unreal.MoviePipelineQueueSubsystem) + # queue = subsystem.get_queue() + global queue + queue = unreal.MoviePipelineQueue() + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + for i in inst_data: + sequence = ar.get_asset_by_object_path(i["sequence"]).get_asset() + + sequences = [{ + "sequence": sequence, + "output": f"{i['output']}", + "frame_range": ( + int(float(i["frameStart"])), + int(float(i["frameEnd"])) + 1) + }] + render_list = [] + + # Get all the sequences to render. If there are subsequences, + # add them and their frame ranges to the render list. We also + # use the names for the output paths. + for s in sequences: + subscenes = pipeline.get_subsequences(s.get('sequence')) + + if subscenes: + for ss in subscenes: + sequences.append({ + "sequence": ss.get_sequence(), + "output": (f"{s.get('output')}/" + f"{ss.get_sequence().get_name()}"), + "frame_range": ( + ss.get_start_frame(), ss.get_end_frame()) + }) + else: + # Avoid rendering camera sequences + if "_camera" not in s.get('sequence').get_name(): + render_list.append(s) + + # Create the rendering jobs and add them to the queue. + for r in render_list: + job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob) + job.sequence = unreal.SoftObjectPath(i["master_sequence"]) + job.map = unreal.SoftObjectPath(i["master_level"]) + job.author = "OpenPype" + + # User data could be used to pass data to the job, that can be + # read in the job's OnJobFinished callback. We could, + # for instance, pass the AvalonPublishInstance's path to the job. + # job.user_data = "" + + settings = job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineOutputSetting) + settings.output_resolution = unreal.IntPoint(1920, 1080) + settings.custom_start_frame = r.get("frame_range")[0] + settings.custom_end_frame = r.get("frame_range")[1] + settings.use_custom_playback_range = True + settings.file_name_format = "{sequence_name}.{frame_number}" + settings.output_directory.path += r.get('output') + + renderPass = job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineDeferredPassBase) + renderPass.disable_multisample_effects = True + + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_PNG) + + # If there are jobs in the queue, start the rendering process. + if queue.get_jobs(): + global executor + executor = unreal.MoviePipelinePIEExecutor() + executor.on_executor_finished_delegate.add_callable_unique( + _queue_finish_callback) + executor.on_individual_job_finished_delegate.add_callable_unique( + _job_finish_callback) # Only available on PIE Executor + executor.execute(queue) From a8680e9f23e885bd3a1957876198b9c249f23fdb Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 17:25:03 +0000 Subject: [PATCH 073/357] Code cleanup --- openpype/hosts/unreal/plugins/create/create_render.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 49268c91f5..77fc98bcec 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -16,9 +16,6 @@ class CreateRender(Creator): root = "/Game/OpenPype/PublishInstances" suffix = "_INS" - def __init__(self, *args, **kwargs): - super(CreateRender, self).__init__(*args, **kwargs) - def process(self): subset = self.data["subset"] From 8f8a4efab9ccf7c30089e78d6dcffb4b76142ce3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 18 Mar 2022 17:39:43 +0000 Subject: [PATCH 074/357] Fixed import problems --- .../unreal/plugins/load/load_animation.py | 29 +++++++++++-------- .../hosts/unreal/plugins/load/load_layout.py | 1 + 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_animation.py b/openpype/hosts/unreal/plugins/load/load_animation.py index ebfce75ca9..65a9de9353 100644 --- a/openpype/hosts/unreal/plugins/load/load_animation.py +++ b/openpype/hosts/unreal/plugins/load/load_animation.py @@ -3,6 +3,11 @@ import os import json +import unreal +from unreal import EditorAssetLibrary +from unreal import MovieSceneSkeletalAnimationTrack +from unreal import MovieSceneSkeletalAnimationSection + from avalon import pipeline from openpype.pipeline import get_representation_path from openpype.hosts.unreal.api import plugin @@ -82,14 +87,14 @@ class AnimationFBXLoader(plugin.Loader): unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True ) animation = None for a in asset_content: - imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a) + imported_asset_data = EditorAssetLibrary.find_asset_data(a) imported_asset = unreal.AssetRegistryHelpers.get_asset( imported_asset_data) if imported_asset.__class__ == unreal.AnimSequence: @@ -149,7 +154,7 @@ class AnimationFBXLoader(plugin.Loader): container_name += suffix - unreal.EditorAssetLibrary.make_directory(asset_dir) + EditorAssetLibrary.make_directory(asset_dir) libpath = self.fname.replace("fbx", "json") @@ -160,7 +165,7 @@ class AnimationFBXLoader(plugin.Loader): animation = self._process(asset_dir, container_name, instance_name) - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( hierarchy_dir, recursive=True, include_folder=False) # Get the sequence for the layout, excluding the camera one. @@ -211,11 +216,11 @@ class AnimationFBXLoader(plugin.Loader): unreal_pipeline.imprint( "{}/{}".format(asset_dir, container_name), data) - imported_content = unreal.EditorAssetLibrary.list_assets( + imported_content = EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False) for a in imported_content: - unreal.EditorAssetLibrary.save_asset(a) + EditorAssetLibrary.save_asset(a) def update(self, container, representation): name = container["asset_name"] @@ -261,7 +266,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'convert_scene', True) - skeletal_mesh = unreal.EditorAssetLibrary.load_asset( + skeletal_mesh = EditorAssetLibrary.load_asset( container.get('namespace') + "/" + container.get('asset_name')) skeleton = skeletal_mesh.get_editor_property('skeleton') task.options.set_editor_property('skeleton', skeleton) @@ -278,22 +283,22 @@ class AnimationFBXLoader(plugin.Loader): "parent": str(representation["parent"]) }) - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( destination_path, recursive=True, include_folder=True ) for a in asset_content: - unreal.EditorAssetLibrary.save_asset(a) + EditorAssetLibrary.save_asset(a) def remove(self, container): path = container["namespace"] parent_path = os.path.dirname(path) - unreal.EditorAssetLibrary.delete_directory(path) + EditorAssetLibrary.delete_directory(path) - asset_content = unreal.EditorAssetLibrary.list_assets( + asset_content = EditorAssetLibrary.list_assets( parent_path, recursive=False, include_folder=True ) if len(asset_content) == 0: - unreal.EditorAssetLibrary.delete_directory(parent_path) + EditorAssetLibrary.delete_directory(parent_path) diff --git a/openpype/hosts/unreal/plugins/load/load_layout.py b/openpype/hosts/unreal/plugins/load/load_layout.py index 5a82ad6df6..86923ea3b4 100644 --- a/openpype/hosts/unreal/plugins/load/load_layout.py +++ b/openpype/hosts/unreal/plugins/load/load_layout.py @@ -12,6 +12,7 @@ from unreal import AssetToolsHelpers from unreal import FBXImportType from unreal import MathLibrary as umath +from avalon import io from avalon.pipeline import AVALON_CONTAINER_ID from openpype.pipeline import ( discover_loader_plugins, From b71554fe25375af9e87b7c854d3492d9f932de02 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Mar 2022 14:56:29 +0100 Subject: [PATCH 075/357] OP-2765 - fix for update of multiple instances --- openpype/hosts/aftereffects/plugins/create/create_render.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 1a5a826137..e4f1f57b84 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -34,9 +34,9 @@ class RenderCreator(Creator): self._add_instance_to_context(instance) def update_instances(self, update_list): - created_inst, changes = update_list[0] - api.get_stub().imprint(created_inst.get("instance_id"), - created_inst.data_to_store()) + for created_inst, _changes in update_list: + api.get_stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) def remove_instances(self, instances): for instance in instances: From 6fde2110148e62649ae3bd0d25726d5dd9c16859 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:37:31 +0100 Subject: [PATCH 076/357] OP-2766 - fix loaders because of change in imprint signature --- openpype/hosts/photoshop/plugins/load/load_image.py | 4 ++-- openpype/hosts/photoshop/plugins/load/load_reference.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/load/load_image.py b/openpype/hosts/photoshop/plugins/load/load_image.py index 0a9421b8f2..91a9787781 100644 --- a/openpype/hosts/photoshop/plugins/load/load_image.py +++ b/openpype/hosts/photoshop/plugins/load/load_image.py @@ -61,7 +61,7 @@ class ImageLoader(photoshop.PhotoshopLoader): ) stub.imprint( - layer, {"representation": str(representation["_id"])} + layer.id, {"representation": str(representation["_id"])} ) def remove(self, container): @@ -73,7 +73,7 @@ class ImageLoader(photoshop.PhotoshopLoader): stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_layer(layer.id) def switch(self, container, representation): diff --git a/openpype/hosts/photoshop/plugins/load/load_reference.py b/openpype/hosts/photoshop/plugins/load/load_reference.py index f5f0545d39..1f32a5d23c 100644 --- a/openpype/hosts/photoshop/plugins/load/load_reference.py +++ b/openpype/hosts/photoshop/plugins/load/load_reference.py @@ -61,7 +61,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader): ) stub.imprint( - layer, {"representation": str(representation["_id"])} + layer.id, {"representation": str(representation["_id"])} ) def remove(self, container): @@ -72,7 +72,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader): """ stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_layer(layer.id) def switch(self, container, representation): From bdc3a05c4d52a29c1aaff99d83c993be48c7563e Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:38:46 +0100 Subject: [PATCH 077/357] OP-2766 - fix wrongly used functions --- openpype/hosts/photoshop/api/pipeline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 2f4343753c..abc4e63bf6 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -179,10 +179,10 @@ def remove_instance(instance): stub.remove_instance(inst_id) if instance.get("members"): - item = stub.get_item(instance["members"][0]) + item = stub.get_layer(instance["members"][0]) if item: - stub.rename_item(item.id, - item.name.replace(stub.PUBLISH_ICON, '')) + stub.rename_layer(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) def _get_stub(): From b8dd330be3f0de72ba1a28652dff2ae4702c3dc2 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:40:28 +0100 Subject: [PATCH 078/357] OP-2766 - fix new creator for multiple instance's update --- .../hosts/photoshop/plugins/create/create_image.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index c24d8bde2f..bc0fa6a051 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -102,11 +102,11 @@ class ImageCreator(Creator): def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) - created_inst, changes = update_list[0] - if created_inst.get("layer"): - created_inst.pop("layer") # not storing PSItem layer to metadata - api.stub().imprint(created_inst.get("instance_id"), - created_inst.data_to_store()) + for created_inst, _changes in update_list: + if created_inst.get("layer"): + created_inst.pop("layer") # not storing PSItem layer to metadata + api.stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) def remove_instances(self, instances): for instance in instances: From adc135cb4c1d09eb27d51dae067f054a93c74d77 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:59:53 +0100 Subject: [PATCH 079/357] OP-2766 - added newPublishing flag to differentiate old from new --- openpype/plugins/publish/collect_from_create_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 16e3f669c3..09584ab37c 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -25,7 +25,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): # Update global data to context context.data.update(create_context.context_data_to_store()) - + context.data["newPublishing"] = True # Update context data for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"): value = create_context.dbcon.Session.get(key) From 96d88e592d56cb5193a13764aba9f5fcecff9616 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 17:01:35 +0100 Subject: [PATCH 080/357] OP-2766 - renamed collector --- openpype/hosts/photoshop/plugins/publish/collect_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index ee402dcabf..d506b9a5bf 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -13,7 +13,7 @@ class CollectInstances(pyblish.api.ContextPlugin): id (str): "pyblish.avalon.instance" """ - label = "Instances" + label = "Collect Instances" order = pyblish.api.CollectorOrder hosts = ["photoshop"] families_mapping = { From e86dc1acd77b841d36486a594862473e6aaf76a8 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 19:57:02 +0100 Subject: [PATCH 081/357] OP-2766 - refactored new creator --- .../photoshop/plugins/create/create_image.py | 79 ++++++++----------- 1 file changed, 33 insertions(+), 46 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index bc0fa6a051..cd7e219bd0 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -3,8 +3,7 @@ from openpype.hosts.photoshop import api from openpype.pipeline import ( Creator, CreatedInstance, - lib, - CreatorError + lib ) @@ -30,65 +29,53 @@ class ImageCreator(Creator): ) self._add_instance_to_context(instance) - def create(self, subset_name, data, pre_create_data): - groups = [] - layers = [] - create_group = False + def create(self, subset_name_from_ui, data, pre_create_data): + groups_to_create = [] + top_layers_to_wrap = [] + create_empty_group = False stub = api.stub() # only after PS is up - multiple_instances = pre_create_data.get("create_multiple") - selection = stub.get_selected_layers() + top_level_selected_items = stub.get_selected_layers() if pre_create_data.get("use_selection"): - if len(selection) > 1: - if multiple_instances: - for item in selection: - if item.group: - groups.append(item) - else: - layers.append(item) + only_single_item_selected = len(top_level_selected_items) == 1 + for selected_item in top_level_selected_items: + if only_single_item_selected or pre_create_data.get("create_multiple"): + if selected_item.group: + groups_to_create.append(selected_item) + else: + top_layers_to_wrap.append(selected_item) else: - group = stub.group_selected_layers(subset_name) - groups.append(group) - elif len(selection) == 1: - # One selected item. Use group if its a LayerSet (group), else - # create a new group. - selected_item = selection[0] - if selected_item.group: - groups.append(selected_item) - else: - layers.append(selected_item) - elif len(selection) == 0: - # No selection creates an empty group. - create_group = True - else: - group = stub.create_group(subset_name) - groups.append(group) + group = stub.group_selected_layers(subset_name_from_ui) + groups_to_create.append(group) - if create_group: - group = stub.create_group(subset_name) - groups.append(group) + if not groups_to_create and not top_layers_to_wrap: + group = stub.create_group(subset_name_from_ui) + groups_to_create.append(group) - for layer in layers: + # wrap each top level layer into separate new group + for layer in top_layers_to_wrap: stub.select_layers([layer]) group = stub.group_selected_layers(layer.name) - groups.append(group) + groups_to_create.append(group) - for group in groups: - long_names = [] - group.name = self._clean_highlights(stub, group.name) + creating_multiple_groups = len(groups_to_create) > 1 + for group in groups_to_create: + subset_name = subset_name_from_ui # reset to name from creator UI + layer_names_in_hierarchy = [] + created_group_name = self._clean_highlights(stub, group.name) - if len(groups) > 1: + if creating_multiple_groups: + # concatenate with layer name to differentiate subsets subset_name += group.name.title().replace(" ", "") if group.long_name: for directory in group.long_name[::-1]: name = self._clean_highlights(stub, directory) - long_names.append(name) + layer_names_in_hierarchy.append(name) data.update({"subset": subset_name}) - data.update({"layer": group}) data.update({"members": [str(group.id)]}) - data.update({"long_name": "_".join(long_names)}) + data.update({"long_name": "_".join(layer_names_in_hierarchy)}) new_instance = CreatedInstance(self.family, subset_name, data, self) @@ -97,8 +84,8 @@ class ImageCreator(Creator): new_instance.data_to_store()) self._add_instance_to_context(new_instance) # reusing existing group, need to rename afterwards - if not create_group: - stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) + if not create_empty_group: + stub.rename_layer(group.id, stub.PUBLISH_ICON + created_group_name) def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) @@ -120,7 +107,7 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, label="Use selection"), + lib.BoolDef("use_selection", default=True, label="Create only for selected"), lib.BoolDef("create_multiple", default=True, label="Create separate instance for each selected") From 9be8885bc3845d3fd5a4aed6b9558a3758e38a8b Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 10:47:50 +0100 Subject: [PATCH 082/357] OP-2766 - added support for new publisher NP already collected instances, need to only add layer information --- .../plugins/publish/collect_instances.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index d506b9a5bf..1b30fb053a 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -1,3 +1,4 @@ +import pprint import pyblish.api from openpype.hosts.photoshop import api as photoshop @@ -21,9 +22,10 @@ class CollectInstances(pyblish.api.ContextPlugin): } def process(self, context): - if context.data.get("newPublishing"): - self.log.debug("Not applicable for New Publisher, skip") - return + instance_by_layer_id = {} + for instance in context: + if instance.data["family"] == "image" and instance.data.get("members"): + instance_by_layer_id[str(instance.data["members"][0])] = instance stub = photoshop.stub() layers = stub.get_layers() @@ -40,13 +42,10 @@ class CollectInstances(pyblish.api.ContextPlugin): if "container" in layer_data["id"]: continue - # child_layers = [*layer.Layers] - # self.log.debug("child_layers {}".format(child_layers)) - # if not child_layers: - # self.log.info("%s skipped, it was empty." % layer.Name) - # continue + instance = instance_by_layer_id.get(str(layer.id)) + if instance is None: + instance = context.create_instance(layer_data["subset"]) - instance = context.create_instance(layer_data["subset"]) instance.data["layer"] = layer instance.data.update(layer_data) instance.data["families"] = self.families_mapping[ @@ -58,7 +57,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Produce diagnostic message for any graphical # user interface interested in visualising it. self.log.info("Found: \"%s\" " % instance.data["name"]) - self.log.info("instance: {} ".format(instance.data)) + self.log.info("instance: {} ".format(pprint.pformat(instance.data, indent=4))) if len(instance_names) != len(set(instance_names)): self.log.warning("Duplicate instances found. " + From 11a9ad18738ffa9ff036722f699d715663d3fb53 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 10:53:58 +0100 Subject: [PATCH 083/357] OP-2766 - refactor --- .../plugins/publish/collect_instances.py | 38 ++++++++++--------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 1b30fb053a..9449662067 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -7,8 +7,8 @@ from openpype.hosts.photoshop import api as photoshop class CollectInstances(pyblish.api.ContextPlugin): """Gather instances by LayerSet and file metadata - This collector takes into account assets that are associated with - an LayerSet and marked with a unique identifier; + Collects publishable instances from file metadata or enhance + already collected by creator (family == "image"). Identifier: id (str): "pyblish.avalon.instance" @@ -24,40 +24,44 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): instance_by_layer_id = {} for instance in context: - if instance.data["family"] == "image" and instance.data.get("members"): - instance_by_layer_id[str(instance.data["members"][0])] = instance + if ( + instance.data["family"] == "image" and + instance.data.get("members")): + layer_id = str(instance.data["members"][0]) + instance_by_layer_id[layer_id] = instance stub = photoshop.stub() - layers = stub.get_layers() + layer_items = stub.get_layers() layers_meta = stub.get_layers_metadata() instance_names = [] - for layer in layers: - layer_data = stub.read(layer, layers_meta) + for layer_item in layer_items: + layer_instance_data = stub.read(layer_item, layers_meta) # Skip layers without metadata. - if layer_data is None: + if layer_instance_data is None: continue # Skip containers. - if "container" in layer_data["id"]: + if "container" in layer_instance_data["id"]: continue - instance = instance_by_layer_id.get(str(layer.id)) + instance = instance_by_layer_id.get(str(layer_item.id)) if instance is None: - instance = context.create_instance(layer_data["subset"]) + instance = context.create_instance(layer_instance_data["subset"]) - instance.data["layer"] = layer - instance.data.update(layer_data) + instance.data["layer"] = layer_item + instance.data.update(layer_instance_data) instance.data["families"] = self.families_mapping[ - layer_data["family"] + layer_instance_data["family"] ] - instance.data["publish"] = layer.visible - instance_names.append(layer_data["subset"]) + instance.data["publish"] = layer_item.visible + instance_names.append(layer_instance_data["subset"]) # Produce diagnostic message for any graphical # user interface interested in visualising it. self.log.info("Found: \"%s\" " % instance.data["name"]) - self.log.info("instance: {} ".format(pprint.pformat(instance.data, indent=4))) + self.log.info("instance: {} ".format( + pprint.pformat(instance.data, indent=4))) if len(instance_names) != len(set(instance_names)): self.log.warning("Duplicate instances found. " + From 755a6dabfd1ba5d1bb80000ab69140d1a54d9c3d Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 11:47:36 +0100 Subject: [PATCH 084/357] OP-2766 - added NP validators for subset names and uniqueness --- .../plugins/publish/help/validate_naming.xml | 21 +++++++++ .../publish/help/validate_unique_subsets.xml | 14 ++++++ .../plugins/publish/validate_naming.py | 47 +++++++++++-------- .../publish/validate_unique_subsets.py | 9 +++- 4 files changed, 71 insertions(+), 20 deletions(-) create mode 100644 openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml create mode 100644 openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml new file mode 100644 index 0000000000..5a1e266748 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml @@ -0,0 +1,21 @@ + + + +Subset name + +## Invalid subset or layer name + +Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when subset name is used in a published file name. + {msg} + +### How to repair? + +You can fix this with "repair" button on the right. + + +### __Detailed Info__ (optional) + +Not all characters are available in a file names on all OS. Wrong characters could be configured in Settings. + + + \ No newline at end of file diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml new file mode 100644 index 0000000000..4b47973193 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml @@ -0,0 +1,14 @@ + + + +Subset not unique + +## Non unique subset name found + + Non unique subset names: '{non_unique}' +### How to repair? + +Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances? + + + \ No newline at end of file diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index b40e44d016..c0ca4cfb69 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -2,6 +2,7 @@ import re import pyblish.api import openpype.api +from openpype.pipeline import PublishXmlValidationError from openpype.hosts.photoshop import api as photoshop @@ -22,32 +23,33 @@ class ValidateNamingRepair(pyblish.api.Action): failed.append(result["instance"]) invalid_chars, replace_char = plugin.get_replace_chars() - self.log.info("{} --- {}".format(invalid_chars, replace_char)) + self.log.debug("{} --- {}".format(invalid_chars, replace_char)) # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) stub = photoshop.stub() for instance in instances: - self.log.info("validate_naming instance {}".format(instance)) - metadata = stub.read(instance[0]) - self.log.info("metadata instance {}".format(metadata)) - layer_name = None - if metadata.get("uuid"): - layer_data = stub.get_layer(metadata["uuid"]) - self.log.info("layer_data {}".format(layer_data)) - if layer_data: - layer_name = re.sub(invalid_chars, - replace_char, - layer_data.name) + self.log.debug("validate_naming instance {}".format(instance)) + current_layer_state = stub.get_layer(instance.data["layer"].id) + self.log.debug("current_layer_state instance {}".format(current_layer_state)) - stub.rename_layer(instance.data["uuid"], layer_name) + layer_meta = stub.read(current_layer_state) + instance_id = layer_meta.get("instance_id") or layer_meta.get("uuid") + if not instance_id: + self.log.warning("Unable to repair, cannot find layer") + continue + + layer_name = re.sub(invalid_chars, + replace_char, + current_layer_state.name) + + stub.rename_layer(current_layer_state.id, layer_name) subset_name = re.sub(invalid_chars, replace_char, - instance.data["name"]) + instance.data["subset"]) - instance[0].Name = layer_name or subset_name - metadata["subset"] = subset_name - stub.imprint(instance[0], metadata) + layer_meta["subset"] = subset_name + stub.imprint(instance_id, layer_meta) return True @@ -72,11 +74,18 @@ class ValidateNaming(pyblish.api.InstancePlugin): help_msg = ' Use Repair action (A) in Pyblish to fix it.' msg = "Name \"{}\" is not allowed.{}".format(instance.data["name"], help_msg) - assert not re.search(self.invalid_chars, instance.data["name"]), msg + + formatting_data = {"msg": msg} + if re.search(self.invalid_chars, instance.data["name"]): + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"], help_msg) - assert not re.search(self.invalid_chars, instance.data["subset"]), msg + formatting_data = {"msg": msg} + if re.search(self.invalid_chars, instance.data["subset"]): + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) @classmethod def get_replace_chars(cls): diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py index 40abfb1bbd..01f2323157 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py @@ -1,6 +1,7 @@ import collections import pyblish.api import openpype.api +from openpype.pipeline import PublishXmlValidationError class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): @@ -27,4 +28,10 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): if count > 1] msg = ("Instance subset names {} are not unique. ".format(non_unique) + "Remove duplicates via SubsetManager.") - assert not non_unique, msg + formatting_data = { + "non_unique": ",".join(non_unique) + } + + if non_unique: + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) From 85b49da44e14ec82a93e43bd4f8f1571b403627a Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 12:11:48 +0100 Subject: [PATCH 085/357] OP-2766 - skip non active instances --- .../plugins/publish/collect_instances.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 9449662067..52a8310594 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -35,27 +35,30 @@ class CollectInstances(pyblish.api.ContextPlugin): layers_meta = stub.get_layers_metadata() instance_names = [] for layer_item in layer_items: - layer_instance_data = stub.read(layer_item, layers_meta) + layer_meta_data = stub.read(layer_item, layers_meta) # Skip layers without metadata. - if layer_instance_data is None: + if layer_meta_data is None: continue # Skip containers. - if "container" in layer_instance_data["id"]: + if "container" in layer_meta_data["id"]: + continue + + if not layer_meta_data.get("active", True): # active might not be in legacy meta continue instance = instance_by_layer_id.get(str(layer_item.id)) if instance is None: - instance = context.create_instance(layer_instance_data["subset"]) + instance = context.create_instance(layer_meta_data["subset"]) instance.data["layer"] = layer_item - instance.data.update(layer_instance_data) + instance.data.update(layer_meta_data) instance.data["families"] = self.families_mapping[ - layer_instance_data["family"] + layer_meta_data["family"] ] instance.data["publish"] = layer_item.visible - instance_names.append(layer_instance_data["subset"]) + instance_names.append(layer_meta_data["subset"]) # Produce diagnostic message for any graphical # user interface interested in visualising it. From d211471ea099f53d8349f33d7e20ad29da7f178c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 13:49:50 +0100 Subject: [PATCH 086/357] OP-2766 - Hound --- openpype/hosts/photoshop/api/pipeline.py | 4 ++-- .../photoshop/plugins/create/create_image.py | 15 +++++++++------ .../plugins/publish/collect_instances.py | 3 ++- .../photoshop/plugins/publish/validate_naming.py | 5 +++-- 4 files changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 1b471ef1d3..db40e456db 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -151,7 +151,7 @@ def list_instances(): layers_meta = stub.get_layers_metadata() if layers_meta: for instance in layers_meta: - if instance.get("id") == "pyblish.avalon.instance": # TODO only this way? + if instance.get("id") == "pyblish.avalon.instance": instances.append(instance) return instances @@ -266,4 +266,4 @@ def get_context_title(): project_name = avalon.api.Session["AVALON_PROJECT"] asset_name = avalon.api.Session["AVALON_ASSET"] task_name = avalon.api.Session["AVALON_TASK"] - return "{}/{}/{}".format(project_name, asset_name, task_name) \ No newline at end of file + return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index cd7e219bd0..e332cfd9c2 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -39,7 +39,9 @@ class ImageCreator(Creator): if pre_create_data.get("use_selection"): only_single_item_selected = len(top_level_selected_items) == 1 for selected_item in top_level_selected_items: - if only_single_item_selected or pre_create_data.get("create_multiple"): + if ( + only_single_item_selected or + pre_create_data.get("create_multiple")): if selected_item.group: groups_to_create.append(selected_item) else: @@ -85,13 +87,15 @@ class ImageCreator(Creator): self._add_instance_to_context(new_instance) # reusing existing group, need to rename afterwards if not create_empty_group: - stub.rename_layer(group.id, stub.PUBLISH_ICON + created_group_name) + stub.rename_layer(group.id, + stub.PUBLISH_ICON + created_group_name) def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) for created_inst, _changes in update_list: if created_inst.get("layer"): - created_inst.pop("layer") # not storing PSItem layer to metadata + # not storing PSItem layer to metadata + created_inst.pop("layer") api.stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) @@ -107,7 +111,8 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, label="Create only for selected"), + lib.BoolDef("use_selection", default=True, + label="Create only for selected"), lib.BoolDef("create_multiple", default=True, label="Create separate instance for each selected") @@ -138,5 +143,3 @@ class ImageCreator(Creator): def _clean_highlights(self, stub, item): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') - - diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 52a8310594..a7bb2d40c7 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -45,7 +45,8 @@ class CollectInstances(pyblish.api.ContextPlugin): if "container" in layer_meta_data["id"]: continue - if not layer_meta_data.get("active", True): # active might not be in legacy meta + # active might not be in legacy meta + if not layer_meta_data.get("active", True): continue instance = instance_by_layer_id.get(str(layer_item.id)) diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index c0ca4cfb69..bcae24108c 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -31,10 +31,11 @@ class ValidateNamingRepair(pyblish.api.Action): for instance in instances: self.log.debug("validate_naming instance {}".format(instance)) current_layer_state = stub.get_layer(instance.data["layer"].id) - self.log.debug("current_layer_state instance {}".format(current_layer_state)) + self.log.debug("current_layer{}".format(current_layer_state)) layer_meta = stub.read(current_layer_state) - instance_id = layer_meta.get("instance_id") or layer_meta.get("uuid") + instance_id = (layer_meta.get("instance_id") or + layer_meta.get("uuid")) if not instance_id: self.log.warning("Unable to repair, cannot find layer") continue From 49d26ef9593271a6b36dfbdd353f7bed017478ad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:11:39 +0100 Subject: [PATCH 087/357] OP-2766 - changed imports after refactor of attribute definitions --- openpype/hosts/photoshop/plugins/create/create_image.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index e332cfd9c2..12898bb7f4 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,9 +1,9 @@ from avalon import api as avalon_api from openpype.hosts.photoshop import api +from openpype.lib import BoolDef from openpype.pipeline import ( Creator, - CreatedInstance, - lib + CreatedInstance ) @@ -111,9 +111,9 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, + BoolDef("use_selection", default=True, label="Create only for selected"), - lib.BoolDef("create_multiple", + BoolDef("create_multiple", default=True, label="Create separate instance for each selected") ] From 7273fd44daa2ebb266c9f95f9beb0cbfad53258a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:14:35 +0100 Subject: [PATCH 088/357] OP-2765 - changed imports after refactor of attribute definitions --- .../hosts/aftereffects/plugins/create/create_render.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 1eff992fe0..826d438fa3 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,11 +1,11 @@ from avalon import api as avalon_api from openpype import resources +from openpype.lib import BoolDef, UISeparatorDef from openpype.hosts.aftereffects import api from openpype.pipeline import ( Creator, CreatedInstance, - lib, CreatorError ) @@ -86,13 +86,13 @@ class RenderCreator(Creator): ] def get_instance_attr_defs(self): - return [lib.BoolDef("farm", label="Render on farm")] + return [BoolDef("farm", label="Render on farm")] def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, label="Use selection"), - lib.UISeparatorDef(), - lib.BoolDef("farm", label="Render on farm") + BoolDef("use_selection", default=True, label="Use selection"), + UISeparatorDef(), + BoolDef("farm", label="Render on farm") ] return output From c829cc19ac675bbc9752980b805b69964cccb6b7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:15:16 +0100 Subject: [PATCH 089/357] OP-2765 - changed default variant --- openpype/hosts/aftereffects/plugins/create/create_render.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 826d438fa3..c43ada84b5 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -79,11 +79,7 @@ class RenderCreator(Creator): self._add_instance_to_context(new_instance) def get_default_variants(self): - return [ - "myVariant", - "variantTwo", - "different_variant" - ] + return ["Main"] def get_instance_attr_defs(self): return [BoolDef("farm", label="Render on farm")] From 1534c878d2e57dad50823d52d434feb2cecd3f10 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:33:49 +0100 Subject: [PATCH 090/357] OP-2766 - Hound --- openpype/hosts/photoshop/plugins/create/create_image.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 12898bb7f4..c2fe8b6c78 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -112,10 +112,10 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ BoolDef("use_selection", default=True, - label="Create only for selected"), + label="Create only for selected"), BoolDef("create_multiple", - default=True, - label="Create separate instance for each selected") + default=True, + label="Create separate instance for each selected") ] return output From c7039e91f8665b1a3f47e317e5b807faee03783c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 15:20:43 +0100 Subject: [PATCH 091/357] OP-2766 - return back uuid for legacy creator --- openpype/hosts/photoshop/plugins/create/create_legacy_image.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 6fa455fa03..9736471a26 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -91,6 +91,7 @@ class CreateImage(create.LegacyCreator): long_names.append(name) self.data.update({"subset": subset_name}) + self.data.update({"uuid": str(group.id)}) self.data.update({"members": [str(group.id)]}) self.data.update({"long_name": "_".join(long_names)}) stub.imprint(group, self.data) From 8964fdb754ff837028f032d6bafbdc3ef160aa31 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:11:25 +0100 Subject: [PATCH 092/357] OP-2766 - clean up import --- openpype/hosts/aftereffects/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 2a213e1b59..e14b8adc8c 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -11,12 +11,12 @@ from openpype import lib from openpype.api import Logger from openpype.pipeline import ( LegacyCreator, + BaseCreator, register_loader_plugin_path, deregister_loader_plugin_path, AVALON_CONTAINER_ID, ) import openpype.hosts.aftereffects -from openpype.pipeline import BaseCreator from openpype.lib import register_event_callback from .launch_logic import get_stub From 0858ee0ce8483c123a67525342fba6f782c15ae2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:15:46 +0100 Subject: [PATCH 093/357] OP-2765 - remove wrong logging function --- .../aftereffects/plugins/publish/collect_workfile.py | 4 ---- openpype/lib/__init__.py | 3 +-- openpype/lib/log.py | 11 ----------- 3 files changed, 1 insertion(+), 17 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 67f037e6e6..f285ae49e4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,8 +2,6 @@ import os from avalon import api import pyblish.api -from openpype.lib import debug_log_instance - class CollectWorkfile(pyblish.api.ContextPlugin): """ Adds the AE render instances """ @@ -76,5 +74,3 @@ class CollectWorkfile(pyblish.api.ContextPlugin): } instance.data["representations"].append(representation) - - debug_log_instance(self.log, "Workfile instance", instance) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index f02706e44f..e8b6d18f4e 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -63,7 +63,7 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger, timeit, debug_log_instance +from .log import PypeLogger, timeit from .path_templates import ( merge_dict, @@ -369,7 +369,6 @@ __all__ = [ "OpenPypeMongoConnection", "timeit", - "debug_log_instance", "is_overlapping_otio_ranges", "otio_range_with_handles", diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 991dc3349a..c963807014 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -498,14 +498,3 @@ def timeit(method): print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) return result return timed - - -def debug_log_instance(logger, msg, instance): - """Helper function to write instance.data as json""" - def _default_json(value): - return str(value) - - logger.debug(msg) - logger.debug( - json.dumps(instance.data, indent=4, default=_default_json) - ) From 91879de0ad4ed7859b4fa330bcc03685fd3d39ad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:24:24 +0100 Subject: [PATCH 094/357] OP-2765 - revert of unwanted commit --- openpype/modules/log_viewer/log_view_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index 5e141f6aa2..14be6b392e 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -8,7 +8,7 @@ class LogViewModule(OpenPypeModule, ITrayModule): def initialize(self, modules_settings): logging_settings = modules_settings[self.name] - self.enabled = False # logging_settings["enabled"] + self.enabled = logging_settings["enabled"] # Tray attributes self.window = None From bfbb2061bcbe900a05ac59ff1e4894f1ae4cefa5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:25:34 +0100 Subject: [PATCH 095/357] OP-2765 - revert of unwanted commit --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index ed932d35b9..eeb1f7744c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -85,9 +85,7 @@ def inject_openpype_environment(deadlinePlugin): with open(export_url) as fp: contents = json.load(fp) for key, value in contents.items(): - print("key:: {}".format(key)) - if key != 'NUMBER_OF_PROCESSORS': - deadlinePlugin.SetProcessEnvironmentVariable(key, value) + deadlinePlugin.SetProcessEnvironmentVariable(key, value) print(">>> Removing temporary file") os.remove(export_url) From 16c919e93d0d65af801a10dff431058ec1da8203 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:26:40 +0100 Subject: [PATCH 096/357] OP-2765 - revert of unwanted commit --- openpype/hosts/harmony/plugins/publish/extract_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/harmony/plugins/publish/extract_render.py b/openpype/hosts/harmony/plugins/publish/extract_render.py index 49133d9608..2f8169248e 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_render.py +++ b/openpype/hosts/harmony/plugins/publish/extract_render.py @@ -41,7 +41,6 @@ class ExtractRender(pyblish.api.InstancePlugin): func = """function %s(args) { node.setTextAttr(args[0], "DRAWING_NAME", 1, args[1]); - node.setTextAttr(args[0], 'MOVIE_PATH', 1, args[1]); } %s """ % (sig, sig) From 59f2adbf341334fcb0ef239ce082f2c50bfe6a43 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:27:48 +0100 Subject: [PATCH 097/357] OP-2765 - revert of unwanted commit --- openpype/lib/log.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index c963807014..f33385e0ba 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -23,7 +23,6 @@ import time import traceback import threading import copy -import json from . import Terminal from .mongo import ( From 881ec1579ec82460734e9bdf93e9d5c968525b1d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 18:10:17 +0100 Subject: [PATCH 098/357] OP-2765 - fix exception if no file opened Should be refactored, merged 2 functions in code and extension. --- openpype/hosts/aftereffects/api/workio.py | 23 +++++++++++++--------- openpype/hosts/aftereffects/api/ws_stub.py | 2 +- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py index 5a8f86ead5..d6c732285a 100644 --- a/openpype/hosts/aftereffects/api/workio.py +++ b/openpype/hosts/aftereffects/api/workio.py @@ -5,14 +5,6 @@ from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .launch_logic import get_stub -def _active_document(): - document_name = get_stub().get_active_document_name() - if not document_name: - return None - - return document_name - - def file_extensions(): return HOST_WORKFILE_EXTENSIONS["aftereffects"] @@ -39,7 +31,8 @@ def current_file(): full_name = get_stub().get_active_document_full_name() if full_name and full_name != "null": return os.path.normpath(full_name).replace("\\", "/") - except Exception: + except ValueError: + print("Nothing opened") pass return None @@ -47,3 +40,15 @@ def current_file(): def work_root(session): return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + + +def _active_document(): + # TODO merge with current_file - even in extension + document_name = None + try: + document_name = get_stub().get_active_document_name() + except ValueError: + print("Nothing opened") + pass + + return document_name diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 1dfea697a1..9a6462fcd4 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -171,7 +171,7 @@ class AfterEffectsServerStub(): def get_active_document_full_name(self): """ - Returns just a name of active document via ws call + Returns absolute path of active document via ws call Returns(string): file name """ res = self.websocketserver.call(self.client.call( From 41d54727529b8f2b8a1580fd455616cbe5905da7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:50:32 +0100 Subject: [PATCH 099/357] OP-2765 - implemented support for optional validation in new publisher --- .../plugins/publish/validate_scene_settings.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py index 0753e3c09a..14e224fdc2 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py @@ -5,11 +5,15 @@ import re import pyblish.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) from openpype.hosts.aftereffects.api import get_asset_settings -class ValidateSceneSettings(pyblish.api.InstancePlugin): +class ValidateSceneSettings(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): """ Ensures that Composition Settings (right mouse on comp) are same as in FTrack on task. @@ -59,6 +63,10 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" + # Skip the instance if is not active by data on the instance + if not self.is_active(instance.data): + return + expected_settings = get_asset_settings() self.log.info("config from DB::{}".format(expected_settings)) From e5f605b1236893c9917a3ea2931f6f3e75650f27 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:51:57 +0100 Subject: [PATCH 100/357] OP-2765 - render.farm is in families not in family Better handling of potentially multiple instances. (Still requiring that there is only one publishable composition at the moment.) --- openpype/hosts/aftereffects/plugins/publish/collect_audio.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_audio.py b/openpype/hosts/aftereffects/plugins/publish/collect_audio.py index 80679725e6..8647ba498b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_audio.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_audio.py @@ -17,12 +17,11 @@ class CollectAudio(pyblish.api.ContextPlugin): def process(self, context): for instance in context: - if instance.data["family"] == 'render.farm': + if 'render.farm' in instance.data.get("families", []): comp_id = instance.data["comp_id"] if not comp_id: self.log.debug("No comp_id filled in instance") - # @iLLiCiTiT QUESTION Should return or continue? - return + continue context.data["audioFile"] = os.path.normpath( get_stub().get_audio_url(comp_id) ).replace("\\", "/") From 71cd7a3fb0aad57e191fb0c520b09921d668d542 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:53:32 +0100 Subject: [PATCH 101/357] OP-2765 - added support for optional validations Asset and Task should be ALWAYS on instance, not on context. (Publishable instance might allow different context than "real context".) --- .../plugins/publish/collect_render.py | 18 +++++++++++------- openpype/lib/abstract_collect_render.py | 1 + 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index aa5bc58ac2..24d08b343e 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -22,6 +22,7 @@ class AERenderInstance(RenderInstance): projectEntity = attr.ib(default=None) stagingDir = attr.ib(default=None) app_version = attr.ib(default=None) + publish_attributes = attr.ib(default=None) class CollectAERender(abstract_collect_render.AbstractCollectRender): @@ -50,16 +51,21 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): current_file = context.data["currentFile"] version = context.data["version"] - asset_entity = context.data["assetEntity"] + project_entity = context.data["projectEntity"] compositions = CollectAERender.get_stub().get_items(True) compositions_by_id = {item.id: item for item in compositions} for inst in context: + if not inst.data["active"]: + continue + family = inst.data["family"] if family not in ["render", "renderLocal"]: # legacy continue + asset_entity = inst.data["assetEntity"] + item_id = inst.data["members"][0] work_area_info = CollectAERender.get_stub().get_work_area( @@ -78,9 +84,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): fps = work_area_info.frameRate # TODO add resolution when supported by extension - if not inst.data["active"]: - continue - subset_name = inst.data["subset"] instance = AERenderInstance( family=family, @@ -90,7 +93,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): source=current_file, label="{} - {}".format(subset_name, family), subset=subset_name, - asset=context.data["assetEntity"]["name"], + asset=inst.data["asset"], + task=inst.data["task"], attachTo=False, setMembers='', publish=True, @@ -112,8 +116,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): toBeRenderedOn='deadline', fps=fps, app_version=app_version, - anatomyData=deepcopy(context.data["anatomyData"]), - context=context + anatomyData=deepcopy(inst.data["anatomyData"]), + publish_attributes=inst.data.get("publish_attributes") ) comp = compositions_by_id.get(int(item_id)) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 029bd3ec39..cce161b51c 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -30,6 +30,7 @@ class RenderInstance(object): source = attr.ib() # path to source scene file label = attr.ib() # label to show in GUI subset = attr.ib() # subset name + task = attr.ib() # task name asset = attr.ib() # asset name (AVALON_ASSET) attachTo = attr.ib() # subset name to attach render to setMembers = attr.ib() # list of nodes/members producing render output From 0506c38e00008d26eb8ce7b8391b6f53844efed3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:54:13 +0100 Subject: [PATCH 102/357] OP-2765 - cleaned up workfile collector --- .../plugins/publish/collect_workfile.py | 66 +++++++++---------- .../plugins/publish/submit_publish_job.py | 12 +++- 2 files changed, 43 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index f285ae49e4..ac552a6a5f 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -17,16 +17,37 @@ class CollectWorkfile(pyblish.api.ContextPlugin): existing_instance = instance break - task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) + if existing_instance is None: # old publish + instance = self._get_new_instance(context, scene_file) + else: + instance = existing_instance + + # creating representation + representation = { + 'name': 'aep', + 'ext': 'aep', + 'files': scene_file, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) + + def _get_new_instance(self, context, scene_file): + task = api.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - shared_instance_data = { + # workfile instance + family = "workfile" + subset = family + task.capitalize() # TOOD use method + + instance_data = { "asset": asset_entity["name"], + "task": task, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -40,37 +61,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity["data"]["resolutionHeight"]), "pixelAspect": 1, "step": 1, - "version": version + "version": version, + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() } - # workfile instance - family = "workfile" - subset = family + task.capitalize() - if existing_instance is None: # old publish - # Create instance - instance = context.create_instance(subset) + # Create instance + instance = context.create_instance(subset) + instance.data.update(instance_data) - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() - }) - - # adding basic script data - instance.data.update(shared_instance_data) - else: - instance = existing_instance - instance.data["publish"] = True # for DL - - # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, - "stagingDir": staging_dir, - } - - instance.data["representations"].append(representation) + return instance \ No newline at end of file diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index fad4d14ea0..f624f40635 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -392,6 +392,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): list of instances """ + self.log.info("!!!!! _create_instances_for_aov") task = os.environ["AVALON_TASK"] subset = instance_data["subset"] cameras = instance_data.get("cameras", []) @@ -454,6 +455,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): break if instance_data.get("multipartExr"): + self.log.info("!!!!! _create_instances_for_aov add multipartExr") preview = True new_instance = copy(instance_data) @@ -519,9 +521,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ representations = [] collections, remainders = clique.assemble(exp_files) - + self.log.info("!!!!! _get_representations") # create representation for every collected sequento ce for collection in collections: + self.log.info("!!!!! collection") ext = collection.tail.lstrip(".") preview = False # if filtered aov name is found in filename, toggle it for @@ -533,6 +536,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): aov, list(collection)[0] ): + self.log.info("!!!!! add preview") preview = True break @@ -582,6 +586,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # add reminders as representations for remainder in remainders: + self.log.info("!!!!! remainder") ext = remainder.split(".")[-1] staging = os.path.dirname(remainder) @@ -602,7 +607,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } - if "render" in instance.get("families"): + is_render_type = set(["render"]).\ + intersection(instance.get("families")) + if is_render_type: + self.log.info("!!!!! is_render_type") rep.update({ "fps": instance.get("fps"), "tags": ["review"] From 2c20f6832dadcc85c1ae4fda23d952b7ae7d2c92 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:59:53 +0100 Subject: [PATCH 103/357] Revert "OP-2765 - cleaned up workfile collector" This reverts commit 0506c38e --- .../plugins/publish/collect_workfile.py | 66 +++++++++---------- .../plugins/publish/submit_publish_job.py | 12 +--- 2 files changed, 35 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index ac552a6a5f..f285ae49e4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -17,37 +17,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): existing_instance = instance break + task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) - if existing_instance is None: # old publish - instance = self._get_new_instance(context, scene_file) - else: - instance = existing_instance - - # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, - "stagingDir": staging_dir, - } - - instance.data["representations"].append(representation) - - def _get_new_instance(self, context, scene_file): - task = api.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - # workfile instance - family = "workfile" - subset = family + task.capitalize() # TOOD use method - - instance_data = { + shared_instance_data = { "asset": asset_entity["name"], - "task": task, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -61,16 +40,37 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity["data"]["resolutionHeight"]), "pixelAspect": 1, "step": 1, - "version": version, - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() + "version": version } - # Create instance - instance = context.create_instance(subset) - instance.data.update(instance_data) + # workfile instance + family = "workfile" + subset = family + task.capitalize() + if existing_instance is None: # old publish + # Create instance + instance = context.create_instance(subset) - return instance \ No newline at end of file + # creating instance data + instance.data.update({ + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() + }) + + # adding basic script data + instance.data.update(shared_instance_data) + else: + instance = existing_instance + instance.data["publish"] = True # for DL + + # creating representation + representation = { + 'name': 'aep', + 'ext': 'aep', + 'files': scene_file, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index f624f40635..fad4d14ea0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -392,7 +392,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): list of instances """ - self.log.info("!!!!! _create_instances_for_aov") task = os.environ["AVALON_TASK"] subset = instance_data["subset"] cameras = instance_data.get("cameras", []) @@ -455,7 +454,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): break if instance_data.get("multipartExr"): - self.log.info("!!!!! _create_instances_for_aov add multipartExr") preview = True new_instance = copy(instance_data) @@ -521,10 +519,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ representations = [] collections, remainders = clique.assemble(exp_files) - self.log.info("!!!!! _get_representations") + # create representation for every collected sequento ce for collection in collections: - self.log.info("!!!!! collection") ext = collection.tail.lstrip(".") preview = False # if filtered aov name is found in filename, toggle it for @@ -536,7 +533,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): aov, list(collection)[0] ): - self.log.info("!!!!! add preview") preview = True break @@ -586,7 +582,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # add reminders as representations for remainder in remainders: - self.log.info("!!!!! remainder") ext = remainder.split(".")[-1] staging = os.path.dirname(remainder) @@ -607,10 +602,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } - is_render_type = set(["render"]).\ - intersection(instance.get("families")) - if is_render_type: - self.log.info("!!!!! is_render_type") + if "render" in instance.get("families"): rep.update({ "fps": instance.get("fps"), "tags": ["review"] From 349827b3a20a718130c214057081f0fdcaa9e41f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 12:00:37 +0100 Subject: [PATCH 104/357] OP-2765 - cleaned up workfile collector --- .../plugins/publish/collect_workfile.py | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index f285ae49e4..93c7a448c6 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -17,16 +17,37 @@ class CollectWorkfile(pyblish.api.ContextPlugin): existing_instance = instance break - task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) + if existing_instance is None: # old publish + instance = self._get_new_instance(context, scene_file) + else: + instance = existing_instance + + # creating representation + representation = { + 'name': 'aep', + 'ext': 'aep', + 'files': scene_file, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) + + def _get_new_instance(self, context, scene_file): + task = api.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - shared_instance_data = { + # workfile instance + family = "workfile" + subset = family + task.capitalize() # TOOD use method + + instance_data = { "asset": asset_entity["name"], + "task": task, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -40,37 +61,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity["data"]["resolutionHeight"]), "pixelAspect": 1, "step": 1, - "version": version + "version": version, + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() } - # workfile instance - family = "workfile" - subset = family + task.capitalize() - if existing_instance is None: # old publish - # Create instance - instance = context.create_instance(subset) + # Create instance + instance = context.create_instance(subset) + instance.data.update(instance_data) - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() - }) - - # adding basic script data - instance.data.update(shared_instance_data) - else: - instance = existing_instance - instance.data["publish"] = True # for DL - - # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, - "stagingDir": staging_dir, - } - - instance.data["representations"].append(representation) + return instance From 8b424f0b013b07c66a17e33d71aee2737c4effb4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 14:58:44 +0100 Subject: [PATCH 105/357] OP-2764 - fixed missed keys for old publishing in AE --- .../hosts/aftereffects/plugins/publish/collect_render.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 24d08b343e..d64e7abc5f 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -57,7 +57,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): compositions = CollectAERender.get_stub().get_items(True) compositions_by_id = {item.id: item for item in compositions} for inst in context: - if not inst.data["active"]: + if not inst.data.get("active", True): continue family = inst.data["family"] @@ -84,6 +84,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): fps = work_area_info.frameRate # TODO add resolution when supported by extension + task_name = (inst.data.get("task") or + list(asset_entity["data"]["tasks"].keys())[0]) # lega + subset_name = inst.data["subset"] instance = AERenderInstance( family=family, @@ -94,7 +97,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): label="{} - {}".format(subset_name, family), subset=subset_name, asset=inst.data["asset"], - task=inst.data["task"], + task=task_name, attachTo=False, setMembers='', publish=True, From 4dcf12ee4c7c77af12c1620c756f4453b31c40c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 15:30:28 +0100 Subject: [PATCH 106/357] OP-2764 - scene should be always saved --- .../aftereffects/plugins/publish/extract_save_scene.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py index e20598b311..eb2977309f 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py @@ -1,15 +1,16 @@ +import pyblish.api + import openpype.api from openpype.hosts.aftereffects.api import get_stub -class ExtractSaveScene(openpype.api.Extractor): +class ExtractSaveScene(pyblish.api.ContextPlugin): """Save scene before extraction.""" order = openpype.api.Extractor.order - 0.48 label = "Extract Save Scene" hosts = ["aftereffects"] - families = ["workfile"] - def process(self, instance): + def process(self, context): stub = get_stub() stub.save() From cc602f1da0829aaa7226a1d1dc9c36111464fc7c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 26 Mar 2022 14:37:01 +0100 Subject: [PATCH 107/357] added implementation of overlay messages --- openpype/tools/utils/overlay_messages.py | 315 +++++++++++++++++++++++ 1 file changed, 315 insertions(+) create mode 100644 openpype/tools/utils/overlay_messages.py diff --git a/openpype/tools/utils/overlay_messages.py b/openpype/tools/utils/overlay_messages.py new file mode 100644 index 0000000000..ade037817a --- /dev/null +++ b/openpype/tools/utils/overlay_messages.py @@ -0,0 +1,315 @@ +import uuid + +from Qt import QtWidgets, QtCore, QtGui + +from .lib import set_style_property + + +class CloseButton(QtWidgets.QFrame): + """Close button drawed manually.""" + + clicked = QtCore.Signal() + + def __init__(self, parent): + super(CloseButton, self).__init__(parent) + self._mouse_pressed = False + policy = QtWidgets.QSizePolicy( + QtWidgets.QSizePolicy.Fixed, + QtWidgets.QSizePolicy.Fixed + ) + self.setSizePolicy(policy) + + def sizeHint(self): + size = self.fontMetrics().height() + return QtCore.QSize(size, size) + + def mousePressEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self._mouse_pressed = True + super(CloseButton, self).mousePressEvent(event) + + def mouseReleaseEvent(self, event): + if self._mouse_pressed: + self._mouse_pressed = False + if self.rect().contains(event.pos()): + self.clicked.emit() + + super(CloseButton, self).mouseReleaseEvent(event) + + def paintEvent(self, event): + rect = self.rect() + painter = QtGui.QPainter(self) + painter.setClipRect(event.rect()) + pen = QtGui.QPen() + pen.setWidth(2) + pen.setColor(QtGui.QColor(255, 255, 255)) + pen.setStyle(QtCore.Qt.SolidLine) + pen.setCapStyle(QtCore.Qt.RoundCap) + painter.setPen(pen) + offset = int(rect.height() / 4) + top = rect.top() + offset + left = rect.left() + offset + right = rect.right() - offset + bottom = rect.bottom() - offset + painter.drawLine( + left, top, + right, bottom + ) + painter.drawLine( + left, bottom, + right, top + ) + + +class MessageWidget(QtWidgets.QFrame): + """Message widget showed as overlay. + + Message is hidden after timeout but can be overriden by mouse hover. + Mouse hover can add additional 2 seconds of widget's visibility. + + Args: + message_id (str): Unique identifier of message widget for + 'MessageOverlayObject'. + message (str): Text shown in message. + parent (QWidget): Parent widget where message is visible. + timeout (int): Timeout of message's visibility (default 5000). + message_type (str): Property which can be used in styles for specific + kid of message. + """ + + close_requested = QtCore.Signal(str) + _default_timeout = 5000 + + def __init__( + self, message_id, message, parent, timeout=None, message_type=None + ): + super(MessageWidget, self).__init__(parent) + self.setObjectName("OverlayMessageWidget") + + if message_type: + set_style_property(self, "type", message_type) + + if not timeout: + timeout = self._default_timeout + timeout_timer = QtCore.QTimer() + timeout_timer.setInterval(timeout) + timeout_timer.setSingleShot(True) + + hover_timer = QtCore.QTimer() + hover_timer.setInterval(2000) + hover_timer.setSingleShot(True) + + label_widget = QtWidgets.QLabel(message, self) + label_widget.setAlignment(QtCore.Qt.AlignCenter) + label_widget.setWordWrap(True) + close_btn = CloseButton(self) + + layout = QtWidgets.QHBoxLayout(self) + layout.setContentsMargins(5, 5, 0, 5) + layout.addWidget(label_widget, 1) + layout.addWidget(close_btn, 0) + + close_btn.clicked.connect(self._on_close_clicked) + timeout_timer.timeout.connect(self._on_timer_timeout) + hover_timer.timeout.connect(self._on_hover_timeout) + + self._label_widget = label_widget + self._message_id = message_id + self._timeout_timer = timeout_timer + self._hover_timer = hover_timer + + def size_hint_without_word_wrap(self): + """Size hint in cases that word wrap of label is disabled.""" + self._label_widget.setWordWrap(False) + size_hint = self.sizeHint() + self._label_widget.setWordWrap(True) + return size_hint + + def showEvent(self, event): + """Start timeout on show.""" + super(MessageWidget, self).showEvent(event) + self._timeout_timer.start() + + def _on_timer_timeout(self): + """On message timeout.""" + # Skip closing if hover timer is active + if not self._hover_timer.isActive(): + self._close_message() + + def _on_hover_timeout(self): + """Hover timer timed out.""" + # Check if is still under widget + if self.underMouse(): + self._hover_timer.start() + else: + self._close_message() + + def _on_close_clicked(self): + self._close_message() + + def _close_message(self): + """Emmit close request to 'MessageOverlayObject'.""" + self.close_requested.emit(self._message_id) + + def enterEvent(self, event): + """Start hover timer on hover.""" + super(MessageWidget, self).enterEvent(event) + self._hover_timer.start() + + def leaveEvent(self, event): + """Start hover timer on hover leave.""" + super(MessageWidget, self).leaveEvent(event) + self._hover_timer.start() + + +class MessageOverlayObject(QtCore.QObject): + """Object that can be used to add overlay messages. + + Args: + widget (QWidget): + """ + + def __init__(self, widget): + super(MessageOverlayObject, self).__init__() + + widget.installEventFilter(self) + + # Timer which triggers recalculation of message positions + recalculate_timer = QtCore.QTimer() + recalculate_timer.setInterval(10) + + recalculate_timer.timeout.connect(self._recalculate_positions) + + self._widget = widget + self._recalculate_timer = recalculate_timer + + self._messages_order = [] + self._closing_messages = set() + self._messages = {} + self._spacing = 5 + self._move_size = 4 + self._move_size_remove = 8 + + def add_message(self, message, timeout=None, message_type=None): + """Add single message into overlay. + + Args: + message (str): Message that will be shown. + timeout (int): Message timeout. + message_type (str): Message type can be used as property in + stylesheets. + """ + # Skip empty messages + if not message: + return + + # Create unique id of message + label_id = str(uuid.uuid4()) + # Create message widget + widget = MessageWidget( + label_id, message, self._widget, timeout, message_type + ) + widget.close_requested.connect(self._on_message_close_request) + widget.show() + + # Move widget outside of window + pos = widget.pos() + pos.setY(pos.y() - widget.height()) + widget.move(pos) + # Store message + self._messages[label_id] = widget + self._messages_order.append(label_id) + # Trigger recalculation timer + self._recalculate_timer.start() + + def _on_message_close_request(self, label_id): + """Message widget requested removement.""" + + widget = self._messages.get(label_id) + if widget is not None: + # Add message to closing messages and start recalculation + self._closing_messages.add(label_id) + self._recalculate_timer.start() + + def _recalculate_positions(self): + """Recalculate positions of widgets.""" + + # Skip if there are no messages to process + if not self._messages_order: + self._recalculate_timer.stop() + return + + # All message widgets are in expected positions + all_at_place = True + # Starting y position + pos_y = self._spacing + # Current widget width + widget_width = self._widget.width() + max_width = widget_width - (2 * self._spacing) + widget_half_width = widget_width / 2 + + # Store message ids that should be removed + message_ids_to_remove = set() + for message_id in reversed(self._messages_order): + widget = self._messages[message_id] + pos = widget.pos() + # Messages to remove are moved upwards + if message_id in self._closing_messages: + bottom = pos.y() + widget.height() + # Add message to remove if is not visible + if bottom < 0 or self._move_size_remove < 1: + message_ids_to_remove.add(message_id) + continue + + # Calculate new y position of message + dst_pos_y = pos.y() - self._move_size_remove + + else: + # Calculate y position of message + # - use y position of previous message widget and add + # move size if is not in final destination yet + if widget.underMouse(): + dst_pos_y = pos.y() + elif pos.y() == pos_y or self._move_size < 1: + dst_pos_y = pos_y + elif pos.y() < pos_y: + dst_pos_y = min(pos_y, pos.y() + self._move_size) + else: + dst_pos_y = max(pos_y, pos.y() - self._move_size) + + # Store if widget is in place where should be + if all_at_place and dst_pos_y != pos_y: + all_at_place = False + + # Calculate ideal width and height of message widget + height = widget.heightForWidth(max_width) + w_size_hint = widget.size_hint_without_word_wrap() + widget.resize(min(max_width, w_size_hint.width()), height) + + # Center message widget + size = widget.size() + pos_x = widget_half_width - (size.width() / 2) + # Move widget to destination position + widget.move(pos_x, dst_pos_y) + + # Add message widget height and spacing for next message widget + pos_y += size.height() + self._spacing + + # Remove widgets to remove + for message_id in message_ids_to_remove: + self._messages_order.remove(message_id) + self._closing_messages.remove(message_id) + widget = self._messages.pop(message_id) + widget.hide() + widget.deleteLater() + + # Stop recalculation timer if all widgets are where should be + if all_at_place: + self._recalculate_timer.stop() + + def eventFilter(self, source, event): + # Trigger recalculation of timer on resize of widget + if source is self._widget and event.type() == QtCore.QEvent.Resize: + self._recalculate_timer.start() + + return super(MessageOverlayObject, self).eventFilter(source, event) From 8bc010a4f409a66f5536fc8bdc39dd4094dee05d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 26 Mar 2022 14:54:08 +0100 Subject: [PATCH 108/357] define default styles for overlay messages --- openpype/style/data.json | 6 +++++- openpype/style/style.css | 20 +++++++++++++++++++ openpype/tools/utils/overlay_messages.py | 25 ++++++++++++++---------- 3 files changed, 40 insertions(+), 11 deletions(-) diff --git a/openpype/style/data.json b/openpype/style/data.json index a76a77015b..15d9472e3e 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -61,7 +61,11 @@ "icon-entity-default": "#bfccd6", "icon-entity-disabled": "#808080", "font-entity-deprecated": "#666666", - + "overlay-messages": { + "close-btn": "#D3D8DE", + "bg-success": "#458056", + "bg-success-hover": "#55a066" + }, "tab-widget": { "bg": "#21252B", "bg-selected": "#434a56", diff --git a/openpype/style/style.css b/openpype/style/style.css index df83600973..4d83e39780 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -687,6 +687,26 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { background: none; } +/* Messages overlay */ +#OverlayMessageWidget { + border-radius: 0.2em; + background: {color:bg-buttons}; +} + +#OverlayMessageWidget:hover { + background: {color:bg-button-hover}; +} +#OverlayMessageWidget[type="success"] { + background: {color:overlay-messages:bg-success}; +} +#OverlayMessageWidget[type="success"]:hover { + background: {color:overlay-messages:bg-success-hover}; +} + +#OverlayMessageWidget QWidget { + background: transparent; +} + /* Password dialog*/ #PasswordBtn { border: none; diff --git a/openpype/tools/utils/overlay_messages.py b/openpype/tools/utils/overlay_messages.py index ade037817a..93082b9fb7 100644 --- a/openpype/tools/utils/overlay_messages.py +++ b/openpype/tools/utils/overlay_messages.py @@ -2,6 +2,8 @@ import uuid from Qt import QtWidgets, QtCore, QtGui +from openpype.style import get_objected_colors + from .lib import set_style_property @@ -12,6 +14,9 @@ class CloseButton(QtWidgets.QFrame): def __init__(self, parent): super(CloseButton, self).__init__(parent) + colors = get_objected_colors() + close_btn_color = colors["overlay-messages"]["close-btn"] + self._color = close_btn_color.get_qcolor() self._mouse_pressed = False policy = QtWidgets.QSizePolicy( QtWidgets.QSizePolicy.Fixed, @@ -42,7 +47,7 @@ class CloseButton(QtWidgets.QFrame): painter.setClipRect(event.rect()) pen = QtGui.QPen() pen.setWidth(2) - pen.setColor(QtGui.QColor(255, 255, 255)) + pen.setColor(self._color) pen.setStyle(QtCore.Qt.SolidLine) pen.setCapStyle(QtCore.Qt.RoundCap) painter.setPen(pen) @@ -61,7 +66,7 @@ class CloseButton(QtWidgets.QFrame): ) -class MessageWidget(QtWidgets.QFrame): +class OverlayMessageWidget(QtWidgets.QFrame): """Message widget showed as overlay. Message is hidden after timeout but can be overriden by mouse hover. @@ -81,9 +86,9 @@ class MessageWidget(QtWidgets.QFrame): _default_timeout = 5000 def __init__( - self, message_id, message, parent, timeout=None, message_type=None + self, message_id, message, parent, message_type=None, timeout=None ): - super(MessageWidget, self).__init__(parent) + super(OverlayMessageWidget, self).__init__(parent) self.setObjectName("OverlayMessageWidget") if message_type: @@ -127,7 +132,7 @@ class MessageWidget(QtWidgets.QFrame): def showEvent(self, event): """Start timeout on show.""" - super(MessageWidget, self).showEvent(event) + super(OverlayMessageWidget, self).showEvent(event) self._timeout_timer.start() def _on_timer_timeout(self): @@ -153,12 +158,12 @@ class MessageWidget(QtWidgets.QFrame): def enterEvent(self, event): """Start hover timer on hover.""" - super(MessageWidget, self).enterEvent(event) + super(OverlayMessageWidget, self).enterEvent(event) self._hover_timer.start() def leaveEvent(self, event): """Start hover timer on hover leave.""" - super(MessageWidget, self).leaveEvent(event) + super(OverlayMessageWidget, self).leaveEvent(event) self._hover_timer.start() @@ -190,7 +195,7 @@ class MessageOverlayObject(QtCore.QObject): self._move_size = 4 self._move_size_remove = 8 - def add_message(self, message, timeout=None, message_type=None): + def add_message(self, message, message_type=None, timeout=None): """Add single message into overlay. Args: @@ -206,8 +211,8 @@ class MessageOverlayObject(QtCore.QObject): # Create unique id of message label_id = str(uuid.uuid4()) # Create message widget - widget = MessageWidget( - label_id, message, self._widget, timeout, message_type + widget = OverlayMessageWidget( + label_id, message, self._widget, message_type, timeout ) widget.close_requested.connect(self._on_message_close_request) widget.show() From ea8b3b79b1c3426194a49db7ac5c6d909a0c1d38 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 13:34:25 +0200 Subject: [PATCH 109/357] OP-2951 - added force_only_broken argument to sync methods Cleaned up representation in sync methods --- .../modules/sync_server/sync_server_module.py | 46 +++++++++++-------- openpype/modules/sync_server/utils.py | 5 ++ 2 files changed, 33 insertions(+), 18 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index caf58503f1..9895a6d430 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -23,7 +23,7 @@ from openpype.settings.lib import ( from .providers.local_drive import LocalDriveHandler from .providers import lib -from .utils import time_function, SyncStatus +from .utils import time_function, SyncStatus, SiteAlreadyPresentError log = PypeLogger().get_logger("SyncServer") @@ -129,7 +129,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, collection, representation_id, site_name=None, - force=False): + force=False, force_only_broken=False): """ Adds new site to representation to be synced. @@ -143,6 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists + force_only_broken (bool): reset only if "error" present Returns: throws ValueError if any issue @@ -155,7 +156,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(collection, representation_id, - site_name=site_name, force=force) + site_name=site_name, + force=force, + force_only_broken=force_only_broken) # public facing API def remove_site(self, collection, representation_id, site_name, @@ -281,7 +284,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): os.path.getmtime(local_file_path)) elem = {"name": site_name, "created_dt": created_dt} - self._add_site(collection, query, [repre], elem, + self._add_site(collection, query, repre, elem, site_name=site_name, file_id=repre_file["_id"]) sites_added += 1 @@ -819,7 +822,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.log.debug("Adding alternate {} to {}".format( alt_site, representation["_id"])) self._add_site(collection, query, - [representation], elem, + representation, elem, alt_site, file_id=file_id, force=True) """ End of Public API """ @@ -1394,7 +1397,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, collection, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False): + remove=False, pause=None, force=False, + force_only_broken=False): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1417,6 +1421,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site + force_only_broken(bool): reset site only if there is "error" field Returns: throws ValueError @@ -1425,7 +1430,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "_id": ObjectId(representation_id) } - representation = list(self.connection.database[collection].find(query)) + representation = self.connection.database[collection].find_one(query) if not representation: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) @@ -1456,7 +1461,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation, site_name, pause) else: # add new site to all files for representation self._add_site(collection, query, representation, elem, site_name, - force) + force=force, force_only_broken=force_only_broken) def _update_site(self, collection, query, update, arr_filter): """ @@ -1511,7 +1516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Throws ValueError if 'site_name' not found on 'representation' """ found = False - for repre_file in representation.pop().get("files"): + for repre_file in representation.get("files"): for site in repre_file.get("sites"): if site.get("name") == site_name: found = True @@ -1537,7 +1542,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ found = False site = None - for repre_file in representation.pop().get("files"): + for repre_file in representation.get("files"): for site in repre_file.get("sites"): if site["name"] == site_name: found = True @@ -1564,34 +1569,39 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self._update_site(collection, query, update, arr_filter) def _add_site(self, collection, query, representation, elem, site_name, - force=False, file_id=None): + force=False, file_id=None, force_only_broken=False): """ Adds 'site_name' to 'representation' on 'collection' Args: - representation (list of 1 dict) + representation (dict) file_id (ObjectId) Use 'force' to remove existing or raises ValueError """ - reseted_existing = False - for repre_file in representation.pop().get("files"): + reset_existing = False + files = representation.get("files", []) + if not files: + log.debug("No files for {}".format(representation["_id"])) + return + + for repre_file in files: if file_id and file_id != repre_file["_id"]: continue for site in repre_file.get("sites"): if site["name"] == site_name: - if force: + if force or (force_only_broken and site.get("error")): self._reset_site_for_file(collection, query, elem, repre_file["_id"], site_name) - reseted_existing = True + reset_existing = True else: msg = "Site {} already present".format(site_name) log.info(msg) - raise ValueError(msg) + raise SiteAlreadyPresentError(msg) - if reseted_existing: + if reset_existing: return if not file_id: diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py index 85e4e03f77..03f362202f 100644 --- a/openpype/modules/sync_server/utils.py +++ b/openpype/modules/sync_server/utils.py @@ -8,6 +8,11 @@ class ResumableError(Exception): pass +class SiteAlreadyPresentError(Exception): + """Representation has already site skeleton present.""" + pass + + class SyncStatus: DO_NOTHING = 0 DO_UPLOAD = 1 From d340d05bf01a5f8beda6cdae1736cb59219c4a07 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 13:37:12 +0200 Subject: [PATCH 110/357] OP-2951 - implemented synching referenced files in workfile When workfile is synched, it checks for referenced files (added by Loader) and tries to sync them too. --- openpype/plugins/load/add_site.py | 72 ++++++++++++++++++++++++++----- 1 file changed, 61 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 95001691e2..0ddce6e160 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,9 +1,19 @@ from openpype.modules import ModulesManager from openpype.pipeline import load +:from openpype.lib.avalon_context import get_linked_ids_for_representations +from openpype.modules.sync_server.utils import SiteAlreadyPresentError class AddSyncSite(load.LoaderPlugin): - """Add sync site to representation""" + """Add sync site to representation + + If family of synced representation is 'workfile', it looks for all + representations which are referenced (loaded) in workfile with content of + 'inputLinks'. + It doesn't do any checks for site, most common use case is when artist is + downloading workfile to his local site, but it might be helpful when + artist is re-uploading broken representation on remote site also. + """ representations = ["*"] families = ["*"] @@ -12,21 +22,61 @@ class AddSyncSite(load.LoaderPlugin): icon = "download" color = "#999999" + _sync_server = None + + @property + def sync_server(self): + if not self._sync_server: + manager = ModulesManager() + self._sync_server = manager.modules_by_name["sync_server"] + + return self._sync_server + def load(self, context, name=None, namespace=None, data=None): self.log.info("Adding {} to representation: {}".format( data["site_name"], data["_id"])) - self.add_site_to_representation(data["project_name"], - data["_id"], - data["site_name"]) + family = context["representation"]["context"]["family"] + project_name = data["project_name"] + repre_id = data["_id"] + + add_ids = [repre_id] + if family == "workfile": + links = get_linked_ids_for_representations(project_name, + add_ids, + link_type="reference") + add_ids.extend(links) + + add_ids = set(add_ids) + self.log.info("Add to repre_ids {}".format(add_ids)) + is_main = True + for add_repre_id in add_ids: + self.add_site_to_representation(project_name, + add_repre_id, + data["site_name"], + is_main) + is_main = False + self.log.debug("Site added.") - @staticmethod - def add_site_to_representation(project_name, representation_id, site_name): - """Adds new site to representation_id, resets if exists""" - manager = ModulesManager() - sync_server = manager.modules_by_name["sync_server"] - sync_server.add_site(project_name, representation_id, site_name, - force=True) + def add_site_to_representation(self, project_name, representation_id, + site_name, is_main): + """Adds new site to representation_id, resets if exists + + Args: + project_name (str) + representation_id (ObjectId): + site_name (str) + is_main (bool): true for really downloaded, false for references, + force redownload main file always, for references only if + broken + """ + try: + self.sync_server.add_site(project_name, representation_id, + site_name, + force=is_main, + force_only_broken=not is_main) + except SiteAlreadyPresentError: + self.log.debug("Site present", exc_info=True) def filepath_from_context(self, context): """No real file loading""" From a197334a251404d06f89ec3de6940db68c4b1dde Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 13:57:21 +0200 Subject: [PATCH 111/357] OP-2951 - added function to collect referenced representation ids --- openpype/lib/avalon_context.py | 120 +++++++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b4e6abb72d..e8a365ec39 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1971,3 +1971,123 @@ def get_last_workfile( return os.path.normpath(os.path.join(workdir, filename)) return filename + + +@with_avalon +def get_linked_ids_for_representations(project, repre_ids, dbcon=None, + link_type=None, max_depth=0): + """Returns list of linked ids of particular type (if provided). + + Goes from representations to version, back to representations + Args: + project (str) + repre_ids (list) or (ObjectId) + dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection + with Session. + link_type (str): ['reference', '..] + max_depth (int): limit how many levels of recursion + Returns: + (list) of ObjectId - linked representations + """ + if not dbcon: + log.debug("Using `avalon.io` for query.") + dbcon = avalon.io + # Make sure is installed + dbcon.install() + + if dbcon.Session["AVALON_PROJECT"] != project: + dbcon.Session["AVALON_PROJECT"] = project + + if not isinstance(repre_ids, list): + repre_ids = [repre_ids] + + versions = avalon.io.find( + { + "_id": {"$in": repre_ids}, + "type": "representation" + }, + projection={"parent": True} + ) + version_ids = [version["parent"] for version in versions] + + graph_lookup = { + "from": project, + "startWith": "$data.inputLinks.id", + "connectFromField": "data.inputLinks.id", + "connectToField": "_id", + "as": "outputs_recursive", + "depthField": "depth" + } + if max_depth != 0: + # We offset by -1 since 0 basically means no recursion + # but the recursion only happens after the initial lookup + # for outputs. + graph_lookup["maxDepth"] = max_depth - 1 + + match = { + "_id": {"$in": version_ids}, + "type": "version" + } + + pipeline_ = [ + # Match + {"$match": match}, + # Recursive graph lookup for inputs + {"$graphLookup": graph_lookup} + ] + + result = dbcon.aggregate(pipeline_) + referenced_version_ids = _process_referenced_pipeline_result(result, + link_type) + + representations = avalon.io.find( + { + "parent": {"$in": list(referenced_version_ids)}, + "type": "representation" + }, + projection={"_id": True} + ) + ref_ids = {representation["_id"] for representation in representations} + return list(ref_ids) + + +def _process_referenced_pipeline_result(result, link_type): + """Filters result from pipeline for particular link_type. + + Pipeline cannot use link_type directly in a query. + Returns: + (list) + """ + referenced_version_ids = set() + correctly_linked_ids = set() + for item in result: + correctly_linked_ids = _filter_input_links(item["data"]["inputLinks"], + link_type, + correctly_linked_ids) + + # outputs_recursive in random order, sort by _id + outputs_recursive = sorted(item.get("outputs_recursive", []), + key=lambda d: d["_id"]) + # go from oldest to newest + # only older _id can reference another newer _id + for output in outputs_recursive[::-1]: + if output["_id"] not in correctly_linked_ids: # leaf + continue + + correctly_linked_ids = _filter_input_links( + output["data"].get("inputLinks", []), + link_type, + correctly_linked_ids) + + referenced_version_ids.add(output["_id"]) + + return referenced_version_ids + + +def _filter_input_links(input_links, link_type, correctly_linked_ids): + for input_link in input_links: + if not link_type or input_link["type"] == link_type: + correctly_linked_ids.add(input_link.get("id") or + input_link.get("_id")) # legacy + + return correctly_linked_ids From a0a2e2678e55f449201981b419d9a6a13f8b4a49 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 14:07:57 +0200 Subject: [PATCH 112/357] OP-2951 - fixed typo --- openpype/plugins/load/add_site.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 0ddce6e160..59720eb5b6 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,6 +1,6 @@ from openpype.modules import ModulesManager from openpype.pipeline import load -:from openpype.lib.avalon_context import get_linked_ids_for_representations +from openpype.lib.avalon_context import get_linked_ids_for_representations from openpype.modules.sync_server.utils import SiteAlreadyPresentError From af092348e50e1bda0ac6b3a13a58f1908cf5b939 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 16:32:32 +0200 Subject: [PATCH 113/357] OP-2766 - Fix creation of subset names in PS review and workfile --- .../hosts/photoshop/plugins/publish/collect_review.py | 10 +++++++++- .../photoshop/plugins/publish/collect_workfile.py | 10 +++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 4b6f855a6a..dafeb95d0e 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,6 +10,8 @@ import os import pyblish.api +from openpype.lib import get_subset_name + class CollectReview(pyblish.api.ContextPlugin): """Gather the active document as review instance. @@ -25,7 +27,13 @@ class CollectReview(pyblish.api.ContextPlugin): def process(self, context): family = "review" task = os.getenv("AVALON_TASK", None) - subset = family + task.capitalize() + subset = get_subset_name( + family, + "", + task, + context.data["assetEntity"]["_id"], + host_name="photoshop" + ) instance = context.create_instance(subset) instance.data.update({ diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index bdbd379a33..1a826c3f2a 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,6 +1,8 @@ import os import pyblish.api +from openpype.lib import get_subset_name + class CollectWorkfile(pyblish.api.ContextPlugin): """Collect current script for publish.""" @@ -19,7 +21,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" task = os.getenv("AVALON_TASK", None) - subset = family + task.capitalize() + subset = get_subset_name( + family, + "", + task, + context.data["assetEntity"]["_id"], + host_name="photoshop" + ) file_path = context.data["currentFile"] staging_dir = os.path.dirname(file_path) From 0f08f3e31df5a6ec54c025776d490343a587ab5b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 17:25:19 +0200 Subject: [PATCH 114/357] OP-2766 - Fix pulling task and project from context --- openpype/hosts/photoshop/plugins/publish/collect_review.py | 5 +++-- openpype/hosts/photoshop/plugins/publish/collect_workfile.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index dafeb95d0e..09fed2df78 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -23,15 +23,16 @@ class CollectReview(pyblish.api.ContextPlugin): label = "Collect Review" order = pyblish.api.CollectorOrder hosts = ["photoshop"] + order = pyblish.api.CollectorOrder + 0.1 def process(self, context): family = "review" - task = os.getenv("AVALON_TASK", None) subset = get_subset_name( family, "", - task, + context.data["anatomyData"]["task"]["name"], context.data["assetEntity"]["_id"], + context.data["anatomyData"]["project"]["name"], host_name="photoshop" ) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 1a826c3f2a..71022a86fd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -20,12 +20,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): break family = "workfile" - task = os.getenv("AVALON_TASK", None) subset = get_subset_name( family, "", - task, + context.data["anatomyData"]["task"]["name"], context.data["assetEntity"]["_id"], + context.data["anatomyData"]["project"]["name"], host_name="photoshop" ) From 9e4e6d4b85a1273d1eeab0f473c88cb8b7f62f30 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Mar 2022 13:30:53 +0200 Subject: [PATCH 115/357] OP-2766 Switched subset function according to review comments --- .../hosts/photoshop/plugins/publish/collect_review.py | 8 ++++---- .../hosts/photoshop/plugins/publish/collect_workfile.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 09fed2df78..d825950b9e 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,7 +10,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name +from openpype.lib import get_subset_name_with_asset_doc class CollectReview(pyblish.api.ContextPlugin): @@ -27,13 +27,13 @@ class CollectReview(pyblish.api.ContextPlugin): def process(self, context): family = "review" - subset = get_subset_name( + subset = get_subset_name_with_asset_doc( family, "", context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"]["_id"], + context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name="photoshop" + host_name=context.data["hostName"] ) instance = context.create_instance(subset) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 71022a86fd..e4f0a07b34 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name +from openpype.lib import get_subset_name_with_asset_doc class CollectWorkfile(pyblish.api.ContextPlugin): @@ -20,13 +20,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin): break family = "workfile" - subset = get_subset_name( + subset = get_subset_name_with_asset_doc( family, "", context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"]["_id"], + context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name="photoshop" + host_name=context.data["hostName"] ) file_path = context.data["currentFile"] From f6fb60bb49bed7a0c26825ef85b5d0f65c4aa6bb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 11:53:58 +0200 Subject: [PATCH 116/357] Update openpype/plugins/load/add_site.py Co-authored-by: Roy Nieterau --- openpype/plugins/load/add_site.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 59720eb5b6..e26ef586e0 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -38,23 +38,20 @@ class AddSyncSite(load.LoaderPlugin): family = context["representation"]["context"]["family"] project_name = data["project_name"] repre_id = data["_id"] + self.add_site_to_representation(project_name, + repre_id, + data["site_name"], + is_main=True) - add_ids = [repre_id] if family == "workfile": links = get_linked_ids_for_representations(project_name, add_ids, link_type="reference") - add_ids.extend(links) - - add_ids = set(add_ids) - self.log.info("Add to repre_ids {}".format(add_ids)) - is_main = True - for add_repre_id in add_ids: - self.add_site_to_representation(project_name, - add_repre_id, - data["site_name"], - is_main) - is_main = False + for link_repre_id in links: + self.add_site_to_representation(project_name, + link_repre_id, + data["site_name"], + is_main=False) self.log.debug("Site added.") From 6b6c466d8b6ca5b587c8ccf1a8e1dac5e9326bfe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 12:00:48 +0200 Subject: [PATCH 117/357] OP-2951 - fix wrong variable --- openpype/plugins/load/add_site.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index e26ef586e0..22d3ebf24b 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -45,7 +45,7 @@ class AddSyncSite(load.LoaderPlugin): if family == "workfile": links = get_linked_ids_for_representations(project_name, - add_ids, + [repre_id], link_type="reference") for link_repre_id in links: self.add_site_to_representation(project_name, From af079897a884538a5af90bbf2301a004fef7233c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 12:07:43 +0200 Subject: [PATCH 118/357] OP-2951 - refactor use better function --- openpype/lib/avalon_context.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index e8a365ec39..496b55a6f2 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2040,14 +2040,14 @@ def get_linked_ids_for_representations(project, repre_ids, dbcon=None, referenced_version_ids = _process_referenced_pipeline_result(result, link_type) - representations = avalon.io.find( - { + ref_ids = avalon.io.distinct( + "_id", + filter={ "parent": {"$in": list(referenced_version_ids)}, "type": "representation" - }, - projection={"_id": True} + } ) - ref_ids = {representation["_id"] for representation in representations} + return list(ref_ids) From b826cfac4115f51d8387daab30e3475445256e0f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 12:14:17 +0200 Subject: [PATCH 119/357] OP-2951 - change sort by depth Previous sorting by _id might not be deterministic, not reliable. The main logic is to have outputs sorted by how they were traversed, which should be denoted by 'depth' field. --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 496b55a6f2..9a5d382c98 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2067,7 +2067,7 @@ def _process_referenced_pipeline_result(result, link_type): # outputs_recursive in random order, sort by _id outputs_recursive = sorted(item.get("outputs_recursive", []), - key=lambda d: d["_id"]) + key=lambda d: d["depth"]) # go from oldest to newest # only older _id can reference another newer _id for output in outputs_recursive[::-1]: From d8c56f0a67cacfc2e05b726efc0e3d8e392c0f78 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 10:39:52 +0200 Subject: [PATCH 120/357] Update openpype/lib/avalon_context.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 9a5d382c98..5ea472f11e 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2001,7 +2001,7 @@ def get_linked_ids_for_representations(project, repre_ids, dbcon=None, if not isinstance(repre_ids, list): repre_ids = [repre_ids] - versions = avalon.io.find( + versions = dbcon.find( { "_id": {"$in": repre_ids}, "type": "representation" From 6f86f78860c795f027ac481b1f6494ddd5b6979c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 10:40:00 +0200 Subject: [PATCH 121/357] Update openpype/lib/avalon_context.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 5ea472f11e..68d38acf35 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2040,7 +2040,7 @@ def get_linked_ids_for_representations(project, repre_ids, dbcon=None, referenced_version_ids = _process_referenced_pipeline_result(result, link_type) - ref_ids = avalon.io.distinct( + ref_ids = dbcon.distinct( "_id", filter={ "parent": {"$in": list(referenced_version_ids)}, From d14d739e1cfd312390d9ab880da0a589b3c6d567 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 10:40:08 +0200 Subject: [PATCH 122/357] Update openpype/lib/avalon_context.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 68d38acf35..7d562733fc 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1974,7 +1974,7 @@ def get_last_workfile( @with_avalon -def get_linked_ids_for_representations(project, repre_ids, dbcon=None, +def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, link_type=None, max_depth=0): """Returns list of linked ids of particular type (if provided). From 44afe82d5a21f8ac4bf393fa35b2357df0c583a5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 11:07:54 +0200 Subject: [PATCH 123/357] OP-2951 - refactored distinct version ids Fixed ordering of referenced versions --- openpype/lib/avalon_context.py | 37 +++++++++++++++------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 7d562733fc..65575493e0 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1980,7 +1980,7 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, Goes from representations to version, back to representations Args: - project (str) + project_name (str) repre_ids (list) or (ObjectId) dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection with Session. @@ -1995,23 +1995,24 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, # Make sure is installed dbcon.install() - if dbcon.Session["AVALON_PROJECT"] != project: - dbcon.Session["AVALON_PROJECT"] = project + if dbcon.Session["AVALON_PROJECT"] != project_name: + dbcon.Session["AVALON_PROJECT"] = project_name if not isinstance(repre_ids, list): repre_ids = [repre_ids] - versions = dbcon.find( - { - "_id": {"$in": repre_ids}, - "type": "representation" - }, - projection={"parent": True} - ) - version_ids = [version["parent"] for version in versions] + version_ids = dbcon.distinct("parent", { + "_id": {"$in": repre_ids}, + "type": "representation" + }) + + match = { + "_id": {"$in": version_ids}, + "type": "version" + } graph_lookup = { - "from": project, + "from": project_name, "startWith": "$data.inputLinks.id", "connectFromField": "data.inputLinks.id", "connectToField": "_id", @@ -2024,11 +2025,6 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, # for outputs. graph_lookup["maxDepth"] = max_depth - 1 - match = { - "_id": {"$in": version_ids}, - "type": "version" - } - pipeline_ = [ # Match {"$match": match}, @@ -2065,12 +2061,11 @@ def _process_referenced_pipeline_result(result, link_type): link_type, correctly_linked_ids) - # outputs_recursive in random order, sort by _id + # outputs_recursive in random order, sort by depth outputs_recursive = sorted(item.get("outputs_recursive", []), key=lambda d: d["depth"]) - # go from oldest to newest - # only older _id can reference another newer _id - for output in outputs_recursive[::-1]: + + for output in outputs_recursive: if output["_id"] not in correctly_linked_ids: # leaf continue From 2694d9d557633e06ed51f684e30056c443a4a401 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 10:20:58 +0100 Subject: [PATCH 124/357] OP-2868 - added configuration for default variant value to Settings --- .../plugins/create/create_render.py | 12 +++++++++- .../project_settings/aftereffects.json | 7 ++++++ .../schema_project_aftereffects.json | 23 +++++++++++++++++++ 3 files changed, 41 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index c43ada84b5..aee660673b 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -18,6 +18,16 @@ class RenderCreator(Creator): create_allow_context_change = False + def __init__( + self, create_context, system_settings, project_settings, headless=False + ): + super(RenderCreator, self).__init__(create_context, system_settings, + project_settings, headless) + self._default_variants = (project_settings["aftereffects"] + ["create"] + ["RenderCreator"] + ["defaults"]) + def get_icon(self): return resources.get_openpype_splash_filepath() @@ -79,7 +89,7 @@ class RenderCreator(Creator): self._add_instance_to_context(new_instance) def get_default_variants(self): - return ["Main"] + return self._default_variants def get_instance_attr_defs(self): return [BoolDef("farm", label="Render on farm")] diff --git a/openpype/settings/defaults/project_settings/aftereffects.json b/openpype/settings/defaults/project_settings/aftereffects.json index 6a9a399069..8083aa0972 100644 --- a/openpype/settings/defaults/project_settings/aftereffects.json +++ b/openpype/settings/defaults/project_settings/aftereffects.json @@ -1,4 +1,11 @@ { + "create": { + "RenderCreator": { + "defaults": [ + "Main" + ] + } + }, "publish": { "ValidateSceneSettings": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json index 4c4cd225ab..1a3eaef540 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json @@ -5,6 +5,29 @@ "label": "AfterEffects", "is_file": true, "children": [ + { + "type": "dict", + "collapsible": true, + "key": "create", + "label": "Creator plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "RenderCreator", + "label": "Create render", + "children": [ + { + "type": "list", + "key": "defaults", + "label": "Default Variants", + "object_type": "text", + "docstring": "Fill default variant(s) (like 'Main' or 'Default') used in subset name creation." + } + ] + } + ] + }, { "type": "dict", "collapsible": true, From 55246ce4a77e25b6d8f7479f741b64839213f5a2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 17:30:28 +0200 Subject: [PATCH 125/357] Update openpype/lib/avalon_context.py Co-authored-by: Roy Nieterau --- openpype/lib/avalon_context.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 65575493e0..224d8129a7 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1995,8 +1995,7 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, # Make sure is installed dbcon.install() - if dbcon.Session["AVALON_PROJECT"] != project_name: - dbcon.Session["AVALON_PROJECT"] = project_name + dbcon.Session["AVALON_PROJECT"] = project_name if not isinstance(repre_ids, list): repre_ids = [repre_ids] From bd61eb99d4b88d640785ea77c10b4a1a5657b279 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 1 Apr 2022 17:56:28 +0200 Subject: [PATCH 126/357] fix support for renderman in maya --- openpype/hosts/maya/api/lib_renderproducts.py | 8 ++--- .../maya/plugins/create/create_render.py | 8 +++-- .../publish/validate_rendersettings.py | 3 +- .../plugins/publish/submit_maya_deadline.py | 18 +++++++++++ .../defaults/system_settings/tools.json | 31 ++++++++++++++++++- 5 files changed, 60 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 0c34998874..8b282094db 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1069,7 +1069,7 @@ class RenderProductsRenderman(ARenderProducts): default_ext = "exr" displays = cmds.listConnections("rmanGlobals.displays") for aov in displays: - enabled = self._get_attr(aov, "enabled") + enabled = self._get_attr(aov, "enable") if not enabled: continue @@ -1085,7 +1085,7 @@ class RenderProductsRenderman(ARenderProducts): return products - def get_files(self, product, camera): + def get_files(self, product): """Get expected files. In renderman we hack it with prepending path. This path would @@ -1094,13 +1094,13 @@ class RenderProductsRenderman(ARenderProducts): to mess around with this settings anyway and it is enforced in render settings validator. """ - files = super(RenderProductsRenderman, self).get_files(product, camera) + files = super(RenderProductsRenderman, self).get_files(product) layer_data = self.layer_data new_files = [] for file in files: new_file = "{}/{}/{}".format( - layer_data["sceneName"], layer_data["layerName"], file + layer_data.sceneName, layer_data.layerName, file ) new_files.append(new_file) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 9002ae3876..4d3e6dc9f5 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -76,7 +76,7 @@ class CreateRender(plugin.Creator): 'mentalray': 'defaultRenderGlobals.imageFilePrefix', 'vray': 'vraySettings.fileNamePrefix', 'arnold': 'defaultRenderGlobals.imageFilePrefix', - 'renderman': 'defaultRenderGlobals.imageFilePrefix', + 'renderman': 'rmanGlobals.imageFileFormat', 'redshift': 'defaultRenderGlobals.imageFilePrefix' } @@ -84,7 +84,7 @@ class CreateRender(plugin.Creator): 'mentalray': 'maya///{aov_separator}', # noqa 'vray': 'maya///', 'arnold': 'maya///{aov_separator}', # noqa - 'renderman': 'maya///{aov_separator}', + 'renderman': '_..', # this needs `imageOutputDir` set separately 'redshift': 'maya///' # noqa } @@ -463,6 +463,10 @@ class CreateRender(plugin.Creator): self._set_global_output_settings() + if renderer == "renderman": + cmds.setAttr("rmanGlobals.imageOutputDir", + "/maya//", type="string") + def _set_vray_settings(self, asset): # type: (dict) -> None """Sets important settings for Vray.""" diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index e24e88cab7..966ebac95a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -121,7 +121,8 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): cls.log.error("Animation needs to be enabled. Use the same " "frame for start and end to render single frame") - if not prefix.lower().startswith("maya/"): + if not prefix.lower().startswith("maya/") and \ + renderer != "renderman": invalid = True cls.log.error("Wrong image prefix [ {} ] - " "doesn't start with: 'maya/'".format(prefix)) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 15a6f8d828..498397b81b 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -215,6 +215,24 @@ def get_renderer_variables(renderlayer, root): filename_0 = os.path.normpath(os.path.join(root, filename_0)) elif renderer == "renderman": prefix_attr = "rmanGlobals.imageFileFormat" + # NOTE: This is guessing extensions from renderman display types. + # Some of them are just framebuffers, d_texture format can be + # set in display setting. We set those now to None, but it + # should be handled more gracefully. + display_types = { + "d_deepexr": "exr", + "d_it": None, + "d_null": None, + "d_openexr": "exr", + "d_png": "png", + "d_pointcloud": "ptc", + "d_targa": "tga", + "d_texture": None, + "d_tiff": "tif" + } + extension = display_types.get( + cmds.listConnections("rmanDefaultDisplay.displayType")[0] + ) elif renderer == "redshift": # mapping redshift extension dropdown values to strings ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] diff --git a/openpype/settings/defaults/system_settings/tools.json b/openpype/settings/defaults/system_settings/tools.json index 9e08465195..49c00bec7d 100644 --- a/openpype/settings/defaults/system_settings/tools.json +++ b/openpype/settings/defaults/system_settings/tools.json @@ -52,10 +52,39 @@ "environment": {}, "variants": {} }, + "renderman": { + "environment": {}, + "variants": { + "24-3-maya": { + "host_names": [ + "maya" + ], + "app_variants": [ + "maya/2022" + ], + "environment": { + "RFMTREE": { + "windows": "C:\\Program Files\\Pixar\\RenderManForMaya-24.3", + "darwin": "/Applications/Pixar/RenderManForMaya-24.3", + "linux": "/opt/pixar/RenderManForMaya-24.3" + }, + "RMANTREE": { + "windows": "C:\\Program Files\\Pixar\\RenderManProServer-24.3", + "darwin": "/Applications/Pixar/RenderManProServer-24.3", + "linux": "/opt/pixar/RenderManProServer-24.3" + } + } + }, + "__dynamic_keys_labels__": { + "24-3-maya": "24.3 RFM" + } + } + }, "__dynamic_keys_labels__": { "mtoa": "Autodesk Arnold", "vray": "Chaos Group Vray", - "yeti": "Pergrine Labs Yeti" + "yeti": "Pergrine Labs Yeti", + "renderman": "Pixar Renderman" } } } \ No newline at end of file From 80ee8c523ad20df67ddfd763933b47fc4e6a3b0d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 18:07:47 +0200 Subject: [PATCH 127/357] OP-2766 - clean up logging --- openpype/hosts/photoshop/plugins/create/workfile_creator.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index 2a2fda3cc4..d66a05cad7 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -15,7 +15,6 @@ class PSWorkfileCreator(AutoCreator): return [] def collect_instances(self): - print("coll::{}".format(api.list_instances())) for instance_data in api.list_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: @@ -30,7 +29,6 @@ class PSWorkfileCreator(AutoCreator): pass def create(self, options=None): - print("create") existing_instance = None for instance in self.create_context.instances: if instance.family == self.family: From 9efa30d7569f0025cdd8a2d1f9a970edfdbb1aad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 18:09:38 +0200 Subject: [PATCH 128/357] OP-2766 - revert unwanted commit --- .../aftereffects/plugins/publish/collect_workfile.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 1983851028..c1c2be4855 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -38,13 +38,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" - subset = get_subset_name( - family, - "", - task, - context.data["assetEntity"]["_id"], - host_name="photoshop" - ) + subset = family + task.capitalize() # Create instance instance = context.create_instance(subset) From d92ccf8c2ee97e38d236cd20764f9a3432a3e1a3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 18:11:26 +0200 Subject: [PATCH 129/357] OP-2766 - cleanup logging --- openpype/hosts/photoshop/plugins/publish/extract_image.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index 75e6323da7..a133e33409 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -16,8 +16,6 @@ class ExtractImage(openpype.api.Extractor): formats = ["png", "jpg"] def process(self, instance): - print("PPPPPP") - self.log.info("fdfdsfdfs") staging_dir = self.staging_dir(instance) self.log.info("Outputting image to {}".format(staging_dir)) From b16b1ee5c48df8438cbe716561df437f941e24c1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Apr 2022 16:39:51 +0200 Subject: [PATCH 130/357] OP-2766 - fix broken merge --- openpype/hosts/photoshop/api/pipeline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 54db09be2d..2e2717d420 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -78,8 +78,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) - avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) + register_creator_plugin_path(CREATE_PATH) log.info(PUBLISH_PATH) pyblish.api.register_callback( From 43a6863dc534ab514a91a9ade561c9c82e87f277 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Apr 2022 16:40:16 +0200 Subject: [PATCH 131/357] OP-2766 - added documentation and resources for New Publisher --- website/docs/artist_hosts_photoshop.md | 64 ++++++++++++++++++ ...rtist_photoshop_new_publisher_instance.png | Bin 0 -> 21366 bytes ...otoshop_new_publisher_instance_created.png | Bin 0 -> 27811 bytes ...photoshop_new_publisher_publish_failed.png | Bin 0 -> 27081 bytes ...rtist_photoshop_new_publisher_workfile.png | Bin 0 -> 22231 bytes .../docs/assets/experimental_tools_menu.png | Bin 0 -> 9307 bytes .../assets/experimental_tools_settings.png | Bin 0 -> 8543 bytes 7 files changed, 64 insertions(+) create mode 100644 website/docs/assets/artist_photoshop_new_publisher_instance.png create mode 100644 website/docs/assets/artist_photoshop_new_publisher_instance_created.png create mode 100644 website/docs/assets/artist_photoshop_new_publisher_publish_failed.png create mode 100644 website/docs/assets/artist_photoshop_new_publisher_workfile.png create mode 100644 website/docs/assets/experimental_tools_menu.png create mode 100644 website/docs/assets/experimental_tools_settings.png diff --git a/website/docs/artist_hosts_photoshop.md b/website/docs/artist_hosts_photoshop.md index a140170c49..36670054ee 100644 --- a/website/docs/artist_hosts_photoshop.md +++ b/website/docs/artist_hosts_photoshop.md @@ -111,3 +111,67 @@ You can switch to a previous version of the image or update to the latest. ![Loader](assets/photoshop_manage_switch.gif) ![Loader](assets/photoshop_manage_update.gif) + + +### New Publisher + +All previous screenshot came from regular [pyblish](https://pyblish.com/) process, there is also a different UI available. This process extends existing implementation and adds new functionalities. + +To test this in Photoshop, the artist needs first to enable experimental `New publisher` in Settings. (Tray > Settings > Experimental tools) +![Settings](assets/experimental_tools_settings.png) + +New dialog opens after clicking on `Experimental tools` button in Openpype extension menu. +![Menu](assets/experimental_tools_menu.png) + +After you click on this button, this dialog will show up. + +![Menu](assets/artist_photoshop_new_publisher_workfile.png) + +You can see the first instance, called `workfileYourTaskName`. (Name depends on studio naming convention for Photoshop's workfiles.). This instance is so called "automatic", +it was created without instigation by the artist. You shouldn't delete this instance as it might hold necessary values for future publishing, but you can choose to skip it +from publishing (by toggling the pill button inside of the rectangular object denoting instance). + +New publisher allows publishing into different context, just click on a workfile instance, update `Variant`, `Asset` or `Task` in the form in the middle and don't forget to click on the 'Confirm' button. + +Similarly to the old publishing approach, you need to create instances for everything you want to publish. You will initiate by clicking on the '+' sign in the bottom left corner. + +![Instance creator](assets/artist_photoshop_new_publisher_instance.png) + +In this dialog you can select the family for the published layer or group. Currently only 'image' is implemented. + +On right hand side you can see creator attributes: +- `Create only for selected` - mimics `Use selected` option of regular publish +- `Create separate instance for each selected` - if separate instance should be created for each layer if multiple selected + +![Instance created](assets/artist_photoshop_new_publisher_instance_created.png) + +Here you can see a newly created instance of image family. (Name depends on studio naming convention for image family.) You can disable instance from publishing in the same fashion as a workfile instance. +You could also decide delete instance by selecting it and clicking on a trashcan icon (next to plus button on left button) + +Buttons on the bottom right are for: +- `Refresh publishing` - set publishing process to starting position - useful if previous publish failed, or you changed configuration of a publish +- `Stop/pause publishing` - if you would like to pause publishing process at any time +- `Validate` - if you would like to run only collecting and validating phases (nothing will be published yet) +- `Publish` - standard way how to kick off full publishing process + +In the unfortunate case of some error during publishing, you would receive this kind of error dialog. + +![Publish failed](assets/artist_photoshop_new_publisher_publish_failed.png) + +In this case there is an issue that you are publishing two or more instances with the same subset name ('imageMaing'). If the error is recoverable by the artist, you should +see helpful information in a `How to repair?` section or fix it automatically by clicking on a 'Wrench' button on the right if present. + +If you would like to ask for help admin or support, you could use any of the three buttons on bottom left: +- `Copy report` - stash full publishing log to a clipboard +- `Export and save report` - save log into a file for sending it via mail or any communication tool +- `Show details` - switches into a more detailed list of published instances and plugins. Similar to the old pyblish list. + +If you are able to fix the workfile yourself, use the first button on the right to set the UI to initial state before publish. (Click the `Publish` button to start again.) + +New publishing process should be backward compatible, eg. if you have a workfile with instances created in the previous publishing approach, they will be translated automatically and +could be used right away. + +If you would create instances in a new publisher, you cannot use them in the old approach though! + +If you would hit on unexpected behaviour with old instances, contact support first, then you could try some steps to recover your publish. Delete instances in New publisher UI, or try `Subset manager` in the extension menu. +Nuclear option is to purge workfile metadata in `File > File Info > Origin > Headline`. This is only for most determined daredevils though! diff --git a/website/docs/assets/artist_photoshop_new_publisher_instance.png b/website/docs/assets/artist_photoshop_new_publisher_instance.png new file mode 100644 index 0000000000000000000000000000000000000000..723a032c94488abf6ee60e11ab86b178e1cd5ade GIT binary patch literal 21366 zcmeFZXIN8Rw>BCSks_i}6c9v_E`szTMGz3_y@NT8qG(YtFgm8uu9Y7o24Z+5b48QC#>65LQS+8hg^ z;U&vuXD257*gELF)8w#|IvbpJdWPUvlURFtdS$N(pT3{Yw$^#uc$Nbg7bt*j#ZAnS z_TuvA{d-Njc|Yo{{(OG*CMf}M@wt;2m?`i_^I(~a^DeK<{fo1gzGiI!UQt zdA%;fq^CZ*-B*8mTh(Gm+Ae^S^y-U@p#13j89!)|IX_(YF%9=52MrRIX8w#YHtwSz zOV`z&Y4=%!L$se>&%AhfG{yC@{oVWSes5lg?FH7}O^jyLIE~ALyk0Xq*d4&vLNEQf zuYqPxRbHN~!u9%+ytdJX9_cmzS*rkn&j#uE)#N>49pQjSmr z?#NV|tnSrGF1$YHxiGG!CQ-0(e%N4$@M$thGoYiR6F8~i&nMlri%2t{Ly$^s)g0gb4+lVFKEd;qJul zM#FZaxl?iJ&=yf~)j7E=B6EAJQwtow7+h#1i@JCqvGPiZkA~sknb|kVB@cqkoryZ0 z1%*ANB^}=g?NDW-Y7QJSIB!Ku%wfmyMQM6?Q#2#0D(6isnpU=FlAv=IFOR|U8#XPX zQW^O{SV)%>;)IXQcz$c_uG3(ye{l)ewY*K#@_3`aaC{3|z&K3QFEO#O`o7vfb~_xp zyZZU}W3uhUh?;iM2K;Uy4%d>r9mEA20{_b3@zLMzcgS1Nnt*y5EiEV{S)8U(`bq*r zmd0C$M(XP6p`^~2q-N(klxgU$LAh>VS#FcMij6(V)}s4G*m0lr&gyzl^wRXnz%$=h zrfN?gMRQpJJm8HeUqiVDs%_D zxR)mjC8h?i0yA50S7s`Q!N$uCY8}a<>lsw`Qe@nV=SV^rRGGHi-kB?8){4+pwRI{6 zJ2BmTxI##Q=^q^!E++7X({zR=VN$u}^(tufrozys{yH>gZ_cQo%Plo%%y78!b-Wt;pvhHBNL z*g`!_Y~N+^*2Df-wf!aYMj46 z%W(XD(wte}X0-FVxv?*{hs$clZ1kf`i=DQ{p_<|+T)(evL8iAaOxHOpr>rownbJe$ z*ySY&zxkt`V1W#XvT);lSTDOaa{*LMUQOyAFgT`K+^TT9hN5CP%&Ov>4KZ`p6Xk~d zuoyVSrgo1aD;4(L;_<#bD!lzP_}l>-Qrfb23HNLC_QrWkMp?~G<}K6T>3K$IY%^mM z--BI|XKP<<3D(p1-QpNG#PWaFC)IzEx;DOj#nq_gL%n~^>mC2-3cidW;n}in{t6fS zLWD`#ZG>YpNrvHIx+_Wv(ys`enR7xAf1ii4(|5PbD*QFb?0dYGxxAG{-z=+H6E^x^ zCC}~-8?#Ye-`Ne3@%*miWEvex&X^xhL_RbW*ff2(dVlWsh#6)#n)Qg9hD60zKckX+ z*n{m3HdI=ymwZG=L%g_f)7-|WUqZMMrWmiaW*5G?Y3#X>vk%-P^=}zMoc=$VD5!flG@1y5`Anv6YzkeV038SSPfSg+=tX zy&3H9vDQ;;tM|lukoWGlK{|LPl!CVU^7!IEg({0(zG(5ySP?_DBkuRS_oyw`lSu4K zCBH(apn9!*ChRQm3$47F|4|k448uU3CHi6_QnVNT7Xl@0a42(9{kg7D8|hT|gNIY$ z>ckg)GRm_PUYseI|NKWl*uOyEs`QA*9BUGPLGrVn7=)VI{fl;9v^h5}&QBx%d;R?b z&F}o(oKsj>PxQ8l=DZzB5w&hR@$-wUz&8UI*Q=AdAe;vD^sUW?`sr_>wFQqbFyPln z`wP7K49alXdqd91_xy7D=cE^-I}9aZ9jf)H{CN5Gw#(@C#*^%x>@x^m_NnbkPS7=%T zo`Lk9vMDP1lgwgvV9@OSJW$Q(Z@3V-+j)ipo#g9$7tK4NLt3lGtLYm$<)v43iwiq+ z`8B>{42>IP+y`nJODBa2JE_LUbb38eg{|s3EKa~GS;kpr-j8H0XzRNzn0cSDCW*F& z7JRLlmT!6@7o*S0@=*UI>42zwUhD0gz>4r=EuC@YIo*ao`{tkmaXKy);D>tN$2SXG zaMRH)bNV7s|0rXFhzmcGbxGG@pC0~6mB(H~2Zo%X1BPGxi8Ym0=!|N6Zj|0kA7uHV zRz7ApCkQ>2I#_)6!06?GqHNgaITSr~<({M(5B@~Q_o(9DdQfMHRK%IJLp(Q2=QWBp zHA=dj$eWk?vCfKR^aADs0xK9@pFR2{TEvJgu~ASEZ!{+-x%k#P+W>IG#IfwpA=^s<7LRBcpypuI%fG znqcF$pQW4e=*+vc`NT}5WD6Qy!FUou?yL!aviovbv7)jVneM~ZH|1_kLevhv>(`>~d8*6}cJnh$+jbn9H4x@`Q_g8)WjE=UbL3LAY4lRCha7r;M(xN9Dlj0b zC(PNtH4+Q{AlUTCEUK*k#M90~wv0}gl9&Lro+Vk)ZqoXqVZukz3|mt>Hd}ZVA+B@2 z@GR`568l|)y(hj07B`g?-zh#&%u+E?>gqGvT!|Kznt&;N;E#9Z}T)p(t^ zhT2#|soMxu!)n^PHP;~HB~UC2U6#qtbz1P-L}9RzfG@949V%91v{nT9T~}5<6jxsw@7$aDfcUG12!Xe0y8{V#LZokRbW(_^EcTlzLwPd-i(eWX9B2%i97s$V|s9;89-JTMo5~iJsJLXNJl^I5UD` zcF|D#@b2s0T9w!d(cTKrrMjM`=;8^6eUSi9V+`KsrA@kDYRehw_Wh@*%%)J3Wq9PUJJn%I~bjm0sGr3$~y zdmzsGx}NPVgZXQQtGlr_=WE_=ZD@vuEuv0CGfr;xRBhW5aT8Q#uGNYv9z zUX0I+Zq3y@d&c?sTIbz@yh=o_M3`)^#-V8a^Lv68{LIOwO?rpRA=~s(w33kd;vpu8 zbj#&I{#4i7y?5t4pW@x+?P8z|3a@7!%9k$V)I7};%|nB+(90H20(a9~(L)H&k0@2O z?F6cIp?$v|(GksgD`Ky7w7W3nSoXb;R;R1d_?y6A!wNvnNIosw^R(lVxW@+FJ->#Ee zB5m3*Oc`--z*U=s`|k?1&?lPz~$ggJ$j}ns7JO})kiEfFD@j6^R9$pUMS;u+9}26 zPedM_q*FTOw{#&-+LH96=AD@kN@CXmcI4!P1;0)TK~zSdw-~~?Jv!YupKO2!-b23~ z+$D@jGC6gBs@t;fUtP)k!@VKv=8_1rVCnI-HCNG}K94(Pe+)Z;zTNE%jTx$j$1XUD zgl74C;AZhS-o1%i4I#%audIk|e50{t33w@iR#w z9V>;YIgL2^w`D6+9+c9Vfpt`$1@2{Fi==!JE~a2NL$Y+t6*2s~XZAijYC9Im8H^>p~>PRfEzxc}jxh^#NNgOI@&Cys)E8@=D^{?ajEDpk0 z9pxK7>v27I_((xG(}>gujC_vZ=y65$yFz1i1LVQaSmY34sqaq1T07XZH?}@MnY}@p ztq1tQ566awos@iw+VVORfzVIBOE6Vy6Q!7LCa9&O(>rFXM=PG@qxEa;cJy1kgN&s{ zU7rnvbPg9p))p-NdJcE~Nt4~t*?A)SSrC|98cr+nrfMJEigK+4gTe4I&j(o!grEcF z6H0-m*B?slE3TtB#;`C%tiG60ghQ#0);_mR=)im1`C)sPyq0r_PO?QK<;3pJb0%7_ zLdy}3qf31PKDaf!rgX1`?(*Q4`zraiL!yRv!3KsRO@iE>#U?trDhjp zG@GnJG$Im(enj5s=i$}~r1QDY`?-~7N%iQ?Nx`TZ5t7;%Fxy z8zsBkl-XJ16>^3`&joL-o*0H&7^tQz#fExgH!et(9fh?pm)-%9-+Q7NL`3oSM;rg% z>@{y*iD1?n@T`FQWuO4WYL+c)_j5szrqDlK_K&lkLj1$ZFLQ?Oh}gyg?s=|k*!A^E zz&HLg z(|-zblr~a2WUo?8YHrG_okemJNV{lyo3b6Gbd-$OH_zbV%ZX-oNYBjiGB_z{qk|vl zi~!i)-(EU6lnIudtCakHn{eTxfqvx{-H`6ybW+QCJ0FR*_g-}6ti)w#aSaKdpOlSk zO0DmSW)16mMo4XDrpz3KQ=4go7$qTjV8{jM-Gn(Dm%<5W^pN{V52o|8pY!JLf`PY1 zO86VZ**PfYqbsr8HL#+pKc2F@f!A4EAoaMRaox*?@$DLN=|kYo0b!9NddhFo3}AaH zWEijBu4ZVLP5u6LEziUPf9i0%u^ccOx-nI0qVCxM|YuGmRLSV>LN9#1?Z8Jax0l??|ZaC*hFVr_)l< zkbm;+ejaU|uD`;b9ZeXiiiS!yLPqf1{ATpW(vZNzZ|f!sW&FN^jVUKgc^c_A+i)=_HYSg0EL3ZG;@4#nsF>-XILAMe&zG zI8_8^k&;wtYDsCSxXyxRsi?bNnKzgh7%JJ=uNdG?l344fgMyVn0lp8W^w+)#; zA-6KTL393KkmBGT8#`6>Xc4Podx`IJPePhCvxR)ks0Jg@XA&*HyhOd7af$QAJpOwW zhg9+?4Y#0dD`Xn2+T~MhA`cOSh9KMD&Ul+bq@}L<;-MJ2pMTw#k1!-773kLOEMMSU zo5@i7UXZFjR9tk4z$s}{_bSrEq}??hE=J&{Y00_elK$q4gZo3MGV_69UbxGAhPzr> zb+&T6Y%^i!0L1|_t&q=wV?QxUVoRPUci(EqhJR$2{=|)e&4h0o65x1ygx+gDa*)X1 z2pkqPe`62*b~8T${M{Uh;>SwjO$ofzk7N6$NS~$%7fPRSb3;0HO<2{Zb6e&Q>aHBN zsU-}=48ME8FCg%|Zi%q2RMbuGoke#m()L&81kbNCX{nFTa^8gL24n7qKK_36Ak#HLO1D?89e8EuRmxHLuD#tZO}7aP zLvB*`x07DK7ggie`2oF2|0Ts1#3Zhx?eNShB5`^#0f2*dZO+3Gs<|QS6pVHE=w}3? zpm!_lH-weBOWeBEA&NC&{}N7JS4CjS-yEZlPpaOGnGv*xc;S<34RO4d9an4Bb|cc9 zO}3WYsLy9Lep&Q5#JFda7*l{2prn^&h36)oCuh@~8xnhZXdJmyBX?{tT`5w-h7G@F zBt;JPJyqx02aYq&=ly+bPEHGB7xOB1F)RFN3RT<7ps7ds!m#|!%~$9&hUOD*Pt7}7 zRkV`*5S7seS)>B>ipUcdG3#=N`aLq)2e0Z-l_{{1L?WX;|A4!gLyNnFwA4iW5!E5XLi=20og^b8AAx1tmcQB}tBT)GN$QHs61l@K z_Wa8(&lX2L_NYhSw9)Xe4{7Pt8=_Uaf6?dK)ggICku@at&2#1aeb+L+n5RZz9JJM0 zaqPStO2-iXyS0Ajtv~l82o`5jNX(te=$GnJNZveXh;i>DoYnWPL|6E)u_M{I=S}R2*AIycn-S}~Qwc*ry&Zds*M?^u(?G9zN1uq;jy;s8dlSL0W z=55A+N3e{SMq#MTK6?0I{%Uf#RqmM!iL^&ATzztlvs6d8t!swo`&GUGVN=t4&JZup z#>u6Ty9>=b%FDN0r?L7y(c=RGhtlmgg`SQ-B9sBW(5NmgOnM3HCzN5jZ0XOgEF#g0 zpUMh>fn zJ_)GVVsiH;oDhVYAn_?w|G|TgM2KuSX4s*AZ%_25`NOE`=c_RyTl~-+<$$R38&;VZ zGTP}9VazeZ5h~{a{}bhJWsEXeWFH{uFFf9n%n&rToWE^T7cv#^e-yHl&#w{9P&Nnq z+NjF!V6HiF+48o`{QzZvb&MrfwFPkYIRLSI0Es9q#6-o)U2}EN#`lwf5cmX(fJ-mn?>CQr_aUM%uB7xPS!XB=t6~qAwgqxJO|{R-fnk6oc|Ue{Uz!`6U(0Rs7cAt84sD=e|vP!>KrPMRlvMjihW zT!Oa+`*kK_i_VWz@|=@xh~v!xo^xFG&UZ)8@wLKf!@=Lm7fT8_Tzm%hgH$wSn}MeS zIg0~#?hjovOV94#H#+cU6IpF2A3mG$ugGvZbv#*;&!c3&4LeFuDy6Z1W`(HywOd(M zY>*?GbMp1zGl0AtUnO-Lcekk;X*y?n8uG`?9l(t^Y-1prMO#{PDy$S*1 zJ)7r~aVg0a*mUy~)5=#RtFym?FO3GR6J4fEwl4cj!r)Z->r9X{Sqnzmf*EM`{=(Q( z5XD)KLh__?6m5o5duGt9{<_D-{#x87WkJ30iuqvkO4&&8Z9EoIvX}VHX3?X>$vtnc z_c6I04|2K2N3h~Lxx|1+LzuC1-Dxz0JT6P;{RjL0@1E~+=O!K@n2`O0OpYxT6V@`$ zz?^D#674j9im1NN+@W~?P3SwJOlKco>W6ru0=Vzfi>!WtroS<5I_yy*|r|9^dk;_HAXP~)gTwlHL`~4E012Io0M|Fp1f!&jkfTqHC zcFhC+*P&befs74zn%|O}&)dGOU|VkWo5Nj`X^3zEKPhm#YxZ8=W78H@Rd6gR;y0Q2 z(ZW$!g7R{iY)|HYaiTp@?q4 zR7HW@hIcbsPC{d$bVN~WHr1Ig1@`h2NqkG_`$5jJfh-if)Cw^6ZA7g8iYxr@>rFVb z)Jl7vm?LT8Orl%kjlE_trnTHGHa+tX{Y2v)Uf2m~XIE(Wo}jtQb5M9sFyVe{Iz!>Q z*{sb0XK8(Qjhxcu?cQHURNdmb>+3h`_(oeX`$zx zW&a?ItNZo4{KAcyYE#!6Bcs}y?(RvM9cLY}-{RKw{XE_;t;xd6ivRKqlz9&XOEaV< zw4zk+AiA0mmNCPl4(uG)y$|0crJl_>m-rA>!-190`FP%!H^*F?WDUFQ4>1Z$oo}e0 zJiI$yhEXwczv0HcI1>o?fB=#Af4o|WLy1Ztc6r6+H#S%C({^ZD2er+{=;fUrucpW z46VflG}SnT$ndzxrq+#-M+JCZ6mam=2&1|C#KE?+28#U=TfO^I6~AuE?Ud(RYtrst z%c147hY*X4$49>*xo9u&7ceS^R`&25p3)Me+qM>6EvPSV;x*xmvzcA}F*Zu=$cK4zPEFYPgQZU8;-h+TjzJ32b{GM!Z=o{If_Pm{Jg1CqW zesrnK z(^nttH%oQQ9Sri8A>0*4Ibt>j#yXDl{BqT$jQTmLrl&*wSsNGNv>EQc{>_%k&_(KlRqEaWkE%*zNR`sYnGdxP*nawiui7VyvU z?4B10BfjY=bJiDZjpCqCjfhhF`gvqC7p2q;a#h%oq+~NfN^Sp|IbR=ta5aROIc2*7 zJF5-!mHEfFCU9k728IBd18UipRBg~XEeU^B+)0_4g&bYEQg{QNu%>h zSd?nEv89%Z;^7-E((pK`sHd18BvDoZ&&P)_dP_^|;fk4H5P2O$~pg&Z1qn zccyj62eXQrGn`mf>${tsF+bW;z!kc+P?PXV(do4+Z_R`K9z^@sweZ$MBm}<@_xx!2 zR9wg4RoRpC0m2#b+Q-uiwd&P_^rw@oaKiU&Tcq@5APkAnEBQO_q~ z7uZH;oF&B@e$Do3uFV%JNp0?l68Rp}t4>DnmqePmZ!FjyXPd(lC%?dDvD4(jGloY` zhOY%gUU-4uHAzEuZhz+kNBy=E*<}&fQi42>b&Ju+F3QDtq?mpK80?k-pQdvkS6h(E zp8l%Hs8KFqh}Y@No7|uErya6NKOr5#CJq1`#a2LF2dU{u^Nd{$f+`vJUEh106Pr(@ z{d^iX!5F(GA!n*Ku@sWUGrX2@=AAcZQ-1p17k~e?-|Fv*5>%Gm!S-ttvpT6k?dYYl zuz53q+p}U{5`VS0j&T3zSM+1DQLqU^oE20~{~0@Ax3latw91{MbeA?`c@kNY>}8(i zW$yVK%rz+*0ogf@>3jxML+LP0-pfOM=epwhSE&^dj%u&*Lkt`(A&N@79*nSKm-4lBFBGGp|*?6ar*auRauU?mJc9Y$Z83K%2I1Yc?UYV=Ow# zxw~DKGnPAMDvnaY9HQcftT$capItH8mSs*1Aef)qv=A_0h^aMx=P$nlA=1yN9 zJZRvFSV*Hm>zgE#81!Mq>+8297n1fr9SV0BSA7nR(XOo5LvvGWNQT*Jw#}=rPFrA7 z-PbZ-Gw>Mv9-v1{LLu`3jAwHZ4JMouNthU#p$tZ#Q;u zkoXfHth*!3tpRF?1wSA}`A|@0A(~oV2J+}-X~p%!Y5kLPCo$&X>a&+0Wak^;N-F1% zV;>&!WH9>Zr9maQ$*0%v@hAT@Y*~+jD#?`(ejycakK4qZG_^lsA75jxY->59TJZuB zWS#g&zZBi(^^a&#lA6Q+e17?ONjXtfzS<%CG3%)jF3Ui)*?eJRyzXt*P z`=!A!cPaZp-}IFQE;Gd!2%Q$|n6s`C<8N)GP4T*QTx;pDVk4>+^n`Z~g=>;JI^g)g z+-cc`Guy#TJIi-GLciYO=Ol`;DwAKvq%g*RCZzjav0|ht8^OtibC8W;KpW=GdGgE| zO*Z+gu^TgpmgQ%tKb&71S~+w6+B8qN_Ljb*QUbY^H;n!sj98~EIIb*0WtrEt+vL2? z5dM;-Gu<#y z_qVo`#X4Aky%jtBS76`cTzk(Ik-5M95xUs4e(UiOIOoeX7N$f1RAqG!HATfA^i-!&fqhccuU-EHvmX}F5D6F2?ox1_p|j= z2^5E;6Rr&o=I>3LUV@9@-BAN0)4$+1T10fB{J=%$CTebGZeT=a?u$`&uF~GmC zJtx=V7+H3n&rj=)anVbWZrUDJz$~d;x!J=xB6u{L!yGif91F&l-IX0YnQlaVr?FHVCznDxB&F}Q?g#Dky= zk^qEsqs=x&`p-21mimhVY7i4`oA_{FVVL{G9EHPpDYv0cyY7tg%=$2=o1S4_50_aK z_V?VvC?_TM>~Kp1K}p=tAHjNKzMk{Jao04(MGvucrq@CWlhF4t=pE(UfjtWA7q-M~ zypr}13c;6p^KN11xXd*7{yPht6YGN;KWyVCAO54^JXy%OdFD(a>eMVPul;n|#Z1lo zeH0!mQG~mSh!w_KG}pM~nk?0Vw|VA#<<;6a9v!p%-8OxmLDD`F@!Tp&bxd#}oerrb za+vBr*0#`UFkx)?%IE>kG^#8U8FLTUro~vmwO#u@{OC_f@x!tsnZ-hzOYyzN-lLCv zjU9j@0h_w5xb9PA3S*bVj9&>_0b03O7>2>suZm=iO8L0H-a7@zU7%F!vJL~`-q%gvLo@<(i&v| z0^sM_LEp6hZz__JWI4NzzB#MUaIbzRxu<~tjNY=aF07;1{@g%IiA+GuzuBp@Y4-1W z>6@Jj(0=M>hkla*eD_CNUw>ZJ+C=Wr^0z<|A7ES;6rt=scEg@^@M<|IogNArVOj;7 z<8?}8G^~}!-0zjeiTWD9)vGkYQ+tN_sPGI{B*6BaBIY5uB%=^IF$MP<2(IF3eWJIZkLZNwPc$0zYKvPJ zaH17=F7r?Dd**6QYSJTOPPTYvVwc}4t1UjYTXe*XfWIvLNnBi{DJ+8o5JkH#3EjlO z-RnP;w36MrN$B`lOHRHwy}gt}3!yFZ?7ME)`k=08_~3mA+Ep)`|Izeeoa}?8jKSMg zmr&_lo{qIKXpwV}uGjXh1tBLj<2SyW6R#Sgrnr*(ek(JUk0*wY-_MfEA9ARhWt_d! zX$nR=RVfIZxkmUh0`mm1y)+Dq*(ouDf#F-D+4UH1czXU@>QwE?@_v+syF|)oEtm|O zfUauV>f`aW$2wqrY0fc0{(QL|Y?I;mQ$^Q_KJ`)Kg~NpUG{Lv><=Yuc5*#MAU+?+c zN=JWa5op*|)R}R&4!>55+DR#7FmEjpOu2rUh`5&*}GSOOhWZ0*m0lNMi9$Tn>{4UdPy$52g z6%T8$jcKp5OJD>fCZ;u*;8{i%Hq~H{(~u7%y-9vuoSb#{*-G|dqis99{Y}$88y^p@ zb^;~&p%re5=6CFp_Jtf0XcLWQsQ8wV<`qq%KKd{Nh(qolRl{67JhQ_DfBMc8TL!BW z%c&Yq#BbK&EB)p8d}NvX^B-wIE4FyQ@&prBa_v+gzpko$IJxusmQ2=@yTT6gyI$b~ z!cNBDTp{9h;;Kg`i^)#%Q??UtrXCu-q{Q=%Yz{49A<97nzUks5>e_Lg>ivQ!!|?<8 z!3#-YK;8Gg#XQx|Z`gvWW%)X7D&1t39L-|V+vhUYtvq&5n#Kej#tR>ePcMy|a={Gm z_^>|NIr&g_>GWiR`@;x5e5XBAd;hEbf(M6GyG=!oZvXj|QbxfU`I?spEM>0la9W0K zN~|YFl-cg}4zH%s;oE_hT}6&J^DLaxR8O7r-H-alOJ&yRW=~O&{$XqB3rqOsb-}ho zh-4CK#(iPr7)b=yO6gt`ULP*K$13G^5X@U^DQ0eCB?V>6_0IYkS;!b_&Q$yRPi%U0 zk-fC@Q8QOBqZOL2QH)wup@N;;T#GaHxMQ>LbP32DfC3m5UDeF}$D|;nIG)cGyzz}g zO79;jTx2cffrH~K2q_7W0i*e6Cbf;vhYDy1xE6mBbpPHym*19IoWjcV2VuYb;z9Av z{HcQarff$Mk~Dr+TbM{=>vrxor|sHwp~(RB1`k4G4ktzW4dKeQ-BX+wA006NZ?^l_ zlTL-T&0FB%&WLlZ7Dopc<4y0f~?fv-D%r;|jM?n_7*@CccaDA>29!0b^|bdOCX{MaE$F%_i93z9C1fA)K}0Q*p5?RKEhbo zbYJmrkEhx?7}*zX%)29MNc>%KTz9SKLVG)A(&csLEj-`5M8B;~EIFKhCHy*fX`$uu z1FGn>g0MtmE(u!h;7c#Am^A{kha{1N*!dQ|QN!7TYpt?~U@+(E6*pdja>EeSpQY(S zDhaMx+U6J&)jC(>w@Irsk|p+|PPZvnk#qbiO7P^k9FHt%rbwrry{SMPmxHjw>vv0G z)qz!0BoTyv!Xn%=e5$RraR@856O!jF&;c+fPMP^G@zWlHB~81AI!z>xRgmD=csAc# zjCj&HtIOs|ReuuVw59$b9lst>+kafGFvx>={#Lo8CE}63@oQALaQx*s!kxn1RZN}V zXAH3agkdG!QhqrV2Cv~@T`hb5KQhYCBnkh*qF8+xr0jkLLG@kBxi2fyf9q~(lj>13 z*tPy-gr($aC)bYVcD;(>EfEiJRnZVN>bT!faKf$9)?+7g^ffqxL`VZQ~Jbo^D z2Xvt|!<47~cG!s~S^fibT@H{di5*DQF23wPTgj|)uW|b_(Y2vnEqVLhpI?6TGyH_J zy!LxG`oFYB(n2pzxi}(q`5o)5;_3R+*bQ-mcP@c6$EP@iZy?(0b%0D8+l@8Qr@%woRAF^9STob!bffpB`22Y_H~WGVk$&Q7cbXyfex ztn)|jplev<;aK(;Sw|8ORtgB&EsJMc39ivz8CgGGw4md4d-YOQ-hwu^SC1Vu6_jOr z@woptBIvoGoS^%7X;uA}<@`7#X}=x7u3jLPuaOzQ%vA@doRIHcmeQ2&T-5iW*q+O| zk2#YfqO_I$g)`SG$>^%1x3_~nI{cWul(!DXAu5$(&(-;~g8QU(YTEK5{>1!%lJb6}`G_4FMe^@{`5znq1Itn>OGZ0fww;8uj@+q1T)5?lDA+M>-*5oLmSEE;I zbwt6b&Lwl@;p5er%#Z$fh#oVWYy}Rh{hDkE^oU9Q>i8tS^S473rK1Ki(lwu|r3c_p%B-5Jhg!%|3L;D5g7D*-Xvi2nMyQ zCFOxt<$%*UA1CPI^xl2RMhUU2vV~u`fBro`qcLe?#`^gO667QY6mbNx(!@+N1rk(< zx^;2GnaNyWLFbr4@DJ@TdC_xz3zB&sDEX|nH<#5a`ZNL%_m zkYV;ZfQasn+?lW14a!|o9-WYrVSx&n_&P;Y-ZhLsrvj>fnI9-}4Tm#GJDxMWU?Ti^ z#f~f_Nk^hMYYf+6u(dlpvVm1-I48qpmsWq4V;NF#>@DDOWSE$7<$ zgxa|!#@#|YvQeopgS>VmFmsH`>529==-1Qi~Q>0Q#EQqNMysL%ib@w53xU<6X(nfjF+w| zrlJPwZo6#Je(|-)@SKwvt+R~SZ`=IdgmCweZ)c*!mg=mVp&$JfgYa5jx|*%FHJcAx z?NL<<5(<@MY6O%&f+}QNH-?;1b}YE15@2W-+8^@LouScG4%G;>4sD@AZI4I!g!Mkr z2X6Y@7-N0CBAsU4`or6rf`%JskLoUf^R69)rE3N2G1nk4O%ej-EOKZCV?sJEslXhE z^K_)PR#6fx?0b4cdI zgwb(`@aiUg;KJ9ibfw{FO1$B(Ec$@BX)cZ$wQ@a8&^qFIW&VpRmHD7pa>z;o)yA7# zR=M>E}v46L+0H+O?3Fp(^(rxhuH() zuDS8l$-BM~FVGA5nRB6)452Y3=wR%|+Je?hi^4dduP|z^{G+eP#`m@^=CRpp1{1EC z9GR&#^Pe@zD$PwP$ZikqO#$A6rnq=XZ6CLf=6G!oPasi%cKx<^VIUg?To5E7XnV}G zwxE+XJw5HHbj08wZV%Wme1Tmqt<*v=|B?}Xor)D**1}K_i8aV8V}dP}R>UV;6`!VI z?|054){K4+zA^+T$WG0-p>HeQ^u=&}8b7{o&!fA=#l`B-FyZmcP(}ri$S}kzH{~C& z>$ctZ;|gfb1AzLrqaM!$GmUbs0mhPNY{L%3`9A;wgQ*Yjbr=+Atn_x+^c=5^%p3rp zT69GAaKY0X!~~XU0)US%jrD3P;vM@I6Fv6xEQc;u_%j^zA{0<6fEP4;tM+5*0d>Hl zvfBN>YbyYMx+2o|FXeT%^FN$;O*%=)-WA`&ZfT%Kk;l231$aycM4>Sk-g-dKKd6{H zlB7%)PkaPyzr{W!zaye9Dth*qLgc2H*!1}Pe8p1Fbh^tPS_lw6B<*o=d?X7g+Nd3s zgpsM8M)>XP2f?RwK`*EQCx#)mthLo#46lr*c z7N4{bfv^px-i2?@>QD#auJW6GqmmW0quoF!(w^7)=_}xQVdL|D>C(jM(oSlNzn8>Z zj%C-xPHg(lg9+l0a%UcqV6Xk-|GB1KRw*8 zjT;Ly!hYI%wy0L6Oq@lmBN=F#hpO|Ni~ z;xi8mJ%pwnfPzblbxr0z#9HN)zT9f2{L++pQSte4ZlE38(!2t%wA)L8-ycg!a##fq zV9R*w$N~N;KA(eXXK3FcYr@&ocLI5Y;x?K-wa?S704txf9Igo4d#{+zE!~q8+yDI- z;IN0}0IB-ZB5BONO=;O4#lc5@nw67=tlmcqCFQLSyW;IW zH3tKdMoJG|(G-ApngEarNGbFXyaBwD;fYDyU-^uIkDlB3vnX;y(~~z`0U3e(0hPQk z{7oGVcpIJ8XpMaPru zbD{TPWhuYvu%RW`A9GfrdRXZ4z{s+J>IpNPZNQ{&NU1ie{oKR^bJJ5{B1H>9V;f_< zCJy({kk~2jrQSM=@HJkVn=l)1S(3c_jdm-RNW_NZ=&uk6l+~RH9R#PLNct`cYLaK3?lJg!45@BaXp~yZi;@9 zbL83X2bbe);S?yo1PaLf@!5vd=wQh6Oje-`)0NQfx(VnMJm=5{NBrms*_OGNLx6Ze z4*y2W`rehN_DP#gp6`;aXG~khve+Drajqxgbe>2FST|7cBGluRCkMMS>SnTB74lI@ zFeh6>ZG3xg9L$?R)nxJ>kkdG3D`&Mk`hB@RwNN%;w!pSMnH_chj`Bo>7j@<<#I zRj{AjWow|(T6cebvsG%XuCkTx)DL)!4fzsOJA%w|3nvlq7Cc7Ef*9o6c`(!@;i_b$ z-IUEn+25M8n&?9v{q_T(Z=NYlRCM)`eGbS=B01C%fMWkT1FWn?E#ZphGmTZ%$CQ$e zuBnoiGw$8AVwj8uYN5#m!k0#F6TJsA*rPTLD^)&m%)zV5>|Vmw(ScD>R_auy5NJewKMLvO zvL&}L)9$9Nh)8lBEs~w0kV!5XV;k4VjL49oGqPb)34_77q$DCLs}M1jG($9o8clL5 z8b!!5zkPqRop$$}&gu8(oOjN9=KS7yp6~lSpYJ0YGlOq*wgo3zkr6WsI)L#>6wVTlYLYyiZfvdA1kp*os{#Sm9vLtEo0y!e8}1Uz!r^6f{e zju@gG)t2sB*BRY?gCKHoy{4ZdEXuhY#y&Y|*ErCt?&eZA z^`KE{TZt(*PQb}rYTad9AZH2RB_Xb$;2LYS8f(?4_r@Mgq+- zKK_ZxVP*C9aXo|gk}2>uz~TNpWa^#(p|k^i*p02ZgVoL*h%QVy&Am5P5* z-C1HR@pzL`J9<{{tn^5fLYejq${I>g0{dP&xAOoIDg50eWaxX%crK z(jmmRI`myIW%d~`N7kyCmQTDiS;#6|^F(K5v@*~Jw}8P805dI(OWh~My@S0(HvcSM zHl$UpJL3n|SDS!Jy#tr~rB-(&25XlD$i zv-$@tGK|se=U|41*XZEy6pN+`sdOuF3r}a!Q5NOvPdmFD+lnbq*lQF$wO^YgNy@Ks zWx-{iPFDm9z+J$_YZ`{9N07!$mOH5!`;)na3BarDnrG8@`6<1mpizpN%0}S)OkTmJ zRVMy>@+0Jvn&k=klc5+vC4qfzHPA@sx*u<1;# z0ZH;3Q*nksE;MrAEZU@X>4-meDm9TaPS0_EtLJGlD_+>O^osB9iM`leHAX+s@u0#*^E`&l&gmZq0zzQMh}b_D(4~6Oa^RE!hmHixvKv-hcW+Ppp&6>Uq3sr}IzeAq zHZ2)Ft}~=(6*Vvv6wW&nPFY{N_JlVtzeR;ktB|;T0Oebx0oh<`ag-w2mO_m9F=~aj z464_Gvv4tgTCO1zGp0l_=wbgB*K3#---8O>R_;cZ0bj5InBa^pF_4^~zxz|BU?yrp z<1^8W;&p|QBI2!%sy|p@jny>SJm?*obZ(&^6dYAG4<-_%Anu@Htcp#$-~2B{*bzpH z^LQs$Sb8*h6j#cYZgsS^TcxI8c~L!hKHx6lzAK`R*MapASE=r8Q~alm%unNT2bL_VWetZBGYr_~=H>tbFH<4)^ z^`N9|asSJ9-gH0`Rg)6GWV|FSRtx4V0kDs`ryYd3_KD*A1pa2r1PO_ToZT-BI3T-a zN3@6%D*AsTme5O&RB$MFphBz)3kKsZoujNA+Dm1gMvq80nO)^%*ju7 zfX&fEZ-$vAL+s*sd`HK+AnXSP4I?{bv{J${x$^qRbLJN26FG0R%p&?STiPt(z9MO6 zY|ZorQ+ZKS{GNu~!$79L&%@-ruC{+4A2oR_0?w^)<6uJi9RGYnPd?>q>!2YT=T@D54*$RFSjo8LVhO;CByDc}>(KIiKO*Fm6)gu|N; z_5-gu9_w2Afk51?tY5Zv@1jQ_kjGWM8`n*O>?lNz`zA9vj2-*6kKYY7k0o9xKX~E5 zjWhLTj<=pp+jGjR`f_?s;_^TAXV0y}Mjd*BdsLj?&nWf!WhVJ7R3Cyf<0LtX!tNWoXN_YFb=g_}e?EJV zb(9yiWMl8I^LuT0MUa(g;DyRpML z!IGk2-Q10tk(%9;G|>7>R63n5978)Hx`>ru8g!eWYHiSRwN} zWp4kwE1yx8I{fpc?ZVStKer=y&;KfQydUC0g?;QR6n7qGwupdXzPj5{F~_g^Ydnc! z&ufV4d8}K4Nx}FB(in(RGAlhYXgARv{zJ76?_%1EjDeV#Yavn2- zceeAfVFm|Dx#wrk9o2`C+sMreTUilmck|C4sAp3L{8LR z3?aC-L+-8-E`=c`rw@wUPH*S+gk0&Fs%r{>TeG|x6mA=I$0$HUNrih}0JdJW=Uyf+ zc2)mQFZOywl41nfp1qqgT$vH+72S289_-~*(DPl^zdct{6qQSHahD}^Ll=wWVckY~ z>;`^+?lb*`bP=b6X{`+kgC^S?^Ag=t1D12$Bek39A!4_>Hyfc_HyA1QCB4D{@5-Cz z(;J|co%bRv15r)E14D<*R-m&dzi3-GpXDfLq!RqQpGiDz|H;|xvCVeQ1A^?)S@iql zvl(T$F8hzCq7IsN1t?_CEdaaZS86ZEJWL{+JtLdi3q2r_#N0I0gzgQAUzN>wl;2Tb zyl}#4hEROLF37ms5}oWD=WVz&)3AD7CQXyJspt%MZ_* z_=cupi7z`xYfXgPC->oI;}>AXDd25W{6u{pQcn@QC?vaOb9u0KV93Fx5r+P0(M!1@ zlg$psa}km7MxCKFzk|CwK!@OtCD`R=9`XT088iESEqQB(TrSG;@4B!xMb7Hm=~BOL zI+uVy>ui9PmJmT+jAAF$JghM2>g8#UCnz*G-E3Ih__)0JRD_$p$OjW<-feezN{#DB zqy6GH#`61H>*i}?h6?!VOWTh)h6^Kq&pfD^j^!gtmg^iBh7 zqIGQm$v$G0iv7HT6u6tUJ9XzTm} zwQyBsATR6-rUClJz|+eWH{|f57DkFW)JT;xMhVM|hTMHd^cz>0X)bPMR_0Hh+5;G- zalE_!Y{#Uwae=-oKUPcgI`LAHhXC6EPB|J<=YdUXbT`dHkTcL8h3AZo**S+koNaGh z?Ow3o8fY9%GA)eG3D_9_bJqXc^Yjb68UE%b#pi*$_=E56?(7@FBed=Yr^&->6 zMp}}Y@HaNr6k@kj^at%v2EXeOuqzvg<}_bVe4r9KtZRGsGOmYfBVpxOxYOX}aEC67 zWFA4M+o5|xBYgZIm3L%Gtg}IRdNJL^-GMaR+K6NS47|Q)gh-pRdYtnMh4m`slLm#AtK zQ=9xn*GS^HEh#vIfWDORne!*2Ne71M#t}Vu%okwwDfEeRX z%8go4+K7@jxZ3N#?GRnYDUl5s7v@Fb@MFmx*+`c!9R|Ib?kDQ>l2$?7g~{bq1)ct{lBr}&4-G785!j5Qw`gRh zIqtw8d$o5@Eb7nbUmGWihkWWkcB0gbpk6(o%iSXdTpdeJvml;_`EAw9fA>G+ytkp! z+O6L`b2CoeX6-O$hpXsIj+DSz>7vS=a9R$)G48qU`EqI&Ykg7gH=^y zVh(?@CoA=?XdNz@l?srKGRAC`mjrC59uiigR>0NszmIGw);#*rv~BY>SAGsG5Yk%D za*dFL@)&D>FLx97iWkNF5K!nE$>r_RisD!>yb9fcFH_g{WSu)iRe+quVGju!35LaekiwoOJG^S4t- zcQE#iic4I4?>~CF^&*-JaAC~KiSHBGqOqy{f-2LhjywXFBFcFuSI^luM}}>6PZ}HV z+ypO<6&TG)b9ogWmgE~N2zErK_uyrk@8ZoyLMu{xuuqq4vMV1p>KoLg$cCDjmVTiQ zQDyw{t6n1%!2?4HWarpxgYWm7w~fP&7jSH-5_6xs7cMleb{rqmM3$uP*u=Ktq>*TY z*jpmzGY?%`C~GH6mV634HKZ_I%C%|I?R-;zh~X!Y*e_(*#JyF)-f6{A&t{w@b#7Ee zI=x$iCkcAl0Q31TbFg*w$H@+!epV4^_htV!zgmix6Gwg*xII^M7LnCAAPw3y@* zk3$2LYNmSc3__z4><{91eer9aC%N7#0wB zan|)Baf5?-qJ1Jh&yDYlVfcuiKSeyosfKm+vLUTiC_;tIC|bK)_&#Jhz5Bj|luTOj z2l#!iRY^#%0++p&baHpor63-+I)2ZA>Z=}Urp@@DlGaHSeH{X<-K}PmdfrRg?N3u| zLB{CDHonbK(k>#m+;;I)YsnDR=RJI&V`*Q)j&s2nk>O@qqqcqUXULcnv#RqPRN3!! z4-Ao0(or_u6^MJe^(hK(fG>}z$XETQG9NBeTeQ$XCQ?3;uWAkb3^hTXF+iv*e$v%p zQeB$Pmba7Moe#6K^-+yZ)ZNL<$XVTqXGPpRO}tjadKT}X!BwdHHv%Uti?@HT;R~MU zH0&blu&(PMOWU>{jam&(eUW`W4^aw<$Ik1%M4-z@h0> zi_5KPOXV5oz@ww$9N{Et=&ORdQKK3Wcl_ejAX{JNM?Oy>F&}m6NkYQy`C|A&nSB*F zbGF|uNJa(Q*91|m4eAYLKc<4}yRpGw5#`VBZ;g{OUETVJ0HEkAH?s<^uX8nyu97m2 z_4*PbWMJ)9v*VmFTs$Zx*?2FOk2vP`UP)?m)W%p#S?6%73P;)ZOU%R)uPoH7NBh3Q z;VGlH-B-<=uh|~&RaAkE*(wxRXd9liY3wj;7YpxwUY{A-MEykM7&h%uBu;fGB8md4 zm;S^fJLl9Eg5&0{u>6#y2jr)40XB&|h}f9_0q3U;OuWD%cn;RwE1w?ZMYR>4ioxboMU@F5>Qa*5Fh)3S4m7FZZLnOKbj6Pm~1g8Llob^pEp09dKKU z5H|l*(>}4b5zzkrSXCU|iuVeso%!k7prY}`QxBNJLZy8@VqYgr)Z==ed#c-EXdAOl zY)@xe+`DoYTqY>9(3LB-$7axhYrC_!TrI<(+AXik4<0{QG_L>Nj;N(&xV%K&3BD6E zPAt+|!sJ9Pc?`BdC+-lL-laaFaeLz8sEYRr8fPZS$%#MAN+ zxdS~LuX`?umX2wSjAqSurC+_vYt(3g*YCnj=%dE+Q(ar|5-uJ%nDh1#XX48Z{OzW) zi)}+zf$gz!TY-RIUh+yHx5dm4|8|$6Z;UWmO0dziqX zNBdC9y_X{pVrS19gAc>9KX301bnUcsB3*QrT6%HTw2rx#!ekmBQaIPFh#A`Iw8L4)!3*9*_b@i9N z`pq;g?yAEy69-ePD_SRxvO8zmmOv}(1PLj+sXoPn)|rP<2-d)$V%$vPILZ5+EP-Po z*icvLx*C4zfPaMHG71h~d@E)c>0Rwuz&#-#m(iwgw4_F)xf+9|e?Bg;Ju~!_F)1UB z>EePKHjq=S{OhVJ;Z{g%Ufd(~?*1K{b5GcNSx7Q+aKYD)anwAc5G^lJDs#>JNtuVl zW9O@SYON;kN~iMcXg@rgYi`$`3|R;k^ue89p}XG$5L0;N+LD*-62^8RJ-ZkC6dPXY zUQ$DA_>MolOM}qoUGaHR@>*NlGy5Fhy*4=wxqWEg-UjbEJ8)T;_LDovV4e`f zonmS(kN+|qpYgU_c$fZx)J#|YavRdyyns`?f+Xl|)c*+q`WGkrZ?LEv-&~!04h{RT z76Fuaps}_ozo9|J?H~4~Tc>gQbn^u2HAhlsvEHtQr{gy1p~OT(ga3jl=2<#96;xJL zar~pF=B0Cx*4Nkn8ixLF81`Q?&i_k;j7z*WouTl!UVn~Q98gi=m#@xVP&7@6{x&DI z<~lvL2lN(M7jD+(_Zk#=L^H(b9)M0wd7g9S@Q@UI;^*Q!Dgw{2A#8a-ks==+UjH)} z@R%fjjkKc71G3fBUAlNnc0hO+%$_nei;gCn){OKRMZI!$47_v&VT)oSaX&lGTR9d2 zQRfC_MaWvcD%Gsi!%h-r!x(oqW%dTE8?l`}`VXPKitc(w({2m7^fM;I^=9CcT4~P1 z6l980_gMK`9Nq<2hGlm~^8T?-QEL9vdzh(@v{TWEB?_l`+81NX?gSHNOv4i-`BCu| zh-?>f4KcyZ%w;N|*_=6XdeOnM*3NjY+vJTk-?l|Vh|CwfHwO7V0hv3^2BK-t%1?js zR9YAC( zT(`HY3&`aKvw?ic+iGv~m<}R7KZ%8r;^8Tw)Q0f-)Y%bSGMs`Loq= z>=QaG8&U(o_VbsR3#(2xB`7H3F7^i7|M<~e<9r2%mbh~H za-Q#t{2}l>#{t?}!+ceVan=uxFm5Pa{@RoFlk@8j@87?#>#vK0_TM-M&P#XFF1R?~ zvX?o-Sogem@8M9c6*cU%Awp!P^#|8*#p}CzKFC^d#4Wg?ZCC0Gp&GA_?iL=kRri{D z!?N|mHy^vWHu32_qxgT_GF1)f_G=$iPf#~rWhbz{fG=K?Jh zjKR0HgnK;h4iTmDX5A;e%zkW~aNDXkMdo+DO0TPMNvYoS3dNYGd-h6Mj)$oOMB?|l78DK zpP%ea0(c4n(VF@IMEquB@8xB5lnYonAY9hOHzkR8?4!MX`}P+wBW!9Q|E$QcD=JT? z)i{99i}{(rI(|zKMb-PwiI>)gHW9$UA*o~NFospOZjccmYH2`U&=CrE3sa47iz8LQ zl|+`I%5!@)H1|9qEkS^V0FVyk>xSW|rC#HHAy#4pkp%xs9rDJ#5NL0AH{fL$#4Q^$ zvy=nefILi{y&!c3(Tyf4& z`{vLB#Vzo>rQrop^$&N}*S8LmG(_*)&CSx)!ye@@H(1&Cv|62ai3n_=m=;5O5*2gd zY?2v6!`{J;F-zwGKK@+ciP&D@;ju~bf`8geO4naK4ZI8L*Ul0^F`GjJH?&j?#-21u z#hS4yp&Hna)3kT|@Wx#vgT<>gtZ7kvwvYWy{Z#|(sDXUv(Cm5$YYiZ&m`4$pKI0h0X+-)3+KQG=MJmLC!rD-6Uajtrf!5s5*>j`#pN|D|V%8TvA z|KfzL!v!-KF3r;W6S*sZy|X}D`2kl84HV^?Z<||S9`3pG1fgKJvn1qiUxRp|Gh$BL z`cgLfsmXNH6caL$QlR)AT}3@I-&DM5Zh6Plv}5 zoIzSfiuNckyzKkz9}mcNquZrIHC5T4UozrH5j}qdRAisbGC+wQ0%>UqKJ1``)%EZW z|8W!>O(cBzgs_y0PO&4Df{jNYwZcra_e&1f8{btLdIN8sFT`&fZmdW*tj1f@wpOP{ zF0%H1b|KkX4C54~c;oP>BQN-hlXPg#pmM)HLx!y@Yp5BXshQ(ZHe>=-CuKf&uRP*l*G(Pfhzeq1~&64|TH6 zZEvCphR)|9{#q7ZX?GaK#3&NvT+4XJy>Wx|e}H8f)AUl1F25J~&&dOvr}7YlYt_cm z`no%sKNOQaLM^Ib=>C)gZE$vwZrszm4*vIdBmxg^}=ngc^ z#6Y?n=b(71v1Y-P2h4LAPN*alQ1Vmfx#fG2!Ag#G#rUcFUNA6j%1 zmLb#JE^uuC_q9>$-Dn6vE$vtsDACz5hcEdxcu43&d+|?``M#a`?3c_Ae%f2%xL^4(qF8ZAlj~6I zsF|DixoNQ+H1bupe2RY_77}I^<@}>}*MIv#U#g z|Jl~JL{k5jsCkZS$lX#+4v0Q^mbx17N*c&+8leD7)J+@OXkD>NHb~C_$Wn>zw##kf z6AmR!w+L`=CIX0CmJ0eAbPiul3Dm+Uy6UH!J#CRZ2W~%|oDzAT*zoz`#x;23NzQ|s- zrx&HT7x0aUlP89zt-MlVb~L+1R*clO<}Ab>Z0g{UaDS6p zBgN$5r~TH9j?PYVr@(kQc{vXbAaQnqNlgXf;B`R`k_J2<@MKq>dCN3QLG{Wz0%_G~ z_KmlZ6DVS1;CK8FoWibfkY|lT8#Syy^=c|g7X-tb_F%Gt5$X6)^F1sP0&AU*0hU@N z3eKuX1IL#Aqf~&d|4R;;GWZKI#xG~O_p{OTZIZ^rH(^9ll zIRaK#)vZK6li@bu{Ny@tY8|4`j66cAEvZOovIXfLsB6PM6$q5nmYgN;sv>PYXual7 zVHpV0(?^~XmgqpBx}I&+@&0nX>iu?zw33n$m+hw~jT|Gpb;6%RJ?}W?mE2RzTym_d zyb5F8cPoH$rj(x4zLF7eAWX<Z!SG&(przTriEUrwcdrH)M-IvLd0$9Pbw$cqzLMggsG&K0rO%1o19}ruT0`B#^ z7O@&@Wo}&HI#{q*_w8%)CCgkK2-V70WuMoq ziGR2Adu8JL8)4P-@OjG4WEb`n@lgSgi;9LH;5d?FX0;~_L#Jh$3yB+IZ)$~0RY!fq zeOb*6#lJ@2w6~6a)x&1ab(!9WtnfjNtIj9(&()Vif#=`2jS;P+=ba0>kO9!9k=nbB zs`WK@L2pscgL&zUs?u}DQRN>QBJxsJvDrn^&mY!p>UkS%T(yPuBH(Kuq=lgMFrPUW zHMvy@tKeA!E7-hQck>Ec4KmBC8FBSJr8|}Sx`tB3eC?CbGiE5vv)f*&u|X?_(>^KN zyYP3H_-L-5GHqy8liszZ{dlQzPx?Aos`_IvaZ*ErJ@NPne%BP7dz>DjAV1z|;q4vd zC+7BXMd4$}u}BC*KdjV_9jZ$=bEVX}N-8DyS=YK*|7a`GH*2H)ld2jhE;AMJ zysReA5SXr#o7?dc@+Ipp1Y3kO8_9T#Atdow);GN{ zy$PIG=~{bnV|Ic(+7X%;+)_a9Ml@+lK_l*88c>B=OB19-C{Gxuid@qVt_=nC0_%u| zq8pORVHv#7M6q?wGH+(t=K=~GYH{7fIr4F9kpMzphl8*Lkd<`+?@Jeldl}O!r6ABO znrVVtJ#TfC{Zr9?DE(6kemHnmkVdB7H6W=NIKQv;eC|dW(_JNsOx9aMB4yHZTpefo z)0b0J7)1>pC03Hn=I#~Nuw0x$y>c0J?cj`^*SdpSdEM6CRp*|;vX43WBY$pfeU)y! z>28FPugHc0tL6LKj5vZOo(JZ>+=UQ-XMD;BV4`{UqgoC1Mj@;Gh73V{-VEz<9VJC` z+X-K_hAEuF^N{;IG7~G$18U@j$04DlPOrxnXmDA5V<~;JqhpZX#F@SH*qUyFa`!8I zF=S=!Y0Y>S&&9m!8(SL|mo?*0}~!FS`(#Pqk~eE%f% zVJ97=8R4Z9!E8={UtHs6e+rxim}VwIzKZ-555%U^&sF<;Y#0@F_e(K_0VHbi>_98qaP%8b5u;6Jk{lIXZK-yNhKTIwpQPlBTEK?t@X!i}5lTEF0pt1H}Da z5)tDZD+mPoOn@x+#{eOlPfU&8pp;jKoZ=2~{h`10?2squ#mhwQJfljjjrl11vw3?- zX$R+vbpUal#`(@{Q)9=Fi+h6ESs=y)uX^uO1`?8w{G1+AMKY%(3$)R#?%K<$!`oKv^Bi>T-@%o z7Bc-mIwft|22JdClAR{rJK8@mK1SR}%V|%(=uOhyqk?6BkFO8Lq~kHcTdb6o_*XC} zxR#eL=dZ6MO#x$O2j&}#>4*oLy-VH3Rk_~Yhw z2SK_YY#YMX)*$_Y4Nv!+KA)L@0TB(*7?vajj)6$-2$R z{Ocu@_D2vo`iZr1Nbt=~*v1+ZH572zyFW3f)q_P#kNqA??_cn&0bq^>K8$WXfg7A3 z2>6o6Yr5{8N&ve1LFaych?VI#_WQ=$&-R)5tz>15U}EXU(zmc=G@X>Y(O0DaWsiJU zgx(S}HPt@p)vS+^cAwbkw09xG!PNWT8r#y8=J`_-s(?7kf*;dHc%I&qw{+PfO^O=4 z;w7cJA19QUmd+fe&;pwAPL29T(2)!j32&Ol zRJ=Yhm4K$fl}HF~e|>$h8|fFV%sk% zD>E>B{ro}}(TF{Lp*onMsAjtbSHx84yll!@gv}^JQr@4M3oqQw`Qu4%y`%X|Y_mNN zqs)^H<^agz+E?~r@>y=k{3&U>6Q@uX%Zane``hCtRR_klBxWZ?>?66dRj| zg?XtdU4uqu@Cx~Wfmgnm$K7V(Q$4Y!m6U`K7eX7{?^UA(_V#TBay#`42z&O#Yrz5P zR$vQw$E$tRI&3atq~dhd_J4vi3dDs;WVzKng1!$C2ZV{)8P~Uq*whAIuI$KdnAl$V z9jo%-&lSP<@*2SY1*Hva0AS~8oOU9Z~eJE8AdU7s%A9xPxlA1q6gk7|8eqvE)2V5rKI zR^VO^ZF&a}HdoJ%iBL6$Hl6ciPJ>%!fE^u@Q+r+YoRu;w;EbQ2rgbkNki~#RAy{Z> z9;S6|iJnm9Ee9rP#e3eN-jDcrI$9vNaNghf@&mPF#<1B>#tN&OdUsPaZUAI-G=8B~ zaEPyW@VwWpyJS5SPSF{2fs32ssQN!maZ%MeqCoItq#w7a5$4)^hQJ{9ol8{+byq1ds3 zF-*REz0}ZHL-^`afai=_KY9`kk!&9oxcSCOTCNl4X8ntgqy~4THifkvf^?aaBRmP! z;H|g=pwrTtEg6yAG~s77nUTCu@9^Cq^dJY&Pl8rkObR zQt9hU56Z76#MB>9v*HS|cB=2qRQ+L?gZRs7)6=Wd<7d7gaRA1;iA?wPv$#`-=#@tL zd};6$$+|gfwaQ%DdX7Vd_aVw5{#ap7W2f1+P_>5n;_x4B)&b5%^~|pG6rfz2EqgY5 zZ+Pp!3BowIpq|OE;GQR-R5RG~=eIF!Zyg`RB*`;{!yrOyg7a{9u2d7&W<1dDF(D^7 zuo3Cjoa>FR2}K27jX0!J?Z2R~@#(Ay3HOSy$KAheq^H)pmcA;LF&!wHbad3H$ZXuu z{;HudV%i&4hQwVrdZ6+$HfH{u%u$`!FHne^S*FIhcN~LUXZ8aqBj2IbtG8C$`YhPq zvgXF^(oO~M7EbF#bs_suQ$p>5h$dzb!wY&@KP@da(bF2Cb4r1_a+@%ir(-53j?M>y zmBx~Wu2G4YN})}|4$gkQ#CoSl0-EYZWfX$t5~ZXL5q&wXP5E z+m<)|G@B5(Eq`zQk;MI&>a>U*DM=4IrC;HgwRqnC1m%9J_j^Nirx&z%W1tUhc;n6b zl9O7!Q+{TcGfH7hnQAGl5OZxqzZ)S#339Gde7`i~gb*03`c(LGC^q!!Ky2ulje$@b z|5A}5-|oRPgt4v05Svc-Mf4SZaB6ys{~1QrK}0LkDl9jJ-?mtiXCFP!!ZKecpdw{> zmU@`>QH1C;r(LlUv0da@*u|rmuyg%3Kk#T3NNwoP?`bfUYNHua=@}$8cxlrCvsud< zG!bms39aAXuM=AB@aIm=SkT3;YvTh4Ac7t#fWB&b!cB6cBBl9Apz}=aDil+Q53F+? zr3S4_J0qraNRNzLJ<}V!gWMBt)_AULClmsEGqZoiG`%{E-q|w!A~55DVp-4)hWgq& zs_phGYj@NGt!Z?Ru-(z+HvF@#`vYPSK2bkw#b@tl$y5vXtpXnU$Fwa;n%RwL%)v<3?5qEjmzOL-1=TR&kx5L7iLFT08= zLS06*ZqZ_t7WkpWDQ+iKDmNerfUw+RYKSjxojT?jWA~yw(@|OGefQ+m0l-hq8(W8$ zp`yDJ3f6Kb!LvzY1=K54In_m7MA+ul?grovvE-p4r_u-Z?cRF{Jd1-R$lvt3t;I%J z*8!nfTsSZ~=$N*yQEG$@c;(mHsgsCj~$E+Q`+q2iqt(_K|o}Od0{oRg1al1A_ zVN=&!TCko7nJDKkE^cf+6+^j1%}8@zxZ!tdX*iff*C+mIs$2PaXrZ*Oe8|t*{p7 zv7guvilmw@h2i#xsjmZ-x6vd0%*0}Tn3i8cEpPPvg?KA}n&z&aL^JupIX{q*xZO5V z2*9d5{@o`dfMy=84G$HTMhGxYsee6I*ixSSSA_a^>kI#D)g%DY0Y89n(M)lPC zBb%<2R}0SX7w4%m*z*{v#vKjEM{}e7G!6xD(flM#c=Y!ZLB>*5K5!~gBCYN|1QS;2 zU zE(Ev+=%Jj!Sm;yOeuu@AfHl3Vv+3RoE$pKD5xPqF<9kTyn{nvv&^>-!Ax4QqJC`9q z+Y;6hA` z56k~g054iLa&BFaay(MKHJI3l+OaF!6{y_gFTH;-$0~?YfD15%7HZ~@ZbM5W+5_g< zJeN^^t079L(AAA4id*VLQAdZebnDNdK*P$izg;xCaqbRj>D3xJun45>BW=BhODgW4F^if zP0}F3O=#DjFrd)}ZD*RUHGM6$$*-!Q+#BN!%`(Q70*V>o?H|spum)jyeLqH0&DOR~ zbr}9~cf-%B{=-Y;uW3o9q@IZ~cJFYbq-JP=c>O?s^oizv%+A61gz`U6z22*zyE=6+ zi+?Q81%nCkeO^WMo!@4G1#3R_W=g|#1ZGJcSPc@_)5*Vt5T<+!SePZrXK zgRM8xAUijuQ5OePmBM1AuQBDcn`vQy&}3Tb(oy`N^YswzqIQX0iiO6~@DYT=QyQ|l z-?VP~t`Svc$VTaFx~sEAg=JDlp-Z)G$iH(YzApW9M8P5aW?xUlO<(n*r!5mM@;lq+ zCc|hMz!;V$8F^e19^hB%-oI3I%6;ycWsTubpu*Q@^WD^8KBT2R`M7F%$^9p^T`lay zYP5q>dYgQJ-i7tl-R>Bx^rwNph3pOv`A3wcPK--oRx4AMd=KX(thux==hSLz2<-$J z2`v`g6e;G7@y6MkhkG1W$F1;d@(lL{ZkID?qttr_>grd4DnHg5*wm+=kJ=@PxWT;O znRU+?DNc(&s;nX`>`E(LTjCcaSuNxX0mL9m$Rhv<^)@rOAk_-C_!l{nV;G&9z<$R) zO~L_lorC=`IE=*{XX+sx&Z@(}?xAr59!c^#v0CF|7MG$J!w4N6%9CiF1(Qv!1D{CV zc6PC{M5gSBjUaI6N&kPc2oMo=$JC913x>Ps+?JBn1!}Git4nl?fHwuO_7PZj2$I8H zEmQG9%}lP8p;xh9T{phB)TLM;E5v5ll;PAQPA+}03gDoSvj8{ruVYC@PHt~JsoS<{ zxW@}&*0(aoO&Gs zN2~vFerM^1-#;Lz5}zG)K==S9h7=d+EzO7#!|q~kH83|)n1^GeF#6C-@cA`yo5sxt zLtTP}HC{O|q+)6te|liA1({#d)#8 zyuEfTnFi>tK$_$7(zC;KW-K5lGRhmaB}-v82}69vk6vFqS=y&vV^#O<*|agZ=95*zbFjgL4gFI|Y@tht1qDj4 zyraQ0_l&B@rsn0z_}l%a;5GnkR}e^V_KADHwHva|ZY8k*8Su1vw$yD@#dJF-FCRCm z5SnGnYj)e~M(I{VLH* zY$MYRiInLdoKJNHysnvY}N`zWJl6gdZT!VpW7K{wI++B2i`EJqT@GV}%cphGfvR`n;EP@SEyw zZNj^7phl4T>Xm1AgL`aAdE;Gj!PwYX=LY)2vf)paTDRYER(AgRCC3geI5jhhzR~tf zjJ1YV+fDAy9tmv+12O>S1pe;~1RI0|PG$qmwQBeNdEOEPEFV@L&lI$;Q7f@y{zVg8%uKZFW#iN3FcF$xa$2=Hx2uEn&!HiA7@Ad#xApz zvOLXs#|8(xiWJ$5+>Q?HV@!!6Vcqr46=YX@K$|!*lx%F}T!?c6jKF-#19-Dnn06 zEc|YTNU6;S)7qjoPU)fs4FX^E^EI>1^jWI89vRqPW*02*Do>gn*I(`DakaCUE&tD( zaBF!hxc&l%<^RjTf!Gs>;EE;SiPVnmE`y1~4RT+2G!*1lPtTK;cpQwLE_jHrP|9sbE z&?nqIVRy-H9sh9mqBGs_d%4eR`CwDNt~jVn5&)yze8^KCKI7C zjxMM7g>2Cg_Yme+y7=B-hEQQ3KFgsLa&^VGU;ooU+^*IH|q1RisNxN zu~s!st)^I%|7|cawSGFV7W35gA~n*h^>7ZHnm}J6gyuu-yoz17T@nI0MN7^=Mml!6 zdC41{g|AcTbiAo3;VGaAJ^$#wceH1Mo@qz0E&0*TE}@0P)L1@mKjo@LyNn2XP5ZTQ zpAu{`)qeDP=d|CKE$bzn2v#xA_Kv+hN-cCP4-mkCitmQ_fND{lJb2ivo8B+jZ8ex2 zKdnAk3qO%jRJ6U*!6KV4}v47xOc2EIGM3s?5AIH($`5-Z=`72&1g z;}?@sKo$fDcO#<+XV{?sxh|I)mS(kqZQv(oOx>G$kz(bS$}!fOfc;UhSDAN_-el)B zpg?>PatbiX_3fKCIbB^{v>1P&)iB)L%-&7AYT>RlRC8@`{_dVCp^pv%xo>}3c`~tv zZZ&+321M_{hKowQO|tjgqn!oztB^fq4mv~W+4 z!%+2nXv;Ddsn$KKR7CIrX~-qVbJ<-J`KhRl8fpNGUuJi2v);o5Fp1a1YRHI3C|5&n z^~#>)p)$PSz#jBaZJmpI(Z6}^e6h6sFO=gi3{+E}3 z^Lz`VrNKk}gI3oSKM1q(C7czrPJFY;)9&u>K|JWwAG*Xv?`hIDg`yIjh+|`yFpsug z;S4bU&mP>kkaW*qy+sR?R{rdQ{CDvR@Cz%Dgr73q5_H{DouzG z6ciBzDbkBdZviAEv=9^p5eX;)f`p<-mm(!d6;V2&6Iuw05LyU5l91$E(bw~x^SyW6 zaqc(n&v*Y}3j=+ZpX z8BfV_Cmc{ZlU%#aLgu{NH@LlBW*R&C&r&>Y(xT8Wrncx(fI@x|sF+e^v?iqo4)M+# zTUiMMADrNU#NG2aTcYzzhuHY*qSQsWnqnX~wA7Zf6hYDxT=H6i$N&+iKlP2?d&6v5 zut_`3cOgEV75?H^B|!Y~9c$88js{E+z#7$(a_I))p&Jk*WmiOf%IW;VcQW_f%UVMT zkFf6l{0I-niNN{JgDeZ%Dv*2eAZ#2&YL$2=1NU@-`0yTNjYr=7%2V{N{3$!BKJXVm z0?U})A7hbr=n3I2o57IxvCm@eXb%g?r>aeSu|EXK(qI-drU8)UKDwG_oNxY?)(?BH z{Ur*}K|ncS`1V^O{IYUqqy1WI_eaC96OMNI(&48K4hg4eI=MRaE{^sqYTPNgSJyz`1SEHx2xD^sQ`q45(gclU|C5KkZ zMh>k$*~Lk*xsL4pC7Q+{&5e!A0K*7-z3pTD_Q9g$#$GiW>y=RB>f%~IdH~sBdbRJ> zO7_VUj3@I+7Iv98WLI3}Xd}h&%!U6L62rlz1diELlGl` za~5Q~sw|S5vH=-0G_qlORM@cR9H~>BZ!V~P?Ye5Wjm;n|UQklH7 zbvu*LF(Puy9_z1)br0}$3MQoqv)vQRYV)DQdm$o2Cn$2mu9a@5+}A39nDGZG!6_B` z>CG|enQVGaS@n#YXLowx_&IaYo}Sv>!v!Z9tGNiJ6{8}5FJ4D~FFlRdtsn#D+5xCY z_ji^-Bz67>9ep33SiX2r<%4(nE~0i>gj~+;c!etS*@tsliAD>d$$_um?g|*@>zG=7 z5U1u*#WG6&hE4(-IEHg;n(1P;N*ZzwUVDTm?kXMc|y|ZmH9FOfcLXD0qT5u7n`?##}2_Z zd+Oicg_t)oJ+-_8;jCG??dnY4Hk;nhkC(sQUUPM+cb8c}j(U~irJ{F*)E?bhAR}`@ zoe8)7#`*Y~lwMgYVDpd*oNo_$$z<1W^St0Ft4KUA(uI3MieOS!S(mE>1R}| z7QXzTI?kVGFtLcFzV{>uwOltvMnSkC(rtaOn9hP`SebBBsVSS_SnB=2`>=yWILnDk zZ;1VEwEO{m=AG;6NWb8l{;XF^vFt!=y^AX*fDi~~qR&yCH4;MQLq|p~%FFc*XMf%= z5Lsr+)qRrM#F|I-6m#1*&aI+)gEz+tV)KeZG%KsiU+I1WaVjG`z|o}>*$XTEh1?$c zpYNH=#c~TDA0Gg*Okd$p8P)V+pG50=1G*|WF8&9?zA2)xn)&MIQ6e|5@SR5RPh&oe zwLrVZ=TQJZo$@LFDcA6q;=mH%Q2RH+BOpuQ=3jUOB zz<$dzQm-^TY@XNz8Fl7#yHf|Hf?+SltEd{TXF7oNi3Z_T?y4VYdIkjCl`A1(n9dp%QK0QoyLcBRdBbSDj+m31tBtvDJ{io!ysI5(hNx(vV8rjT5YAcmiIlef+kv;mHer8L@<7zV=JUceB zXeCPN?}!^EV;YY=0|Q&+5$b$Q(As1yD)jvM^KM#p_tn(Us2MY25JARWD*%fU_5^L? zYiBrVQuu5v?zXlD@Vip>AK42ye?9gs%`tH1hDT-ninIL?dMp@{m){+bokRmxUU(Q=%6Dj0`D18A^rGMl}0jZ-cO};2@NI*VE9y@9wvA1^Nt$_(0OsAj9^tF`lEDIX!LHGh*jkS zs%}{lDHrNckqvjPV+yLux=>gRb&%G2fX!a*8{P66`%4GRs{k6+2zfP1z|{tXU|nSR z#ZVI@lUEV2SLnlMSEm!q66iWtKCn8XJiH<8Pagdm+IPVOnVs(Y0{g_>q8-uBZ0JMe zaHL1RTLA8{FaAk1UV5Nn>|>q!p-!N$>d284=O?h;JxGW0E_5kt`z64e3$L`a=7h$ z(PmClEVzG)sPe!UGd*j=e6CQl7#9FtiS%IgNJHjddil>Ju>0cARNjQvOz7fH3%GQD zwJ);fcH~eQrhEX>6a3lhMnzcQQge{KgI$H>;#ge>tONKe^T3u#sFaf4A6u%s%D!BL4=vDN(La5Wm3M%Lt0G6ul-VO4 zIqy!_`wla|UYElBEyi@tMP2r)}dnh1i}Hk-m}YQ_qPbDhHkBcgrl(XM+KQ z7od%bzm?Jv0R02M;97WG5(cj&IW`(J#*a}3t9*6TkmEj_zrbrt!d5~x~v z{p-sP=nYgL1;4+)KO^D)v~|$dQA5?PVKXzk#Y?Ai2jd1a2FnIDkN*4Mqz61e?*u9y zP_7Yw)jYtd@voTJ?>znJNimgUvh|ovYP6=N$E=!qunmI#r#gT>u(2qa_S`MZhQOiGJO?i$Q5nA&Rn`>Bg*7?_HQ2%BZ@BMW*1%h;Ku}tu=K0e zFsQsTRdEv;C3$Ex28e z=H}<0k^d8Y{4BG-`-A3QFk>#R|0JesQHaEm{u4j}rG&p-krVE0#3U8O=zQ|1_^HzprVE)ObkPSI(utDAqm_`RIdf@dFrpme9Y3%`k; zksi7dT>NO$mhBars%u!$K==-Z9eD43PtLCCny@nbW1L3Z#8&$KkkAu~gq<`kmr_^r z&5CkzPVAcMC8hDALiTrRZRYjwRpLjp#fWkqP~R~_c@Fjp8iFTeYE$T|P@PDg)w1-` z(~|pli<)IUZR>?+^Di(;0|=O``w`4c1Y6~nCRtiPEjsx{anhfSO?CBh)!uqMAs84YvgaoGM(%aTEa5k%hD(cN{IMv0B#|XcvgfVLU zY1Qo&WRu!1u-oIj>2Xfrj&PoM<&yl&0G~V}3%hk+e^+gq*r9-LMTh!i`t#3xq`X09|tflQ@u?}*IRFF~=(r4|*aU*@4fbcFPk+ z_Z((1cy4Tf-UTGVx(`KQ8P8H)2-@@Sk6hRL1@7n$04!mFm=KeoLEc|=pZ5n~t*@pyb*bx^vJVSLvEYMG8o;3+#JbNG203V&%U(mqis zsz(j|qiDsDxjXx;8#WFlQBl?9&o09C-4i*g!aW%nrja5#yBPWsm6G~AsZ(j!y$&BO zjFoCXqOGcEMv#@XD)iP??|>KyQ=Q{Yj=CqJUDJ>}cCif6xQ9`MtYXfM)uv_C!@rjo z)s4HB2GxWI|A)vHS3a@7tHIS)<3LvEy6U^&cHuU1_;hCimVcL870fm-5VX4{Cnh2a#8epCgq| zL-P?L?2b}Lsw(a(W29L1%^S?=z5uF+SHUARt(xZ3hpWkfo?#SQ9eb}qL$`Hl&h#1{ zH9?9hc&4f@%S?#Uwzkwc(cJ#>C%loGXAg%E3q=? zot#vsp7cDm`jt%&x2`@DgLbKREpV=%tlr!Dw1R>d`UAf?gqPQf?fPKAbRsMc8H$GS zK57f(-+#@*ZGBj`N&3RbP8|fSd}^3i(A|d8N*Yx(Z9FVgQ?OvWo{8w^>HjJzki()B zq6FZGbp-}e#3?9uJSs@!HI^FPv^?%tbroAi9m%XJB#-1l>^y7MC?oT2kw{3<1@eR^ zV}RvcHfa2?F-S*~fC=;N7q!g9)f1wC<5t0_-Sg!-#B}od>%Tdgqxk0O_P5{`MifUaMXEqpnWYi zb3+#f%^&wpc{Rnx$IvaohIu5Xp+O>2>a-{1f{@a^htiJ(`ICHnTIHv0;zX_&n$c(V zm^X&!egH?4uGwvTa!SVD>qFQfGtW@`sp1~@lA+ODA*}sBjO;QEvn?)H1gI4AnPQX{ z0flGLl>dUW-NM+9*>mbTeA8O<3T$1?aPr&oX4b{YGcisD;6lIYZtSRYcDKTlzs`hk$HcTq>c@kR`sqGQRZ)LHD|vKrWA`)i2jWR zsM#Fq2bwN5Qp94>#Atcv474~ZWg9Yx>3PsK3}Y=%4*L*!pdk9- z2D*<+N!`3J9GQ3X(YDHxH~qR-?ZN4wyGGvf#N@AO+|={C)**T;t1ivF$04B9gLIoe z^ikYJ$pfW*`E5JlX_nt%-~~)Xgn1MgzIKONmY}Yj5H(OX4w-e6Wg8X8?|n-7r1W^@ z@IzO5Xjfx^Qzc@ql`vM1T|QK!6E=MA&VZSRr&CbPRep^?hr7FK`$;Gm-$>BP>YFJ+ zR(;4aRqCGT3GiU2Ue~HHllt=m%t#e?F_UPr(7gPNpWnM&%Ne0VCEf&0L=J7e-WF|O zYx^dM{^rpw^D=0_Wc22ufh=lHhfl*5$#YD%XjjmEkHQB)Yx#uEjF{cNQT5s>5oO{%b=iN8RQbiTY4$<|$N4Z#!{xNyZUF%jZXe9-!w)G0I$?eFg zMJV$I2UiL5qp^F&c}WFb;zYw^hu+m!T-#@+us5jI2D;IFw|Zk{)_mm+b+&V3cy? zX5<&eBtF+L=N*`=E=e~47!9J2r&*8iRc}}Mzo)yu>LW}vW`5rvtsyj~tPi1CQ-jm# zo7(w&Z+E4Xnb?P7I-M1eV;8nE|6!XD{Vd<}koKm3%;4bOz<&zd>EuVznEkSZq~Xy? zMWoF!nm`@XFoJ`irAq$7&==%>)!c$_Y~|Nd=%AJ;!dp0si>EUJRe>wV&`-yv$2JH= ze$2_&$ff?y&f8#nFA}3OLk7?@G?L*LWe;p@dh|RnB`M<&P`HAB*h9=d8EsrCMw6_! z8Qiozc(=@bZ7&Z)j{;_j0i*v;^87Z{0+*t^&`|Z0AK0AlQVJe*ohrPnsC}XZmDxsV zo!Ko6Keb)_FK}sH(bdSmeEY$F0mey`9smFU literal 0 HcmV?d00001 diff --git a/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png b/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png new file mode 100644 index 0000000000000000000000000000000000000000..e34497b77da37b31ebbf26882242ed73040eba64 GIT binary patch literal 27081 zcmcG#cU05cwl<90z4dq$P}x|hTR~7-M0$5`6r`*4j#TL)osh%|N{dPf9TfqokrrA) zL`tMa0wjb0kzPVT5=clw!W;LwcYNoL@xJ5U@qXX;2P6D4ves{{HP@Q+na_OI%iGqb zg8vlzr+|QfpxLb(cLf9vFAE6#0{q7j{x@M;sY3jZLy>n){}QMhkyztD{2K75)t>?a z_0Z${_kQC)AA4}iIZ{ACsQc&NAx!v3UjczY53?J8+QoTLX9No+qd@@PmYja6zCyIt zOXv6y(U3K{v%mDnfBFse>!&Mg3N1mD-@@u{J$?{s&TRoX=RZ95)+_SytVhX5*Q`O~ zR(sd3if0vXCLBK6|9;D|A@%af*8yKQA^szUGmRZ95z^O)i`+idl|r2WFq$=7Xs{9q zM&||UZG7rJBBQFE%RS5=W60sq(9kjnnT>BTU*tdEeuenced*^vx#wAE{^#{Wzw+lG zaQT(j&-wm#dgzdVz}xFjbodXyf1GnBLRd7#ZKyC4BVvO-2SBNx0|<}2=sMqV>2;^_ z5z-MK=FF7$Ca(h%0Vm0Ow5)8hs`)J+`KzB;ZaG-2h**KL7tY$~OQZK$^!n&v+V?c~ zi~e$5iT*v`>rNf+5H=yr9g#Mt`|mnt4oaF>N~{nDYwHG1!W?K`v{+`=!sXtr#7|PbbocGw*ICb?t9Sv(3&7l{ld< ziFXzsp?MO+x3uRzF`N!k14@|{HcMUl^Vy&y^GjUMm%`CfXSQZm1X-tp;&w(_dBMUf zu@{AM4?61iR=IRwCUvxx&W7K-8K`;?dlr)=s&ygI9k}qQYPUM~EIGZPbLWA?-AIFV z<)Z=uUZh)x!%BZl4MifF)#Jzsq|gXP9mSMKhJ%tbVkan|MBhc1^k*z+v9L*($k?xri*a z*utewB>BmACJQLpE6DQN*=h2AHIU zddPlaA_QBf-RhRtq;)Xj+0zxCcwsZFO(=W4*4S|C`$Lez)=G4NJvBO;a)*^)G2fvD zG7R%9z?W$uBwp3opq6CUo90SgJEdc%N2T5SefZl_$-7 zXfyspFIu*?6^Yb*A?-h^Fbw&Her4A1B7=)MC0}klW8s4>RmT||gA(f}SXcLA<-{ZI z|09u>FE2bIZ6sS&p#B6JSlx{QkkXFBp;u9a=8E~}n& z&Ga&S^^DdfTPVEo+R{vjto!N%O(|@rcT?GJ*Hc*teMTpnZ4Pt9OSYV-Hx5~bm7%vo@>%C)E~i} zQ3t&3kzQ8A*owja*V|s8#Y)y4kDk5t+m(G3@u2N!&Lt}22cb%$lt1JF#{F+e$x;4r zz7-H^OCzl&BAFEr_Y#&qG5W__F^PTGPi*L(d zyjUl5tkPJ%Ku0lYRSLZ#4H{zk1_oW`;N6eS$0_aZ^eVNZJ-!^HjI*k#msWz_v$?kPH?@#0mZ^TIIfNX~PH@%;CwWgh2} zMO}bHULAf=dKR-a zV)RFGP}X9_+rP&r+ibm~DLRL&9gQA>gBQD67b>{he+^lB@2iYz`VeFvZArtVc-1Vm zi=FgIwTQ89qtrGyNy9MeM@_EtLDR;i(mEgTS3N~&y0y(;_=1@AzHVWoJcDcA;Dq@G z*{`r=S)urV56L&6$gtSJ^JSdumPW_4?bRXzhH%l{y6x5W_`7QcT5+Hyw)DeWd{<{H zv+lx;*1!i^__>xF%&on6G_X<*g|?IJ#Q6cm(o(mo<^0;MS*n@K78AK*pkcF*Uw`~R?EAkqn##~;w|4C_TAPM*Pf60D=`#$V6_Q4~j%77J@{5&X z&=rrkd%?&Znio?4stlq}MO8bpMJZQa$Kd$#Q!^9k!n_{3`x^nHA0hiRy;#hcC#T|E z+%ggOh2NK#yxN=>c)g-7{j7%szb~K;nAJny-p7`tK877NR~x%-D8oWW+!9lb;5{jH3)mltI5w`rT>6-MQ|e&KC36B%^8~X8=>Ij`CIn0wcnSn#0Oq~9AC!r zHi&nHkW<^L zeg-hKbPpS=9-NU^6G0aox`H%BzbjaBC zVzTwYjGxIy`*%Wv-?icBk(7*8=&(4QV<|-CqjJ3u>gG0IuP5loU%Q#$-J+j`QRy~V z1|96TO~z3UQcR5?&inI537Hs^BwE+I2Zl2K5~Ch2mqic{h=QYy8_)J<5fg1*9!<>J zR3Lsu`0ACv2bU{%Avd5ResHAKu+@V0`L?!AcML6@)w=z-b~_c)q1dq*uk%9qIo;jg z-0JI)jf3b>!zuzcIbXE7$r|O=Md4@PF@F+0s3qch^ zv3KmH1B808`0y(Rua0aq!Sq>LR7uDN9^3ob0)PoSD)l?3WF-LKwPSC3SRuQ$D4rlJU3u9wP^a#Lt2T8tM@=eEQS*)Pi@J8?lFr|&=buF&`2Or_p1;(%;R=>cTDZ2d zNX~$@2=&mh$a7DL#K|#_AHMm{?I>npsW)0n`YceIx18IyQtZ4HHUxcu&!`fX zI}qx*ehsw)H%fzTasTXG+~$~(jkotA!_!!%`oCA_99T^!t`HVFo5e43*8VtFi9I$Jo;ZZbBr901nf5)Q0U?s6T2gn*nqu*nBbacz7 z*_Y}9DiGJG{cFddC7zP8vuG7|NxYmUm*ZZ)%tizMj6S3o=_18zmCq zWB0m6vCfe`H5>iFnoLe}4%rD()45z#)6&$i!HCCXAPu8U9iwVGWL4bq#_tVJ)kp;j z2QuT!*q%j(#n~v$1QP-}r5vy4&RcUoMWo|&<(A}ZZEV>2zA?0XHpN|6s2wtc>V^{H zt?EhM#yf6N5s1PKjsEdQ+SF<~BNy!zwss|bAdJ5N=|ACRe-_w((=c(W>=*xp8$1oVHF$fpl%P|i1GFS+OUvZpdjC)YPPwt zMJ{|+)i&y^+Pln?MMm`$?gUb{(m9+r-<8yi-7pPX9S z=~MPe`W=>`CMEtYNV&A8f48o2zdTAxoSsQwh6edx-eG)F7Q1g3)7`E?+7xUWuWPm= zmGFZ(wQF%F7kdjhjqf(`h*QCDGSImK%-*p7Cr^Vl|q#XC2^L_TEvvba4V47`ZrXw;wbWIG-{B?Nk z1w&R~wnYw2em0M|nUKi%En-R3?Bk|cPM+0DhVChs(uRU+(lZjuzeBU}XAZ~+%#7FE>*}{E=G5gr8#K3HyF@K45_%y;%A8HXU0V;1L1BXL zO&Pc?B_}7O5YI%Ga#GYKVtcqPV1+slk2>S_je4={nftsOBve8N0`d3Yg69pE%`TH& zHdSqm1UHju5lNm@~OUrsdnh37P9capXXz+gLntgUh;OZsvkduY|Y zFBU9Z`P5V^z)YE%Iy>g_(G{)f;1LPjk$*?YeJM}RHF$5Tfi{e`eXI#TSX~$uiS#cU zbUMql6J!q8G5aP9V|WhEV4p~zn7hxHle^BgD%0N0p{iA+bn)n&xV7Bve6ef;f1WlW z;+Ys#cQ%?2Fc5kohKig&4_MUgf$v_(+*vbhpGR?wGMCeH)=(UKu-6sGdkulEfOov~ znF|o;St-zE4{#0Oh$mRbwa;#{;ox)rSM;r6sA5o~9m}Cx6vjLHd`;c!!E+!72LBX;PYD|uX`MkYght$L-(SgG!rm&5 z9N6mF=8(nLY=i1zSNgY)21uL&#ex&|PHeSN!(2m%Mj0;pfOh|3tEo>$f!)4srbQCc z2?v!Cd(|?3z}IlQ7?J4Ux*5!9-Ef`p`&piPG4~uFGrNg&X0TopcsG%)(N~LS zrp`PKE%W(G!a*`CYeP+TlsQf(4Cx`f4L4dL_h3xVhp8nY0np#h-0RgspLq(f0>fD^ zD0$FUp}k|HsJ$j9yLiEsPTnf2Igz{K%M7F)=i^;U&WDPQ+f7 ztKNK?2|?9!{A_iQV6+s7x^bq?#{+_j4E!zulB5SG!AsnuQH_)k(_!84aFX|iu3Tu` z@Nj+Lc3DKlOM{=AvIS<-zvs?L4w+-e0N}`G3Uh#hHwI$uR`iVVYrtw z1Ml=DFO?{<>W}l3jzDy-jVi-cp%kN2HLr~G?5(cnbd zme+|9C94i8P~6bu)&xtWyt-|)?Z-1Gr)cx#CG3>y66`n9^gtlF9GB6cjA)7BQ?lh> zo;_s^9tL3W?r$D+0#6T(J?JD|KQnNYzq`fZ$TvofO2?>x6=)e~w|_iGU)jKz6V>uP zmla_-u{ODX-nKR>ePd+glJi@O(=(cD%2;b{JVBaCwpEqr^y&>yHTxuq-~-&NEIIuw zt_CFT;RY|Gbm6%{OlntNhwOz8WwVvgz~ngO4#4O|1>hi&H*}994Sa}P{?N-$4A`xG=!zLQXc^XNDkNtx^THROO#d67<+ndhX~MVCRz6s3xUZwRLdl zMc3WSWcFT8+!}?P-t};6?=nzwzi-lK^WF9rEaWuS;754Bj7arQ*4olkMEGYi_PNz+ z@7zyX*K7a(D#QA38#PNFWZZnp3jO&C@i8C19~Ag4Nbe&kL+Wob`Nh9J{&$@6|7|kp z)e%GU1SxLrPq#(f0Hbp@;vv!IV0iR!j+$tnBU5I+5Nj1ubm$TFsh7~_Mcw$D)2wva zs}9yB9T!v#0!Sva7oRe2h~8nYO?wDjo#CgfmXA9YXpz{AGU5(lyVvRzhBv!QYKmo! z3X1!TxszW=FEcCn=azFNVol4Tr=Nvt;5W5}Clnm|?0ahHP_CHK{yiExif!)3ln>~| zK_<`De^(|cP4E9`Z=B~6tgyK{Wzj|~<%ke)BQ7coe#NnLSiq~{AIkzEB1Y4%ex>J% zMAGYV6VU3q$myMa#ra0#?uGCd7B+Q8s|iCisP-ilE0}wI8di>uQSyt@(MbMIsH1$n zH`h<+DO?G@pGU6p2zwqB8rhS(iw#J-jjoXyX}hqwzZ%hByD&dbi=;iQ3WLgwND}=^ zTu+r27=UI1GBw3)(MOV1e51_de%yqfQwt&21liQqAD5?rMAq)G(WDXFz=z<>--Qg8Z0Y z@WQ4bG)cZ4;3yM7EqCOxgx9u|clJ`LDI3 zV!I!-F=dxBW&tiv(4gJGO1?!3`J27G@06+Rfd%lcO@BfJz`A?gC{eC69~(B@w>Mul zxKPcXGeH^f{1a0%zM(;8xo^Ywd~C&^)yOoshd6-`K0vWM`(Qtvl9vFvyp72gyR8e{ zIW0n8xwgugjDI{?`?T_*COijLB(#K%iOcikT22=!MKY$5JN?>^?;WYLsS>@O`5SlF?kUj-)af4Rv9(q(MpL_V~ZyBkn zaFn4#*_i2pc1A(buav=&;?Jr>KESE5%Su)EQ|$?4mff5cEHFO15BOrL}< zYgvHLiEA!tRrlxUU3=s&b6B%3{hgbk$y2|KhzCfpbx*kd1dvf01$n@#ezQYjc6-qDB4h^n4dJLNb=rYlnk3`k`BAIK9y#6xmpPj8I$d4U`{oB zPHWggg~UlFRW!aIeVclUbMrOjkbv^Zp+lEPHriy8v!%Bg+I2x5Vk9+QlDboROR}1Q z2?tj!+oVBS@W48HTP3*BU9GPO`k+@x-I&o8OaWvYx1+tfU+>=^<~7ws6kh^o!NHtOSuP6D zKBf!mm`|I5mKz`<=UZEfuDNY`f@%hcI;Eu+F0o0|)is@0!y zTAj4VH_I@l<^z_H)?0&?fb*K(@n(B~5(ALAT}}8+;@Z;K$oWX{BfbOMc=hZd*JSH# zW~0<_^Lc@^5-*|a7jT2O-HVF?I}#II*9<*xZj>NQD3-*Arvh za3Qe)Z*bsioM&?)xJ$Ooji0U_W@8Nv)GY*!6w0nbb>k<6;pu2|fHMOKDTHzAom%G> zSW7J~KPHy?gB4)(&N%K_L&JvJd}T#v{1P)uLZ#72` zgLPN4&(=>^TUt6nzX%|F{&#e#PT$`|=?BdJj4%B-w}h+P!wb_~FTs@K1Hi}mA^HB| zHb!%~rRqNd^na=C{@W0(la$=|()!v8<2UV;=|TpM}eYd+}ii+rqS~bLR`a-j(JKx%Q9ctGzg% z0aqQ~ul)D%%1|q6u|y5Dmg>)jJ2%3U;D-f1XZU$Nxa!~3yu_3<`4^Q<{k1DGEXp&& zSjjb)d@15DB}&^L>c{v@^>(?efMMW(e_HO+_|*BN ziY3%*woSIqZR%7ttVDEXW@(L2T$Nw}pTGFD(_C$Rf?)jIF2++dmGeQgVdok%ABZg^A>2pc*|QU#p( zFgQ7RmMdA&H$Q6DK%dMdkBYbKgj}A%LL{`ypZDmz9|GbpE&U`=MppggO~wP?j4PpW zx$Ux{px|(D$Fa!VhEBVm2-GhSp6Gavkgh$4bOX3B`eBDI5d2$7$3yqC*w^b5gZ z853roiqUUP_6EOr8$^0Lm_kCN%uCn$zu~??%%+XDaCfzBfVi=R&PGgf#c{=iHjFZ* z42}aDoDw9oC2fzFHPlpw$n)2s=(D^PtA1qxk#IMM(uVaG{=y0DT0sZP>O+;0eQQZ_fkKK_zjFyb3xngn7o}q@xNog|P~qwLgm)&|ocO$x_EM zn{28eyO$#FVUspA$4mp5ziq$1xn}5rm*%2cdM6YKeeC_$Yu+mm9>4(Y*qvK6f*d3R_3b#zrdzv^`WprWbf*r~kE z9}ab~J09XQW=`*(cp28FPofnq387bV58rO;|L1a2L-$DC!nhOiD%z?B9pJtb(8BiF z1^vZ}0dU_{t|YClwf(Wtmp+vK8(o3j4-ALeSJ$Q6SBFDgH@6QT7x>-m>e2VXmABP= zx#$Y=ALUe7RR&fI*JF_m9*&vL0U-#}(R>m+Y99;7iGv*eqz#hw_w-_KcYg z9L+|PP0vMq?TecwnJMQ_L;6SzpIKmFQ5e^5I(Dcmsp0VL%f`gV$0!}Uh($_-8RxyQ zX>0(vqHggxO|&02w==kNTl$4+6CEYMlPkYhH@v4vt5j+W6- zu5VV+uLZ>xihso`9J}w{uM5lU`f!WHObW@9nQC!!5HUuaRjF(_n|J6g+KhWwEIbfl8aJxs1r+BuD`#1j76Qo zU2nR-Y5|1o)LBFCP}6yx`|lQ0cTL+@mSYtzd43oJ;6bQ*UA;Pucq>Es_lg;nFa^1g zdB4~g`7SxMLDOt?7<9IC?vho~DJlsFRPqFyY1X>9t5hiFs8qKUS-cc#D ziq}!GW&+OOQTx$MNkW3XoBOF7y8br!vpg|5JI2>ce?QnA3j%OBy`J{o#{OB+%PJ-L zW7caAmUqP)0rNVa1g$TgQQgRJi+(xEa1Krg)cl`9+B}9j_U=Sa0P3#hqQB!L!=Yzgg zVifcb_s+DtYSuqc22xhdTtvdwq3iX(Moz9}C;T$lZVlIRi?$vTTfaj|waV_g&kySd z-(ZUwx$dDCxCQdHEeJmmg^q_P;6mrAxftVV+fyTQlB^#U>gfPsk-(2|i1^imdXBM?VYo(>k zwP!<1>#CW&kJEmdVN$c6Y{Crb28;AQRBn%Ni1Mj_8N%s_&a2&Dg?7i6y6p+!RXEWJ z9YvN)vAEH)C)OqAb;A=bK}hgiE%Xu_>Xch`jGWpniET@U#HpRF}lpnzqW9b5#^A%ma*b zn`u*Aq`$^!a9dHO%gO-5Dp@H27Fw<8uJQh8OpWXbI8wWP%;#0+b$tn`F)0dL{+Ob; zV*&oeE35GU93G@lcrrCPp^sA}okYORIBo*^RqViY7Q8N!0g2wL1UryDLG#ekWr$pDfmY8`k`_~_sWGBdszX+Ws7LDqGbKHEROpV~!Fqu_ z$}zI%8@n!YVc=n#qvhN-#@Iw^aH_cpIa}tF5$xVjxvT4VC17rPbwkOjjNnnMR58-O zj-!mrWBb=(=sGn)XZwMIxB@xO=lFQ%nX^6FqJpYX8&PAsd8_TQz2^sWd`Ysk7LhPs zFz&_dVtoBuH~5Fb@(k6IPiB(A$#{o${%6jA`%hy!##(wr5(1absZ@wTGPiAw3XYXdd z$AR9AT%fy2)OTwqKOM9C$^^wL0j?XsN;4ur>^=*HN}tVaCIhKuppybI0#9_V;ywaM z6E)894oWtiUBd2U?d-%qt>J(|4|ZUXNqzW>;@~<-(I;cq053OF!>YB9#8%a$XRk%S zRUF)+q}va|Q>@IKK87VmTPFiyb#w!Ts1Nrln2gq?VqK%e8njlJ*?@R`=-nl1re|Zj z@5Rt5X{Ki;zLCZr>n~l?KqxXF=d6#h+u_d6x?!lhVt6?Y(mPP6lvIhLji|E%V*<{c6Fz8KC&ZS;pUEk0(OJjAC!`Ff%M zD`56NZt)|5#jm7=#8s~&BLoezN*-C@Upnv2Ys6KlG)n}b`H?=}@xz9Nj8Ytn}W5#5ZI zgHZk!c*A_ygxT-yHfm7TFPDjjF#D=f!-k{-M|$w^2+kn^nP$mx!+@)3!KT%>gyH`1 z2?Vp6@iL$!UKj0%d_JfGCHA*0vl=&{$H%Vz%sKS|^IzTpHX}=c#AKg4j=&gzAhku7 zj+bp2lVXUbPL-ltwBA=Mz7%-5VR2wj+&3A3Z@EW1)L44FVhku#%?J1y+L>SSUBFX^ zb2~5J0ngkD#NNAmkg+qtHRG!{lnk`m-|RYdhI!1;)QaSL8t89UVzV|G%vmW%I8qf4 z+tj>g$0;*<)&w7%Y8ho$w8LBV=kd|1lpR}(=v#z?d|z&|!fhZcFJafFWjDpvg6i_l zt;8gsWN6{-+_o9Wfp;o9Fg_TuKcGIY#jK{!u9`2)!(BxqOmG6!?7{n!JVCsXn${Hx z?n9Pu+srVvptKByO=ZI+IZy3}x$yqoYNDZE|HHbk1S8N)yD!syT!WuSP&>5ed5nz7 ztQ^=3xp2C~v%SQy;tnf%*0UCQ5R8x(*zo$-9u}v&POPaW7mXS?_9{oXOk5ZX{Fq*# zlCinj*s%5`SQDe_sPuBDGEkU9o8XQXC(5Ax;sbeAybSHq{tNIW0;~-6aHjeVfYnx> zgI=!M*dAE28(4=}wed<)I2k(1$CqZz9t7KgtAN}4t!wY^64#>UtO|U#8xYm=(xU$wc-!z#>xm3m?OiMJb*gvC1fxEepkvr33L`~<`IFFqf zEroMhm(jDF>Pl)!H4!Xu-JXw-KH#UDRF+E||2b`}3+vhEk=|D{0OPn*t#Ak*V8wZF z)jA69ey=loGU_YNq>ZsL?J4Z=ulUH2o{twmAV+6POXv9(G!8Eg8QXG*l(c;`!PBRvB- zHRP=6O_Y)ANZ)Xo^YA#jjWKglvGKbOLcYa$4Jwz7>b0tF0aqqaZD%2b#ics(t7jW2 z(qZ{Coxx$&Q>!>%BBhVFio4HIf0xHxY2}~wn7jN5*5JwK9N`6LQ_n6k&8B=Kcgj!{ z=a1EeF2~}6ef-)?BUzWs#7&h9!mrspu@7Kn-aUA!&DX-^YE3T`SUSRk*#WQRC`J=h zy==ry&Ie83Q1yp#kf0|^hXrg+_^!lhigS*xkfCG!h0UdwCbh;EpTso5Q-L(8hW>A= z9qK#Hk#lcauNf6b-~F%)jThYo2(C!ZTyg5y$E`k6?7%>h*gNO8{f^V$p(t86LQ+<@ z;2YOMn|Rdv%aZ-C;KuAi=ny59LtC_mu!iwy8Gjgz4EocJ7}@ppwa5Y=N$IlL^Tp|9 z#ki);)Rku3B~mMIxG8+?9KFdjPEW7jy_9htbr3U>zBwizaZvb!!KW@$GSOYXTDD}O zvu!kU#B{?n42WZEAfoEzrBds<2`$EAc;&{kd!ws2LhS8|kwN4AwRIW7T28z|+%ZIh zsX`gX5(X5^WJ3*Eq_swGzj*F?mS+{@^m68|XFeRVvn$`Vg=kG!nku&zfE8~iIzUXL z1W|@pN@W=Z`BH0MjLEdy_H5Otq1JE;^hD7lfS8P1>V1=v$<3bf_8F#T z=6Hs4O*&(yw;%$OU6%NM@~@G(kJjQG7)3T>Q3`yzQRaB&)kKp-^VQ!TQM0^+NQb%- zISx91{ekm|d>&c;c;l`QacRgLHC5|gCoC|T!^h`BKO^KPdVY=9=FiCa|9hl?l9(J>G&-5an)5Q{=Y=Zm zvV3_`d3G=zvwo!+lOnp(_x`5)=)^?!J}|6iwN1ahEK=Z+V)h&TikjJ)y74zQr`05eHOq3Gw|&BftF@lN=229bHMkmq@@%%z z=M0UpQ1fNRL`S6{n
    `u+FfieK(csR09(%#9Hb075(8VbCH z++AKZ`{EbGr-!@)9Uqp?M;kB6vK>87pJj}LH(!lhs^37`mIm8h{X2NO*|yJoRPo6s zR2w+}(At~MuaMw7H1WMz!fuY1BP?Mm^VKw3H=atfm|w3s3W>`LgeSV-pU)6- zl2YWUjd4T20DbHoHl6E{ZrBuQ8E-yG7TvHB1xD1Qj)>KfRfh%jq+K!pEV7s%5M420 zkW6q#e148x`PlOHP|-gVcCCWx7f9mX!=U&nrK8)s%sy@)|K3jG*&)dU_k;F=g-#`O zPBDQmV8)9`fGG3D%R--|@FDK&W?wd*8}tVgRxq)`%4+eus%2`$09Q@OUOlnD-EJLa zL_4)BYGe}Ct7_bz7&hd*KiArj7{l=$NUCY^j*?6sO5dbgEAM`QTYoticqq=7v+&gr zpBl}fs#qYzV(e1A?s01hAjuaqdJ*~Z z8?_8C;$QAAu;Qp%nrRBo&hVZ)@kc$)Ua8Pi%)3~bGVes>D?XhYH8t&PceEF~7hwQ& z&_{w4|1k5#Ama#dWY&Dl8qTfk^?anyJ=Te^{%NbFx-AXn3dSFs$_l)zBJ2nNl!81A zZy@J2cB7(tUYwgYch9%4i)!X#IUB2`N_(bP6X39w?iqQ~B&+IMhZnhBKt*hEVxr>)|qsHD%_Zx8w=yU6=1xePW z9)*mtI{%L=2JPN~;6g_BOyIfDGL>N$ajhl!h+vx>;#g$7TYT@5bDn|XtQsx(vE|(g z(K6j^z;9YjhN2fBcW8wnI!hwWR!Ps#_N0`p^x?{}KZFE6M_tH0yj$C*kOOZ=MaAqk z>`cv7tDJ8~#1X&wjh)WMm^&JJP_G5b$S&2c*Ge4BNP`M!1`Ebq>+By~&wT z`j=QMr_|`!bg5}yMSumo#ubtfv-ghj0tVK;j2#}SD<1I#bH4dx`dl>TV}ePkf6rAk zv`J3hTaHu_tRX!L3|$N0SJPg5nyVa}S|ff-%@l%5gAja@J_Yx;Z>c6XzQ;&-O`RM# zBoK4sUzbk*qcF)Yf69GU{W}gmO#AyHiopMny#Ie0!t@X$mm>VIf}@$1jxktCAmm!v zV5hfPlz;5M?g4z+VEE=&EnSO8ENSNRGsWqXgXL7Adk~coU#i}kCQ`rM0s9l$t*CVL@Y2bj_ZbeIt8C=ZRx3|*kyzy1z}+@|_XCgcmR)N| z+{b-u-6F{SVr_o^e{rh;{e}XKf%g1zVZG0R7zrKk1FrVn;a1lFXZkCA}e}KiA44-TPT3D zts%Ag%9{L$mC#_by~As}!xNePyNU_A_@P_MNFsvLo{QcYzYiErx1V8RRP9Sj#M)|B zI}CC>qkL$77u@k>st|WS19LPDp4*}Vv93)>b3muvlw87~&y!!Y^pcV{VI_nCHUqQ% zF2?+Wfuo|ra;*|YJBS97!?)uF~Xb@OIDSUPHn32UYfeg z0VpV6nz-wl7KGWsl;9Ax6P?W;uqDz=<*igT9HQtMb$lwJ8DZn-S+b*37TOdTv5Qa< zM}d-7ph^L85Xn#^rKB_L+U(PAif$9YcPllc^%CbA@gum@)6?0XhRolp+&ES`V6I`i zLX9Xne`N-cK}-Zj$wEFS5tOr4g25pkGpADhXEd7t>EYsJ@#DB?XTsUkGugpf4U+lQ zoUhbtERJhn5r$`VnZ!e^tYV29pjnSQe_xp3qjBy>=azbiv^iZ29?s}=Lh7hkGN4ls zp8=CF^w3A9{cggCmi4BaGgwRm4y?MbxI&rP!HEZ;`J5w!>yc7KX<=XnaV@AjRs>y6}HX-ExE_;$k3J$V8VZziiWh zezYdN5LsF}UD~P7@@{Qv&_WrUDU+@t0t#R~>z#ZC8RW&M3nIrG-UPDRT2HI%d?DzJ3S)HZ64|<%4pVeH?l9WTINiV zV?B7KNKDm^QE|~40C{9T=H)DdFCA@Am~alG9fE<*53Q8hvM;py``y(00A2CK;hApw zJID}M1nx{J!g%d)`yCGc&4%O3lAl$aUUP$o^5&RTjZG~=rP>e@0uR0XQi2Z`BPs@x@~{^kVWUOEAU!Q zXIR6gZ6i9JS8#QT(QKd1o>g!?4cP2&^)f>nA)fro!iX7w8&?)5dRKozGQdgnA4HQ_T zO9BIVL+}nG*rj)dW}DrRk{ClidVV%_)?GJB3Hu=Y=KKGsh-U%S@nmtPy!3sHUEZNf^#QU?b|N+6!>iR3~vbwBG!ICS09RNH#HrS;R@{IZ2 zH-9z3#=EGc03>(3QS?upfzkq#s+RO{bKZ#KOrL#d8XreKk_ec=*u#?i(%A*y-iLM? z5W+J$_0!5w3n*soFxa;rZ*Uv{s=q?c(4gSw$y--!tEG0QE*5FcVw-~Cl0M#?ZiM}I zan<-n=XeU0rH1}<1~+;=(1tT2F<{HzOg(sJqJo2IFCcgAXg~teq4#!aRF%Hmr$8dS zbQkj4BXW!y5C1FhBl)(??Z(~v@6+1l8agKiMhvt1?p=dvn6P>jU_D&AN-g(K)Q=}l z^&bHag3aB{K41fQB|awi7RMp~>qEH)k5Vf?aQzOg4gkd)rwyOB#FM5jn0|7sS6Z_th91w{AGwZiz=!m%pOs@m43K z=x3Gs@oK@?9n+j0RL*AOlC-QCzP2At&I}y_wwm)d%1zyHWJ%zJM@MZsGu+8ubgRa=w3j}@-pt_ord6fxj(t@gG=G_ z=XSO|SM=Q-)*MG^4GkiYH0?VUyzc9S)l87OE7X7BW57j90g{nVl2Py|D83n=NG|Hb z`|J-^U0pG#zv$_qTNWA_&Fk$Pw@nBO^k}D!asOalS2jA3iEwT3^tB27GRcO4kHN<} ziyGgTuFbs=nY0#hfsgh%{;&4FJgli}Z&!-WCz!W@fs8k&{?>c$zospTbgMMDNj6H3%Rn&rE4@Po5{ zbAzbidH!&Omp36U=5jI5A{>th*;Dh4;)@a`Eb^0N-E}uf^ZOb{4$K$c@T@iXeXHr$ zCmTpr$`Zs=cE`%`Gqk1$k5${Rbl>HDRc<&DT$=lf*9_7R9}}lSwb!ZW53iIicjF_b z$^&4T0imY2aLzRo4nNRycZ2%dELB-`FV_cOzg|}7h2Izm|Lf1~zRj*oca3-T($Sp< zFWUzk%?b~CCn8=sJsB@r556|-aS%G2?Yhr9t3%}XU0e0D=M@hOI4pYT{Igix^Qf>z zfEwe_kn*j+ddbK-^ZM9o%>+{O9BG1{t9oMt*ZeLKS-0c4_@ue!wAPVJ){cFq4!PlT z#>24g1+HaI+53~2kIq#-l^j(gC(zM7Cf>7U%O5>SJ)UQD@cEAbV4qIfHMGg{ci8d| z#?3#k-~RuDU?c3W1G6DMr zfWiDcd(TET4n@U%w#lh(glRsG4uzq#fwe+sA)Z}Pvn&FqC^u~`4$Q^gB_f~+um;@wKu>N8hvE7keT zx9CILHMa}xbSL9~RFn+*e!=}sD|x;|=^YzSPsJ2|Qv&S4Kg~1tB(3K+3f4@)ZC?(7 z&{4pahkcLX!G~`)jm@|zyFZR~nE}Ym|L`{ZZ~54EwU~K zd(g+oKssfMWy?xT7e%FMoJX&3{MpS35h4YSfVs4+2W+i>GzR z2LhEiBy$!<0+QSO8&qqq9j;SSFWC?(<1daYuenC0vhr;uA{&ZY4l)_27tfmEfJJ+Y z*IB@Q!&&uK%F!L~e+E)oZr9Wd#yCla*xu9+i*U{{RxL!)8%#iNzP5a5mz;%lEqYd# z)_a=&XHHLA1%s3RM|T1>{?l{yWefgN{KcoG!g(p;4^1Doah%`UG;{3% z;ed0}SmLt90`*vD8=q%RN}VTtd?Mqrmic$Bz2)Hfa<`b5WgUm`<5lZ|!Qs_A9t(s6 z=o^DiyLle@cWY?kIpZy_0kzY)mxuUBn4#pk4RZ~-ra83gtA`eyfTl1~OJy2{Q!_f0 zZstzJRTTNEX8>+Shfe0*lrlWl6h-TZF+@V2&l=L9Dw>c@qp7^YBUZe-Q`_0%Ef1Z0 z3@<+p1sS1}{rc8%wcOg-FL25c4V`KuUAD=)%p4c&k&LbaE|zY(&{j8oACLc*Sq7r4 z%SMf+toI6s&qPTG|a1}jWkMHl~!m4mpN1gDEbXBL-2zw(ebY|1{*aQ{w(Un zNa_y++7X&F_5jaXR8kRBPJ43R!KTY-XxWjxVBgk)&Pq^N35^ZBk@T?wC(9$m3~S>` zzxfBg)u%#Q=SXg<(eMa#OdMqE!|-42o?p>_6McBge$4;p0{qGQ=Y}CGV#z7VleccWh^`ra5RC?t*mU68HlOY|c zW@08BJw0~dyBum=Lb^#QsEM-R<<>x=LL;oNf5%zLgz+e-lzxBC{xB&;^6{}kcTnkj zYuDb%cfGvW{_~a8oww^w@#Yre^)IMpX82$$-Dy-~HzkKB%&ur^-$;D@bU0aH43M5I|G4?7jC~|$;YNf{3iJukyzD9%jL>cOv+)|VHHbZ(I zMYhr$cvp`4z3T>)gFU;U+ii$P0z=?VXXKyQEf9L zi?iyWkz?SvL^QiODlEbrmPWB^TnLs`NN8g#H)@jocg@^*){P! zA--!5FwJfX-csQiz1dxdv++pBUvih9#`!Z-=dG=)vTUL=@D-H(K86ZK6KyT22$(L* z@r93h-qs(}@G9gHU!3-SPU5#U1!>K=jhp4^P?cs_skEo<)7w?Q6KL)P^b~5tLT?;u+y}dL;e9*CkvwF zLm_I*r5_NL!FR~Ju+>=bP+7b4m`rNJTuo6)oj&IerIi&7la3keh1p$cZ*5U9FwOzZ zL<7oEeAD$AY-+;$^_k-;XS2u>C#lPjOKxtPJLdLiLANuo3ZUF1q9R*6dMUAn>DbBT?$?SY~I6Fmrn7 zqBcJ4a2f5Kfz9hhdn;e>SjEe}+2BlXiS8D^y+C_R37G6IlC;fs?LLh0!~#<4#!p^+ zE?v{Z0y`#`lI<$MIJ%BDszp@NJ4OUEG%8&~++LiA-vg!YVStB2s<$rQ)gFlZ$^;Wx z$z_hI-@ag_UgNGUpPiCrVI05Y4+Y}f(Xtok&mc=Io6Cr=f6Zk;*(Tq;&tO(#HJVz-T+H}Q(9$|`;}Vs8cW&Jr61@8uoyt5g-+`?^Q-SBUQZ@;v&n@@- zpLwTaLje?~2Rv(qpJGpHf7hS5R@5jf*NuH%A39LXUNvSDgK>#Gl7$djPn#Lm+;ZdW zNfhLwcro9}Kg(p;66qb^LrlYM1Vxr?v|}jXK>Zh>qD%ON;8T~>f!cMQ-zA6v(ci-h z_vs$_PF5ivK8;s)VO@(Ku8>rsyww|=mdWE0Gyhqyb^dbeOyZ|YRuwK*Res&aR=_SM+~D^4Gk@yWQo(qq4@@I0|lT2%hyi zLWT{LHy3sek92$}N0&h+N(6am)dVC{mreq_@K$i2JT{Z@$KXv_^*$T^suw@C4sEt| z6a_)?BDFH<%B=Dr^@cdhq%7rdGFj1`xK?W>n;WQ0g*5tzb0kDlEyz=g+1c4OXq|zf zVP-mt=o~|o4a@cgxBA6fnH+gPr1D34z{PNa-yuaS-foZTZ38`nVysS5P8vnFpc77w zUfC5ELO->jDqu2n)j}}iE?Gr5HrWZ1gJp!O1!E-F7|4Kzq2LjfkCaa>Ao|rfnPyzI z1&XiDtXS;a5*(f3Q5Ji#B-R6!aG_!tKn*jQ$&x|#(~n2I$MSIf}TZ~`)EjV@u#d>ksT+t`uk^GX0~ip zTCNtW5?2I5!lCX`=c#k^epX@LNG?ecb1cCfs-$X{?7FuQ7etKUcdvEQCRB*m0L_F6 z$XzJ!Gnri;S><9=Ix3Rrj3gGE8ZNfaH69^fo4GiH|ESlq2lPTRr$eDnE5TP9A5hm< z1XQ1t!$;*zvc82;{QDjmA^u3XN0R&={XSg&=6#8SLqP1?t%A08Lz|%GtLl7xef_J` z^;+Dw)~PnxAVJYDZ}L=b3r%hUw$AghBRg?u)ZlHzELz2rE>lT5>=MZ<+u%V^--HlL zu_49zl*klh zCR_eS$`2!hvmtHuEYOJn@`nan0&>yo2lJz|d7?s4f=ZL{h(EKrDni2+bOHz2(UE~N zv4*U1FkhTkP3|+24FqNp)NdQO@%Cm8>N(pK&7F(oqh)W5@u{PcLG;uBBQol>0rZ@v z@e{P_$mQz%ZLiua$h)8c_tD0Slm4zPxW)u~&-J8jC;H$x(Ufhd3O*2rPH?#tH%cGq z#WY|70T&%d5fjR2F;yRQmPi7JNMthWw_u?U*Y;~=bveOYc^f=$j8St3r9Cc~E1t;? zsYgn&&S7;t5-kS7fMK#KLCe{JPQ+G6k%+7gPHASRO z(eNtNw~Aj~nB2s@#=DoTJknctN=iP{?io(sv*0S82qOctZG#d$tRfTmQ= zIpmHgx`vzXCd_}0keERHlT{%rx$OKjRelU6P#!tQCja88#JH-CD>OcIlF5gPRkAU7 z|KkL;x2nO2=A)JwAkcg=(9W1U9;#8b@iC_le8>yj$J~+}aw{%rkK&Q|*~c=M;Ol(` zNs~zm>vKjoOMALJoDWUGr@vH7 zrqCmR`*j++!ww~vj_0sT3n48`sRud9D}`qeNziGmrQ0r3TMp$r7mT^$2#pmKF)nG~ zkU%9V)4@Bux002cO@HW?VyY!ssbV+2m)6N~o@52FcIO)o`A)SYHS#0|+4v*mnr}IB zIUUK#Wu5dpRnBs2I;6b>Aqhwws+wNrJYf>a(Uh7AE_XsV?!oVAg6otYO#<;;(Vg+7 z?!yp!$l+ufwA;E=ot@5jqVrC9Gm%OWQUepNE8juEcfdIBSGUx!M1|QKjCub1I|Tai zW~M`%)@OaekPTn)c97kM<80_T)#v#UW6P@24)OAE5YC5ekKW%x;{>ZKkqFg`*Jm&w zh@6Va?8M`YK=KokI~;1ZttI;FC!Vu^prUMBp)?tH<)Uf{O+0QpB`4}DlFY00oOixR zf~T$<%kSg#(x*)MWeyB~4ABgghs}3yMMu@F=0)J3Zbv0A54ff+X$K&@EV?g6am3sZQJy>z6NtB~;3X8#>FZXtnwFA2~d8 z(OS(!Z!Fw$d3XJ3J9)h$Y5{MIx*ETiX><}`%XLc@EVQP{B46zGwIp)V73=)oBzthG zyR70D+Sz2K45U)=2e{dRgP`zu93^MY^l6t0{tF%yg@VH6NGvJY>hUKup; zH;Se2rmPB|zl@yVG(B3&VHxd0z)gpXw|RaslAgRRhw}svGKLJ1MEhI75zu~I0eSx@ z%B3XfH4shogXb#2+YY23wF~WNPZdS#&L}mF@1h%|A*8kS)(ViS5mh^DyrE=!>_`8$ zG{8cyR}1QVcwYhT*J1GvsoXFKPHwTUDy%a_jvep22PPG6oIY>oBvI~|iCtA-`Jw8M z9LhDAAGpw&*;C3P-^{lvA3F)4S-beaa8SUVKDcpv!e|dh0^~p1XjD53q9Q z4FIqmTK@9XZegdd>Qa=bvj`AdDwVS8>S%AgQ(cX0#=kZ{#Jef1F+QR>`}r7;g$>N0 zZVY{Q=ES^dfMOOQVjQm&Crx&xBk>ql7eu7Xm<>b3ubQOzDGblo}5u_cH-c^`UzyC9>(fmf-&J6_C@mTT@cR-VcK z=1$QDBMZ9TFPSuLtHe7kKeuhLEKkc$4BjBTgzF+#o1tq1=;)D>W33urkmRlKfr~gf z%~rZIHh7Hd`%3jJa4@DCQz@*PO;GcCxYAO~J8Ck1?!Fs602!zpD5igjmcJ4ba-$l4 zN^(<0U1ob8D6y(vU;~ViTR^yK0nb=5KpAH~2*eX-pcMZEoDSlA_(DbL^Tt6|wjram z3Fg$85)(hbsXlp0%}e=dgO0XXb>_XaJ<#omciLT7<5IbObHcXHhJPQoE#8uoFhNfg z-na6&Cg!zD2HL0Qa*YyDZ>-kUIqoK$1FyhnfmKMo^SCs>I7!13As}$40a2@ z=6HP&PBD4R@L3i4@XJqWL0zk7O+K3af{xy!Ud!7EDw9-1u_qo`s#)Flm~Co_hisoG zdpNM$Vh4lsvgG<%cr_+^2RV)b3toVe?CjZN8R~ekf&kK-a6KP2I|owJDLw-*ciV$q zY5Sk(4kd#Xn^A(r{x=F@jUwr#4o(`hHs|f(IQv0ZYtb}pthihvTlb)9v3-AvI(dt= z0Br1t2Q`?(E^+q3=PsP7&21-GxD|P`Si2v1v9@m z)jys5ZTS=KRh6JV;fl`=dFdp*6mon@O}uuoMEFj9KTAG$Upbs$d?$gl@!KQNfr%3V zS}aa5hlu8E1jM6Zi4ntKhHE>@%2{z6R<$HwQdlo{yMHfpK4YssiE6JBUHOHhy;ZGW z%qDhy_{F)OQi#p|*}&fY$j0M>ZR--M{QKMvl|?Tl6JzV_1bQsSeudu{3TgK;BnQyu zSD*!0)nUj*lRIbV+S(|!CPH>mZAudGrbuO3KD+zCT?}{&#F}yTsnF+YD zo$hEsa#}h%Wjd$yLsOX!4r+yUIC~PGAnq)rA1!{#4po)F zzzV@M#ULfXr_MM1Y*vA9q*G+8=RII@Myan;+FSS!*~)jmh@K?1)ylWDRLMMGrwt~F zE;y*1rw2LfZ2jc`yuUl13YL$dcT8#S4WMAD0->3Q|l_2s-ZS^ z;ISt|sa8FD#BqZ=?p=^ZrehM1q>gzNzOvpII#Q@ZN4`}E-pg@)<0$bD@z!z?l|myc zTe-XMb1IoC;USa$To2w)xn}T&W1Hgz`)1=ax8K8LNqkIj z&iLz;fTbGgn&!c^<&XjjbDPnx{(3E8UTq=lKAHDcyuH$vpaXqnk$l_SZz?;O^rF z&B<#HbYrT2tnZw--y;kwDUE1#xAk!|9gp^YHevu*U2}Z)vnMe@P%Ch_hcYdm0M>jC zaqwvSH3X+cy?ztaN57<&AgJBlGum5o*|mvpn>6>PWq*fG;1UEk^^3}F2D-IZ^~m|4 zaup1ISBmLM!|326=r-C;kkSd=o_R%gP^{~wrOt9pB}L(?nNiu>TjiY`|7Ov3RMQVa z<9XR~z&7mmswpG1y(!u1Lirk=$Q@^nLJP*8{xmExR$~5$@aXk1jx6*Q4k$DXQ2HCb z03AxYzA@187OnIpJ`%1Aw`WO32NMTSbLozSH5LhQa< z(PzW^%EQs&a}NmQ^Kr7J9aZI;?r0c0T_(`Gr+tUI3PVbJ|Ci^Pv~xKTj=oD^)#?J5 zzCR&MJ=J3LXCfrt@K0ZoNx)7F$cy~f({%pII9YwRWD%O@uAv4pH~>Teimm5ch%$?^ zz>QPpg@83i%nTVVK%b=sZYFD%O~c;%CzypSCx@2c-2{A1b$!#;E8gh)NR%iC8(b2Yi&n?yVT36VoXsNH)p=7@-M%k2W80&L87cu zZ!?Lv<;1?O)ixv-{q{Rq_w0keE!AQJQoboeuNEcBSlFP;D^tqzK&gjEoT?m+NF;Uu z{_cD{eINQMsp{4yE8AQKuvLKT|1bsgu&tf6-uMp#$w&w3+gpUo&bGFNZa`gt@?>i2 u>*#dDYo~K4t~W3JVdoNE-PmZRLc6=)@!X#*rUJ#boH^4G2snVjNB2pC*q)YFJlmMaFK$NC*X_1cfPyz-* zKuV+wfdmK;CA1JA^gu$AZ{pf#zh|HGp1sd^UGMw-_^$IqlQ5H+XO20>^W69S+#_#| z4753q@E>7gW8>7hd+Pxk8#|ee?RW4Whk(Bn_2H*?AN#VgakcIL`wi_~?7+tM$V2DWjfa8O%acc+TRDcxF0r!RZ$3Zp z;`PVpZ=-&QJx|NOd%oPf>HgtQ=Sb&BYi8b=4{x8}j2f+6zE%*}eS1)eW6QlfS!u+) z_4~9HneWu4kCJ@&cf~hsBM*oip1Jz5t2e}b8R_IS;IKU*>+Y1TZR0eF8+X6zP9cO5 zwkmFd{{#Yi?z;7rulb6qs%oy$#akvyoWS3{9eyGRJUsi;b1(b!Ur+CtaRBelJbRzR zcOc+gOgPikeJNz_dTl*eTN`ZRXF4KwA9Z{(Vx_oJs=YdI(Z0_|yVGZbTvNCI6(8=Y zrKvsras8o*Nvw1kVK;httq)q;xspXEj@hil_xC&4ndsm0KZz|BLAZ+`78~=bK1pGb zpDs*oe@#q$95K`loTa5I_$w_6@``{PJj;<-P3AMyM$6Ol+zHHuDU>pTx*Y3*PfcQ+ z;26)XS-u|f1V1p%gJ+6Z@Bp7ZQ+^_1tD>U91&=n*G@|eLv&eUHuEE zh5gN%d`32=9S79lr^cfT#1Zc9x$=kCEye?q9tm})5+*5pzS}bK4)Wq{sl-V~y5y<; zIGxGKhJ3U+X{UQm_JGQkp{;R;IA= zq}|y)(W$*%KD}qQHkd{fG(=&V;f)k?@{7|Tt?zhI!Nm2%Y9HXc#e2JxrHI*-snEN& zl*5apCclqwhc`MpzPV#d6L=So6tq~I@=lHvm{4nlW1P9fH{LzbxTIqws}wCj7hlG6s)B1c`1ERxzZc2f8Wf_>0KyuJ2n6pc zA8;*ajk%W7&Ej$M_A1AO>_X=Z6E zG%BH1Og;11OxEwVsXzEu=*eL+ro}oc)QZC%U%B1hyea23qGM#JYw}qYSy3Wsea&0P zry*BSHjdBX6(dxZ$&eVcy}HK)z1`6p*J~d3(l}bE^C_ftp4g|Mzd(7nlQ?xey=;TR z=Y%2f5=xDXS>I&BPeLs>=61>x4c_&DHY;{!1VeR0cjF8@8#Vock|Y`EXUAqG%R!m< zE2y&q(+sHs!$yCF=x4rH!Jg}~h2F*}YG$IH&Q_C0*-Huovgx!QhHa0A81JR; zEI3zx4;h+W%KDK+|f6&xM*J_5DYu zP=Bt_yx_}zUks-P1?(t+3weVzZ()L3UyhKu-IK`3A04o{#CgsK>^AVM^$oe+Yatun zyf|PIJ}WGG>DBPH{gOA$dP+3xYHXPkvP25u)|d;izJQoB6$2$!SzAQ)$HgrepkO*o z8w;x_V?M#NxU9`wGw?$#xYNOoxfN(kb@x(&-Np{D&-$lpYaar4ay+UQtb9y&L7mMd zGZ#c#>*}={4Q~}<4(zBtaVNme6G@Wrm7>SU+6)vkuz_45HhOS6tos*hQ~9vYU6r=F246PAtQtGrYy!&(>Y!giaOd%H+jF(-Grv~lfPp?!xB zDIfk7nbPOsj}s3MY<~cPz4tZP%JfnhpRUQnKB@MXh;fxNxau74VF!0TC6+skI=}W7 zs%afEjIXk0y`2snnlI3=ik@wXYD5ql8QEHJ@ke`*&xH-|5A5=Sw}vB8nws~U;^_y+ z5ISccn>=%LHQwNb#u!%Ye1EQ;6AQ#xPS}XJ4POy-)c}9FM>a9q?xoefWxa_E&Blvn zY5}SLNU*PVM1pqdqjFWzV(P--CDPTzobOriaG_3Z;n4HV@ES#~;bAcenE1yByrq2S zcyZ93>FGT`9&dV)_B3e`?gA>231f8(M4Mh#FI4ef8nW(UiCXU^eul*Dq!O&6)GkX? zGCiBnSGFpF-2MC`DMB{nNo`#nFRJOx{XSX~qGTZx$D4KZPt|$wZPQR^=`_>eTNaX& zMxI!nko>I9RDUI9d-Z3!dpfANSzspG2FF}9VWrv|zQdiHJmV~@s*05kHfW+jkzSW=QZ)}IGZ^}7oAPd=n45Gk)Up{dERL^1&XVrlx zSNvc50}tQM{zEN2fdv9S@B9yk3ah4L6tio%fOlic>2K)K*`k5E`4W8M--Z5Q{w>ar24YSDLPDsJryzphru1e6~k}{J0t-FRxTm zHBTH-N(Smi*Gfm#RMyP)%A#mBPBf?4-CLdRO^9}GW{xb3*VNa_0cT8mC9YluWzE;$ z!M~^&t*;cxp8NvZW<9g<^Y=@3D!hP{+%+^WISS<8PWL|%Zi(Nmae`J`lY?VlB*Jzh zWbe=igk>sz-w#bc$PAhW!IqZhl{TtvgD|Hd=Gkxiw_goTPV&0uQCDsbfPW(mtT+^P z#pmaPqT6($WY59QXKz#}Yxf;S7aRpn(vqokVZZQ~p21Qz>*^mSS|9uAs`E1NQJ&C9 z*X!C-?qj-blNv51%$4_>v&9XVKzQ{BMu4XGCNj(gJ993ae$eM5? z*~m?W%+yqc;?%Iy!M}$*DuFhtESf?sC&c9^ufcxO$kWqxV;6bagATGi|8hQdgoqtU z-eyis@(tI#pR(F}%*&BICA6$VT2wJQGG{U}HlUF2+(sHmFDcd_-|q0d+9qSeA3Mlz zsGbrlFZU||o*@?vFDnv*r1Q3Rqd~e@Ti>QHxCh4+7iuK3ItM`Xa64+af&_-fv9&VQ zV$Ew(q>!Z4-B3RH)WT!@sezJGT?({LlHVNOse!LPtoO$}FB{ujCDk8(z~>#(`|`J#+sWx zQ~7KpWU^|Kg~O2Mg+#XZmM4AY3h2DAAuR)oN+df zX3hwP^H?nl-Un|^t1M>XZujLz44R}K*ri4F?@kT|nVG8+e>@Q!Y$m=93}AE)1#~`H zjufzCbTjoAEtG8pOuf3ZXPfsjM^~qg+Bw?8^>lqbnu*acSKYCispuZd6zRm+BE^wD zn|JY(J7$?xKQ*+xQ7>y;8XtA!sk5c;$UM!>PJ10;{Kf;N_;_W2xF)Z?iojW) z$iOOh&be2_>Elzx&~m-1n9at!?j<-Sq}*lmte}Hvtt{Df}budb3YkNSMFR)E}fnz zC|_G1VBX%_;!9i>+H&`4m6DCBA*T~dxtA|ru3s3&wSOr(cXj}>jBxQl%4r5kod}|NdF)bu+ECYP2&d+SV2VuI1}O|YzYYG7IjchVM4gQ&4;K(-k@3Wg zu3&l`;=FZGYjj@4E_uz!hh1w~O{3xGoJ3@3Y+GiY9&2OVUbHf22vzhZkuZ;nTblq% z2v2#befT-`z^XG?Ou#%5YjXExzy;2Uh$y?A71tv+M`o=ivpVLB_$Vf_N};lf5ZO5D z#6Y$yYx~vMcNI^FLalv6H(qYc`+<_@J)Dk{1o;>Pp-VNX_hN(* zHZNd5P7yltV5E}LX#Ay$#3X&?DgM5}cH6=&J+nL3}uHYQ#n@L(bL)*gF@sxp^Wr$&giNQy160z-mibSIG}}xhlp0_eKo@s*I@3!aTg}Hb0YE7ryFR< z6bGm>4+4l1ExmGbiew!l%F^e@hOdGjyh9?LUxX3{gkqe9cD$Bo_2F-=EKq`tC|3FC zR(a#}XB6G9cCYn~9ge`V_p7gSw#e`RqmeoZu4dHE!GlOoFgjKkC2yX1K45MK6MT+P zIj0B-+m5>F6|l!n@ixq?Qkd+-HULx{G=yWAsegEMlDnGmQzn|2sC6cYhvvFx<>g2VZv$CJAl6146#Af!^V#e|~C>rU6{?`9#M1sULi(pUFN|jcQ!^BCfKl8U^r8 z-+4l)_%>+WXo)ozZ$~f`dKVhMcX2$7`P5H+?Bt}NV27t(q?LAgRJ3Vj43Qkr9#1`G zH<)`H=59K^5#X5_x*|1g;+}GQl9@hgoZPFnHLS9=q})L_EPq>BhzyiPi7GYO-OFHKoh;tO}@d0qbGIznK7KC`hr#l)GT7SAebWv@JCpG)oErsyy zaJ1E9j_HD_MrbUZ39GGq)@_^q_go4LG<;Zq-j1Vo7D&;(?QOkRMS>?`YC_{E z>C0gwQqvQDbb8{lsaQ`FQ{QL%J|W)k!@EkzE_oU(t14#Ryi01%o|XSXw^~w_G(ZNL zlvyIAue~Ynx~2z*ERv@My){4vnt$32#G_0WKZZAi#{U?-6EKB-YOPJ<~X?=t6cc_4qy3cer#TV;t~@b3F)UHQbx@_f81-!3Kv|nG9l05?OWya~MxwLQ z>uNhgsY!|jP$vwlF^%n-dL^*vXENS9t)iP4{`XlW^0}VYR(_^9(bmTIqT4skeT0@- zH0W9{C$ukR$#=NNh7X+y@$u^io#EgnM>(%BQtc{NYbIxu^+6I!o9fPu1=p0U6?ZiD z#l)GO*Mu4&lZZ4HS@!MMA^W_>Le<>HQ^RWDrFYxjjPnSH+aQO^Qac~I$x-4qUN-y$ zU8o84@hVg67s+&)Zc_!$a^*75RO`9@CqDnc;{MzG>tBBA~IJ$aE!(p;~*ShZX+*T}XH(fXH^XDQj5^92U&j9W=^( z#5C`OT<*qEQ01*Q_@`+07R#h&XUom@I-l7>?V3v@dWS!3Gj0IifROd_9WXrm;d99=gsWpNfan_2t1vUi~0{=T;uzB^{yqWX^ zdMT}h8mT7jiukju)+Xlgb;z>_&#h)|?rlw*uqOM7uz|f3Vc5GtsDt7|w?f1P4FGPQ zg7Ch1DFLO&ogBVnHS%>oxMOJ3&! zd|;WFxxhetG{=-~{oX7L0lVf!396agW;9efI%Ym5WjEf#pY3m9h|&ZJ=edGp2G5}5 zr$ZhBmvL-koaRZ%F7K+2^|`5}`P~-fTkuS$Qe*I(iKS9kH2E{CwGe> zR^6K)Q z7~_hM=S2M?VkpCgw(K7){FuTK`+($*RZ6=7m(QH8pGkJleWScuckIfd@BOYb)&e|> zEiV`y70dqQ^dVM!g24HoU=B0rsMtRY2~Skm8fD5kh`)a798@bcxQjnNuKR+w|`cE z`&hA3QCwc$Q+Tdh@$k3$b12i;RgItqdooqi#_Jemmk|g$^vq5as)M%w`ZZYJzkIy3 zE8wdDk!EMSS!Nj?R_n1uN}C%T-CR=>38ut0UQY2s%F|Hcis()?en`C!0}<6HJg~wc zP_?|p3GPI{tD7f1@ngwi|f+3LgeRa?r&>>YYG=sDH?ymlmA zPgRHqE3miM#w9z&eI&7RSl5Fzs5~Y*mtTu%*v!spZ4m!W?=+>2Z8nmor)r)!BoJbX zYxVfU)-6B2zqnoTFx&IbSB9O3*3zn~vNe6VY$g4<*PiR*MA_I1zZ(4-i1b$6$OvC! z4M&<}U6+4R-dBJ0-#<2(4ylCR=j6!vPiYlZ$y<@c5?Sc@szj8K~2nY1pa?lG^vY66_WbT<`*? z5b*Hq|6FzcTfO@w+D?wGHVlu9XhY~PJUkrYG>OX9IUiSc*D44gxv*7dn|ONlrIv-| zy;g8_&WF$X6>5UK6>9ch#N`@jw4nP|#yk-(Dd6AM^Gi$RRn^ppw%U-Z6Uq7q3&(-c z*Dkdu^t-sb>uQ2|=WNYD0VaE``2HxdF_*;`Y9LKfyFhoiQXL$ulPWqL`rA(rd%4SMM)Y+ zU4B2h`)X&^#w!*bEu!TOQhX{5s3dRB-s=kv%hxAxiwm=1K5<=Vjy%NOGSSzs6?W+< zWHY|qM++2$*ky)oRs^o6js<>U?L1aL9-aGh)IrKYxLIOa(Hr7lW-vDa zrWZ}igWisY4S-VI=Db<1uF2M}XTnXSNgtsVtksQac5fSAw6pFp1jB!9j}NIjMavYO zaNZdDvzXmj2B|&Ch-Z`6Ht(^vGR}zot)}%HS;ob*)~OEn#{3joFp*PSXvf-6Kd_x> z!SuYICVX>E_l4(L@)UinW-C6-x~a^A5r$_CK+)TI@$Vx20LUlN=G-2X=aRyGWpJo} zMj@0V?HU5nm{(%-t@HVqZCu1w&hOv|b82o8u`7$w7IGVXj#oE8Q?Pt&p;)6%kx7My zN=*ZI5Xxez5xKFN5*fhCXt9(7f2A3NqUllxSNO8d6mdQ#hdn%J`uQ;9dPn+NSxQLn z5Epq!e3f;E3f&Wb0k{Ed<34IBl2Pya2>8=QSu7hntDxmz+Qs^fR5Uwyo6^UqRSc!3 z38S_hea8EtXjF2%H{==H)S@1uz^dB^$gR&aa_7kTFpKt ztca=`JHy6SLzr+o#1giNr-snxeQ5Z5`n}O&kR%J1!9+j@PDoQy^S#W*i<&>1;a@01 z8}t?C4Uh2F(tBi^%{>pdc$alYSNx z6SdL;j6bDk{gN2C%c;dOyLKLN8$?>!B|3dt7k16cOSH4b%dUrSMLO(- zk;lzS@r^5qz%Y7Z7ke2k}1kxA$O^F2%+gW3xA7x6C+x9OqEux)nP4)**XVvzX#K!Ud#xs7d39Dc4ycTMozK54wO|Z8vOQVZv9U4Q_4={< z^+c?Sea0X-K6t*he?cYiy3rd~g~#z;8Q3c8;PH=x$1i&tHt=7&>B1eV!LU<7)dFy=K}SA^Vv5})Q(Q|+|x5mHI2 z+HY1~6Db1<@b&%%XTdbsM2)86kaoQZZunNt54zQJ^-G$F@)~)rfLe(*E1>3==wmg= zUsbQzA0{W3mLMeO`YeufjBoRS4g`Sd=l^&wVPZ0JzgY5CDC0>NWQr+T3wH2*{yG2E zHMYXxy`VSR^cRf5u^=NOj&C`3eYDnBgYa;};gPMGp^p_N{j?Nh2JUc}1FN-(1RqUG%E-WWMCzNik59+=Cj7jB1up!#j*_!G z7H1t2rl3IJ*Vj+&OwI@csyA?iq+FEITCcfgrsM~n7~T9Znh?96fc?_3Rm}lf;|D?? zD?Q>T0O@ED|9j{%uOrcQWAj1Hmfaa)oX2+vSaJ?)&{DI}4x59y`aL ztg%2zq%%X~1PT7<;YA!&SZ1I8)+}x`dluoPmg#TZhZrf`Mfi(&Z~mQ1?-;!H{iCSW zV*Ba~wknNKrlXR=)xV)eXK7si;Z$q+Df}6LnHf6Z>bkzOf|cY>p9^zXn%vM+I@F@~ zW1JIXUp$bXHv;o^2AcV|bBxla(ziUR9sya z21>Sq=-+a!qwuZzzo%fgwhYR`;l(PqP{%tttX#GrtbY`&Y{LxrEZH?JQcqX`IJcINKb9`{;d)DB!_V@lKr%?l&- z(sM~ib?I&}%-kv;JZU7oNOC07xh7hXjO##j`ZXvjQ8SdQ#7n-y4eoAdgdGw3EYQ=}A?!}qzcK%Uu8O-a|gD1~$-csqg&5$aPqLJGqaZs~U^ zb-(+>bsXIKxiZ?MYEL;>y~ z>_&tI<`&3ZMm%*?AY@GSWPj64vmUOLM{EjhW&20MW<_AIAjdLc&yDX(?Avs|i32f1 zfb?=lw3SY?tJ!)W1xmW2TaKL*px1SOsT+2?Xpy`5?VDdPX&j5opr6OotphpB zw{SPx|2eFOtU+c#{RWGuQ|pifN4Ul~-H}*@;RyQk&KJKgTR!9qbOCOv#_zrA6_Zyq z#L3zIPk^Mh&X3LTLdc{{$$jIw7_9ofF;A{SmeN!`4@)HNwHESe5B1B+MmK+P6m&kV zmZ=8fAL{Dk;TaG8!Twh*`Gd^HKBHJRa7c6Yx4)fPs?3n30oX{{u7mzBRG5$>>_3iM zSy>HEHG}5*Y>tm8*4;NT>fw5??24g|kEg3FRf!~ini86{;#q$$ab7|l!I=hAM|ipo z;|8K7UkIKD4?%|2xC8E>TiIW#>g7GAXKpPnE~@N(yWK_eSCxNxO`P$5aB4~j5kp7--l%6Tp)pgOQn(S;r-{Amr(Mx{3yP(9j{uxpD$cQ8D!ch8F zCbsGOEUF$ zd~1QWeDzwXCLg}q5dhg2BY2(5Nd1xMf{F^waqZpne_ikO>|to2*)WJwReY^l{77Ad z$6`*LMU@Ixp(4gn`*LiX+Z?+272O_h9N$q7_jn7Pq+>ijf51=C0{=1bz6Be2jQZVB z_vssfUD74>0CFrDrY-vYcSwV29jUkT(BLR@Vh0??6|D3yxYPdE(r`rf>H?J?K zgp7PG)__c&Q!?-qgzFQ_aB|E;iiARYVsZAo68$7IrrHlKp zpU`l9Q|G!Bow3n#QaGCv?wGUgl~McJilW9w!Vu3i#PJ4F?C@IR+2P;5#lLvbq+LL2 zS-Ma*|3IthC%b8f0DsNk3w|2`;ilcTwZZ(5)h5$+yf-t;%p`Md$RRLl%l3?R=9i0Z z@#Q_8l4|oi@gW_UmK3dEqzbL-aj8XD3jID8E}zP+Sf zxFGcWv8U}7zTqcc$Fa_zOe!Ic(qRh}BEvN7Jae_F*^11*_DJl_xcr>VW(I_ri`#Mk5qciLF*j*X_r#@-{k z6BVPwPjO#n_MI5%tW;%)jUBJK9~@7+n@=q^Ex9jTY}VEcw=-FrsM&tk5N4(8vE;d! z9dO-rWu*b8!WeJx9`Sd4p+U~~ve!L1UgNaE)_V5{7XVDUWsTysMTd@U~X|%V6LXOu|9yR9)>Y z!`Yqn!lN`dY^tmjqzs?87U-^I=GCKZXTQ;Hh611v(4VSFuF||yZm7u{9dD5ki&QYM zS$nt?#p`qDbUMa-L6BShxZx=Zh_!6C9&7Qg4~u!$#x6Cz7?!gq=0ggP-qBNKwb|%n zFC`wDYm6Ls&OE&qTe){7 zd*?}YE5<-zRqO`Kp4MrTysVtHZ3^m5v4@{2!BL;NR#Nih#%G=DWNc#ke8McDim$?L zLM|!QJ8T#P+EX@b^RxOwJ>DD4{H@qYD;-_}E+-wh9M_gf*!1v&9jhR>Oxky4qPtpL ztXRj9#Yh#eHRH7wZi~dKQ1eXqj`mtzBdw$>yMdgs_G7x)78<-tesz$N+KI3E#7t%2 zySae=goRKPM?jOP0p)Y2MUYpf0CxxX(5LtP(Waf`L&HtnVS2rfCIv+x@9b%T@WC_7 z>ovOLWk?Fl(=|4TAU}!`nxq(Xf=+fYr{N*_;9%IMW}=3b^Lx*UK(G=QTZ@NiS-eTl z_NY)au|f1{QQZRf6sv7Cm=!-gjIl{SI4ij3!2ep{Gd)@Y`m9Vxi4}PB+@nHe!yGNGlqv(4kev-SPGzqK&h7|o}> zEBI4n<6#J~le?YH3eY||-jh)oZ-gKS65<4g#L14chS7@4t~Ctd4o}7$Ba}3%>n_hB zxU<{r;o?GYca5t!^H?SXF;QP1*bj&oF)3A=T(0`GY0RVc1i{C?>!U(=uyh=*3GonP zoumi~*GKg)gsB-UTRETUgT~^8f^{ra$GIk}3zxkGXYRY}4$%8l>gIc1Ta!|`8S%bX zLa8Kq{JEX6=7qo#4S~|7E~J`W50RA~%eib5wujsqLxTD`0fO=2Bo>`#<{sv83nubq z`t?pLyUr&o%c<8*iok5GZ)Y!ZL>5cT4s`%m6}fb-0#zRFuNz{&<{e+nvF(Qn7sf+^ zy=7BsaXYp4U+n44dKZmp>rZU8eM8}4vv`>O?Pupm@XbRYspVZ}#fgL4jlwA9=8YYy z6t=1egW4ieI<4wM#WKF?`BvqLxVgF4$g!{aG;3}zhbm?K-r`{KZyL<#w}KnUiZJWiWddJT zmI(RPp)?h(P;tTXxKOpc@^YCY5ihH_X5Lg~3wP1A)>|o6qP6i$yL2`^XdK`yUtEfM z#Q|heE`Xbo4+FrgS0RxXJNe6v`)>pnfN_9>NB-}4?cZ_Ve|-qUk8MP#)?BcyATKR; zN1plOYu5&NcswbqoP)JZD!;R}98)fTf=c0rILUCbdm6BBA6&$(M3dvMr>cH3o_lmtt%dZmH;Bm3r$h9Ke%45^kwCtn_jtIq5{)_3<&}+*?mYF9J>&QzPKKzl5c$tJz!G=G)A^AF|QO z9iMqfD?4B3ux3F{pS2q6_@Kg^ebg`Yf^4>j0PI&dG%hv$OQKS4Cj{T^qum1=O$D&wf$fv0T7m z08e@14ewR9n6JP+d+J#0#mxQ0#dd`qn6owaXnI?gg zTldkq!mE8h`L&yW?dJODX!bvu)Bkznd{u49cX^BE;GCatk9<+L+yS-T#;vFjyCtLZE zg4!GBoK@VO)t|$73ae^Zg@q=J2w<&D)9Zg`4jltJJ-UDx=<-VpEQ{H)=jz~ud70>1 zCXRI5)LgThgvh?Nv$lcUF7s}v0b84UO(gW+rTC+y`IT)f(!=_)1{<0;54NNLI!A2Z ziE@n%uO>CC-`Jj?+YkL$i(2$N+VQarb%A2)-AwcyG?qhoVp4Oz_?ToSdl6wNE}rV3 z%R-~SP5y1lg#y@lZRzMMd5+5*Y%MW>>ZF%Yd#BMX5efU%9_E_+t36CQ-1P&PD#`lw zcgkcH{VB@|E3Z3UqLN{n?!Tj4F~FUb%Uui1>i&(b_ZBcj4*2zs(ahJ&BhDs&^mlyH zNaC?`wRcws1SfvL;s(?|TxV!$h19|A+D>Fa_{L7H<(M%BD9|x~0s+*{_S~bUQZFjq z_l~8n=)Cg%oYExQARAqHlzaJe=xoH8#^T#qeZKbYlyV=#@ypEwxNt1mqNcTW43a5K z5tv@{;Or;~0tDcfU*E}N{hZr~4@^ykmzO!J<(_w$W>|t%B`v{)Tj@!f_8QA!NWG2D z?6@B%T`3eClwgqo@WXx=OWc`yRaU@W_O zucqE6OjK`mQ^!I|4dwv5)VF`v7>mA{0}Q9Usn@Z~L{m(%C|0_p*{Ewv=_Mx<*BCajoGO5gl59qCp4X=?F7xm%_G`5$f zc(b}zU1rzPY!S0-lw1JA`#o$}iP=g&a3xhneKBp6u_+5sx;u9Nxf`=M!_FE+OTP`; zm@^!G!+MA)_sh^0Zp?3pAN)p?YxUaz1I5(X{cmb-j@vrdg~@mGo*7VGpnvj? zNId!grjG#h%=F3ucMs24COl#BKK9Vuer&sqz&-jri`m~GxZjA~r(N>p@m>}~PeYUx zngC17PINre)n|t~#cEwxT$JRBZ{NnEYjS?Z&ZSwi7H#4qw1w`U3!59$@WJ7ljn4_a z9X!XQSD(YYT!Gk|Ttk?ms&@k4=J%H1{?&a_e!&OF zksgCs6caC-BL47`Q`Sd^VpOy#Y7s|F?r6!@FaD&qo430SZWgd%VOgpA*OJoxpOXrl z1Z_9POP#FDgl!w_&DV1I+AE<*)v!%r(A$cNm5UFPPu?4s1}*o|+UGTth4u#H?aACi zk_MHx%$&u~_7N5X0S=1+Q1cn&H9jl`GTf8VC>+#&Vr>kyXXdj5o?N z@sUk$LjPiw^TCMM1!4}z7hOTS9^KyqCzBfNzoAU>`S5%8(6*6aB511E; z)vz3m!F7kGt*lgM`!~S;*}_!7b`T7JBn^aT3|B33qSyyw7RbVzQi8 z*<brb-YPTWANE>C&h z4U6I4c9nRhQJ|h#)Hb^jd1Nv10utEjjZj@A10p3k;b&ajE7zXDG~=OFMy$8@+se=l zHLVoCk2(8&_3zr=!1vCig>CRUcgPC4zfCl-JdFx$s#EcFZJTajwX$OB+#hxDHE)gp z*IE1+xK)dkuX%X3zrKGyd+Xd7igo=VZ%M-W??N10=b`{HqwEp#IDNa`QMYwsl}T+9 z3G*D$wcqN_(fu_Ocg2d?XV5W+$)r#cAV<3DyeY!KwT0tr`qlFSD_439!gZQ9hxs~S zXmtr|$jbQ67#v+)VC%Jm1vlgA4`1ix_vH5g8f#mIT;C70eQrjp;eoy%A9dDg;GF#( z&lvq*8=%uf{=(lMf%R-m(>7ebiVJucV*xe|2^7AhbW|u<3{(GK0(HMb; z+YAFvHY-MxgU2(kYL2<;n=p720wD>=IL;VJ-~BkJV^`!!X1tFwVNPEyMsZl|ZGC-U zTUeN>sk0xr5pwEGW9^&1>^L=5)p2dca>0HSM8&Cre38Mw+UmSi#Y?}oq@T%JuhPu< z#hIS30sQvAqW&LOr+#42e~th?N6!Z(CHE%BfpI-HJQj52lGR{Pl}6VIz~7P9D84-C zIXQUtjUfC=_86m7C5_qYAK(Q1{0$qew7fjJ1n43@`#I?IOM_sl2; z@M_!EpBB!SuS9gq|42XpQ1Sxn?uQmctXD|1zZ@3xiI$#schUhna%4S-Vi zw-RQw6h2J_8OP2w#dhYu-l`p=HzNn`Gy<{+Tg(GS)2^PKqyRDJ`Bxz|+!FNecL7~X zeK#Y$`mB|_lXU0=W8h(1Ry{BqYO;j#A;W++0K4lhUK;twD_0D;D+PYKqMa!b&XON7 zn)g%>Q3Te@%+$jP5i5=M)Pvigddhv_?)@YGz)_vP2EVe|motf7=xU(aJs*4=GI98q z!OSdNmp^VGGBRLEP8;209qe|l&gIF{-Uw!J?-j?isUf9%j>{WDCXN|}Yb~?(lb8gw zwrTVg)dd?Gyo~srq`2 zrw_S^DIT_tfjaxnrQDnMtQ9X%-q}m%LuWk;mR6G#h`2AXLinC*7C^h?GqIQbbG9q| z%``!jt>qSQdt<4V+wg|vN=+eCn>7%ytBYQ_2CAeXcN$SsXDGDE`cu#onvd_C95yCh zB~>SiEq|i)d7JZPB;fYc^t~lh{8X}(9?pjn*An=$P98md?sO59Ha!~HvjzLfV2Nb= z4$o}HY1T#twsB~l^S1dID>PlOww+P=PFZ357|WmxsAdig%lgfixXIBeP@y6j;h9wM z@Vd%uL(te=rz%&y?4SMh1V9D5mK6B|gxKzG_`z8jTN%Q@udPs-0}TragxIZA8y*6) z9~}K;i(OzUQAT482p1CLG&Xp*ZAUoVMGI!qsu;dg1VF$ef4+9ZjX)) zo@~u{w?j4zRS2V_EY0?QcT{-9Sb0OtQ>Myd&ZZRMha1b*OUq+GeEDF z-k#DLv%9cb74Q3}vd7MjtthRP zPpI~%ZC@LoHD*vjn{Zlxn=R_qW(c)dJ-ir)7n!8$+PQiHsN+&Lxu9-rUCUD_ZqGe5&YKYg zTWD}#_WL!l;6ob$>(xt&Vg;M@Z*b+W;fa9e)bX;7?+)Nh^C&~L#j9KuP#vB{QR7A% z!Qpag+;gqM4Cf~C!<%TCYf8;)Qc2Ty9Zcg?GVcEL{{l(B?aiq_O|-^6FZ!<(1GEt{6= zBs!-k1w=-DGIoK*q~!Ocb`RMU(tF_48WG}jDdDWQssn_7ISqa!e{-`EFG8` z!ss}7#M%TVwf)7Lp6xGzWj)-Z;p;c&x73*3^EpGiD{?z~ADX5fTBv`q2M?z*`Z4}| zGMRjhVD*o^f~0`Rgv``=r^;nCNr{7hrVgq;ft;O905(l^w9S%Dca(?8#j|S@v}GhY ze|E5_Cg_#mz*9Uso2}7+;s4XfxrZf{r*XV#w%poLi_%(^<=Gm?%3EcMnTlmfUP+_y zQev7~78z!clF>+NY08>+O=wcn>7tn$ninQb&HJV%2@0c*7X&;Z26CTc?LNEn*Y4lv z_s2QU@0{m%KJWYazDM}_u2K9Ruo^>WuQS@x#wmB@J<628rd-i@N&PiylXlw3Kj6<4i(DN!zP<2YFgkAWtff0ru_VMIw5IR!%j;m3X;2c_S zd<0W+X=38Sb!a~!<+z5uei*2);7}C>V6Kijzmmo*?$HjIErVR>;s%B4OgEUOr$c{& zl7#!l&`amVjrp>PT20;^j{DeY^_yz4E$v+j{c8)`0N;7Me?FM_HnXGp9_x8ur_Ma1 z1?HS%(PVG*QrvA9akk|;^D5-Dud{mDUbK8741eF#(8?sfC6ZdlU%t?RD~kyHKG@PD z$u|*_nX>WXPCvYIW{>D%Qs&vdA$5gqUrQI(VNkMQwtH>n!so>_5r>mNo=eZOxT~0G z@{|!!@8Z3Z#Fcjcu%ZizeHD>^5&-1=H*aW5vs6dYnEEA>Z`OEcf@)ko?bV~OIh>X^ z!J`6~8Y`~Dlt^by_oW44mNN~qL%M!4Y>#s=ql``jZax09xHTkof z$1x6+x$B~y$^{ZeFnfbTbMsPe{W%V09b59saVc%@@R2JO{-%gFw;?%`w zD#`)EtqpcPtl(!(Aq51h0uAZBITzwrv@b=HXMv^bX8IV_4NGz}^bQE6sew!kd-Ak` zw1Qs()LX$oKqy2d!L)Af&4^xh za_s_KQ~vd&1f>1gfjpYgbJS%j;`QR=hJ|m*CJ42>bZlh9$P0VJC<-MQ;$?_1-~`vc zPaD#k8?x59d{@V}$r1izqGy|;c{=ZH8!c!gq$y0cm|Rn4(L7@I&4q`qk4@iHnZXPg z#k#Tsaw{IU0R82Ojm{|^&Gd$y4IWjIUq&#tSr~U6+%6;@ui~lRw{q2}PU(asZ1QTh zB&fCT1;@h`|B~BAlS`%=Iw+dR6f}36i6Y~N`xB{iB$1n*b}O$9in6>Oa2uH z%Mw}gS%>`MT}mlH)yk^j9Y6@-WDQbr*YAlbLR9W_`1RkppBD1ZwI})}UgJ5gqgervQ5|-DQ}uAo84jcC$K+nmen#!w3)p@U z-z*#narg4!qo@(x)|Zem{gJ_r@@Zr#&I4;B^<#zUHL7ECrMbS=A0A4*tqFVW&ZkCY zPyTb-v$#$FhIfc5MXlEZbs#TS!zbK>E>37QOOvm%`?PK^LjZw;=>pNL%l`q~A6FAD zYv%Gq-CjEgs;~nb9+dgf?}B>cER@xBBWQ@VJb$h}7DT)DA@(%5>$59Eh(z#MS-^Cf zIcvyMUUY4o2{SG_`k48(OYnmC7Ke;Lby1v3jty6F^kMe|+7$|31Td?=zC+*_^C*}3 zV7WeVMxcdg)g=IxsZBkUhTY`-V05K|DJL8SoJ9Fu_jOuJeq+QU(m1WH_EiIm@l3_| zofwr5&uNYlZOPWBe0yDvORw4@P{3 zi?it($6^}>IDhir9<+hha)ZG8C;#&y4a%-;B24`dgoj&&p*z|X*L~0de|y^4sx6G( z-=-LGdYy65Lbtbkb|zil(V^q;YJz8W^Jw8(z7szXSmr#g5|ijdnh#GvfU^Oud5}#) zKy0)-5buyWf2;pL+yYWCkw9?AfxHTiaoihgYkSS(apm}5Pm@Tb)zxfsM9jPYTE6!g z(G84W{cEvD-ZAqv=3})aOdQHzH*(Kv0nk%Ojs;Oom&olbO~AKrAm`{(QACV|Jvg_G z0nl^iWFUXmvit@E{rez(lYuAs;e%``V18Y$STvwH&9{VDjzA;wd-DCS?GeQ-FCue-P{O{JyJE%o%?-vf(_1spIQU|VmCU)el( zHIPiBszLd#jpge(+JQ&DbdmId^3;7@}IeYKVIcx8=Kj*dg=l%JptF1ytLQet!0GXPqqCNoN zBx0Yx5fNa^uQUFc*c*@mbAXwG;J15v56XK-+mDgEe*elDFhSexKhQk}3VH^=ob+fCB> zO2l$rZ%q{KH|%6wHzq!Z$vCR2h(Dj7s6L8>2_CK{YZu9&>m9n zN{t+1RXNs6{Ia3TagG4I%7S&9_v8*${8g;ZFW(5;Ddpt*?HU_AHUJswDn*S;J$TjX zMUdZ3;w9N1uKM)Mn@j_~mIK=p&S8zK&vEp3aTk2Mej@dbW4YcB-c@=e3D-Jki~D%TJz!r2xs(-}Q0jK_T2WxyDAGJa!{jvO=S zZQJ;D$(f2(uc;TT3iffE+o`c`=8HZ}mPkJtk$L6cbr$V17Yz;$lu_ah`{reGcS|^j zcY#$nOW&gF_)6j1_=hX;oZ=w=@4cu1pO_g`z>)g=U9QX-j3S|eM;fjw|9_$ZpArCw z@Nw|1o69Am#zOAQ65`n6C%UlxZ?5naZc)Nxx7Yd;U=y&Un&~NXh zFgd`gNyIAt?P)|Dose=oUc?E#Q$vV3-k_Jg98ScE5UJN0Upg6rAbFSYN!CxDm!Q`; zwz=k&1;vJV&xM`@6JpT}@gT#Oe-!*PN}?tbxLa-LLerwlaCDcIh;g=`H)0_KJ>pCY zOgOL;P^{9+>60-NiDY&&%MFL&Q$`e|yG9J0$x+>x(v|ssuC_i0 zdL;1cE~O<-uZz@OX>@C%PF?FxX;xW%y36s8kdbV0WSiIi(w=ZwtRbYj$H?UYE5d2gw;}BG2l+DE6F3{(;m|oU~)V_ z&U;V3i&@dTTH~ioG%GGZ8J9T-#caZSLPY%4Fl=l9z>XX?z?k_h951>4NLX`bolOY< zA~bE%M%yoa%7-)_;M~&$fS!$ElRELdc+D&kUe426p|hI&eAe>TFEX7$@mS2dU+(>S z_VGOLLw)00(GEe|zrSlt-cE7Q zH=l5jg`9GXZ@`;d6@51ZPc1rLw9enCdemdI`H2BQ+dfhT5=||)J_t|m+$*_}S#mPn zeR87$HpdKXeU^PZVP)+b95kw{!cRy83`n@Sx%po0H&yD_`kpWPv*gHzTk4EFOYTTZ zqn3W4Q(Dqm@8PlN1tIJ1(F(w^&9?%7JBlRvJ4E}i;FU(-go1DrYO!hg4L;RWd|~8U zy~+OWjj;`%zMhYa0X-6GC0VVQ`rJfv!foRD#z#plDbpj|qi)LEAMt|p5!p-HJ|AqC z_1XH_a-7u9FIA|CSrcR~pPZhYjO9uPZ>M)&b6{2oWycJJhK|WhbeF-o}U$Pe4Yb;#<}JE{^tk zLSp7vGpBiOG<2BTti#0_8a2S^5)5m+Cw;Z;ryibrf3eip1ot1l42+tIbG5I+e(lXa4X#;@-iNa|?`+oQ(rOKfl3HL7IwUAur7hc_jlX~ljZ z6K5{@iIx7Jxed%Nubf%`$jBajGndweEAUcx%FH5B^L9&<(=6edcGr*Q)Kqq6%cU$? zy6|)|hE)1ne5_IoNhyE&gRA<^ zh4q5b6u!3Nm4lx|6gm|iKX-FZda3TdGqSb9W|6D6|73Wm=t@~f7n8ebIBvMeu=^xO zsw9}hXIAy3Q8Bn^gd6%?dr@gde}C~HO$C>E4lC3eq-+{iR3S}45wiEU^yn!g>b-5|DU-v1z7$ zgj3A=BGd#v0XR^9yQNuIakTfk*ACj)HMIJL&UK$MY=F2%hebKhLFG~A=pNL`2jj)c zs^W8BZUeB|L*Ye1pRbA2ZZ1X~?&X>Fd{(Zu&0amK3Ow(Ff}_kkLC@7R4Ttgy-F!~| z7JQ4`Ao93%t8q|cRk&~L+4=W7e_3sVvMYy`kOTGzmc}2xe4xOl(+rU}bV&SSW9ZLPH(D{s@yYgmA0O zmwodFB@S9E)=Q2nJM(yTKM^4#V8lPPXdMfkU>CIE+Ry&l?M1q0jk0#3{^fpfhF6~x zyHm}hOD~+KG_Bq zwK)B57h$tMZyHsh+@g0!FvH?It;-m`cFfs)&=aAl$;oebC?Ir-w#=$Ir_Y`_efA9d zVUuqCZt$aG6Cg82qTdsXJ^HT!s=jxw;DsLL6RObcD?E6#tZj%pQu2 zj=Gt15?2!lA_NjHm3?^EgKFaAR-psnP+yGWnx6$5VWHe|q>^`C+Wy<1=|s+?jMy#g z;n2jf<*(JEU^I0ce^JZ#H7D&OE#F(Mrm^KEvjQ0X4Kn+q@1dLM_@`qMsDUqpIg9LM znM@TD6WBxrbP>^*-KPeCd}2}n=%T?T!b$~i0bBrx#72RD94j_f0Vs+7OEdeaKkFTv z{z@+ZNc=ekm+4)O#L5Zm|JQf^hffu6=y6_LT=**JeKix|=eJ6@0}PO3Frkz4^BPv> zVmhGZcaapRe#3A*us+T?YM6x-7PF*57-5e=w*pL<`u; z4i2&F;Q1G9olLmi0TQ2*kib!)D-VJI`MQU|dU8m8SwK(HzoU0-#E(xoaE}AzbJPoJ zdO^`wUfdF|;aG}b=D~!kL#6`w!*=lie99Q1mV?)32Wj2L*WA4D%1JyxV1g|o;)6GW z%OOgMfywDcRHEnuz?yN5-QPRQuGR*uiiWIGw`k&~Y_1nQZITY6T!m|X@qMaiE=3+@ zt3YILdBNsm_dh+|8TF#sUK83nC2}IU&T2red3r3x9xfC~KYF1BxnAzoG2y*(bii5xA@w0#9ObS10%WqQ&zAz9Yc zki$1d3l(s~wP?{-Kff`}QP&J88mB$D@WbjeX9Ugko(TsQ@)Ug`P_9GqcneYVRKyPu2JuwA) z+E4T84Yv%Tp3_C#l>vfU&wl4aI`^NyN9Glb#%>r|vb8&G z|0*VVEL2^V|ItCV5JJ=MrJICZ?V4>|aPIh!xNoDf&^)>m6C+U`qko~W(!3E{7&$Z* zjrRTO$ho-V^;OC~wAM=Ld-kI=>szfeI~?zOxJ2x}n`5tkXM^}}+}TH`VvAE2pU^Cn zywbGV%0BJ$;>gal%twOz_#5I!h1`5M@6OnB`mpNL9V!hrLAr!#p8mm4`gXjfGbSs_ z!U?t6>&Nr=x`ZS--acGvoIN2-ETWX_`k-Qvma@R#CxTHuM^-2dD4aXR9=Efk8sawkUcEZyohG@8G6sZnmy9Ocv+?(Oj zD)EgVd=cYO>vrm zXcYK7fOxs2z{Ip}@VHs7bN5NB619-tw5JV&{7@R}=!c?`~d{6PG}IS>$-uYdFbCs?S>U0l{9_q60WjwoMnS zcXko}3r18yK$1T(Ua@u!cm4&5ppI2|=K&sU3@@fFh8bWrG2d@5yz1gU0gk^%qd^fd zWPrHbvEbmpfKd$ZOpv)c)Y&=S&?vjakuy$i(>NCRH#(O4NCAQ5RS`=p)L8x-{Qd`! z{eNptazd4osUo`~E@LkC4i8~HJui4@B`xDM`kIroz$}2Ad-E7t7J*#Gg1TrZo!@|+ z>f^4aedoNGBsO5i7yd`rU_YTJ0=iV+V@n`{;(tjU|5fPx7b^c#0r_vuszoHu3zv_b zD^O8hn|cOld*`ajsK^U9t`VM2C4SM{7-&my%hwkP!^X!sRRyaODTx-8a=1c*ybka6 zW}7piO<}PFIb3g>5W;k+dNaJQUX<{{4Q$qWPt1K5%JMGt99Xsg`C$S}^(^KOgZbK3Y zktXzo>jCgu@+Xkw+qBQ@XH&t8qhMv&MTyGUAx8y2Dr)+Cw>^92tM@X$HzNzR&*uR@ z=bf3|R>vLAr9<|{!*)2Q>GXnq#TF%zl;*u;KgT9h((#6bMf*RAEXxqXKDu_0n`+Zd z0GS)fWZT0|zPj?_>cAY~8ZQ-6BY16-Rl0sC-gsruWwo{mGk-?DUZ3`ZR9>?%5GemOu0*H@$nS+-~UD!+$n1~pS7rqh^H1Gy|m zcV7k`rsp}EHJBYXEuTrB3n$qk*=Rpf$`MrV_t(Rs!@ueOa=FN?E)JlWN^2Nnew7vQ z3XLo)sd{e@yWI6LO|Mkwk3r|t@&=F4Q+V8B)+Q~Cl{bQZ{xT1rC-kkVgx-8hd&Swn zyqZn`w@Fl_zYZvUK8}z?1axrJ54Ysx@2J^{r`tSy7ut!pUzt$c8BFU%Dfa?9gWc?V zo>NN|p!7b~nY<;qU6x?4L5pjWpWFrVLz`u)9?EJEn1Gr7YZ8oSYbw&#@>0 zjc4MK36c?Wn2FK=xh^3&j;YT_5HU<3X_=wusR!*N}QsRacWOJs83rauF0R26(XfhW|^Wq=- z3QdoT6Lt)u;-%?3-vEEkc)RyhPL7Cmr!b2=Z7wVytY!XdPeS{!@(d_qx0IaEwv>-2 zW8Tn92b;PILw@uTMx{F1Vd~%Il0!5978fAc=Im=qU-fr!eg~!W8NH@btv~od@+vq? zGG~9fT|VLCvbRh>k6)+0n2P4VM_->rTBXd5bgk8;5vXqcGCUtKvco_AZS86hBg(rQ zl!K@W1fzQk@+M?PpNZ^vVZ~)ft8`0tj-y=avwO|p2DCp62o|8S$)C0VC)<_7UO{X?*?f&WX2JVq+MEo%SXGcya0sLwVM zqvx*IWE;)(_>`v;*+x@_W_>Up6Viry-}|Yl`ll7E!2z%QW2Kpv>5d^^y#2I(eOMt& zMR@4h2;vxqRb{2=HiUfe-a{ZRuGS;j1NY%q0fEQ75_2#f+26R#bg`$D~-xo6RKsLa9-SOKM?syyxPTWs!|= zrA?`+Qwl7zX1~}jO5G}DvM)C+qT)mJk#e`U3(K_s^aScuDk13p;%jX#eghl)*WbOn(KcP1~xe(a(HsFTI>v~{xauMQeI;klttj=W+dMX5rYG@V<5LKm;4 z2Nm#lD@WG!<-uCZ#S9Qpk;SRp(xs0X%iG@hi)nZ`Y;W2?09BoE$f~Y=G;V^?0H#rIOZy~hPJP|%w zI7kKt?3rJHhb#Fq_iumQd>Fxi!HcP=HAeoKa?2MQ8m)%=5kt+5b2#B&vvLi&yv}80 zOwSv1{Y{0d=hhu7tHP3vr-sDYN^F_a0@FCq5$(5y=LthLJB2!u!Wv?W{jxFkJyBYD z+hV2XIXcRj{{6f4TcPvBgI8bF1aWfRWuCzItFDJ+U30_pD#VWvQ&}r>5RImE1-R~7 zkfnn^9qg3DdFjUvxH3MIU-DOd`vM4gL?DqDdOl~>xwew>r*#ALvb!i6q3$9UKx>eK z_zSrDqF8t^_{PXqK>rr1D zW-J*4~{22FpBG zu}jHbGRj)t=@`uq&S4U^;Pbz0KYvPTyoJA(H;1pB)O(X9&Ht=p=mZuvs^q&IrC#H(k*dDo2p#nzo-u`xZY=m`i`F!!edBN!|J)ap z0bV$fR2x1YdH1JPf2Zp!*O~q(O2r4~} z`Vy@9(6nMatNK(#gIHbXIeVr%`KEFobyf@agyvUGD0g#bWfo!-0a|LpWQ5C6pn`6` zms1HkynjyfQDOF6v8{9dhgz~{ZZEW3Pi{5N`9q3`_~L1098_@u4DV)UQG$LMrV*XZ zvLHvf3)R^#UWGyD0y#nw;OMra2_*-nNQs~Ysdp!>u^axDFuoh9m1kurv4dl3<<1?i zWO0u!sxaWW4(C>D`beC1&WUqexAEnZylwA7@4@aHd*Pj`T^_YM%s|PB&DC=zeevh% z*TxJZa=e{>DeLdqL&@!hp)+o6rhGRe;Eo;Y2}YX|4F~rL=4bvCYC=j*H2V;$XX&-S z<^ziOS)3vIMS8BWnDG*uWyqx8^gwMPzx5~ne8yxm#Whmp_qxVfr=z7K@=Po&=B~HSB-8eP4*Ryy7qg&4FQb~Ln59KltJ|*8n5L^f<{WtdM z29Z0630!$Lmk}FNSQ-5poUSLFuDSjuf8oe_l5#!m8_i^NRxID6i_(jwqYWwUqZBNB_MkXlWO~y;+G$w{&U}r6n}3+j&Gg|NIf{C!zRzj>G-cfDagl)9Af$ij2%-;PC~1&c1>^E!=Xk%J zKwnFLticMDzBV;o1xIej{!V)>d%pOt&TZyUyc|83b=A_l6S@zU=}5Mu;}48~01R&` zcJ7eNBeU)~(=MxuA<=2=1@=pRbUyQ51SQDWV1x<}+IV&t(@`jRyYP0!li`p^&y>r5 zRq=2(p=Dnz%FYGR%Q3?X#TwmP1N#=zhy(;a}8=d2C&lw~CP`k~g#f6HBR(9DK-^UC;tLc8KYYg`2p{L~EK zjn0=72Ny$pfL8nL8pX?UI`pcJ}2X()qI#K6DraF(edtALP|8sEj!1D*4x0!D(^A{GQS5XOI#gx?KR!Xw8?1w zEB$!C9C##s#=7!QAUlt9OJJQ7o68M2TaKJPJmqe?+{fleq3>`Mo9~c0ba|31xW52! zVjOOnnoonZ{++&TA)s7+C_pKtT;noRQx2J5!in%t^t@5Nejh#GJgj(fi=)c6n}=`V SEsPCz0W~FU#hMq^Z~p^3&eD4T literal 0 HcmV?d00001 diff --git a/website/docs/assets/experimental_tools_settings.png b/website/docs/assets/experimental_tools_settings.png new file mode 100644 index 0000000000000000000000000000000000000000..4d514e8a8fc63292579bd7f4a8b63b20c7c8e3d9 GIT binary patch literal 8543 zcmbVyWl&tr)-Hhrf?Ehdf`%mM1a}DTE(spoEx1lX@E~Dea0$)~1cKWz1PJaHf-|@c zkbz-vF7JD)Zk_Yhcg{U^f9$<`RqfT?)w|d8JZtri(biNVCZHw2!ong}QI^-m!n$X1 z*WP}Jdv{kR-a5N`-1E{^lEtbVq2IYP?%T^~$Y5brClX#+KDe{-J(P{Su&_wF{<`k< zx|i8tVKJSk$jj*an9cG1K^K9+)TKXQIzo+!P-@g%yN2v7W3ttP#1xe^ zpCSm!m`wEc0r`92VbBu4mmObn#9Sn7A4+6z&BCJka^rhfDK!8=n9(A&|FgR`4}mJp%fGCZ{(3nn zjkP5W-r-;A3@{Y~-fK0H`f-|Sk;NSo{W2p35p<5S&A#p0Y6?~%utD#auLDn>*&aU0 zShtVNWZY8^5|@_W+oBkfmrv2(Jwn1OUMM$MxkQG$;#Q8rY5h_jucU@mi7;1`mXArE za+reNNd?|Cj`XwzxIX3M<73gA{e>Bh?Kqthp(utH=%qck){e4Lj76NOhvXw&x zlE?F~cVTq>gn4Ye)xN79C4fr@Q!Y20kCrzzdo6f)6mQaA?5z06y?x-}jIp=dd!~QH zc<*`*7AqB17nwTHwMZiz(jd4 zf9pq28cnRkL3zv*`&uojTWtIGFFsi1JTwO8lgyJ`)FNS(zE&hyl!n5aPdRa2^XM zG%Mb!Z(rm1qtA@>d4v+pCey5@7ZhQoIOF;jRS>9+>=9{jwq`IquT+O_V6!&rQpScy<=S}nSbdb>Z_Rv5V zeLNGVBbQV~x}EnO+gd1zy>{zjtf7h}^MEk<3Pc{llS znc8o4^G!eLboq~Vp_3zG84X#aknZ{6V_SP?KNr4Ht<`$&-*3HNRmRJQD|!|Kxp9t(g%yZ8GmHr&v|&ydjHsyW1O}DR|nmV1U|@uNsy`E>p4%vUm^#6Z0=# z9gY?{UiKcj6YrW`tKk5Z((n zSkCL%#ZYyDv-&ZSYl*|R^dnQ1q4-@r3;&lB3>4)AH3 zBy^w_RB&s4kDXNo@4>7msO4&FzN22F?p=_0T=Xg{0R-npov)%=@)(Chk_w+>uz^l$H~GIL#59gO%$uX|oTn!oDp&dIB5`ftQ zSjz(w^9P39XvIUOlMWZU^P{A9i*;SDGw1Ji#}9V&L=H%%w?ns!*)^86KTm?D;-Lah z!)Lf7oo>$*U3hzo|5~Q#^t+k8heSOsLbVBK{MwVxS#CVxlM zIL(@sYNtR^=-?=0;%bquC;b<&Ctzm{X-X;0v^t=6NST=Ha)iOSWmNdx_g&Pzag8dK zJ6y?=cI%BO@9EFl;xGcmh|btpAJ=6Bw#{7~00S9wtVh<4_m9syhOoIF1{+oFNkhW< zg)BFB@-IUn7eX=y#TcuxF$^NMAX3#XyVIIiPC|ZN)#zOFNm`5=Wzj{OB6KB2_g-Rq zxbt%n3wAwyLH=LCllOMj$a6otvqp-c6Pw2ranN(A@#FavEW6CMx;Hab>b-epX`gE< z_Ff5d;sG;{sTP7l#wvWSx)ot@JFHv(D~;XjN{jBpjs)DjKxDzfLdX|uH@bldj){(Z!ugl| z1THWC_|=`w_3qy~vIG0AjAbe_a7OC<(nVQVXdfE>7hZo4_y6eL;-+Z*Ge9Eha3Lfw zU_CFp^ZkMpC+WuzMr0(cC#SshPP+nxk+MRnho-Ssjij;Y(7M&W5n6$rjaYlLCh{N* zEFOXBm0pzUK}*^{HZ*+9Ot#|}HbZ9_E*I~A?NKYrgAuh9&^IVd0PdtHGA(mhQ2huU zU^j}S+R!|T?E3aFqEnkRY(uTO=GnZ2vqOa3Qs|JJV}cRyI!asH!490wgtG439aw=c zS>7jmm}=SGiCr7%uukW7odW9}4wjvroyinkS$L6`(fk$+9%Pj+KSjE{2! z=9QnB_D8FpIe6@fK{Jw1=WKe9JY9m^UMq5pebmHz6xb1#t)87Dgy+X~Cgzs$^rw$K zFp5-cR9s`@(O2*OU@o}Ew^o`)^}(Lm<-Uds?FHm0!eu0w%k;`;;n$idWs>;ubM;)Z zo_5A_dwOv+gKUOOuqDZ}?u=@ZW&_CJOkY5}2O>B=O z(TonaR&}zOicHcEOOHl+1Po49mrYKr(r#b@?MwHk$S0U~tsx*V>+> zO)1>I5RGvvhR7~<`RlcyvkSh{Ebh__J0ZgtVy#0JY2hb(K~5gq9)jlXN8y%;_%S1L zX!pT|oxt==I@%M#Eh{)((^mIKvG9ITA3?f@mr7RcVD7-C$>rLrLfC8553c))!gBVg*T~Ge>A-fk`QpjIv*`fhXot87hu`o&ZYA&RLjr=zW{Ni7c2um@w4jQ#+ZQ~^-?Dm3t9uI2oQ$G4)BGTAV9A-e=9``@ zsB%l>r;u2p$Dzc=IVf(Id-Xf0f&3DYb|ZD6^k#DN_F;ab^VrXtv`Qv;L{-(YuZJ9y zcDMY_2R3Hn=daEG^u z$^P*RgL<>_QLeC!K`!%^XQ(^?V()flDDw}eS128q#V4?zSm`AOTJ@~I#-mHz(=s_~ zc2|@z&9Xs?)Hm|}*hzv{CKq^PAfi_L?b{C%p!bVQA4X%_hC;ES9Gcn@B#O7*5$w|q zwC+gqQ&si(5jf`woDvPdH?#b#Om}S6j{U1N#Jk|Y70t}1g}KQ}-P*CmPn2=)$*0}w z`sUwJ@l#GFaGB5O;xUm9@JwoRFD>E1X&)?X_BJXrsD)9nY!k~js87z=z67dLn9hZsKp&*Wk3mtNl??vG;}Vi%d7r&I6CAZw1)z-QVh#TNG1|e#YU3`A`k(ysp9vu>rQKXzUjD}V z-?|S1sX{SM%xL&V`?u4yaaW{<9W`;~{@k9(5`n!IJmV+~$&A`IyR3UM`BvP$B$xf8sp zEPbm94$iis1?GRx`>?tJM&w>^=1DKaWrN-?Xe z@R`)EiG)|tHXY^InjF?tnFw6`-?)%CoPe^jI68ac)ck@c+n@o_N2$HLqP2wCJ|Pa) z5d`Lt76VFErL>D>LxDbDVyI>PA~s13nNDCZsf7i{vWs3Zttead8oZbTLEt_G<}IRF zG+T}NmK?l0`8ij@ZSv+sK5Y3JS~u#q8S4@I9ztYt zT;BNAkTch-%SxD3J!9F;sY2|He)+T=XP9oBrvJ@pXhtHA*#JT}#2MR`gd`{&k<`%z{z*3lUHUN|8X2ie%a-Gazd<%QiLyC5z z+DJ)01=j-Zq$n>R!tZh?w+c9E{)Zg<=l%FE<%j<@Zi=qT?d0XxrT@MBIz%Ym#oT|b z?EZsB%WA+*S!%|`Ea@f4*4o&x|0oEGY`GOWRT1DLnmig7lA#vouDKvN0I|n;h{S2y zf0Puh{f^4DeL2jVc>U^$D}|137aT<#WlYYemo`1MC%>5Mw7+yLe+XuQ zTd2XJ>h>C_e%(!-%kL!_vUPAdWC|=B@MDj~`CGS*)XryvY~Si|@shj!G=Ca?;AN}x zS4541^cMN<%f*X?N$B4DHYj>@-${Dbxf{pVt2) zvQo7{=Ub#*MkE0TngNwt0ZQ67r-f2nPJb@?vD}hQbIckGWabj+8m(5h!nXOsrmTL8z*C7eh@8-)yj)ZAZ?6bP z_gCNY$QSl~;8?b~C;oi}O~3^76@ARDKgm_bzj{Xz^!N}1g{HP=CfN45B<9NRcD{Q& zV1w0+sCLt*Hu__ApaM2ijIr};)T1n#WMnJvq|k?OZ`*|?oA2tf<_|z~aB38cppDT{ zi2--Y7p%~?3vx08^vV6fEe05l?*Eb^}Z$Q`PWjyP#pn9B|3~IY1cCJES$I! z`(%uDHxH^<$%f>#weCT(=ry98^N$<-A$kwW2X;Mb&8eB)k7h7aD`N- z*r|%0s_Y7ti&BigSZnr-B&XVlm~O?Pj1+x>NU=ohVA|@E_+Y@tL`Cjpo~Cm~SX*C4 z#Jpra)k}EumSbh<>R>9g8V~X-TRTxRDiTUS7Z=i6dNOfpzE}cj!TVacS8+qrH!vYh zg{j{MX+`PpQ96Ql&(3r3vdK~jntngkWTv%^Ja2povxd8YQ@#P-LK^+cHf{F>OOaH5 zJ&CTHmJRxbpNnX-zp4FrWjrNfHz_`rrHpQVkKlUr`0>}eB6`k^=C&O6%g&h)HFVPt z7*9kl2mhX;x*n2;YgcK5ZpKa>m@`u&D-0bWSXY9gJ*T>L(Xfe5hqsjRhFucDY$Pn! z@BnT5nk7OPsPTT8^yd8!%~;grZVGBM&m;{icrqd7yCcFRQ*U}mwh47Vz9Z)t7}Pe+ zE){wbrY3!}->MS7wQKEbo)!$Y<#@R0q&qjPR4DOFOG~ZuE|Tta&deM142|E+vR&Ex zs1Lf|5(_zO19Erspc7sBiu*_(iJa{L!LL=B#9DtW-0!l`(8!QGnc&U#cj4CK0f{&n zIWe%}Kc}+)8{mCG&3ulD?)(37{b2zp(Xo{2mZ)ew05d1;yAUoX=!Mr&})g?%u%1I0alaA(%tSrOi*oVpM@L+96g__5Ym6muGu=h(+UOU^oLYJvw2lTV*(!@6)4;enNnvhKYZkO|*TWSiCe zdyebvN0N4qr2Q9#&!lepzDFM9RU}&%-(-n+V#6=;?9!EV)_N0r*Q9Ump-PrM45k6r zN=xeH%q8L6$eGed)m#TE<`LFoP-MT56Xi3``#ox9V{-!GD!)-$&navgh9=up|E zDrAes6Fj_zvGWVxTkG#5eQskPZ{_zs;;OoztAp%6=?i^#zG-u*CEoyW?P6U*GDef0 zi=bq_hpPJ6&|<5}+x~t*hSr_R!Fif-={kxTOUBd1B4Q$#l z%!Dn(SaT}B%_f0T{&?$aQ7GeSgVx97UK#bAiG)VYbmZ#Pn>Gskja0Ks?L<>`q@biV z`nQxY?FD{)L%yURjp1{Zv-Hy`&y7u4MnypHA<9kdIyochBfc)e&dzSNBYyAC3k$&b zpzu|qPr9C%$>{A`Zb;c2PCKTxJjeooq9+eCBf z4p5H9n_+8_ixU0uM%C!OF{dsi$6{U1k-FymMMg)LWfTFt{le&7n-~2O0%VT%y)qUb zsmnClZ9JytSs8C%DC+ezsK1nh_%dF9KC~Q1E|2ZqE+c4RNA12l0RPV+GLxZjRPjdU*+%1kYh~R9$+3Vs@>uo44@YqjlkI+?K z&UU-&%{Bs$|L~#Z%%{-;*K^9vIungtcbx5NeTW(>^)Fcw9X$1&dJ5Oxn-)_SA*8E0 z$^LD#xEsgiWq=^r>@sCjex-545wy1fYI8TRpVIodlP>jgu1a~JhUe9K>sS;c0>LPi zkOR1OzdEZDTI-A;v?CrLYm&Wf_;hjl;l2|XGmk5f`G<6G%%kfLdh*+Wy1K*~F2Dn} z8xK@MoT$Q>#0gsQ2fcw%${HcUCb&G!sp($0P7WnHE>l?zUXPQVPH zm2?TEne>f|$sa1eI^YDu zg_o&Q^T}Yy^#+=SmOZizrSX1k=phvshIQs!4+BK8GQe#w!&g0KfzLk|9%Cn7(CG2T zz$Iw?R5r319a!LWxvl5SWENu>swKL->@th?FlF0D?){BoynBqZq+Gxs#ELtYID;RQ z+0HSqH)_(eJWrPzI1dE1FCm=Wo9S4Z;hKQA2dH!!O7 zMHRJt17V)$kGI!Ld|3klxA2A7YX;*;23n4rU-1j;3v+F!I42Y`F zE|Y6*(W^F=0+OiiVl3TTO^P4z`lyO8iID^I1bnmv@QoJoia@F|i%(~VT6W6|Fz%Bf z1H%yqvcb#K@bcBhQ{}OSUxiPp<}S7K=&rx&OdLdQ393KSJui2Z{QfPm3y1ewd2E#7 zB;+jB(iU_>wB7`pr-*+fM-7)<7U3U1`UH1vy;gp)-!KP<#IR-Re%iV zW9D`~FPV4Ux9SPTli=A~AQpQc22vj6JiFPo|A^MSoq(@opqeI{bELKDrEQiSDTYa{73r^A2x+TV3m8|FgN_s(amhnb6+) z9^kED4;4rwA!1!@psb&yZi7eStV`yGqderQ{6M}CLJz2gbf-zNRuT1&PXVU#kBL3c z2>i}u%+T*4dMaF9e#tNzgJSXGx<}2fJ|g+$rUs51Rtm*>_?Rz@aZiC$RPXnXJ1a_&Yv{-WQXWUa~E?VK)E8XF&~{h->|e9NB=JkDz# zq7J#BId-S$RI0}8XpJ2 zW;1QL^z=8qqf*K6;8+9qg+Xm$Yx%gjRnMpCCw0)c4dMg0W3&Ba)!6_yrwL5n8=PGD za-iazu=Aw5NgJ-F!gfO;KwC2Xz~3%*=9TN8-sAgjWykc1hKa5Q%}LgCF#6xO$B(Y# zE`@b^Pt}d5%Po2p1Yk$*;$vk$q$}jQh`Z7hBW`X~`fImFEHAwPAA!1{tDe6O`+p3i z1IWi6djIdi|G&w+a}VpnO!@J$9S>i5$;%6n{kK9>j^yk%)B%sTf8*DSyPqPkR1`Gj JD`nq>{U56@0@(ln literal 0 HcmV?d00001 From e150db350f972ac23a337fad14b6bcf8889be12c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 12:38:27 +0200 Subject: [PATCH 132/357] Docs - changed docstring --- openpype/plugins/load/add_site.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 22d3ebf24b..cfb0ede328 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -63,9 +63,11 @@ class AddSyncSite(load.LoaderPlugin): project_name (str) representation_id (ObjectId): site_name (str) - is_main (bool): true for really downloaded, false for references, - force redownload main file always, for references only if - broken + is_main (bool): true for main representation, false for referenced + loaded repres. Drives if site state should be reset. + (it should be for main, not for referenced as they might be + shared from multiple workfiles). In necessary cases, referenced + repres should be reset (re-downloaded) manually. """ try: self.sync_server.add_site(project_name, representation_id, From 4b3405056982e7dea251e6a1d9b4d30d5a979bf6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 20:33:02 +0200 Subject: [PATCH 133/357] OP-2951 - clean up of unnecessary argument force and force_only_broken could be replaced only by force and explicitly catching SiteAlreadyPresentError --- .../modules/sync_server/sync_server_module.py | 48 ++++++++++--------- openpype/plugins/load/add_site.py | 41 +++++----------- 2 files changed, 36 insertions(+), 53 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 9895a6d430..af69e645d5 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -129,24 +129,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, collection, representation_id, site_name=None, - force=False, force_only_broken=False): + force=False): """ - Adds new site to representation to be synced. + Adds new site to representation to be synced. - 'collection' must have synchronization enabled (globally or - project only) + 'collection' must have synchronization enabled (globally or + project only) - Used as a API endpoint from outside applications (Loader etc) + Used as a API endpoint from outside applications (Loader etc). - Args: - collection (string): project name (must match DB) - representation_id (string): MongoDB _id value - site_name (string): name of configured and active site - force (bool): reset site if exists - force_only_broken (bool): reset only if "error" present + Use 'force' to reset existing site. - Returns: - throws ValueError if any issue + Args: + collection (string): project name (must match DB) + representation_id (string): MongoDB _id value + site_name (string): name of configured and active site + force (bool): reset site if exists + + Throws: + SiteAlreadyPresentError - if adding already existing site and + not 'force' + ValueError - other errors (repre not found, misconfiguration) """ if not self.get_sync_project_setting(collection): raise ValueError("Project not configured") @@ -157,8 +160,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(collection, representation_id, site_name=site_name, - force=force, - force_only_broken=force_only_broken) + force=force) # public facing API def remove_site(self, collection, representation_id, site_name, @@ -1397,8 +1399,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, collection, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, - force_only_broken=False): + remove=False, pause=None, force=False): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1421,10 +1422,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site - force_only_broken(bool): reset site only if there is "error" field - Returns: - throws ValueError + Throws: + SiteAlreadyPresentError - if adding already existing site and + not 'force' + ValueError - other errors (repre not found, misconfiguration) """ query = { "_id": ObjectId(representation_id) @@ -1461,7 +1463,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation, site_name, pause) else: # add new site to all files for representation self._add_site(collection, query, representation, elem, site_name, - force=force, force_only_broken=force_only_broken) + force=force) def _update_site(self, collection, query, update, arr_filter): """ @@ -1569,7 +1571,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self._update_site(collection, query, update, arr_filter) def _add_site(self, collection, query, representation, elem, site_name, - force=False, file_id=None, force_only_broken=False): + force=False, file_id=None): """ Adds 'site_name' to 'representation' on 'collection' @@ -1591,7 +1593,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): for site in repre_file.get("sites"): if site["name"] == site_name: - if force or (force_only_broken and site.get("error")): + if force or site.get("error"): self._reset_site_for_file(collection, query, elem, repre_file["_id"], site_name) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index cfb0ede328..ae765c81b7 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -38,45 +38,26 @@ class AddSyncSite(load.LoaderPlugin): family = context["representation"]["context"]["family"] project_name = data["project_name"] repre_id = data["_id"] - self.add_site_to_representation(project_name, - repre_id, - data["site_name"], - is_main=True) + site_name = data["site_name"] + + self.sync_server.add_site(project_name, repre_id, site_name, + force=True) if family == "workfile": links = get_linked_ids_for_representations(project_name, [repre_id], link_type="reference") for link_repre_id in links: - self.add_site_to_representation(project_name, - link_repre_id, - data["site_name"], - is_main=False) + try: + self.sync_server.add_site(project_name, link_repre_id, + site_name, + force=False) + except SiteAlreadyPresentError: + # do not add/reset working site for references + self.log.debug("Site present", exc_info=True) self.log.debug("Site added.") - def add_site_to_representation(self, project_name, representation_id, - site_name, is_main): - """Adds new site to representation_id, resets if exists - - Args: - project_name (str) - representation_id (ObjectId): - site_name (str) - is_main (bool): true for main representation, false for referenced - loaded repres. Drives if site state should be reset. - (it should be for main, not for referenced as they might be - shared from multiple workfiles). In necessary cases, referenced - repres should be reset (re-downloaded) manually. - """ - try: - self.sync_server.add_site(project_name, representation_id, - site_name, - force=is_main, - force_only_broken=not is_main) - except SiteAlreadyPresentError: - self.log.debug("Site present", exc_info=True) - def filepath_from_context(self, context): """No real file loading""" return "" From 7df6c29b4e08f78ad25ac57a65427540c54b5106 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:28:24 +0200 Subject: [PATCH 134/357] fixing unrelated typo Co-authored-by: Roy Nieterau --- openpype/settings/defaults/system_settings/tools.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/tools.json b/openpype/settings/defaults/system_settings/tools.json index 49c00bec7d..243cde40cc 100644 --- a/openpype/settings/defaults/system_settings/tools.json +++ b/openpype/settings/defaults/system_settings/tools.json @@ -83,7 +83,7 @@ "__dynamic_keys_labels__": { "mtoa": "Autodesk Arnold", "vray": "Chaos Group Vray", - "yeti": "Pergrine Labs Yeti", + "yeti": "Peregrine Labs Yeti", "renderman": "Pixar Renderman" } } From ad3dfa872e8fd5b88b1581e0a32eccfe6c2e335c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Apr 2022 12:30:33 +0200 Subject: [PATCH 135/357] OP-2951 - refactor sync loaders Changed structure of loaders bit, cannot use isinstance check though. --- openpype/plugins/load/add_site.py | 1 + openpype/plugins/load/remove_site.py | 26 +++++++++++++++----------- openpype/tools/utils/lib.py | 4 ++-- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index ae765c81b7..55fda55d17 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -23,6 +23,7 @@ class AddSyncSite(load.LoaderPlugin): color = "#999999" _sync_server = None + is_add_site_loader = True @property def sync_server(self): diff --git a/openpype/plugins/load/remove_site.py b/openpype/plugins/load/remove_site.py index adffec9986..c5f442b2f5 100644 --- a/openpype/plugins/load/remove_site.py +++ b/openpype/plugins/load/remove_site.py @@ -12,22 +12,26 @@ class RemoveSyncSite(load.LoaderPlugin): icon = "download" color = "#999999" + _sync_server = None + is_remove_site_loader = True + + @property + def sync_server(self): + if not self._sync_server: + manager = ModulesManager() + self._sync_server = manager.modules_by_name["sync_server"] + + return self._sync_server + def load(self, context, name=None, namespace=None, data=None): self.log.info("Removing {} on representation: {}".format( data["site_name"], data["_id"])) - self.remove_site_on_representation(data["project_name"], - data["_id"], - data["site_name"]) + self.sync_server.remove_site(data["project_name"], + data["_id"], + data["site_name"], + True) self.log.debug("Site added.") - @staticmethod - def remove_site_on_representation(project_name, representation_id, - site_name): - manager = ModulesManager() - sync_server = manager.modules_by_name["sync_server"] - sync_server.remove_site(project_name, representation_id, - site_name, True) - def filepath_from_context(self, context): """No real file loading""" return "" diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 422d0f5389..e5d978c3b2 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -719,11 +719,11 @@ def is_sync_loader(loader): def is_remove_site_loader(loader): - return hasattr(loader, "remove_site_on_representation") + return hasattr(loader, "is_remove_site_loader") def is_add_site_loader(loader): - return hasattr(loader, "add_site_to_representation") + return hasattr(loader, "is_add_site_loader") class WrappedCallbackItem: From b33667255813f009f32d63182a202412fe2a465f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Apr 2022 12:31:08 +0200 Subject: [PATCH 136/357] OP-2951 - safer pulling of inputLinks from legacy records --- openpype/lib/avalon_context.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index d26011e522..7f35694e58 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2057,7 +2057,8 @@ def _process_referenced_pipeline_result(result, link_type): referenced_version_ids = set() correctly_linked_ids = set() for item in result: - correctly_linked_ids = _filter_input_links(item["data"]["inputLinks"], + input_links = item["data"].get("inputLinks", []) + correctly_linked_ids = _filter_input_links(input_links, link_type, correctly_linked_ids) From b90f54943b527fd98b808a3b4ca8be405a2ff367 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 12 Apr 2022 13:11:43 +0200 Subject: [PATCH 137/357] =?UTF-8?q?fixes=20=F0=9F=90=A9=20and=20optimize?= =?UTF-8?q?=20renderman=20prefix=20condition?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/maya/plugins/create/create_render.py | 4 +++- .../maya/plugins/publish/validate_rendersettings.py | 13 ++++++++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 4d3e6dc9f5..13bfe1bf37 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -84,7 +84,9 @@ class CreateRender(plugin.Creator): 'mentalray': 'maya///{aov_separator}', # noqa 'vray': 'maya///', 'arnold': 'maya///{aov_separator}', # noqa - 'renderman': '_..', # this needs `imageOutputDir` set separately + # this needs `imageOutputDir` + # (/renders/maya/) set separately + 'renderman': '_..', 'redshift': 'maya///' # noqa } diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 966ebac95a..92aa3af05a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -116,16 +116,23 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): prefix = prefix.replace( "{aov_separator}", instance.data.get("aovSeparator", "_")) + + required_prefix = "maya/" + + if renderer == "renderman": + # renderman has prefix set differently + required_prefix = "/renders/{}".format(required_prefix) + if not anim_override: invalid = True cls.log.error("Animation needs to be enabled. Use the same " "frame for start and end to render single frame") - if not prefix.lower().startswith("maya/") and \ - renderer != "renderman": + if not prefix.lower().startswith(required_prefix): invalid = True cls.log.error("Wrong image prefix [ {} ] - " - "doesn't start with: 'maya/'".format(prefix)) + "doesn't start with: '{}'".format( + prefix, required_prefix)) if not re.search(cls.R_LAYER_TOKEN, prefix): invalid = True From b895efac5ba8d9430a54b282aebd8552c3171114 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 12 Apr 2022 13:19:09 +0200 Subject: [PATCH 138/357] fix ident --- .../hosts/maya/plugins/publish/validate_rendersettings.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 92aa3af05a..28fe2d317c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -130,9 +130,10 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): if not prefix.lower().startswith(required_prefix): invalid = True - cls.log.error("Wrong image prefix [ {} ] - " - "doesn't start with: '{}'".format( - prefix, required_prefix)) + cls.log.error( + "Wrong image prefix [ {} ] - doesn't start with: '{}'".format( + prefix, required_prefix) + ) if not re.search(cls.R_LAYER_TOKEN, prefix): invalid = True From 50dc946fa1ba5e34d33a7c34cfd65021178c28f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:51:12 +0200 Subject: [PATCH 139/357] copied schema and mongodb to openpype pipeline --- openpype/pipeline/__init__.py | 6 + openpype/pipeline/mongodb.py | 407 ++++++++++++++++++++++++++++++++++ openpype/pipeline/schema.py | 140 ++++++++++++ 3 files changed, 553 insertions(+) create mode 100644 openpype/pipeline/mongodb.py create mode 100644 openpype/pipeline/schema.py diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 308be6da64..2c35ea2d57 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -85,6 +85,10 @@ from .context_tools import ( install = install_host uninstall = uninstall_host +from .mongodb import ( + AvalonMongoDB, +) + __all__ = ( "AVALON_CONTAINER_ID", @@ -170,4 +174,6 @@ __all__ = ( # Backwards compatible function names "install", "uninstall", + + "AvalonMongoDB", ) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py new file mode 100644 index 0000000000..436f6994db --- /dev/null +++ b/openpype/pipeline/mongodb.py @@ -0,0 +1,407 @@ +import os +import time +import functools +import logging +import pymongo +import ctypes +from uuid import uuid4 + +from . import schema + + +def requires_install(func): + func_obj = getattr(func, "__self__", None) + + @functools.wraps(func) + def decorated(*args, **kwargs): + if func_obj is not None: + _obj = func_obj + else: + _obj = args[0] + if not _obj.is_installed(): + if _obj.auto_install: + _obj.install() + else: + raise IOError( + "'{}.{}()' requires to run install() first".format( + _obj.__class__.__name__, func.__name__ + ) + ) + return func(*args, **kwargs) + return decorated + + +def auto_reconnect(func): + """Handling auto reconnect in 3 retry times""" + retry_times = 3 + reconnect_msg = "Reconnecting..." + func_obj = getattr(func, "__self__", None) + + @functools.wraps(func) + def decorated(*args, **kwargs): + if func_obj is not None: + _obj = func_obj + else: + _obj = args[0] + + for retry in range(1, retry_times + 1): + try: + return func(*args, **kwargs) + except pymongo.errors.AutoReconnect: + if hasattr(_obj, "log"): + _obj.log.warning(reconnect_msg) + else: + print(reconnect_msg) + + if retry >= retry_times: + raise + time.sleep(0.1) + return decorated + + +SESSION_CONTEXT_KEYS = ( + # Root directory of projects on disk + "AVALON_PROJECTS", + # Name of current Project + "AVALON_PROJECT", + # Name of current Asset + "AVALON_ASSET", + # Name of current silo + "AVALON_SILO", + # Name of current task + "AVALON_TASK", + # Name of current app + "AVALON_APP", + # Path to working directory + "AVALON_WORKDIR", + # Optional path to scenes directory (see Work Files API) + "AVALON_SCENEDIR" +) + + +def session_data_from_environment(context_keys=False): + session_data = {} + if context_keys: + for key in SESSION_CONTEXT_KEYS: + value = os.environ.get(key) + session_data[key] = value or "" + else: + for key in SESSION_CONTEXT_KEYS: + session_data[key] = None + + for key, default_value in ( + # Name of current Config + # TODO(marcus): Establish a suitable default config + ("AVALON_CONFIG", "no_config"), + + # Name of Avalon in graphical user interfaces + # Use this to customise the visual appearance of Avalon + # to better integrate with your surrounding pipeline + ("AVALON_LABEL", "Avalon"), + + # Used during any connections to the outside world + ("AVALON_TIMEOUT", "1000"), + + # Address to Asset Database + ("AVALON_MONGO", "mongodb://localhost:27017"), + + # Name of database used in MongoDB + ("AVALON_DB", "avalon"), + + # Address to Sentry + ("AVALON_SENTRY", None), + + # Address to Deadline Web Service + # E.g. http://192.167.0.1:8082 + ("AVALON_DEADLINE", None), + + # Enable features not necessarily stable, at the user's own risk + ("AVALON_EARLY_ADOPTER", None), + + # Address of central asset repository, contains + # the following interface: + # /upload + # /download + # /manager (optional) + ("AVALON_LOCATION", "http://127.0.0.1"), + + # Boolean of whether to upload published material + # to central asset repository + ("AVALON_UPLOAD", None), + + # Generic username and password + ("AVALON_USERNAME", "avalon"), + ("AVALON_PASSWORD", "secret"), + + # Unique identifier for instances in working files + ("AVALON_INSTANCE_ID", "avalon.instance"), + + # Enable debugging + ("AVALON_DEBUG", None) + ): + value = os.environ.get(key) or default_value + if value is not None: + session_data[key] = value + + return session_data + + +class AvalonMongoConnection: + _mongo_client = None + _is_installed = False + _databases = {} + log = logging.getLogger("AvalonMongoConnection") + + @classmethod + def register_database(cls, dbcon): + if dbcon.id in cls._databases: + return + + cls._databases[dbcon.id] = { + "object": dbcon, + "installed": False + } + + @classmethod + def database(cls): + return cls._mongo_client[str(os.environ["AVALON_DB"])] + + @classmethod + def mongo_client(cls): + return cls._mongo_client + + @classmethod + def install(cls, dbcon): + if not cls._is_installed or cls._mongo_client is None: + cls._mongo_client = cls.create_connection() + cls._is_installed = True + + cls.register_database(dbcon) + cls._databases[dbcon.id]["installed"] = True + + cls.check_db_existence() + + @classmethod + def is_installed(cls, dbcon): + info = cls._databases.get(dbcon.id) + if not info: + return False + return cls._databases[dbcon.id]["installed"] + + @classmethod + def _uninstall(cls): + try: + cls._mongo_client.close() + except AttributeError: + pass + cls._is_installed = False + cls._mongo_client = None + + @classmethod + def uninstall(cls, dbcon, force=False): + if force: + for key in cls._databases: + cls._databases[key]["object"].uninstall() + cls._uninstall() + return + + cls._databases[dbcon.id]["installed"] = False + + cls.check_db_existence() + + any_is_installed = False + for key in cls._databases: + if cls._databases[key]["installed"]: + any_is_installed = True + break + + if not any_is_installed: + cls._uninstall() + + @classmethod + def check_db_existence(cls): + items_to_pop = set() + for db_id, info in cls._databases.items(): + obj = info["object"] + # TODO check if should check for 1 or more + cls.log.info(ctypes.c_long.from_address(id(obj)).value) + if ctypes.c_long.from_address(id(obj)).value == 1: + items_to_pop.add(db_id) + + for db_id in items_to_pop: + cls._databases.pop(db_id, None) + + @classmethod + def create_connection(cls): + from openpype.lib import OpenPypeMongoConnection + + mongo_url = os.environ["AVALON_MONGO"] + + mongo_client = OpenPypeMongoConnection.create_connection(mongo_url) + + return mongo_client + + +class AvalonMongoDB: + def __init__(self, session=None, auto_install=True): + self._id = uuid4() + self._database = None + self.auto_install = auto_install + + if session is None: + session = session_data_from_environment(context_keys=False) + + self.Session = session + + self.log = logging.getLogger(self.__class__.__name__) + + def __getattr__(self, attr_name): + attr = None + if not self.is_installed() and self.auto_install: + self.install() + + if not self.is_installed(): + raise IOError( + "'{}.{}()' requires to run install() first".format( + self.__class__.__name__, attr_name + ) + ) + + project_name = self.active_project() + if project_name is None: + raise ValueError( + "Value of 'Session[\"AVALON_PROJECT\"]' is not set." + ) + + collection = self._database[project_name] + not_set = object() + attr = getattr(collection, attr_name, not_set) + + if attr is not_set: + # Raise attribute error + raise AttributeError( + "{} has no attribute '{}'.".format( + collection.__class__.__name__, attr_name + ) + ) + + # Decorate function + if callable(attr): + attr = auto_reconnect(attr) + return attr + + @property + def mongo_client(self): + AvalonMongoConnection.mongo_client() + + @property + def id(self): + return self._id + + @property + def database(self): + if not self.is_installed() and self.auto_install: + self.install() + + if self.is_installed(): + return self._database + + raise IOError( + "'{}.database' requires to run install() first".format( + self.__class__.__name__ + ) + ) + + def is_installed(self): + return AvalonMongoConnection.is_installed(self) + + def install(self): + """Establish a persistent connection to the database""" + if self.is_installed(): + return + + AvalonMongoConnection.install(self) + + self._database = AvalonMongoConnection.database() + + def uninstall(self): + """Close any connection to the database""" + AvalonMongoConnection.uninstall(self) + self._database = None + + @requires_install + def active_project(self): + """Return the name of the active project""" + return self.Session["AVALON_PROJECT"] + + @requires_install + @auto_reconnect + def projects(self, projection=None, only_active=True): + """Iter project documents + + Args: + projection (optional): MongoDB query projection operation + only_active (optional): Skip inactive projects, default True. + + Returns: + Project documents iterator + + """ + query_filter = {"type": "project"} + if only_active: + query_filter.update({ + "$or": [ + {"data.active": {"$exists": 0}}, + {"data.active": True}, + ] + }) + + for project_name in self._database.collection_names(): + if project_name in ("system.indexes",): + continue + + # Each collection will have exactly one project document + + doc = self._database[project_name].find_one( + query_filter, projection=projection + ) + if doc is not None: + yield doc + + @auto_reconnect + def insert_one(self, item, *args, **kwargs): + assert isinstance(item, dict), "item must be of type " + schema.validate(item) + return self._database[self.active_project()].insert_one( + item, *args, **kwargs + ) + + @auto_reconnect + def insert_many(self, items, *args, **kwargs): + # check if all items are valid + assert isinstance(items, list), "`items` must be of type " + for item in items: + assert isinstance(item, dict), "`item` must be of type " + schema.validate(item) + + return self._database[self.active_project()].insert_many( + items, *args, **kwargs + ) + + def parenthood(self, document): + assert document is not None, "This is a bug" + + parents = list() + + while document.get("parent") is not None: + document = self.find_one({"_id": document["parent"]}) + if document is None: + break + + if document.get("type") == "hero_version": + _document = self.find_one({"_id": document["version_id"]}) + document["data"] = _document["data"] + + parents.append(document) + + return parents diff --git a/openpype/pipeline/schema.py b/openpype/pipeline/schema.py new file mode 100644 index 0000000000..26d987b8f3 --- /dev/null +++ b/openpype/pipeline/schema.py @@ -0,0 +1,140 @@ +"""Wrapper around :mod:`jsonschema` + +Schemas are implicitly loaded from the /schema directory of this project. + +Attributes: + _cache: Cache of previously loaded schemas + +Resources: + http://json-schema.org/ + http://json-schema.org/latest/json-schema-core.html + http://spacetelescope.github.io/understanding-json-schema/index.html + +""" + +import os +import re +import json +import logging + +import jsonschema +import six + +log_ = logging.getLogger(__name__) + +ValidationError = jsonschema.ValidationError +SchemaError = jsonschema.SchemaError + +_CACHED = False + + +def get_schema_version(schema_name): + """Extract version form schema name. + + It is expected that schema name contain only major and minor version. + + Expected name should match to: + "{name}:{type}-{major version}.{minor version}" + - `name` - must not contain colon + - `type` - must not contain dash + - major and minor versions must be numbers separated by dot + + Args: + schema_name(str): Name of schema that should be parsed. + + Returns: + tuple: Contain two values major version as first and minor version as + second. When schema does not match parsing regex then `(0, 0)` is + returned. + """ + schema_regex = re.compile(r"[^:]+:[^-]+-(\d.\d)") + groups = schema_regex.findall(schema_name) + if not groups: + return 0, 0 + + maj_version, min_version = groups[0].split(".") + return int(maj_version), int(min_version) + + +def validate(data, schema=None): + """Validate `data` with `schema` + + Arguments: + data (dict): JSON-compatible data + schema (str): DEPRECATED Name of schema. Now included in the data. + + Raises: + ValidationError on invalid schema + + """ + if not _CACHED: + _precache() + + root, schema = data["schema"].rsplit(":", 1) + # assert root in ( + # "mindbender-core", # Backwards compatiblity + # "avalon-core", + # "pype" + # ) + + if isinstance(schema, six.string_types): + schema = _cache[schema + ".json"] + + resolver = jsonschema.RefResolver( + "", + None, + store=_cache, + cache_remote=True + ) + + jsonschema.validate(data, + schema, + types={"array": (list, tuple)}, + resolver=resolver) + + +_cache = { + # A mock schema for docstring tests + "_doctest.json": { + "$schema": "http://json-schema.org/schema#", + + "title": "_doctest", + "description": "A test schema", + + "type": "object", + + "additionalProperties": False, + + "required": ["key"], + + "properties": { + "key": { + "description": "A test key", + "type": "string" + } + } + } +} + + +def _precache(): + global _CACHED + + if os.environ.get('AVALON_SCHEMA'): + schema_dir = os.environ['AVALON_SCHEMA'] + else: + current_dir = os.path.dirname(os.path.abspath(__file__)) + schema_dir = os.path.join(current_dir, "schema") + + """Store available schemas in-memory for reduced disk access""" + for schema in os.listdir(schema_dir): + if schema.startswith(("_", ".")): + continue + if not schema.endswith(".json"): + continue + if not os.path.isfile(os.path.join(schema_dir, schema)): + continue + with open(os.path.join(schema_dir, schema)) as f: + log_.debug("Installing schema '%s'.." % schema) + _cache[schema] = json.load(f) + _CACHED = True From e953c8602f66e33e9d1ca54e5d7c0b12c25897a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:54:49 +0200 Subject: [PATCH 140/357] replaced AvalonMongoConnection with using OpenPypeMongoConnection --- openpype/pipeline/mongodb.py | 111 +++-------------------------------- 1 file changed, 8 insertions(+), 103 deletions(-) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index 436f6994db..1a1b6f7ce9 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -3,7 +3,6 @@ import time import functools import logging import pymongo -import ctypes from uuid import uuid4 from . import schema @@ -146,107 +145,12 @@ def session_data_from_environment(context_keys=False): return session_data -class AvalonMongoConnection: - _mongo_client = None - _is_installed = False - _databases = {} - log = logging.getLogger("AvalonMongoConnection") - - @classmethod - def register_database(cls, dbcon): - if dbcon.id in cls._databases: - return - - cls._databases[dbcon.id] = { - "object": dbcon, - "installed": False - } - - @classmethod - def database(cls): - return cls._mongo_client[str(os.environ["AVALON_DB"])] - - @classmethod - def mongo_client(cls): - return cls._mongo_client - - @classmethod - def install(cls, dbcon): - if not cls._is_installed or cls._mongo_client is None: - cls._mongo_client = cls.create_connection() - cls._is_installed = True - - cls.register_database(dbcon) - cls._databases[dbcon.id]["installed"] = True - - cls.check_db_existence() - - @classmethod - def is_installed(cls, dbcon): - info = cls._databases.get(dbcon.id) - if not info: - return False - return cls._databases[dbcon.id]["installed"] - - @classmethod - def _uninstall(cls): - try: - cls._mongo_client.close() - except AttributeError: - pass - cls._is_installed = False - cls._mongo_client = None - - @classmethod - def uninstall(cls, dbcon, force=False): - if force: - for key in cls._databases: - cls._databases[key]["object"].uninstall() - cls._uninstall() - return - - cls._databases[dbcon.id]["installed"] = False - - cls.check_db_existence() - - any_is_installed = False - for key in cls._databases: - if cls._databases[key]["installed"]: - any_is_installed = True - break - - if not any_is_installed: - cls._uninstall() - - @classmethod - def check_db_existence(cls): - items_to_pop = set() - for db_id, info in cls._databases.items(): - obj = info["object"] - # TODO check if should check for 1 or more - cls.log.info(ctypes.c_long.from_address(id(obj)).value) - if ctypes.c_long.from_address(id(obj)).value == 1: - items_to_pop.add(db_id) - - for db_id in items_to_pop: - cls._databases.pop(db_id, None) - - @classmethod - def create_connection(cls): - from openpype.lib import OpenPypeMongoConnection - - mongo_url = os.environ["AVALON_MONGO"] - - mongo_client = OpenPypeMongoConnection.create_connection(mongo_url) - - return mongo_client - - class AvalonMongoDB: def __init__(self, session=None, auto_install=True): self._id = uuid4() self._database = None self.auto_install = auto_install + self._installed = False if session is None: session = session_data_from_environment(context_keys=False) @@ -292,7 +196,9 @@ class AvalonMongoDB: @property def mongo_client(self): - AvalonMongoConnection.mongo_client() + from openpype.lib import OpenPypeMongoConnection + + return OpenPypeMongoConnection.get_mongo_client() @property def id(self): @@ -313,20 +219,19 @@ class AvalonMongoDB: ) def is_installed(self): - return AvalonMongoConnection.is_installed(self) + return self._installed def install(self): """Establish a persistent connection to the database""" if self.is_installed(): return - AvalonMongoConnection.install(self) - - self._database = AvalonMongoConnection.database() + self._installed = True + self._database = self.mongo_client[str(os.environ["AVALON_DB"])] def uninstall(self): """Close any connection to the database""" - AvalonMongoConnection.uninstall(self) + self._installed = False self._database = None @requires_install From b2c4210920963bc6f85d973b54a5c271e7a391f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:55:18 +0200 Subject: [PATCH 141/357] removed unused environments --- openpype/pipeline/mongodb.py | 37 ------------------------------------ 1 file changed, 37 deletions(-) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index 1a1b6f7ce9..9efd231bb2 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -65,8 +65,6 @@ SESSION_CONTEXT_KEYS = ( "AVALON_PROJECT", # Name of current Asset "AVALON_ASSET", - # Name of current silo - "AVALON_SILO", # Name of current task "AVALON_TASK", # Name of current app @@ -89,10 +87,6 @@ def session_data_from_environment(context_keys=False): session_data[key] = None for key, default_value in ( - # Name of current Config - # TODO(marcus): Establish a suitable default config - ("AVALON_CONFIG", "no_config"), - # Name of Avalon in graphical user interfaces # Use this to customise the visual appearance of Avalon # to better integrate with your surrounding pipeline @@ -106,37 +100,6 @@ def session_data_from_environment(context_keys=False): # Name of database used in MongoDB ("AVALON_DB", "avalon"), - - # Address to Sentry - ("AVALON_SENTRY", None), - - # Address to Deadline Web Service - # E.g. http://192.167.0.1:8082 - ("AVALON_DEADLINE", None), - - # Enable features not necessarily stable, at the user's own risk - ("AVALON_EARLY_ADOPTER", None), - - # Address of central asset repository, contains - # the following interface: - # /upload - # /download - # /manager (optional) - ("AVALON_LOCATION", "http://127.0.0.1"), - - # Boolean of whether to upload published material - # to central asset repository - ("AVALON_UPLOAD", None), - - # Generic username and password - ("AVALON_USERNAME", "avalon"), - ("AVALON_PASSWORD", "secret"), - - # Unique identifier for instances in working files - ("AVALON_INSTANCE_ID", "avalon.instance"), - - # Enable debugging - ("AVALON_DEBUG", None) ): value = os.environ.get(key) or default_value if value is not None: From e91d84546e9b3cd69790876bae4d953f06250033 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:59:01 +0200 Subject: [PATCH 142/357] use AvalonMongoDB from openpype pipeline --- openpype/hooks/pre_global_host_data.py | 5 ++--- openpype/hosts/hiero/api/lib.py | 3 +-- openpype/hosts/hiero/api/pipeline.py | 15 ++++----------- .../hosts/maya/plugins/publish/collect_ass.py | 11 ++--------- openpype/hosts/nuke/api/pipeline.py | 11 +++++------ .../hosts/nuke/plugins/create/create_read.py | 2 -- openpype/hosts/testhost/run_publish.py | 2 -- openpype/hosts/tvpaint/api/pipeline.py | 1 - .../webserver_service/webpublish_routes.py | 16 +++++++++------- openpype/lib/applications.py | 2 +- openpype/lib/avalon_context.py | 11 +++++------ openpype/lib/plugin_tools.py | 2 +- openpype/lib/project_backpack.py | 2 +- openpype/modules/avalon_apps/rest_api.py | 3 +-- .../action_prepare_project.py | 2 +- .../event_handlers_server/event_sync_links.py | 2 +- .../event_sync_to_avalon.py | 3 +-- .../event_user_assigment.py | 3 +-- .../event_handlers_user/action_applications.py | 3 +-- .../event_handlers_user/action_delete_asset.py | 3 ++- .../action_delete_old_versions.py | 2 +- .../event_handlers_user/action_delivery.py | 2 +- .../action_fill_workfile_attr.py | 2 +- .../action_prepare_project.py | 2 +- .../action_store_thumbnails_to_avalon.py | 3 ++- openpype/modules/ftrack/ftrack_server/lib.py | 2 +- openpype/modules/ftrack/lib/avalon_sync.py | 8 ++------ .../modules/ftrack/scripts/sub_event_storer.py | 2 +- .../modules/sync_server/sync_server_module.py | 8 +++++--- .../modules/timers_manager/timers_manager.py | 3 ++- openpype/pipeline/create/context.py | 11 +++++++---- openpype/plugin.py | 1 - openpype/plugins/load/delete_old_versions.py | 3 +-- openpype/plugins/load/delivery.py | 4 +--- openpype/settings/handlers.py | 2 +- openpype/tools/context_dialog/window.py | 2 +- openpype/tools/launcher/window.py | 3 +-- openpype/tools/libraryloader/app.py | 2 +- openpype/tools/loader/__main__.py | 1 - .../project_manager/project_manager/widgets.py | 2 +- .../project_manager/project_manager/window.py | 2 +- openpype/tools/settings/settings/widgets.py | 15 +-------------- openpype/tools/standalonepublish/app.py | 2 +- openpype/tools/traypublisher/window.py | 6 ++++-- 44 files changed, 77 insertions(+), 115 deletions(-) diff --git a/openpype/hooks/pre_global_host_data.py b/openpype/hooks/pre_global_host_data.py index 4c85a511ed..ea5e290d6f 100644 --- a/openpype/hooks/pre_global_host_data.py +++ b/openpype/hooks/pre_global_host_data.py @@ -5,8 +5,7 @@ from openpype.lib import ( prepare_app_environments, prepare_context_environments ) - -import avalon.api +from openpype.pipeline import AvalonMongoDB class GlobalHostDataHook(PreLaunchHook): @@ -64,7 +63,7 @@ class GlobalHostDataHook(PreLaunchHook): self.data["anatomy"] = Anatomy(project_name) # Mongo connection - dbcon = avalon.api.AvalonMongoDB() + dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name dbcon.install() diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index df3b24ff2c..00c30538fc 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -38,8 +38,6 @@ self.pype_tag_name = "openpypeData" self.default_sequence_name = "openpypeSequence" self.default_bin_name = "openpypeBin" -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") - def flatten(_list): for item in _list: @@ -49,6 +47,7 @@ def flatten(_list): else: yield item + def get_current_project(remove_untitled=False): projects = flatten(hiero.core.projects()) if not remove_untitled: diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 616ff53fd8..5001043a74 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -20,8 +20,6 @@ from . import lib, menu, events log = Logger().get_logger(__name__) -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") - # plugin paths API_DIR = os.path.dirname(os.path.abspath(__file__)) HOST_DIR = os.path.dirname(API_DIR) @@ -247,15 +245,10 @@ def reload_config(): import importlib for module in ( - "avalon", - "avalon.lib", - "avalon.pipeline", - "pyblish", - "pypeapp", - "{}.api".format(AVALON_CONFIG), - "{}.hosts.hiero.lib".format(AVALON_CONFIG), - "{}.hosts.hiero.menu".format(AVALON_CONFIG), - "{}.hosts.hiero.tags".format(AVALON_CONFIG) + "openpype.api", + "openpype.hosts.hiero.lib", + "openpype.hosts.hiero.menu", + "openpype.hosts.hiero.tags" ): log.info("Reloading module: {}...".format(module)) try: diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 8e6691120a..7c9a1b76fb 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,23 +1,16 @@ from maya import cmds -import pymel.core as pm import pyblish.api -import avalon.api + class CollectAssData(pyblish.api.InstancePlugin): - """Collect Ass data - - """ + """Collect Ass data.""" order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] def process(self, instance): - - - context = instance.context - objsets = instance.data['setMembers'] for objset in objsets: diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 6ee3d2ce05..0194acd196 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -38,7 +38,6 @@ from .lib import ( log = Logger.get_logger(__name__) -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.nuke.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") @@ -79,11 +78,11 @@ def reload_config(): """ for module in ( - "{}.api".format(AVALON_CONFIG), - "{}.hosts.nuke.api.actions".format(AVALON_CONFIG), - "{}.hosts.nuke.api.menu".format(AVALON_CONFIG), - "{}.hosts.nuke.api.plugin".format(AVALON_CONFIG), - "{}.hosts.nuke.api.lib".format(AVALON_CONFIG), + "openpype.api", + "openpype.hosts.nuke.api.actions", + "openpype.hosts.nuke.api.menu", + "openpype.hosts.nuke.api.plugin", + "openpype.hosts.nuke.api.lib", ): log.info("Reloading module: {}...".format(module)) diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py index bdc67add42..87a9dff0f8 100644 --- a/openpype/hosts/nuke/plugins/create/create_read.py +++ b/openpype/hosts/nuke/plugins/create/create_read.py @@ -2,8 +2,6 @@ from collections import OrderedDict import nuke -import avalon.api -from openpype import api as pype from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import ( set_avalon_knob_data diff --git a/openpype/hosts/testhost/run_publish.py b/openpype/hosts/testhost/run_publish.py index cc80bdc604..c7ad63aafd 100644 --- a/openpype/hosts/testhost/run_publish.py +++ b/openpype/hosts/testhost/run_publish.py @@ -22,13 +22,11 @@ openpype_dir = multi_dirname(current_file, 4) os.environ["OPENPYPE_MONGO"] = mongo_url os.environ["OPENPYPE_ROOT"] = openpype_dir -os.environ["AVALON_MONGO"] = mongo_url os.environ["AVALON_PROJECT"] = project_name os.environ["AVALON_ASSET"] = asset_name os.environ["AVALON_TASK"] = task_name os.environ["AVALON_APP"] = host_name os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" -os.environ["AVALON_CONFIG"] = "openpype" os.environ["AVALON_TIMEOUT"] = "1000" os.environ["AVALON_DB"] = "avalon" os.environ["FTRACK_SERVER"] = ftrack_url diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 78c10c3dae..d57ec3178a 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -7,7 +7,6 @@ import logging import requests import pyblish.api -import avalon.api from avalon import io diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 1f9089aa27..e82ba7f2b8 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -7,18 +7,20 @@ import collections from aiohttp.web_response import Response import subprocess -from avalon.api import AvalonMongoDB - -from openpype.lib import OpenPypeMongoConnection -from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint -from openpype.settings import get_project_settings - -from openpype.lib import PypeLogger +from openpype.lib import ( + OpenPypeMongoConnection, + PypeLogger, +) from openpype.lib.remote_publish import ( get_task_data, ERROR_STATUS, REPROCESS_STATUS ) +from openpype.pipeline import AvalonMongoDB +from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint +from openpype.settings import get_project_settings + + log = PypeLogger.get_logger("WebServer") diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 07b91dda03..b52da52dc9 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1295,7 +1295,7 @@ def get_app_environments_for_context( Returns: dict: Environments for passed context and application. """ - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB # Avalon database connection dbcon = AvalonMongoDB() diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index e82dcc558f..d95d1b983f 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -64,8 +64,8 @@ def create_project( """ from openpype.settings import ProjectSettings, SaveWarningExc - from avalon.api import AvalonMongoDB - from avalon.schema import validate + from openpype.pipeline import AvalonMongoDB + from openpype.pipeline.schema import validate if dbcon is None: dbcon = AvalonMongoDB() @@ -333,8 +333,7 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): Args: asset_name (str): Name of asset. subset_name (str): Name of subset. - dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection - with Session. + dbcon (AvalonMongoDB, optional): Avalon Mongo connection with Session. project_name (str, optional): Find latest version in specific project. Returns: @@ -429,7 +428,7 @@ def get_workfile_template_key_from_context( "`get_workfile_template_key_from_context` requires to pass" " one of 'dbcon' or 'project_name' arguments." )) - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name @@ -1794,7 +1793,7 @@ def get_custom_workfile_template_by_string_context( """ if dbcon is None: - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index f11ba56865..3f78407931 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -136,7 +136,7 @@ def get_subset_name( `get_subset_name_with_asset_doc` where asset document is expected. """ if dbcon is None: - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py index 11fd0c0c3e..396479c725 100644 --- a/openpype/lib/project_backpack.py +++ b/openpype/lib/project_backpack.py @@ -25,7 +25,7 @@ from bson.json_util import ( CANONICAL_JSON_OPTIONS ) -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB DOCUMENTS_FILE_NAME = "database" METADATA_FILE_NAME = "metadata" diff --git a/openpype/modules/avalon_apps/rest_api.py b/openpype/modules/avalon_apps/rest_api.py index 533050fc0c..b35f5bf357 100644 --- a/openpype/modules/avalon_apps/rest_api.py +++ b/openpype/modules/avalon_apps/rest_api.py @@ -1,4 +1,3 @@ -import os import json import datetime @@ -6,7 +5,7 @@ from bson.objectid import ObjectId from aiohttp.web_response import Response -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype_modules.webserver.base_routes import RestApiEndpoint diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py index 2e55be2743..975e49cb28 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py @@ -1,8 +1,8 @@ import json -from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project +from openpype.pipeline import AvalonMongoDB from openpype.settings import SaveWarningExc from openpype_modules.ftrack.lib import ( diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_links.py b/openpype/modules/ftrack/event_handlers_server/event_sync_links.py index 9610e7f5de..ae70c6756f 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_links.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_links.py @@ -1,7 +1,7 @@ from pymongo import UpdateOne from bson.objectid import ObjectId -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import ( CUST_ATTR_ID_KEY, diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 46c333c4c4..b5f199b3e4 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -12,8 +12,7 @@ from pymongo import UpdateOne import arrow import ftrack_api -from avalon import schema -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB, schema from openpype_modules.ftrack.lib import ( get_openpype_attr, diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py index 96243c8c36..593fc5e596 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py @@ -1,10 +1,9 @@ -import os import re import subprocess from openpype_modules.ftrack.lib import BaseEvent from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from bson.objectid import ObjectId diff --git a/openpype/modules/ftrack/event_handlers_user/action_applications.py b/openpype/modules/ftrack/event_handlers_user/action_applications.py index 48a0dea006..b25bc1b5cb 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_applications.py +++ b/openpype/modules/ftrack/event_handlers_user/action_applications.py @@ -1,5 +1,4 @@ import os -from uuid import uuid4 from openpype_modules.ftrack.lib import BaseAction from openpype.lib.applications import ( @@ -8,7 +7,7 @@ from openpype.lib.applications import ( ApplictionExecutableNotFound, CUSTOM_LAUNCH_APP_GROUPS ) -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB class AppplicationsAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py index 94385a36c5..ee5c3d0d97 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py @@ -3,7 +3,8 @@ import uuid from datetime import datetime from bson.objectid import ObjectId -from avalon.api import AvalonMongoDB + +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index 5871646b20..f5addde8ae 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,10 +5,10 @@ import uuid import clique from pymongo import UpdateOne -from avalon.api import AvalonMongoDB from openpype.api import Anatomy from openpype.lib import StringTemplate, TemplateUnsolved +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import BaseAction, statics_icon diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index 1f28b18900..9ef2a1668e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -15,7 +15,7 @@ from openpype.lib.delivery import ( process_single_file, process_sequence ) -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB class Delivery(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index 3888379e04..c7237a1150 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -7,7 +7,6 @@ import datetime import ftrack_api -from avalon.api import AvalonMongoDB from openpype.api import get_project_settings from openpype.lib import ( get_workfile_template_key, @@ -15,6 +14,7 @@ from openpype.lib import ( Anatomy, StringTemplate, ) +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py index 3759bc81ac..0b14e7aa2b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py @@ -1,8 +1,8 @@ import json -from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project +from openpype.pipeline import AvalonMongoDB from openpype.settings import SaveWarningExc from openpype_modules.ftrack.lib import ( diff --git a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py b/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py index 4820925844..62fdfa2bdd 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py @@ -4,9 +4,10 @@ import json import requests from bson.objectid import ObjectId + from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index f8319b67d4..e89113a86c 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -192,7 +192,7 @@ class ProcessEventHub(SocketBaseEventHub): except pymongo.errors.AutoReconnect: self.pypelog.error(( "Mongo server \"{}\" is not responding, exiting." - ).format(os.environ["AVALON_MONGO"])) + ).format(os.environ["OPENPYPE_MONGO"])) sys.exit(0) # Additional special processing of events. if event['topic'] == 'ftrack.meta.disconnected': diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index c5b58ca94d..124787e467 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -6,16 +6,12 @@ import numbers import six -from avalon.api import AvalonMongoDB - -import avalon - from openpype.api import ( Logger, - Anatomy, get_anatomy_settings ) from openpype.lib import ApplicationManager +from openpype.pipeline import AvalonMongoDB, schema from .constants import CUST_ATTR_ID_KEY, FPS_KEYS from .custom_attributes import get_openpype_attr, query_custom_attributes @@ -175,7 +171,7 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None): if not name_pattern: default_pattern = "^[a-zA-Z0-9_.]*$" - schema_obj = avalon.schema._cache.get(schema_name + ".json") + schema_obj = schema._cache.get(schema_name + ".json") if not schema_obj: name_pattern = default_pattern else: diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py index 5543ed74e2..946ecbff79 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/ftrack/scripts/sub_event_storer.py @@ -67,7 +67,7 @@ def launch(event): except pymongo.errors.AutoReconnect: log.error("Mongo server \"{}\" is not responding, exiting.".format( - os.environ["AVALON_MONGO"] + os.environ["OPENPYPE_MONGO"] )) sys.exit(0) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 2c27571f9f..7d4e3ccc96 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -6,7 +6,6 @@ import platform import copy from collections import deque -from avalon.api import AvalonMongoDB from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule @@ -14,11 +13,14 @@ from openpype.api import ( Anatomy, get_project_settings, get_system_settings, - get_local_site_id) + get_local_site_id +) from openpype.lib import PypeLogger +from openpype.pipeline import AvalonMongoDB from openpype.settings.lib import ( get_default_anatomy_settings, - get_anatomy_settings) + get_anatomy_settings +) from .providers.local_drive import LocalDriveHandler from .providers import lib diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 47d020104b..3f77a2b7dc 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -1,13 +1,14 @@ import os import platform -from avalon.api import AvalonMongoDB from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, ILaunchHookPaths ) +from openpype.pipeline import AvalonMongoDB + from .exceptions import InvalidContextError diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 0cc2819172..584752e38a 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,6 +6,11 @@ import inspect from uuid import uuid4 from contextlib import contextmanager +from openpype.pipeline.mongodb import ( + AvalonMongoDB, + session_data_from_environment, +) + from .creator_plugins import ( BaseCreator, Creator, @@ -659,10 +664,8 @@ class CreateContext: ): # Create conncetion if is not passed if dbcon is None: - import avalon.api - - session = avalon.api.session_data_from_environment(True) - dbcon = avalon.api.AvalonMongoDB(session) + session = session_data_from_environment(True) + dbcon = AvalonMongoDB(session) dbcon.install() self.dbcon = dbcon diff --git a/openpype/plugin.py b/openpype/plugin.py index 3569936dac..bb9bc2ff85 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -1,7 +1,6 @@ import tempfile import os import pyblish.api -import avalon.api ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index 2789f4ea23..c3e9e9fa0a 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -8,9 +8,8 @@ import ftrack_api import qargparse from Qt import QtWidgets, QtCore -from avalon.api import AvalonMongoDB from openpype import style -from openpype.pipeline import load +from openpype.pipeline import load, AvalonMongoDB from openpype.lib import StringTemplate from openpype.api import Anatomy diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 04080053e3..7df07e3f64 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -3,9 +3,7 @@ from collections import defaultdict from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB - -from openpype.pipeline import load +from openpype.pipeline import load, AvalonMongoDB from openpype.api import Anatomy, config from openpype import resources, style diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 2109b53b09..0c94623a64 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -337,7 +337,7 @@ class MongoSettingsHandler(SettingsHandler): def __init__(self): # Get mongo connection from openpype.lib import OpenPypeMongoConnection - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB settings_collection = OpenPypeMongoConnection.get_mongo_client() diff --git a/openpype/tools/context_dialog/window.py b/openpype/tools/context_dialog/window.py index 9e030853bf..3b544bd375 100644 --- a/openpype/tools/context_dialog/window.py +++ b/openpype/tools/context_dialog/window.py @@ -2,9 +2,9 @@ import os import json from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB from openpype import style +from openpype.pipeline import AvalonMongoDB from openpype.tools.utils.lib import center_window from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.constants import ( diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index d80b3eabf0..dab6949613 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -3,10 +3,9 @@ import logging from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB - from openpype import style from openpype.api import resources +from openpype.pipeline import AvalonMongoDB import qtawesome from .models import ( diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index 328e16205c..7fda6bd6f9 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -2,8 +2,8 @@ import sys from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB from openpype import style +from openpype.pipeline import AvalonMongoDB from openpype.tools.utils import lib as tools_lib from openpype.tools.loader.widgets import ( ThumbnailWidget, diff --git a/openpype/tools/loader/__main__.py b/openpype/tools/loader/__main__.py index 146ba7fd10..400a034a76 100644 --- a/openpype/tools/loader/__main__.py +++ b/openpype/tools/loader/__main__.py @@ -24,7 +24,6 @@ if __name__ == '__main__': os.environ["AVALON_DB"] = "avalon" os.environ["AVALON_TIMEOUT"] = "1000" os.environ["OPENPYPE_DEBUG"] = "1" - os.environ["AVALON_CONFIG"] = "pype" os.environ["AVALON_ASSET"] = "Jungle" # Set the exception hook to our wrapping function diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 39ea833961..dc75b30bd7 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -10,11 +10,11 @@ from openpype.lib import ( PROJECT_NAME_REGEX ) from openpype.style import load_stylesheet +from openpype.pipeline import AvalonMongoDB from openpype.tools.utils import ( PlaceholderLineEdit, get_warning_pixmap ) -from avalon.api import AvalonMongoDB from Qt import QtWidgets, QtCore, QtGui diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index bdf32c7415..c281479d4f 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -16,6 +16,7 @@ from .style import ResourceCache from openpype.style import load_stylesheet from openpype.lib import is_admin_password_required from openpype.widgets import PasswordDialog +from openpype.pipeline import AvalonMongoDB from openpype import resources from openpype.api import ( @@ -23,7 +24,6 @@ from openpype.api import ( create_project_folders, Logger ) -from avalon.api import AvalonMongoDB class ProjectManagerWindow(QtWidgets.QWidget): diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 6db001f2f6..45c21d5685 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1,13 +1,9 @@ -import os import copy import uuid from Qt import QtWidgets, QtCore, QtGui import qtawesome -from avalon.mongodb import ( - AvalonMongoConnection, - AvalonMongoDB -) +from openpype.pipeline import AvalonMongoDB from openpype.style import get_objected_colors from openpype.tools.utils.widgets import ImageButton from openpype.tools.utils.lib import paint_image_with_color @@ -1209,15 +1205,6 @@ class ProjectListWidget(QtWidgets.QWidget): selected_project = index.data(PROJECT_NAME_ROLE) break - mongo_url = os.environ["OPENPYPE_MONGO"] - - # Force uninstall of whole avalon connection if url does not match - # to current environment and set it as environment - if mongo_url != os.environ["AVALON_MONGO"]: - AvalonMongoConnection.uninstall(self.dbcon, force=True) - os.environ["AVALON_MONGO"] = mongo_url - self.dbcon = None - if not self.dbcon: try: self.dbcon = AvalonMongoDB() diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index 3630d92c83..1ad5cd119e 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -12,7 +12,7 @@ from .widgets import ( from .widgets.constants import HOST_NAME from openpype import style from openpype.api import resources -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype.modules import ModulesManager diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index a550c88ead..972e89a3ae 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -8,8 +8,10 @@ publishing plugins. from Qt import QtWidgets, QtCore -from avalon.api import AvalonMongoDB -from openpype.pipeline import install_host +from openpype.pipeline import ( + install_host, + AvalonMongoDB, +) from openpype.hosts.traypublisher import ( api as traypublisher ) From a05755e8327c279ea373914a68595d731df187bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 18:10:40 +0200 Subject: [PATCH 143/357] use schema from openpype --- openpype/hosts/blender/api/pipeline.py | 3 ++- openpype/hosts/hiero/api/pipeline.py | 2 +- openpype/hosts/maya/api/setdress.py | 4 ++-- openpype/hosts/resolve/api/pipeline.py | 3 +-- openpype/pipeline/load/utils.py | 3 ++- openpype/plugins/publish/integrate_hero_version.py | 3 ++- openpype/tools/loader/model.py | 6 ++++-- openpype/tools/sceneinventory/model.py | 3 ++- 8 files changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index 0ea579970e..9420a10228 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -11,9 +11,10 @@ from . import ops import pyblish.api import avalon.api -from avalon import io, schema +from avalon import io from openpype.pipeline import ( + schema, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 5001043a74..8025ebff05 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -5,10 +5,10 @@ import os import contextlib from collections import OrderedDict -from avalon import schema from pyblish import api as pyblish from openpype.api import Logger from openpype.pipeline import ( + schema, register_creator_plugin_path, register_loader_plugin_path, deregister_creator_plugin_path, diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py index 0b60564e5e..018ea4558c 100644 --- a/openpype/hosts/maya/api/setdress.py +++ b/openpype/hosts/maya/api/setdress.py @@ -12,6 +12,7 @@ from maya import cmds from avalon import io from openpype.pipeline import ( + schema, discover_loader_plugins, loaders_from_representation, load_container, @@ -253,7 +254,6 @@ def get_contained_containers(container): """ - import avalon.schema from .pipeline import parse_container # Get avalon containers in this package setdress container @@ -263,7 +263,7 @@ def get_contained_containers(container): try: member_container = parse_container(node) containers.append(member_container) - except avalon.schema.ValidationError: + except schema.ValidationError: pass return containers diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index 636c826a11..4a7d1c5bea 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -7,10 +7,9 @@ from collections import OrderedDict from pyblish import api as pyblish -from avalon import schema - from openpype.api import Logger from openpype.pipeline import ( + schema, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index cb7c76f133..ca04f79ae6 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -9,10 +9,11 @@ import numbers import six from bson.objectid import ObjectId -from avalon import io, schema +from avalon import io from avalon.api import Session from openpype.lib import Anatomy +from openpype.pipeline import schema log = logging.getLogger(__name__) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index ded149bdd0..76720fc9a3 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -8,11 +8,12 @@ from bson.objectid import ObjectId from pymongo import InsertOne, ReplaceOne import pyblish.api -from avalon import api, io, schema +from avalon import api, io from openpype.lib import ( create_hard_link, filter_profiles ) +from openpype.pipeline import schema class IntegrateHeroVersion(pyblish.api.InstancePlugin): diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 6cc6fae1fb..8cb8f30013 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -6,8 +6,10 @@ from uuid import uuid4 from Qt import QtCore, QtGui import qtawesome -from avalon import schema -from openpype.pipeline import HeroVersionType +from openpype.pipeline import ( + HeroVersionType, + schema, +) from openpype.style import get_default_entity_icon_color from openpype.tools.utils.models import TreeModel, Item diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index f8fd8a911a..2c47381751 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -7,8 +7,9 @@ from Qt import QtCore, QtGui import qtawesome from bson.objectid import ObjectId -from avalon import io, schema +from avalon import io from openpype.pipeline import ( + schema, HeroVersionType, registered_host, ) From 1c153ebb6089664e9c841f0cafb70cba1192149b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Apr 2022 21:33:02 +0200 Subject: [PATCH 144/357] flame: remove clip in reels dependency --- .../publish/collect_timeline_instances.py | 13 ---------- .../plugins/publish/validate_source_clip.py | 26 ------------------- 2 files changed, 39 deletions(-) delete mode 100644 openpype/hosts/flame/plugins/publish/validate_source_clip.py diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 95c2002bd9..bc849a4742 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -31,7 +31,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): self.log.debug("__ selected_segments: {}".format(selected_segments)) self.otio_timeline = context.data["otioTimeline"] - self.clips_in_reels = opfapi.get_clips_in_reels(project) self.fps = context.data["fps"] # process all sellected @@ -63,9 +62,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # get file path file_path = clip_data["fpath"] - # get source clip - source_clip = self._get_reel_clip(file_path) - first_frame = opfapi.get_frame_from_filename(file_path) or 0 head, tail = self._get_head_tail(clip_data, first_frame) @@ -103,7 +99,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): "families": families, "publish": marker_data["publish"], "fps": self.fps, - "flameSourceClip": source_clip, "sourceFirstFrame": int(first_frame), "path": file_path, "flameAddTasks": self.add_tasks, @@ -258,14 +253,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): ) return head, tail - def _get_reel_clip(self, path): - match_reel_clip = [ - clip for clip in self.clips_in_reels - if clip["fpath"] == path - ] - if match_reel_clip: - return match_reel_clip.pop() - def _get_resolution_to_data(self, data, context): assert data.get("otioClip"), "Missing `otioClip` data" diff --git a/openpype/hosts/flame/plugins/publish/validate_source_clip.py b/openpype/hosts/flame/plugins/publish/validate_source_clip.py deleted file mode 100644 index 345c00e05a..0000000000 --- a/openpype/hosts/flame/plugins/publish/validate_source_clip.py +++ /dev/null @@ -1,26 +0,0 @@ -import pyblish - - -@pyblish.api.log -class ValidateSourceClip(pyblish.api.InstancePlugin): - """Validate instance is not having empty `flameSourceClip`""" - - order = pyblish.api.ValidatorOrder - label = "Validate Source Clip" - hosts = ["flame"] - families = ["clip"] - optional = True - active = False - - def process(self, instance): - flame_source_clip = instance.data["flameSourceClip"] - - self.log.debug("_ flame_source_clip: {}".format(flame_source_clip)) - - if flame_source_clip is None: - raise AttributeError(( - "Timeline segment `{}` is not having " - "relative clip in reels. Please make sure " - "you push `Save Sources` button in Conform Tab").format( - instance.data["asset"] - )) From f245ca5073a68fcdae21045b45db6ad390c751ca Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Apr 2022 21:34:23 +0200 Subject: [PATCH 145/357] flame: refectory of extractor settings --- .../defaults/project_settings/flame.json | 16 +- .../projects_schema/schema_project_flame.json | 147 ++++++++++++------ 2 files changed, 106 insertions(+), 57 deletions(-) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index ef7a2a4467..028fda2e66 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -55,16 +55,18 @@ "keep_original_representation": false, "export_presets_mapping": { "exr16fpdwaa": { + "active": true, + "export_type": "File Sequence", "ext": "exr", "xml_preset_file": "OpenEXR (16-bit fp DWAA).xml", - "xml_preset_dir": "", - "export_type": "File Sequence", - "ignore_comment_attrs": false, "colorspace_out": "ACES - ACEScg", + "xml_preset_dir": "", + "parsed_comment_attrs": true, "representation_add_range": true, "representation_tags": [], "load_to_batch_group": true, - "batch_group_loader_name": "LoadClip" + "batch_group_loader_name": "LoadClipBatch", + "filter_path_regex": ".*" } } } @@ -87,7 +89,8 @@ "png", "h264", "mov", - "mp4" + "mp4", + "exr16fpdwaa" ], "reel_group_name": "OpenPype_Reels", "reel_name": "Loaded", @@ -110,7 +113,8 @@ "png", "h264", "mov", - "mp4" + "mp4", + "exr16fpdwaa" ], "reel_name": "OP_LoadedReel", "clip_name_template": "{asset}_{subset}_{output}" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index fe11d63ac2..fcbbddbe29 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -238,25 +238,19 @@ "type": "dict", "children": [ { - "key": "ext", - "label": "Output extension", - "type": "text" + "type": "boolean", + "key": "active", + "label": "Is active", + "default": true }, { - "key": "xml_preset_file", - "label": "XML preset file (with ext)", - "type": "text" - }, - { - "key": "xml_preset_dir", - "label": "XML preset folder (optional)", - "type": "text" + "type": "separator" }, { "key": "export_type", "label": "Eport clip type", "type": "enum", - "default": "File Sequence", + "default": "Sequence Publish", "enum_items": [ { "Movie": "Movie" @@ -268,54 +262,105 @@ "Sequence Publish": "Sequence Publish" } ] - }, { - "type": "separator" + "key": "ext", + "label": "Output extension", + "type": "text", + "default": "exr" }, { - "type": "boolean", - "key": "ignore_comment_attrs", - "label": "Ignore attributes parsed from a segment comments" - }, - { - "type": "separator" + "key": "xml_preset_file", + "label": "XML preset file (with ext)", + "type": "text" }, { "key": "colorspace_out", "label": "Output color (imageio)", - "type": "text" - }, - { - "type": "separator" - }, - { - "type": "boolean", - "key": "representation_add_range", - "label": "Add frame range to representation" - }, - { - "type": "list", - "key": "representation_tags", - "label": "Add representation tags", - "object_type": { - "type": "text", - "multiline": false - } - }, - { - "type": "separator" - }, - { - "type": "boolean", - "key": "load_to_batch_group", - "label": "Load to batch group reel", - "default": false - }, - { "type": "text", - "key": "batch_group_loader_name", - "label": "Use loader name" + "default": "linear" + }, + { + "type": "collapsible-wrap", + "label": "Other parameters", + "collapsible": true, + "collapsed": true, + "children": [ + { + "key": "xml_preset_dir", + "label": "XML preset folder (optional)", + "type": "text" + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "parsed_comment_attrs", + "label": "Include parsed attributes from comments", + "default": false + + }, + { + "type": "separator" + }, + { + "type": "collapsible-wrap", + "label": "Representation", + "collapsible": true, + "collapsed": true, + "children": [ + { + "type": "boolean", + "key": "representation_add_range", + "label": "Add frame range to representation" + }, + { + "type": "list", + "key": "representation_tags", + "label": "Add representation tags", + "object_type": { + "type": "text", + "multiline": false + } + } + ] + }, + { + "type": "collapsible-wrap", + "label": "Loading during publish", + "collapsible": true, + "collapsed": true, + "children": [ + { + "type": "boolean", + "key": "load_to_batch_group", + "label": "Load to batch group reel", + "default": false + }, + { + "type": "text", + "key": "batch_group_loader_name", + "label": "Use loader name" + } + ] + } + + ] + }, + { + "type": "collapsible-wrap", + "label": "Filtering", + "collapsible": true, + "collapsed": true, + "children": [ + { + "key": "filter_path_regex", + "label": "Regex in clip path", + "type": "text", + "default": ".*" + } + ] } ] } From 9174e437a6d4f5cff2df2d0de9cd19ece954ec89 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 13 Apr 2022 21:38:47 +0200 Subject: [PATCH 146/357] flame: add new settings with filter - removing reel clip dependency --- .../publish/extract_subset_resources.py | 278 ++++++++++-------- 1 file changed, 154 insertions(+), 124 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index a780f8c9e5..f1eca9a67d 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,4 +1,5 @@ import os +import re from pprint import pformat from copy import deepcopy @@ -6,6 +7,8 @@ import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi +import flame + class ExtractSubsetResources(openpype.api.Extractor): """ @@ -20,27 +23,31 @@ class ExtractSubsetResources(openpype.api.Extractor): # plugin defaults default_presets = { "thumbnail": { + "active": True, "ext": "jpg", "xml_preset_file": "Jpeg (8-bit).xml", "xml_preset_dir": "", "export_type": "File Sequence", - "ignore_comment_attrs": True, + "parsed_comment_attrs": False, "colorspace_out": "Output - sRGB", "representation_add_range": False, - "representation_tags": ["thumbnail"] + "representation_tags": ["thumbnail"], + "path_regex": ".*" }, "ftrackpreview": { + "active": True, "ext": "mov", "xml_preset_file": "Apple iPad (1920x1080).xml", "xml_preset_dir": "", "export_type": "Movie", - "ignore_comment_attrs": True, + "parsed_comment_attrs": False, "colorspace_out": "Output - Rec.709", "representation_add_range": True, "representation_tags": [ "review", "delete" - ] + ], + "path_regex": ".*" } } keep_original_representation = False @@ -62,12 +69,8 @@ class ExtractSubsetResources(openpype.api.Extractor): # flame objects segment = instance.data["item"] segment_name = segment.name.get_value() + clip_path = instance.data["path"] sequence_clip = instance.context.data["flameSequence"] - clip_data = instance.data["flameSourceClip"] - - reel_clip = None - if clip_data: - reel_clip = clip_data["PyClip"] # segment's parent track name s_track_name = segment.parent.name.get_value() @@ -104,14 +107,41 @@ class ExtractSubsetResources(openpype.api.Extractor): for unique_name, preset_config in export_presets.items(): modify_xml_data = {} + # get activating attributes + activated_preset = preset_config["active"] + filter_path_regex = preset_config["filter_path_regex"] + + self.log.info( + "Preset `{}` is active `{}` with filter `{}`".format( + unique_name, activated_preset, filter_path_regex + ) + ) + self.log.debug( + "__ clip_path: `{}`".format(clip_path)) + + # skip if not activated presete + if not activated_preset: + continue + + # exclude by regex filter + if not re.search(filter_path_regex, clip_path): + continue + # get all presets attributes + extension = preset_config["ext"] preset_file = preset_config["xml_preset_file"] preset_dir = preset_config["xml_preset_dir"] export_type = preset_config["export_type"] repre_tags = preset_config["representation_tags"] - ignore_comment_attrs = preset_config["ignore_comment_attrs"] + parsed_comment_attrs = preset_config["parsed_comment_attrs"] color_out = preset_config["colorspace_out"] + self.log.info( + "Processing `{}` as `{}` to `{}` type...".format( + preset_file, export_type, extension + ) + ) + # get attribures related loading in integrate_batch_group load_to_batch_group = preset_config.get( "load_to_batch_group") @@ -131,24 +161,14 @@ class ExtractSubsetResources(openpype.api.Extractor): in_mark = (source_start_handles - source_first_frame) + 1 out_mark = in_mark + source_duration_handles - # make test for type of preset and available reel_clip - if ( - not reel_clip - and export_type != "Sequence Publish" - ): - self.log.warning(( - "Skipping preset {}. Not available " - "reel clip for {}").format( - preset_file, segment_name - )) - continue - - # by default export source clips - exporting_clip = reel_clip - + exporting_clip = None if export_type == "Sequence Publish": # change export clip to sequence - exporting_clip = sequence_clip + exporting_clip = flame.duplicate(sequence_clip) + + # only keep visible layer where instance segment is child + self.hide_others( + exporting_clip, segment_name, s_track_name) # change in/out marks to timeline in/out in_mark = clip_in @@ -161,131 +181,126 @@ class ExtractSubsetResources(openpype.api.Extractor): "startFrame": frame_start }) - if not ignore_comment_attrs: + if parsed_comment_attrs: # add any xml overrides collected form segment.comment modify_xml_data.update(instance.data["xml_overrides"]) self.log.debug("__ modify_xml_data: {}".format(pformat( modify_xml_data ))) + else: + exporting_clip = self.import_clip(clip_path) - # with maintained duplication loop all presets - with opfapi.maintained_object_duplication( - exporting_clip) as duplclip: - kwargs = {} + export_kwargs = {} + # validate xml preset file is filled + if preset_file == "": + raise ValueError( + ("Check Settings for {} preset: " + "`XML preset file` is not filled").format( + unique_name) + ) - if export_type == "Sequence Publish": - # only keep visible layer where instance segment is child - self.hide_others(duplclip, segment_name, s_track_name) + # resolve xml preset dir if not filled + if preset_dir == "": + preset_dir = opfapi.get_preset_path_by_xml_name( + preset_file) - # validate xml preset file is filled - if preset_file == "": + if not preset_dir: raise ValueError( ("Check Settings for {} preset: " - "`XML preset file` is not filled").format( - unique_name) + "`XML preset file` {} is not found").format( + unique_name, preset_file) ) - # resolve xml preset dir if not filled - if preset_dir == "": - preset_dir = opfapi.get_preset_path_by_xml_name( - preset_file) + # create preset path + preset_orig_xml_path = str(os.path.join( + preset_dir, preset_file + )) - if not preset_dir: - raise ValueError( - ("Check Settings for {} preset: " - "`XML preset file` {} is not found").format( - unique_name, preset_file) - ) + preset_path = opfapi.modify_preset_file( + preset_orig_xml_path, staging_dir, modify_xml_data) - # create preset path - preset_orig_xml_path = str(os.path.join( - preset_dir, preset_file - )) + # define kwargs based on preset type + if "thumbnail" in unique_name: + export_kwargs["thumb_frame_number"] = in_mark + ( + source_duration_handles / 2) + else: + export_kwargs.update({ + "in_mark": in_mark, + "out_mark": out_mark + }) - preset_path = opfapi.modify_preset_file( - preset_orig_xml_path, staging_dir, modify_xml_data) + # get and make export dir paths + export_dir_path = str(os.path.join( + staging_dir, unique_name + )) + os.makedirs(export_dir_path) - # define kwargs based on preset type - if "thumbnail" in unique_name: - kwargs["thumb_frame_number"] = in_mark + ( - source_duration_handles / 2) - else: - kwargs.update({ - "in_mark": in_mark, - "out_mark": out_mark - }) + # export + opfapi.export_clip( + export_dir_path, exporting_clip, preset_path, **export_kwargs) - # get and make export dir paths - export_dir_path = str(os.path.join( - staging_dir, unique_name - )) - os.makedirs(export_dir_path) + # create representation data + representation_data = { + "name": unique_name, + "outputName": unique_name, + "ext": extension, + "stagingDir": export_dir_path, + "tags": repre_tags, + "data": { + "colorspace": color_out + }, + "load_to_batch_group": load_to_batch_group, + "batch_group_loader_name": batch_group_loader_name + } - # export - opfapi.export_clip( - export_dir_path, duplclip, preset_path, **kwargs) + # collect all available content of export dir + files = os.listdir(export_dir_path) - extension = preset_config["ext"] + # make sure no nested folders inside + n_stage_dir, n_files = self._unfolds_nested_folders( + export_dir_path, files, extension) - # create representation data - representation_data = { - "name": unique_name, - "outputName": unique_name, - "ext": extension, - "stagingDir": export_dir_path, - "tags": repre_tags, - "data": { - "colorspace": color_out - }, - "load_to_batch_group": load_to_batch_group, - "batch_group_loader_name": batch_group_loader_name - } + # fix representation in case of nested folders + if n_stage_dir: + representation_data["stagingDir"] = n_stage_dir + files = n_files - # collect all available content of export dir - files = os.listdir(export_dir_path) + # add files to represetation but add + # imagesequence as list + if ( + # first check if path in files is not mov extension + [ + f for f in files + if os.path.splitext(f)[-1] == ".mov" + ] + # then try if thumbnail is not in unique name + or unique_name == "thumbnail" + ): + representation_data["files"] = files.pop() + else: + representation_data["files"] = files - # make sure no nested folders inside - n_stage_dir, n_files = self._unfolds_nested_folders( - export_dir_path, files, extension) + # add frame range + if preset_config["representation_add_range"]: + representation_data.update({ + "frameStart": frame_start_handle, + "frameEnd": ( + frame_start_handle + source_duration_handles), + "fps": instance.data["fps"] + }) - # fix representation in case of nested folders - if n_stage_dir: - representation_data["stagingDir"] = n_stage_dir - files = n_files + instance.data["representations"].append(representation_data) - # add files to represetation but add - # imagesequence as list - if ( - # first check if path in files is not mov extension - [ - f for f in files - if os.path.splitext(f)[-1] == ".mov" - ] - # then try if thumbnail is not in unique name - or unique_name == "thumbnail" - ): - representation_data["files"] = files.pop() - else: - representation_data["files"] = files + # add review family if found in tags + if "review" in repre_tags: + instance.data["families"].append("review") - # add frame range - if preset_config["representation_add_range"]: - representation_data.update({ - "frameStart": frame_start_handle, - "frameEnd": ( - frame_start_handle + source_duration_handles), - "fps": instance.data["fps"] - }) + self.log.info("Added representation: {}".format( + representation_data)) - instance.data["representations"].append(representation_data) - - # add review family if found in tags - if "review" in repre_tags: - instance.data["families"].append("review") - - self.log.info("Added representation: {}".format( - representation_data)) + # at the end remove the duplicated clip + flame.delete(exporting_clip) self.log.debug("All representations: {}".format( pformat(instance.data["representations"]))) @@ -373,3 +388,18 @@ class ExtractSubsetResources(openpype.api.Extractor): for segment in track.segments: if segment.name.get_value() != segment_name: segment.hidden = True + + def import_clip(self, path): + """ + Import clip from path + """ + clips = flame.import_clips(path) + self.log.info("Clips [{}] imported from `{}`".format(clips, path)) + if not clips: + self.log.warning("Path `{}` is not having any clips".format(path)) + return None + elif len(clips) > 1: + self.log.warning( + "Path `{}` is containing more that one clip".format(path) + ) + return clips.pop() From b086680289c2a897164d9db5f868c1c5d78690e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Apr 2022 08:55:30 +0200 Subject: [PATCH 147/357] flame: improving work with presets --- .../publish/extract_subset_resources.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index f1eca9a67d..ba4a8c41ad 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -169,27 +169,30 @@ class ExtractSubsetResources(openpype.api.Extractor): # only keep visible layer where instance segment is child self.hide_others( exporting_clip, segment_name, s_track_name) + else: + exporting_clip = self.import_clip(clip_path) - # change in/out marks to timeline in/out - in_mark = clip_in - out_mark = clip_out + # change in/out marks to timeline in/out + in_mark = clip_in + out_mark = clip_out - # add xml tags modifications - modify_xml_data.update({ - "exportHandles": True, - "nbHandles": handles, - "startFrame": frame_start - }) + # add xml tags modifications + modify_xml_data.update({ + "exportHandles": True, + "nbHandles": handles, + "startFrame": frame_start, + "namePattern": ( + "<segment name>_<shot name>_{}.").format( + unique_name) + }) - if parsed_comment_attrs: - # add any xml overrides collected form segment.comment - modify_xml_data.update(instance.data["xml_overrides"]) + if parsed_comment_attrs: + # add any xml overrides collected form segment.comment + modify_xml_data.update(instance.data["xml_overrides"]) self.log.debug("__ modify_xml_data: {}".format(pformat( modify_xml_data ))) - else: - exporting_clip = self.import_clip(clip_path) export_kwargs = {} # validate xml preset file is filled From b3b1938a43d2832be5f72c2439f1701ec229c977 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Apr 2022 11:44:13 +0200 Subject: [PATCH 148/357] flame: IntegrateBatchGroup disable from settings --- .../settings/defaults/project_settings/flame.json | 3 +++ .../projects_schema/schema_project_flame.json | 15 +++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index 028fda2e66..dd8c05d460 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -69,6 +69,9 @@ "filter_path_regex": ".*" } } + }, + "IntegrateBatchGroup": { + "enabled": false } }, "load": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index fcbbddbe29..ace404b47a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -366,6 +366,21 @@ } } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "IntegrateBatchGroup", + "label": "IntegrateBatchGroup", + "is_group": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] } ] }, From 891ba74d6c6e2d7370a9c8eec1b13e1a311b8094 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Apr 2022 11:46:16 +0200 Subject: [PATCH 149/357] flame: no need to assign project object anymore --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index bc849a4742..5174f9db48 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -26,7 +26,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): add_tasks = [] def process(self, context): - project = context.data["flameProject"] selected_segments = context.data["flameSelectedSegments"] self.log.debug("__ selected_segments: {}".format(selected_segments)) From 30a959f429072449dbadf7d7425a181eb9a6cb7b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 14 Apr 2022 11:50:35 +0200 Subject: [PATCH 150/357] flame: improving extractor's preset filtering --- .../publish/extract_subset_resources.py | 30 +++++++++++++------ 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index ba4a8c41ad..4598405923 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -68,6 +68,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # flame objects segment = instance.data["item"] + asset_name = instance.data["asset"] segment_name = segment.name.get_value() clip_path = instance.data["path"] sequence_clip = instance.context.data["flameSequence"] @@ -109,7 +110,7 @@ class ExtractSubsetResources(openpype.api.Extractor): # get activating attributes activated_preset = preset_config["active"] - filter_path_regex = preset_config["filter_path_regex"] + filter_path_regex = preset_config.get("filter_path_regex") self.log.info( "Preset `{}` is active `{}` with filter `{}`".format( @@ -123,8 +124,11 @@ class ExtractSubsetResources(openpype.api.Extractor): if not activated_preset: continue - # exclude by regex filter - if not re.search(filter_path_regex, clip_path): + # exclude by regex filter if any + if ( + filter_path_regex + and not re.search(filter_path_regex, clip_path) + ): continue # get all presets attributes @@ -162,6 +166,8 @@ class ExtractSubsetResources(openpype.api.Extractor): out_mark = in_mark + source_duration_handles exporting_clip = None + name_patern_xml = "_{}.".format( + unique_name) if export_type == "Sequence Publish": # change export clip to sequence exporting_clip = flame.duplicate(sequence_clip) @@ -169,8 +175,15 @@ class ExtractSubsetResources(openpype.api.Extractor): # only keep visible layer where instance segment is child self.hide_others( exporting_clip, segment_name, s_track_name) + + # change name patern + name_patern_xml = ( + "__{}.").format( + unique_name) else: exporting_clip = self.import_clip(clip_path) + exporting_clip.name.set_value("{}_{}".format( + asset_name, segment_name)) # change in/out marks to timeline in/out in_mark = clip_in @@ -181,9 +194,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "exportHandles": True, "nbHandles": handles, "startFrame": frame_start, - "namePattern": ( - "<segment name>_<shot name>_{}.").format( - unique_name) + "namePattern": name_patern_xml }) if parsed_comment_attrs: @@ -302,8 +313,9 @@ class ExtractSubsetResources(openpype.api.Extractor): self.log.info("Added representation: {}".format( representation_data)) - # at the end remove the duplicated clip - flame.delete(exporting_clip) + if export_type == "Sequence Publish": + # at the end remove the duplicated clip + flame.delete(exporting_clip) self.log.debug("All representations: {}".format( pformat(instance.data["representations"]))) @@ -405,4 +417,4 @@ class ExtractSubsetResources(openpype.api.Extractor): self.log.warning( "Path `{}` is containing more that one clip".format(path) ) - return clips.pop() + return clips[0] From 2591e81877985b86ab1afd15d58d0f39acf112e5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:56:41 +0200 Subject: [PATCH 151/357] replaced avalon imports in blender --- openpype/hosts/blender/api/ops.py | 14 ++++----- openpype/hosts/blender/api/pipeline.py | 31 +++---------------- .../blender/plugins/create/create_action.py | 4 +-- .../plugins/create/create_animation.py | 4 +-- .../blender/plugins/create/create_camera.py | 4 +-- .../blender/plugins/create/create_layout.py | 4 +-- .../blender/plugins/create/create_model.py | 4 +-- .../plugins/create/create_pointcache.py | 4 +-- .../blender/plugins/create/create_rig.py | 4 +-- .../blender/plugins/publish/extract_layout.py | 8 ++--- .../plugins/publish/integrate_animation.py | 1 - 11 files changed, 29 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index 29d6d356c8..c1b5add518 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -15,9 +15,9 @@ from Qt import QtWidgets, QtCore import bpy import bpy.utils.previews -import avalon.api -from openpype.tools.utils import host_tools from openpype import style +from openpype.pipeline import legacy_io +from openpype.tools.utils import host_tools from .workio import OpenFileCacher @@ -279,7 +279,7 @@ class LaunchLoader(LaunchQtApp): def before_window_show(self): self._window.set_context( - {"asset": avalon.api.Session["AVALON_ASSET"]}, + {"asset": legacy_io.Session["AVALON_ASSET"]}, refresh=True ) @@ -327,8 +327,8 @@ class LaunchWorkFiles(LaunchQtApp): def execute(self, context): result = super().execute(context) self._window.set_context({ - "asset": avalon.api.Session["AVALON_ASSET"], - "task": avalon.api.Session["AVALON_TASK"] + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] }) return result @@ -358,8 +358,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu): else: pyblish_menu_icon_id = 0 - asset = avalon.api.Session['AVALON_ASSET'] - task = avalon.api.Session['AVALON_TASK'] + asset = legacy_io.Session['AVALON_ASSET'] + task = legacy_io.Session['AVALON_TASK'] context_label = f"{asset}, {task}" context_label_item = layout.row() context_label_item.operator( diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index 9420a10228..5b81764644 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -1,6 +1,5 @@ import os import sys -import importlib import traceback from typing import Callable, Dict, Iterator, List, Optional @@ -10,17 +9,15 @@ from . import lib from . import ops import pyblish.api -import avalon.api -from avalon import io from openpype.pipeline import ( schema, + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, - uninstall_host, ) from openpype.api import Logger from openpype.lib import ( @@ -86,8 +83,8 @@ def uninstall(): def set_start_end_frames(): - asset_name = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ + asset_name = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -191,7 +188,7 @@ def _on_task_changed(): # `directory` attribute, so it opens in that directory (does it?). # https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector # https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add - workdir = avalon.api.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] log.debug("New working directory: %s", workdir) @@ -202,26 +199,6 @@ def _register_events(): log.info("Installed event callback for 'taskChanged'...") -def reload_pipeline(*args): - """Attempt to reload pipeline at run-time. - - Warning: - This is primarily for development and debugging purposes and not well - tested. - - """ - - uninstall_host() - - for module in ( - "avalon.io", - "avalon.pipeline", - "avalon.api", - ): - module = importlib.import_module(module) - importlib.reload(module) - - def _discover_gui() -> Optional[Callable]: """Return the most desirable of the currently registered GUIs""" diff --git a/openpype/hosts/blender/plugins/create/create_action.py b/openpype/hosts/blender/plugins/create/create_action.py index 5f66f5da6e..54b3a501a7 100644 --- a/openpype/hosts/blender/plugins/create/create_action.py +++ b/openpype/hosts/blender/plugins/create/create_action.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io import openpype.hosts.blender.api.plugin from openpype.hosts.blender.api import lib @@ -22,7 +22,7 @@ class CreateAction(openpype.hosts.blender.api.plugin.Creator): name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py index b88010ae90..a0e9e5e399 100644 --- a/openpype/hosts/blender/plugins/create/create_animation.py +++ b/openpype/hosts/blender/plugins/create/create_animation.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -37,7 +37,7 @@ class CreateAnimation(plugin.Creator): # asset_group.empty_display_type = 'SINGLE_ARROW' asset_group = bpy.data.collections.new(name=name) instances.children.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) if (self.options or {}).get("useSelection"): diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py index cc796d464d..1a3c008069 100644 --- a/openpype/hosts/blender/plugins/create/create_camera.py +++ b/openpype/hosts/blender/plugins/create/create_camera.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -40,7 +40,7 @@ class CreateCamera(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') print(f"self.data: {self.data}") lib.imprint(asset_group, self.data) diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py index f62cbc52ba..5949a4b86e 100644 --- a/openpype/hosts/blender/plugins/create/create_layout.py +++ b/openpype/hosts/blender/plugins/create/create_layout.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -34,7 +34,7 @@ class CreateLayout(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) # Add selected objects to instance diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index 75c90f9bb1..fedc708943 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -34,7 +34,7 @@ class CreateModel(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) # Add selected objects to instance diff --git a/openpype/hosts/blender/plugins/create/create_pointcache.py b/openpype/hosts/blender/plugins/create/create_pointcache.py index bf5a84048f..38707fd3b1 100644 --- a/openpype/hosts/blender/plugins/create/create_pointcache.py +++ b/openpype/hosts/blender/plugins/create/create_pointcache.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io import openpype.hosts.blender.api.plugin from openpype.hosts.blender.api import lib @@ -22,7 +22,7 @@ class CreatePointcache(openpype.hosts.blender.api.plugin.Creator): name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 65f5061924..0abd306c6b 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -34,7 +34,7 @@ class CreateRig(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) # Add selected objects to instance diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index b78a193d81..8ecc78a2c6 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -7,7 +7,7 @@ import bpy import bpy_extras import bpy_extras.anim_utils -from avalon import io +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY import openpype.api @@ -139,7 +139,7 @@ class ExtractLayout(openpype.api.Extractor): self.log.debug("Parent: {}".format(parent)) # Get blend reference - blend = io.find_one( + blend = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), @@ -150,7 +150,7 @@ class ExtractLayout(openpype.api.Extractor): if blend: blend_id = blend["_id"] # Get fbx reference - fbx = io.find_one( + fbx = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), @@ -161,7 +161,7 @@ class ExtractLayout(openpype.api.Extractor): if fbx: fbx_id = fbx["_id"] # Get abc reference - abc = io.find_one( + abc = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), diff --git a/openpype/hosts/blender/plugins/publish/integrate_animation.py b/openpype/hosts/blender/plugins/publish/integrate_animation.py index 90e94a4aac..d9a85bc79b 100644 --- a/openpype/hosts/blender/plugins/publish/integrate_animation.py +++ b/openpype/hosts/blender/plugins/publish/integrate_animation.py @@ -1,6 +1,5 @@ import json -from avalon import io import pyblish.api From 3edce9456ed4bd6adf1fdb0db3368ed28d110b9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:56:54 +0200 Subject: [PATCH 152/357] replced avalon imports in aftereffects --- openpype/hosts/aftereffects/api/launch_logic.py | 9 ++++----- openpype/hosts/aftereffects/api/pipeline.py | 3 --- .../aftereffects/plugins/publish/collect_workfile.py | 3 +-- .../plugins/publish/validate_instance_asset.py | 11 +++++++---- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py index c549268978..30a3e1f1c3 100644 --- a/openpype/hosts/aftereffects/api/launch_logic.py +++ b/openpype/hosts/aftereffects/api/launch_logic.py @@ -12,9 +12,8 @@ from wsrpc_aiohttp import ( from Qt import QtCore +from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools - -from avalon import api from openpype.tools.adobe_webserver.app import WebServerTool from .ws_stub import AfterEffectsServerStub @@ -271,13 +270,13 @@ class AfterEffectsRoute(WebSocketRoute): log.info("Setting context change") log.info("project {} asset {} ".format(project, asset)) if project: - api.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project os.environ["AVALON_PROJECT"] = project if asset: - api.Session["AVALON_ASSET"] = asset + legacy_io.Session["AVALON_ASSET"] = asset os.environ["AVALON_ASSET"] = asset if task: - api.Session["AVALON_TASK"] = task + legacy_io.Session["AVALON_TASK"] = task os.environ["AVALON_TASK"] = task async def read(self): diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 3ed2de0e9d..73aea2da11 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -2,10 +2,8 @@ import os import sys from Qt import QtWidgets -from bson.objectid import ObjectId import pyblish.api -from avalon import io from openpype import lib from openpype.api import Logger @@ -15,7 +13,6 @@ from openpype.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, - registered_host, ) import openpype.hosts.aftereffects from openpype.lib import register_event_callback diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index cb5a2bad4f..21a0cd7a1b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -1,5 +1,5 @@ import os -from avalon import api + import pyblish.api from openpype.lib import get_subset_name_with_asset_doc @@ -11,7 +11,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): - task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py index 37cecfbcc4..1a303f5da4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -1,7 +1,10 @@ -from avalon import api import pyblish.api + import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + legacy_io, +) from openpype.hosts.aftereffects.api import get_stub @@ -27,7 +30,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["asset"] = api.Session["AVALON_ASSET"] + data["asset"] = legacy_io.Session["AVALON_ASSET"] stub.imprint(instance[0], data) @@ -51,7 +54,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): def process(self, instance): instance_asset = instance.data["asset"] - current_asset = api.Session["AVALON_ASSET"] + current_asset = legacy_io.Session["AVALON_ASSET"] msg = ( f"Instance asset {instance_asset} is not the same " f"as current context {current_asset}." From d5c52df5ce35cc4bcae79a175b91c5384fa02622 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:57:13 +0200 Subject: [PATCH 153/357] replaced avalon imports in celaction --- .../hosts/celaction/plugins/publish/collect_audio.py | 10 +++++----- .../plugins/publish/collect_celaction_instances.py | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py index 80c1c37d7e..8acda5fc7c 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_audio.py +++ b/openpype/hosts/celaction/plugins/publish/collect_audio.py @@ -1,10 +1,10 @@ import os import collections +from pprint import pformat import pyblish.api -from avalon import io -from pprint import pformat +from openpype.pipeline import legacy_io class AppendCelactionAudio(pyblish.api.ContextPlugin): @@ -60,7 +60,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): """ # Query all subsets for asset - subset_docs = io.find({ + subset_docs = legacy_io.find({ "type": "subset", "parent": asset_doc["_id"] }) @@ -93,7 +93,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): }} ] last_versions_by_subset_id = dict() - for doc in io.aggregate(pipeline): + for doc in legacy_io.aggregate(pipeline): doc["parent"] = doc["_id"] doc["_id"] = doc.pop("_version_id") last_versions_by_subset_id[doc["parent"]] = doc @@ -102,7 +102,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): for version_doc in last_versions_by_subset_id.values(): version_docs_by_id[version_doc["_id"]] = version_doc - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "parent": {"$in": list(version_docs_by_id.keys())}, "name": {"$in": representations} diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index f393e471c4..1d2d9da1af 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -1,6 +1,6 @@ import os -from avalon import api import pyblish.api +from openpype.pipeline import legacy_io class CollectCelactionInstances(pyblish.api.ContextPlugin): @@ -10,7 +10,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): - task = api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) From 480029f6828867124cb0eb650b8600bf976d8c8f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:57:25 +0200 Subject: [PATCH 154/357] replaced avalon imports in flame --- .../hosts/flame/plugins/publish/collect_timeline_otio.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index c6aeae7730..f2ae1f62a9 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -1,6 +1,7 @@ import pyblish.api -import avalon.api as avalon + import openpype.lib as oplib +from openpype.pipeline import legacy_io import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export @@ -18,7 +19,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): # main asset_doc = context.data["assetEntity"] - task_name = avalon.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) From 93fa04e1da15c4819051dc86a5e495f8c5ba5270 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:58:19 +0200 Subject: [PATCH 155/357] replaced avalon imports in fusion --- openpype/hosts/fusion/api/lib.py | 20 +++++++++++-------- .../fusion/plugins/load/load_sequence.py | 9 +++++---- .../fusion/plugins/publish/submit_deadline.py | 8 ++++---- .../fusion/scripts/fusion_switch_shot.py | 13 +++++------- .../hosts/fusion/utility_scripts/switch_ui.py | 8 +++++--- 5 files changed, 31 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index f7a2360bfa..29f3a3a3eb 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -6,8 +6,10 @@ import contextlib from bson.objectid import ObjectId from Qt import QtGui -from avalon import io -from openpype.pipeline import switch_container +from openpype.pipeline import ( + switch_container, + legacy_io, +) from .pipeline import get_current_comp, comp_lock_and_undo_chunk self = sys.modules[__name__] @@ -94,8 +96,10 @@ def switch_item(container, # so we can use the original name from those. if any(not x for x in [asset_name, subset_name, representation_name]): _id = ObjectId(container["representation"]) - representation = io.find_one({"type": "representation", "_id": _id}) - version, subset, asset, project = io.parenthood(representation) + representation = legacy_io.find_one({ + "type": "representation", "_id": _id + }) + version, subset, asset, project = legacy_io.parenthood(representation) if asset_name is None: asset_name = asset["name"] @@ -107,14 +111,14 @@ def switch_item(container, representation_name = representation["name"] # Find the new one - asset = io.find_one({ + asset = legacy_io.find_one({ "name": asset_name, "type": "asset" }) assert asset, ("Could not find asset in the database with the name " "'%s'" % asset_name) - subset = io.find_one({ + subset = legacy_io.find_one({ "name": subset_name, "type": "subset", "parent": asset["_id"] @@ -122,7 +126,7 @@ def switch_item(container, assert subset, ("Could not find subset in the database with the name " "'%s'" % subset_name) - version = io.find_one( + version = legacy_io.find_one( { "type": "version", "parent": subset["_id"] @@ -134,7 +138,7 @@ def switch_item(container, asset_name, subset_name ) - representation = io.find_one({ + representation = legacy_io.find_one({ "name": representation_name, "type": "representation", "parent": version["_id"]} diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index 075820de35..b860abd88b 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -1,10 +1,9 @@ import os import contextlib -from avalon import io - from openpype.pipeline import ( load, + legacy_io, get_representation_path, ) from openpype.hosts.fusion.api import ( @@ -212,8 +211,10 @@ class FusionLoadSequence(load.LoaderPlugin): path = self._get_first_image(root) # Get start frame from version data - version = io.find_one({"type": "version", - "_id": representation["parent"]}) + version = legacy_io.find_one({ + "type": "version", + "_id": representation["parent"] + }) start = version["data"].get("frameStart") if start is None: self.log.warning("Missing start frame for updated version" diff --git a/openpype/hosts/fusion/plugins/publish/submit_deadline.py b/openpype/hosts/fusion/plugins/publish/submit_deadline.py index 9da99dd9e2..8570c759bc 100644 --- a/openpype/hosts/fusion/plugins/publish/submit_deadline.py +++ b/openpype/hosts/fusion/plugins/publish/submit_deadline.py @@ -4,10 +4,10 @@ import getpass import requests -from avalon import api - import pyblish.api +from openpype.pipeline import legacy_io + class FusionSubmitDeadline(pyblish.api.InstancePlugin): """Submit current Comp to Deadline @@ -133,7 +133,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin): "FUSION9_MasterPrefs" ] environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( @@ -146,7 +146,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin): self.log.info(json.dumps(payload, indent=4, sort_keys=True)) # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(DEADLINE_REST_URL) + url = "{}/api/jobs".format(deadline_url) response = requests.post(url, json=payload) if not response.ok: raise Exception(response.text) diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index ca8e5c9e37..704f420796 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -4,10 +4,8 @@ import sys import logging # Pipeline imports -import avalon.api -from avalon import io - from openpype.pipeline import ( + legacy_io, install_host, registered_host, ) @@ -167,7 +165,7 @@ def update_frame_range(comp, representations): """ version_ids = [r["parent"] for r in representations] - versions = io.find({"type": "version", "_id": {"$in": version_ids}}) + versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}}) versions = list(versions) versions = [v for v in versions @@ -205,12 +203,11 @@ def switch(asset_name, filepath=None, new=True): # Assert asset name exists # It is better to do this here then to wait till switch_shot does it - asset = io.find_one({"type": "asset", "name": asset_name}) + asset = legacy_io.find_one({"type": "asset", "name": asset_name}) assert asset, "Could not find '%s' in the database" % asset_name # Get current project - self._project = io.find_one({"type": "project", - "name": avalon.api.Session["AVALON_PROJECT"]}) + self._project = legacy_io.find_one({"type": "project"}) # Go to comp if not filepath: @@ -241,7 +238,7 @@ def switch(asset_name, filepath=None, new=True): current_comp.Print(message) # Build the session to switch to - switch_to_session = avalon.api.Session.copy() + switch_to_session = legacy_io.Session.copy() switch_to_session["AVALON_ASSET"] = asset['name'] if new: diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py index 37306c7a2a..70eb3d0a19 100644 --- a/openpype/hosts/fusion/utility_scripts/switch_ui.py +++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py @@ -5,11 +5,13 @@ import logging from Qt import QtWidgets, QtCore -from avalon import io import qtawesome as qta from openpype import style -from openpype.pipeline import install_host +from openpype.pipeline import ( + install_host, + legacy_io, +) from openpype.hosts.fusion import api from openpype.lib.avalon_context import get_workdir_from_session @@ -164,7 +166,7 @@ class App(QtWidgets.QWidget): return items def collect_assets(self): - return list(io.find({"type": "asset"}, {"name": True})) + return list(legacy_io.find({"type": "asset"}, {"name": True})) def populate_comp_box(self, files): """Ensure we display the filename only but the path is stored as well From 5bded18fbd709b1f61e8c2f40e400845bbe9cf99 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:59:13 +0200 Subject: [PATCH 156/357] replaced avalon imports in harmony --- openpype/hosts/harmony/api/README.md | 3 +-- openpype/hosts/harmony/api/pipeline.py | 5 ++--- .../harmony/plugins/publish/collect_farm_render.py | 12 ++++++------ 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/harmony/api/README.md b/openpype/hosts/harmony/api/README.md index e8d354e1e6..dd45eb14dd 100644 --- a/openpype/hosts/harmony/api/README.md +++ b/openpype/hosts/harmony/api/README.md @@ -419,7 +419,6 @@ class ExtractImage(pyblish.api.InstancePlugin): ```python import os -from avalon import api, io import openpype.hosts.harmony.api as harmony signature = str(uuid4()).replace("-", "_") @@ -611,7 +610,7 @@ class ImageSequenceLoader(load.LoaderPlugin): def update(self, container, representation): node = container.pop("node") - version = io.find_one({"_id": representation["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) files = [] for f in version["data"]["files"]: files.append( diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 88f11dd16f..b953d0e984 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -5,11 +5,10 @@ import logging from bson.objectid import ObjectId import pyblish.api -from avalon import io - from openpype import lib from openpype.lib import register_event_callback from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -111,7 +110,7 @@ def check_inventory(): outdated_containers = [] for container in ls(): representation = container['representation'] - representation_doc = io.find_one( + representation_doc = legacy_io.find_one( { "_id": ObjectId(representation), "type": "representation" diff --git a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py index 35b123f97d..f5bf051243 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py +++ b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py @@ -3,13 +3,13 @@ from pathlib import Path import attr -from avalon import api -from openpype.lib import get_formatted_current_time -import openpype.lib.abstract_collect_render -import openpype.hosts.harmony.api as harmony -from openpype.lib.abstract_collect_render import RenderInstance import openpype.lib +import openpype.lib.abstract_collect_render +from openpype.lib.abstract_collect_render import RenderInstance +from openpype.lib import get_formatted_current_time +from openpype.pipeline import legacy_io +import openpype.hosts.harmony.api as harmony @attr.s @@ -143,7 +143,7 @@ class CollectFarmRender(openpype.lib.abstract_collect_render. source=context.data["currentFile"], label=node.split("/")[1], subset=subset_name, - asset=api.Session["AVALON_ASSET"], + asset=legacy_io.Session["AVALON_ASSET"], attachTo=False, setMembers=[node], publish=info[4], From cea55ccc715e2aeb10cd6890f8c09377cbb1fef6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:59:43 +0200 Subject: [PATCH 157/357] replaced avalon imports in hiero --- openpype/hosts/hiero/api/lib.py | 14 ++++++-------- openpype/hosts/hiero/api/menu.py | 19 ++++++++++++------- openpype/hosts/hiero/api/tags.py | 8 ++++---- .../hosts/hiero/plugins/load/load_clip.py | 10 ++++++---- .../plugins/publish/precollect_workfile.py | 19 +++++++++++-------- .../collect_assetbuilds.py | 4 ++-- .../precollect_workfile.py | 4 ++-- 7 files changed, 43 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 00c30538fc..0e64ddcaf5 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -12,8 +12,7 @@ import hiero from Qt import QtWidgets from bson.objectid import ObjectId -import avalon.api as avalon -import avalon.io +from openpype.pipeline import legacy_io from openpype.api import (Logger, Anatomy, get_anatomy_settings) from . import tags @@ -383,7 +382,7 @@ def get_publish_attribute(tag): def sync_avalon_data_to_workfile(): # import session to get project dir - project_name = avalon.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] anatomy = Anatomy(project_name) work_template = anatomy.templates["work"]["path"] @@ -408,7 +407,7 @@ def sync_avalon_data_to_workfile(): project.setProjectRoot(active_project_root) # get project data from avalon db - project_doc = avalon.io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) project_data = project_doc["data"] log.debug("project_data: {}".format(project_data)) @@ -994,7 +993,6 @@ def check_inventory_versions(): it to red. """ from . import parse_container - from avalon import io # presets clip_color_last = "green" @@ -1006,19 +1004,19 @@ def check_inventory_versions(): if container: # get representation from io - representation = io.find_one({ + representation = legacy_io.find_one({ "type": "representation", "_id": ObjectId(container["representation"]) }) # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index de20b86f30..e262abec00 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -1,14 +1,16 @@ import os import sys + import hiero.core -from openpype.api import Logger -from openpype.tools.utils import host_tools -from avalon.api import Session from hiero.ui import findMenuAction +from openpype.api import Logger +from openpype.pipeline import legacy_io +from openpype.tools.utils import host_tools + from . import tags -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self._change_context_menu = None @@ -24,8 +26,10 @@ def update_menu_task_label(): log.warning("Can't find menuItem: {}".format(object_name)) return - label = "{}, {}".format(Session["AVALON_ASSET"], - Session["AVALON_TASK"]) + label = "{}, {}".format( + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] + ) menu = found_menu.menu() self._change_context_menu = label @@ -51,7 +55,8 @@ def menu_install(): menu_name = os.environ['AVALON_LABEL'] context_label = "{0}, {1}".format( - Session["AVALON_ASSET"], Session["AVALON_TASK"] + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) self._change_context_menu = context_label diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index fe5c0d5257..e15e3119a6 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -3,9 +3,9 @@ import os import hiero from openpype.api import Logger -from avalon import io +from openpype.pipeline import legacy_io -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def tag_data(): @@ -141,7 +141,7 @@ def add_tags_to_workfile(): nks_pres_tags = tag_data() # Get project task types. - tasks = io.find_one({"type": "project"})["config"]["tasks"] + tasks = legacy_io.find_one({"type": "project"})["config"]["tasks"] nks_pres_tags["[Tasks]"] = {} log.debug("__ tasks: {}".format(tasks)) for task_type in tasks.keys(): @@ -159,7 +159,7 @@ def add_tags_to_workfile(): # asset builds and shots. if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) == 1: nks_pres_tags["[AssetBuilds]"] = {} - for asset in io.find({"type": "asset"}): + for asset in legacy_io.find({"type": "asset"}): if asset["data"]["entityType"] == "AssetBuild": nks_pres_tags["[AssetBuilds]"][asset["name"]] = { "editable": "1", diff --git a/openpype/hosts/hiero/plugins/load/load_clip.py b/openpype/hosts/hiero/plugins/load/load_clip.py index d3908695a2..da4326c8c1 100644 --- a/openpype/hosts/hiero/plugins/load/load_clip.py +++ b/openpype/hosts/hiero/plugins/load/load_clip.py @@ -1,5 +1,7 @@ -from avalon import io -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) import openpype.hosts.hiero.api as phiero # from openpype.hosts.hiero.api import plugin, lib # reload(lib) @@ -105,7 +107,7 @@ class LoadClip(phiero.SequenceLoader): namespace = container['namespace'] track_item = phiero.get_track_items( track_item_name=namespace) - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -174,7 +176,7 @@ class LoadClip(phiero.SequenceLoader): # define version name version_name = version.get("name", None) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py index d48d6949bd..29c0397f79 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py @@ -1,12 +1,15 @@ import os -import pyblish.api -import hiero.ui -from openpype.hosts.hiero import api as phiero -from avalon import api as avalon -from pprint import pformat -from openpype.hosts.hiero.api.otio import hiero_export -from Qt.QtGui import QPixmap import tempfile +from pprint import pformat + +import pyblish.api +from Qt.QtGui import QPixmap + +import hiero.ui + +from openpype.pipeline import legacy_io +from openpype.hosts.hiero import api as phiero +from openpype.hosts.hiero.api.otio import hiero_export class PrecollectWorkfile(pyblish.api.ContextPlugin): @@ -17,7 +20,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): def process(self, context): - asset = avalon.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" project = phiero.get_current_project() active_timeline = hiero.ui.activeSequence() diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py index a90856c6fd..10baf25803 100644 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py @@ -1,5 +1,5 @@ from pyblish import api -from avalon import io +from openpype.pipeline import legacy_io class CollectAssetBuilds(api.ContextPlugin): @@ -18,7 +18,7 @@ class CollectAssetBuilds(api.ContextPlugin): def process(self, context): asset_builds = {} - for asset in io.find({"type": "asset"}): + for asset in legacy_io.find({"type": "asset"}): if asset["data"]["entityType"] == "AssetBuild": self.log.debug("Found \"{}\" in database.".format(asset)) asset_builds[asset["name"]] = asset diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py index ef7d07421b..693e151f6f 100644 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api from openpype.hosts.hiero import api as phiero -from avalon import api as avalon +from openpype.pipeline import legacy_io class PreCollectWorkfile(pyblish.api.ContextPlugin): @@ -11,7 +11,7 @@ class PreCollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.51 def process(self, context): - asset = avalon.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" project = phiero.get_current_project() From 785bdb09c21ba9987d2258d5195f44354c2dc250 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:00:59 +0200 Subject: [PATCH 158/357] replaced avalon imports in houdini --- openpype/hosts/houdini/api/lib.py | 16 ++++++----- openpype/hosts/houdini/api/usd.py | 10 ++++--- .../houdini/plugins/create/create_hda.py | 14 ++++++---- .../plugins/publish/collect_usd_bootstrap.py | 12 ++++++--- .../plugins/publish/extract_usd_layered.py | 15 ++++++----- .../validate_usd_shade_model_exists.py | 14 ++++++---- .../avalon_uri_processor.py | 27 ++++++++++--------- 7 files changed, 65 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..7b8a3dc46c 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -4,8 +4,8 @@ from contextlib import contextmanager import six -from avalon import api, io from openpype.api import get_asset +from openpype.pipeline import legacy_io import hou @@ -75,9 +75,13 @@ def generate_ids(nodes, asset_id=None): if asset_id is None: # Get the asset ID from the database for the asset of current context - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}, - projection={"_id": True}) + asset_data = legacy_io.find_one( + { + "type": "asset", + "name": legacy_io.Session["AVALON_ASSET"] + }, + projection={"_id": True} + ) assert asset_data, "No current asset found in Session" asset_id = asset_data['_id'] @@ -424,8 +428,8 @@ def maintained_selection(): def reset_framerange(): """Set frame range to current asset""" - asset_name = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset_name, "type": "asset"}) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": asset_name, "type": "asset"}) frame_start = asset["data"].get("frameStart") frame_end = asset["data"].get("frameEnd") diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index a992f1d082..e9991e38ec 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -1,11 +1,12 @@ """Houdini-specific USD Library functions.""" import contextlib - import logging + from Qt import QtWidgets, QtCore, QtGui -from avalon import io + from openpype import style +from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from pxr import Sdf @@ -20,11 +21,12 @@ class SelectAssetDialog(QtWidgets.QWidget): Args: parm: Parameter where selected asset name is set. """ + def __init__(self, parm): self.setWindowTitle("Pick Asset") self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - assets_widget = SingleSelectAssetsWidget(io, parent=self) + assets_widget = SingleSelectAssetsWidget(legacy_io, parent=self) layout = QtWidgets.QHBoxLayout(self) layout.addWidget(assets_widget) @@ -44,7 +46,7 @@ class SelectAssetDialog(QtWidgets.QWidget): select_id = None name = self._parm.eval() if name: - db_asset = io.find_one( + db_asset = legacy_io.find_one( {"name": name, "type": "asset"}, {"_id": True} ) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 0a9c1bad1e..5fc78c7539 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import hou -from avalon import io + +from openpype.pipeline import legacy_io from openpype.hosts.houdini.api import lib from openpype.hosts.houdini.api import plugin @@ -22,13 +23,16 @@ class CreateHDA(plugin.Creator): # type: (str) -> bool """Check if existing subset name versions already exists.""" # Get all subsets of the current asset - asset_id = io.find_one({"name": self.data["asset"], "type": "asset"}, - projection={"_id": True})['_id'] - subset_docs = io.find( + asset_id = legacy_io.find_one( + {"name": self.data["asset"], "type": "asset"}, + projection={"_id": True} + )['_id'] + subset_docs = legacy_io.find( { "type": "subset", "parent": asset_id - }, {"name": 1} + }, + {"name": 1} ) existing_subset_names = set(subset_docs.distinct("name")) existing_subset_names_low = { diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index 66dfba64df..3f0d10e0ba 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from avalon import io +from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib @@ -50,7 +50,10 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): self.log.debug("Add bootstrap for: %s" % bootstrap) - asset = io.find_one({"name": instance.data["asset"], "type": "asset"}) + asset = legacy_io.find_one({ + "name": instance.data["asset"], + "type": "asset" + }) assert asset, "Asset must exist: %s" % asset # Check which are not about to be created and don't exist yet @@ -104,7 +107,8 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): # Or, if they already exist in the database we can # skip them too. return bool( - io.find_one( - {"name": subset, "type": "subset", "parent": asset["_id"]} + legacy_io.find_one( + {"name": subset, "type": "subset", "parent": asset["_id"]}, + {"_id": True} ) ) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 3e842ae766..bfcd93c1cb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -7,7 +7,10 @@ from collections import deque import pyblish.api import openpype.api -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.hosts.houdini.api.lib import render_rop @@ -266,8 +269,6 @@ class ExtractUSDLayered(openpype.api.Extractor): instance.data["files"].append(fname) def _compare_with_latest_publish(self, dependency, new_file): - - from avalon import api, io import filecmp _, ext = os.path.splitext(new_file) @@ -275,10 +276,10 @@ class ExtractUSDLayered(openpype.api.Extractor): # Compare this dependency with the latest published version # to detect whether we should make this into a new publish # version. If not, skip it. - asset = io.find_one( + asset = legacy_io.find_one( {"name": dependency.data["asset"], "type": "asset"} ) - subset = io.find_one( + subset = legacy_io.find_one( { "name": dependency.data["subset"], "type": "subset", @@ -290,7 +291,7 @@ class ExtractUSDLayered(openpype.api.Extractor): self.log.debug("No existing subset..") return False - version = io.find_one( + version = legacy_io.find_one( {"type": "version", "parent": subset["_id"], }, sort=[("name", -1)] ) @@ -298,7 +299,7 @@ class ExtractUSDLayered(openpype.api.Extractor): self.log.debug("No existing version..") return False - representation = io.find_one( + representation = legacy_io.find_one( { "name": ext.lstrip("."), "type": "representation", diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index fcfbf6b22d..44719ae488 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,9 +1,9 @@ import re import pyblish.api -import openpype.api -from avalon import io +import openpype.api +from openpype.pipeline import legacy_io class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -23,16 +23,20 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): shade_subset = subset.split(".", 1)[0] model_subset = re.sub("^usdShade", "usdModel", shade_subset) - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one( + {"name": asset, "type": "asset"}, + {"_id": True} + ) if not asset_doc: raise RuntimeError("Asset does not exist: %s" % asset) - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "name": model_subset, "type": "subset", "parent": asset_doc["_id"], - } + }, + {"_id": True} ) if not subset_doc: raise RuntimeError( diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py index 8cd51e6641..01a29472e7 100644 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py @@ -1,17 +1,21 @@ +import os import hou import husdoutputprocessors.base as base -import os -import re -import logging import colorbleed.usdlib as usdlib +from openpype.pipeline import ( + legacy_io, + registered_root, +) + def _get_project_publish_template(): """Return publish template from database for current project""" - from avalon import io - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = legacy_io.find_one( + {"type": "project"}, + projection={"config.template.publish": True} + ) return project["config"]["template"]["publish"] @@ -133,12 +137,11 @@ class AvalonURIOutputProcessor(base.OutputProcessorBase): """ - from avalon import api, io - from openpype.pipeline import registered_root - - PROJECT = api.Session["AVALON_PROJECT"] - asset_doc = io.find_one({"name": asset, - "type": "asset"}) + PROJECT = legacy_io.Session["AVALON_PROJECT"] + asset_doc = legacy_io.find_one({ + "name": asset, + "type": "asset" + }) if not asset_doc: raise RuntimeError("Invalid asset name: '%s'" % asset) From 4a5f4c16f4bcdf1f2de341615fc8badce8ed6237 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:12:41 +0200 Subject: [PATCH 159/357] replace avalon import in maya --- openpype/hosts/maya/api/action.py | 8 ++- openpype/hosts/maya/api/commands.py | 15 ++-- openpype/hosts/maya/api/lib.py | 72 +++++++++++-------- openpype/hosts/maya/api/menu.py | 13 ++-- openpype/hosts/maya/api/pipeline.py | 12 ++-- openpype/hosts/maya/api/setdress.py | 20 +++--- .../maya/plugins/create/create_render.py | 9 +-- .../create/create_unreal_skeletalmesh.py | 4 +- .../create/create_unreal_staticmesh.py | 6 +- .../maya/plugins/create/create_vrayscene.py | 9 +-- .../plugins/inventory/import_modelrender.py | 9 +-- .../hosts/maya/plugins/load/load_audio.py | 11 +-- .../maya/plugins/load/load_image_plane.py | 8 +-- openpype/hosts/maya/plugins/load/load_look.py | 8 ++- .../hosts/maya/plugins/load/load_reference.py | 8 ++- .../hosts/maya/plugins/load/load_vrayproxy.py | 13 ++-- .../maya/plugins/load/load_yeti_cache.py | 6 +- .../maya/plugins/publish/collect_render.py | 4 +- .../maya/plugins/publish/collect_review.py | 5 +- .../maya/plugins/publish/collect_vrayscene.py | 5 +- .../maya/plugins/publish/collect_workfile.py | 7 +- .../maya/plugins/publish/extract_look.py | 4 +- .../plugins/publish/submit_maya_muster.py | 5 +- .../plugins/publish/validate_model_name.py | 11 +-- .../publish/validate_node_ids_in_database.py | 5 +- .../publish/validate_node_ids_related.py | 5 +- .../publish/validate_renderlayer_aovs.py | 6 +- .../validate_unreal_staticmesh_naming.py | 10 +-- 28 files changed, 164 insertions(+), 134 deletions(-) diff --git a/openpype/hosts/maya/api/action.py b/openpype/hosts/maya/api/action.py index ab26748c8a..ca1006b6aa 100644 --- a/openpype/hosts/maya/api/action.py +++ b/openpype/hosts/maya/api/action.py @@ -2,8 +2,8 @@ from __future__ import absolute_import import pyblish.api -from avalon import io +from openpype.pipeline import legacy_io from openpype.api import get_errored_instances_from_context @@ -75,8 +75,10 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): from . import lib asset = instance.data['asset'] - asset_id = io.find_one({"name": asset, "type": "asset"}, - projection={"_id": True})['_id'] + asset_id = legacy_io.find_one( + {"name": asset, "type": "asset"}, + projection={"_id": True} + )['_id'] for node, _id in lib.generate_ids(nodes, asset_id=asset_id): lib.set_id(node, _id, overwrite=True) diff --git a/openpype/hosts/maya/api/commands.py b/openpype/hosts/maya/api/commands.py index a1e0be2cfe..dd616b6dd6 100644 --- a/openpype/hosts/maya/api/commands.py +++ b/openpype/hosts/maya/api/commands.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """OpenPype script commands to be used directly in Maya.""" from maya import cmds -from avalon import api, io + +from openpype.pipeline import legacy_io class ToolWindows: @@ -73,13 +74,13 @@ def reset_frame_range(): 59.94: '59.94fps', 44100: '44100fps', 48000: '48000fps' - }.get(float(api.Session.get("AVALON_FPS", 25)), "pal") + }.get(float(legacy_io.Session.get("AVALON_FPS", 25)), "pal") cmds.currentUnit(time=fps) # Set frame start/end - asset_name = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset_name, "type": "asset"}) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": asset_name, "type": "asset"}) frame_start = asset["data"].get("frameStart") frame_end = asset["data"].get("frameEnd") @@ -144,8 +145,8 @@ def reset_resolution(): resolution_height = 1080 # Get resolution from asset - asset_name = api.Session["AVALON_ASSET"] - asset_doc = io.find_one({"name": asset_name, "type": "asset"}) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({"name": asset_name, "type": "asset"}) resolution = _resolution_from_document(asset_doc) # Try get resolution from project if resolution is None: @@ -154,7 +155,7 @@ def reset_resolution(): "Asset \"{}\" does not have set resolution." " Trying to get resolution from project" ).format(asset_name)) - project_doc = io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) resolution = _resolution_from_document(project_doc) if resolution is None: diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 9e99b96477..cf09c39b21 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -17,11 +17,10 @@ import bson from maya import cmds, mel import maya.api.OpenMaya as om -from avalon import api, io - from openpype import lib from openpype.api import get_anatomy_settings from openpype.pipeline import ( + legacy_io, discover_loader_plugins, loaders_from_representation, get_representation_path, @@ -1388,9 +1387,13 @@ def generate_ids(nodes, asset_id=None): if asset_id is None: # Get the asset ID from the database for the asset of current context - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}, - projection={"_id": True}) + asset_data = legacy_io.find_one( + { + "type": "asset", + "name": legacy_io.Session["AVALON_ASSET"] + }, + projection={"_id": True} + ) assert asset_data, "No current asset found in Session" asset_id = asset_data['_id'] @@ -1545,9 +1548,11 @@ def list_looks(asset_id): # # get all subsets with look leading in # the name associated with the asset - subset = io.find({"parent": bson.ObjectId(asset_id), - "type": "subset", - "name": {"$regex": "look*"}}) + subset = legacy_io.find({ + "parent": bson.ObjectId(asset_id), + "type": "subset", + "name": {"$regex": "look*"} + }) return list(subset) @@ -1566,13 +1571,17 @@ def assign_look_by_version(nodes, version_id): """ # Get representations of shader file and relationships - look_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "ma"}) + look_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "ma" + }) - json_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "json"}) + json_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "json" + }) # See if representation is already loaded, if so reuse it. host = registered_host() @@ -1637,9 +1646,11 @@ def assign_look(nodes, subset="lookDefault"): except bson.errors.InvalidId: log.warning("Asset ID is not compatible with bson") continue - subset_data = io.find_one({"type": "subset", - "name": subset, - "parent": asset_id}) + subset_data = legacy_io.find_one({ + "type": "subset", + "name": subset, + "parent": asset_id + }) if not subset_data: log.warning("No subset '{}' found for {}".format(subset, asset_id)) @@ -1647,13 +1658,18 @@ def assign_look(nodes, subset="lookDefault"): # get last version # with backwards compatibility - version = io.find_one({"parent": subset_data['_id'], - "type": "version", - "data.families": - {"$in": ["look"]} - }, - sort=[("name", -1)], - projection={"_id": True, "name": True}) + version = legacy_io.find_one( + { + "parent": subset_data['_id'], + "type": "version", + "data.families": {"$in": ["look"]} + }, + sort=[("name", -1)], + projection={ + "_id": True, + "name": True + } + ) log.debug("Assigning look '{}' ".format(subset, version["name"])) @@ -2136,7 +2152,7 @@ def reset_scene_resolution(): None """ - project_doc = io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) project_data = project_doc["data"] asset_data = lib.get_asset()["data"] @@ -2169,13 +2185,13 @@ def set_context_settings(): """ # Todo (Wijnand): apply renderer and resolution of project - project_doc = io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) project_data = project_doc["data"] asset_data = lib.get_asset()["data"] # Set project fps fps = asset_data.get("fps", project_data.get("fps", 25)) - api.Session["AVALON_FPS"] = str(fps) + legacy_io.Session["AVALON_FPS"] = str(fps) set_scene_fps(fps) reset_scene_resolution() @@ -2935,7 +2951,7 @@ def update_content_on_context_change(): This will update scene content to match new asset on context change """ scene_sets = cmds.listSets(allSets=True) - new_asset = api.Session["AVALON_ASSET"] + new_asset = legacy_io.Session["AVALON_ASSET"] new_data = lib.get_asset()["data"] for s in scene_sets: try: diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 5f0fc39bf3..97f06c43af 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,10 +6,9 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -import avalon.api - from openpype.api import BuildWorkfile from openpype.settings import get_project_settings +from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib from .lib import get_main_window, IS_HEADLESS @@ -40,15 +39,15 @@ def install(): parent_widget = get_main_window() cmds.menu( MENU_NAME, - label=avalon.api.Session["AVALON_LABEL"], + label=legacy_io.Session["AVALON_LABEL"], tearOff=True, parent="MayaWindow" ) # Create context menu context_label = "{}, {}".format( - avalon.api.Session["AVALON_ASSET"], - avalon.api.Session["AVALON_TASK"] + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) cmds.menuItem( "currentContext", @@ -211,7 +210,7 @@ def update_menu_task_label(): return label = "{}, {}".format( - avalon.api.Session["AVALON_ASSET"], - avalon.api.Session["AVALON_TASK"] + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) cmds.menuItem(object_name, edit=True, label=label) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f6f3472eef..dd05bfbb21 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -7,7 +7,6 @@ from maya import utils, cmds, OpenMaya import maya.api.OpenMaya as om import pyblish.api -import avalon.api import openpype.hosts.maya from openpype.tools.utils import host_tools @@ -18,6 +17,7 @@ from openpype.lib import ( ) from openpype.lib.path_tools import HostDirmap from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_inventory_action_path, register_creator_plugin_path, @@ -93,7 +93,7 @@ def _set_project(): None """ - workdir = avalon.api.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] try: os.makedirs(workdir) @@ -473,7 +473,7 @@ def on_task_changed(): # Run menu.update_menu_task_label() - workdir = avalon.api.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] if os.path.exists(workdir): log.info("Updating Maya workspace for task change to %s", workdir) @@ -494,9 +494,9 @@ def on_task_changed(): lib.update_content_on_context_change() msg = " project: {}\n asset: {}\n task:{}".format( - avalon.api.Session["AVALON_PROJECT"], - avalon.api.Session["AVALON_ASSET"], - avalon.api.Session["AVALON_TASK"] + legacy_io.Session["AVALON_PROJECT"], + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) lib.show_message( diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py index 018ea4558c..f8d3ed79b8 100644 --- a/openpype/hosts/maya/api/setdress.py +++ b/openpype/hosts/maya/api/setdress.py @@ -10,9 +10,9 @@ from bson.objectid import ObjectId from maya import cmds -from avalon import io from openpype.pipeline import ( schema, + legacy_io, discover_loader_plugins, loaders_from_representation, load_container, @@ -283,21 +283,23 @@ def update_package_version(container, version): """ # Versioning (from `core.maya.pipeline`) - current_representation = io.find_one({ + current_representation = legacy_io.find_one({ "_id": ObjectId(container["representation"]) }) assert current_representation is not None, "This is a bug" - version_, subset, asset, project = io.parenthood(current_representation) + version_, subset, asset, project = legacy_io.parenthood( + current_representation + ) if version == -1: - new_version = io.find_one({ + new_version = legacy_io.find_one({ "type": "version", "parent": subset["_id"] }, sort=[("name", -1)]) else: - new_version = io.find_one({ + new_version = legacy_io.find_one({ "type": "version", "parent": subset["_id"], "name": version, @@ -306,7 +308,7 @@ def update_package_version(container, version): assert new_version is not None, "This is a bug" # Get the new representation (new file) - new_representation = io.find_one({ + new_representation = legacy_io.find_one({ "type": "representation", "parent": new_version["_id"], "name": current_representation["name"] @@ -328,7 +330,7 @@ def update_package(set_container, representation): """ # Load the original package data - current_representation = io.find_one({ + current_representation = legacy_io.find_one({ "_id": ObjectId(set_container['representation']), "type": "representation" }) @@ -479,10 +481,10 @@ def update_scene(set_container, containers, current_data, new_data, new_file): # Check whether the conversion can be done by the Loader. # They *must* use the same asset, subset and Loader for # `update_container` to make sense. - old = io.find_one({ + old = legacy_io.find_one({ "_id": ObjectId(representation_current) }) - new = io.find_one({ + new = legacy_io.find_one({ "_id": ObjectId(representation_new) }) is_valid = compare_representations(old=old, new=new) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 4f0a394f85..1e3fc3f0ae 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -19,9 +19,10 @@ from openpype.api import ( get_project_settings, get_asset) from openpype.modules import ModulesManager -from openpype.pipeline import CreatorError - -from avalon.api import Session +from openpype.pipeline import ( + CreatorError, + legacy_io, +) class CreateRender(plugin.Creator): @@ -104,7 +105,7 @@ class CreateRender(plugin.Creator): self.deadline_servers = {} return self._project_settings = get_project_settings( - Session["AVALON_PROJECT"]) + legacy_io.Session["AVALON_PROJECT"]) # project_settings/maya/create/CreateRender/aov_separator try: diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index a6deeeee2e..1a8e84c80d 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator for Unreal Skeletal Meshes.""" from openpype.hosts.maya.api import plugin, lib -from avalon.api import Session +from openpype.pipeline import legacy_io from maya import cmds # noqa @@ -26,7 +26,7 @@ class CreateUnrealSkeletalMesh(plugin.Creator): dynamic_data = super(CreateUnrealSkeletalMesh, cls).get_dynamic_data( variant, task_name, asset_id, project_name, host_name ) - dynamic_data["asset"] = Session.get("AVALON_ASSET") + dynamic_data["asset"] = legacy_io.Session.get("AVALON_ASSET") return dynamic_data def process(self): diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index f62d15fe62..4e4417ff34 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- """Creator for Unreal Static Meshes.""" from openpype.hosts.maya.api import plugin, lib -from avalon.api import Session from openpype.api import get_project_settings +from openpype.pipeline import legacy_io from maya import cmds # noqa @@ -18,7 +18,7 @@ class CreateUnrealStaticMesh(plugin.Creator): """Constructor.""" super(CreateUnrealStaticMesh, self).__init__(*args, **kwargs) self._project_settings = get_project_settings( - Session["AVALON_PROJECT"]) + legacy_io.Session["AVALON_PROJECT"]) @classmethod def get_dynamic_data( @@ -27,7 +27,7 @@ class CreateUnrealStaticMesh(plugin.Creator): dynamic_data = super(CreateUnrealStaticMesh, cls).get_dynamic_data( variant, task_name, asset_id, project_name, host_name ) - dynamic_data["asset"] = Session.get("AVALON_ASSET") + dynamic_data["asset"] = legacy_io.Session.get("AVALON_ASSET") return dynamic_data def process(self): diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py index fa9c59e016..38cf5818a6 100644 --- a/openpype/hosts/maya/plugins/create/create_vrayscene.py +++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py @@ -19,11 +19,12 @@ from openpype.api import ( get_project_settings ) -from openpype.pipeline import CreatorError +from openpype.pipeline import ( + CreatorError, + legacy_io, +) from openpype.modules import ModulesManager -from avalon.api import Session - class CreateVRayScene(plugin.Creator): """Create Vray Scene.""" @@ -44,7 +45,7 @@ class CreateVRayScene(plugin.Creator): self.deadline_servers = {} return self._project_settings = get_project_settings( - Session["AVALON_PROJECT"]) + legacy_io.Session["AVALON_PROJECT"]) try: default_servers = deadline_settings["deadline_urls"] diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py index c2e43f196f..a5367f16e5 100644 --- a/openpype/hosts/maya/plugins/inventory/import_modelrender.py +++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py @@ -1,9 +1,10 @@ import json -from avalon import io from bson.objectid import ObjectId + from openpype.pipeline import ( InventoryAction, get_representation_context, + legacy_io, ) from openpype.hosts.maya.api.lib import ( maintained_selection, @@ -39,7 +40,7 @@ class ImportModelRender(InventoryAction): else: nodes.append(n) - repr_doc = io.find_one({ + repr_doc = legacy_io.find_one({ "_id": ObjectId(container["representation"]), }) version_id = repr_doc["parent"] @@ -63,7 +64,7 @@ class ImportModelRender(InventoryAction): from maya import cmds # Get representations of shader file and relationships - look_repr = io.find_one({ + look_repr = legacy_io.find_one({ "type": "representation", "parent": version_id, "name": {"$regex": self.scene_type_regex}, @@ -72,7 +73,7 @@ class ImportModelRender(InventoryAction): print("No model render sets for this model version..") return - json_repr = io.find_one({ + json_repr = legacy_io.find_one({ "type": "representation", "parent": version_id, "name": self.look_data_type, diff --git a/openpype/hosts/maya/plugins/load/load_audio.py b/openpype/hosts/maya/plugins/load/load_audio.py index d8844ffea6..ce814e1299 100644 --- a/openpype/hosts/maya/plugins/load/load_audio.py +++ b/openpype/hosts/maya/plugins/load/load_audio.py @@ -1,8 +1,9 @@ from maya import cmds, mel -from avalon import io + from openpype.pipeline import ( + legacy_io, load, - get_representation_path + get_representation_path, ) from openpype.hosts.maya.api.pipeline import containerise from openpype.hosts.maya.api.lib import unique_namespace @@ -64,9 +65,9 @@ class AudioLoader(load.LoaderPlugin): ) # Set frame range. - version = io.find_one({"_id": representation["parent"]}) - subset = io.find_one({"_id": version["parent"]}) - asset = io.find_one({"_id": subset["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) + subset = legacy_io.find_one({"_id": version["parent"]}) + asset = legacy_io.find_one({"_id": subset["parent"]}) audio_node.sourceStart.set(1 - asset["data"]["frameStart"]) audio_node.sourceEnd.set(asset["data"]["frameEnd"]) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index b250986489..b67c2cb209 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -1,7 +1,7 @@ from Qt import QtWidgets, QtCore -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -216,9 +216,9 @@ class ImagePlaneLoader(load.LoaderPlugin): ) # Set frame range. - version = io.find_one({"_id": representation["parent"]}) - subset = io.find_one({"_id": version["parent"]}) - asset = io.find_one({"_id": subset["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) + subset = legacy_io.find_one({"_id": version["parent"]}) + asset = legacy_io.find_one({"_id": subset["parent"]}) start_frame = asset["data"]["frameStart"] end_frame = asset["data"]["frameEnd"] image_plane_shape.frameOffset.set(1 - start_frame) diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 8f02ed59b8..80eac8e0b5 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -5,8 +5,10 @@ from collections import defaultdict from Qt import QtWidgets -from avalon import io -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api import lib from openpype.widgets.message_window import ScrollMessageBox @@ -71,7 +73,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): shader_nodes = cmds.ls(members, type='shadingEngine') nodes = set(self._get_nodes_with_shader(shader_nodes)) - json_representation = io.find_one({ + json_representation = legacy_io.find_one({ "type": "representation", "parent": representation['parent'], "name": "json" diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index a7222edfd4..a8875cf216 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -1,10 +1,12 @@ import os from maya import cmds -from avalon import api from openpype.api import get_project_settings from openpype.lib import get_creator_by_name -from openpype.pipeline import legacy_create +from openpype.pipeline import ( + legacy_io, + legacy_create, +) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api.lib import maintained_selection @@ -143,7 +145,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): roots = cmds.ls(self[:], assemblies=True, long=True) assert roots, "No root nodes in rig, this is a bug." - asset = api.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] dependency = str(context["representation"]["_id"]) self.log.info("Creating subset: {}".format(namespace)) diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index 69d54df62b..22d56139f6 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -11,9 +11,9 @@ from bson.objectid import ObjectId import maya.cmds as cmds -from avalon import io from openpype.api import get_project_settings from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -185,12 +185,11 @@ class VRayProxyLoader(load.LoaderPlugin): """ self.log.debug( "Looking for abc in published representations of this version.") - abc_rep = io.find_one( - { - "type": "representation", - "parent": ObjectId(version_id), - "name": "abc" - }) + abc_rep = legacy_io.find_one({ + "type": "representation", + "parent": ObjectId(version_id), + "name": "abc" + }) if abc_rep: self.log.debug("Found, we'll link alembic to vray proxy.") diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index c64e1c540b..fb903785ae 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -7,9 +7,9 @@ from pprint import pprint from maya import cmds -from avalon import io from openpype.api import get_project_settings from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -111,11 +111,11 @@ class YetiCacheLoader(load.LoaderPlugin): def update(self, container, representation): - io.install() + legacy_io.install() namespace = container["namespace"] container_node = container["objectName"] - fur_settings = io.find_one( + fur_settings = legacy_io.find_one( {"parent": representation["parent"], "name": "fursettings"} ) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index a525b562f3..2ce7c02737 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -49,8 +49,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup import pyblish.api -from avalon import api from openpype.lib import get_formatted_current_time +from openpype.pipeline import legacy_io from openpype.hosts.maya.api.lib_renderproducts import get as get_layer_render_products # noqa: E501 from openpype.hosts.maya.api import lib @@ -93,7 +93,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): render_globals = render_instance collected_render_layers = render_instance.data["setMembers"] filepath = context.data["currentFile"].replace("\\", "/") - asset = api.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] workspace = context.data["workspaceDir"] deadline_settings = ( diff --git a/openpype/hosts/maya/plugins/publish/collect_review.py b/openpype/hosts/maya/plugins/publish/collect_review.py index 60183341f9..1af92c3bfc 100644 --- a/openpype/hosts/maya/plugins/publish/collect_review.py +++ b/openpype/hosts/maya/plugins/publish/collect_review.py @@ -2,7 +2,8 @@ from maya import cmds, mel import pymel.core as pm import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CollectReview(pyblish.api.InstancePlugin): @@ -19,7 +20,7 @@ class CollectReview(pyblish.api.InstancePlugin): self.log.debug('instance: {}'.format(instance)) - task = avalon.api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] # get cameras members = instance.data['setMembers'] diff --git a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py index 327fc836dc..afdb570cbc 100644 --- a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py @@ -6,7 +6,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup from maya import cmds import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io from openpype.lib import get_formatted_current_time from openpype.hosts.maya.api import lib @@ -117,7 +118,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin): # instance subset "family": "vrayscene_layer", "families": ["vrayscene_layer"], - "asset": api.Session["AVALON_ASSET"], + "asset": legacy_io.Session["AVALON_ASSET"], "time": get_formatted_current_time(), "author": context.data["user"], # Add source to allow tracing back to the scene from diff --git a/openpype/hosts/maya/plugins/publish/collect_workfile.py b/openpype/hosts/maya/plugins/publish/collect_workfile.py index ee676f50d0..12d86869ea 100644 --- a/openpype/hosts/maya/plugins/publish/collect_workfile.py +++ b/openpype/hosts/maya/plugins/publish/collect_workfile.py @@ -1,7 +1,8 @@ -import pyblish.api -import avalon.api import os +import pyblish.api + from maya import cmds +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): @@ -19,7 +20,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = avalon.api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] data = {} diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 6fcc308f78..881705b92c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -12,9 +12,9 @@ from collections import OrderedDict from maya import cmds # noqa import pyblish.api -from avalon import io import openpype.api +from openpype.pipeline import legacy_io from openpype.hosts.maya.api import lib # Modes for transfer @@ -40,7 +40,7 @@ def find_paths_by_hash(texture_hash): """ key = "data.sourceHashes.{0}".format(texture_hash) - return io.distinct(key, {"type": "version"}) + return legacy_io.distinct(key, {"type": "version"}) def maketx(source, destination, *args): diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index f852904580..3ce9ec714c 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -8,10 +8,9 @@ import requests from maya import cmds -from avalon import api - import pyblish.api from openpype.hosts.maya.api import lib +from openpype.pipeline import legacy_io from openpype.api import get_system_settings @@ -503,7 +502,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "TOOL_ENV" ] environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) # self.log.debug("enviro: {}".format(pprint(environment))) for path in os.environ: if path.lower().startswith('pype_'): diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 3757e13a9b..50acf2b8b7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -1,16 +1,17 @@ # -*- coding: utf-8 -*- """Validate model nodes names.""" +import os +import re from maya import cmds import pyblish.api + import openpype.api -import avalon.api +from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action from openpype.hosts.maya.api.shader_definition_editor import ( DEFINITION_FILENAME) from openpype.lib.mongo import OpenPypeMongoConnection import gridfs -import re -import os class ValidateModelName(pyblish.api.InstancePlugin): @@ -68,7 +69,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): invalid.append(top_group) else: if "asset" in r.groupindex: - if m.group("asset") != avalon.api.Session["AVALON_ASSET"]: + if m.group("asset") != legacy_io.Session["AVALON_ASSET"]: cls.log.error("Invalid asset name in top level group.") return top_group if "subset" in r.groupindex: @@ -76,7 +77,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): cls.log.error("Invalid subset name in top level group.") return top_group if "project" in r.groupindex: - if m.group("project") != avalon.api.Session["AVALON_PROJECT"]: + if m.group("project") != legacy_io.Session["AVALON_PROJECT"]: cls.log.error("Invalid project name in top level group.") return top_group diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py index c5f675c8ca..068d6b38a1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -1,8 +1,7 @@ import pyblish.api -from avalon import io - import openpype.api +from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -43,7 +42,7 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): nodes=instance[:]) # check ids against database ids - db_asset_ids = io.find({"type": "asset"}).distinct("_id") + db_asset_ids = legacy_io.find({"type": "asset"}).distinct("_id") db_asset_ids = set(str(i) for i in db_asset_ids) # Get all asset IDs diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py index 276b6713f4..38407e4176 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,9 +1,8 @@ import pyblish.api import openpype.api -from avalon import io +from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action - from openpype.hosts.maya.api import lib @@ -38,7 +37,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin): invalid = list() asset = instance.data['asset'] - asset_data = io.find_one( + asset_data = legacy_io.find_one( { "name": asset, "type": "asset" diff --git a/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py b/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py index 4eb445ac68..e65150eb0f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py +++ b/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py @@ -1,7 +1,7 @@ import pyblish.api import openpype.hosts.maya.api.action -from avalon import io +from openpype.pipeline import legacy_io import openpype.api @@ -48,8 +48,8 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): def validate_subset_registered(self, asset_name, subset_name): """Check if subset is registered in the database under the asset""" - asset = io.find_one({"type": "asset", "name": asset_name}) - is_valid = io.find_one({ + asset = legacy_io.find_one({"type": "asset", "name": asset_name}) + is_valid = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset["_id"] diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index 43f6c85827..33788d1835 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- """Validator for correct naming of Static Meshes.""" -from maya import cmds # noqa +import re + import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from avalon.api import Session +from openpype.pipeline import legacy_io from openpype.api import get_project_settings -import re class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): @@ -63,7 +63,9 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): invalid = [] - project_settings = get_project_settings(Session["AVALON_PROJECT"]) + project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) collision_prefixes = ( project_settings ["maya"] From dc0c46dff9e121b333f98b0511dd45bb1920a344 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:17:36 +0200 Subject: [PATCH 160/357] replaced avalon imports in nuke --- openpype/hosts/nuke/api/command.py | 15 ++++----- openpype/hosts/nuke/api/lib.py | 32 +++++++++++-------- .../hosts/nuke/plugins/load/load_backdrop.py | 6 ++-- .../nuke/plugins/load/load_camera_abc.py | 6 ++-- openpype/hosts/nuke/plugins/load/load_clip.py | 10 +++--- .../hosts/nuke/plugins/load/load_effects.py | 7 ++-- .../nuke/plugins/load/load_effects_ip.py | 7 ++-- .../hosts/nuke/plugins/load/load_gizmo.py | 7 ++-- .../hosts/nuke/plugins/load/load_gizmo_ip.py | 6 ++-- .../hosts/nuke/plugins/load/load_image.py | 6 ++-- .../hosts/nuke/plugins/load/load_model.py | 7 ++-- .../nuke/plugins/load/load_script_precomp.py | 7 ++-- .../nuke/plugins/publish/collect_reads.py | 9 ++++-- .../plugins/publish/precollect_instances.py | 7 ++-- .../nuke/plugins/publish/precollect_writes.py | 9 ++++-- .../nuke/plugins/publish/validate_script.py | 5 +-- 16 files changed, 78 insertions(+), 68 deletions(-) diff --git a/openpype/hosts/nuke/api/command.py b/openpype/hosts/nuke/api/command.py index 6f74c08e97..c756c48a12 100644 --- a/openpype/hosts/nuke/api/command.py +++ b/openpype/hosts/nuke/api/command.py @@ -3,8 +3,7 @@ import contextlib import nuke from bson.objectid import ObjectId -from avalon import api, io - +from openpype.pipeline import legacy_io log = logging.getLogger(__name__) @@ -15,11 +14,11 @@ def reset_frame_range(): displayed handles """ - fps = float(api.Session.get("AVALON_FPS", 25)) + fps = float(legacy_io.Session.get("AVALON_FPS", 25)) nuke.root()["fps"].setValue(fps) - name = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": name, "type": "asset"}) + name = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": name, "type": "asset"}) asset_data = asset["data"] handles = get_handles(asset) @@ -71,10 +70,10 @@ def get_handles(asset): if "visualParent" in data: vp = data["visualParent"] if vp is not None: - parent_asset = io.find_one({"_id": ObjectId(vp)}) + parent_asset = legacy_io.find_one({"_id": ObjectId(vp)}) if parent_asset is None: - parent_asset = io.find_one({"_id": ObjectId(asset["parent"])}) + parent_asset = legacy_io.find_one({"_id": ObjectId(asset["parent"])}) if parent_asset is not None: return get_handles(parent_asset) @@ -84,7 +83,7 @@ def get_handles(asset): def reset_resolution(): """Set resolution to project resolution.""" - project = io.find_one({"type": "project"}) + project = legacy_io.find_one({"type": "project"}) p_data = project["data"] width = p_data.get("resolution_width", diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e05c6aecbd..eafb707249 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -10,8 +10,6 @@ from bson.objectid import ObjectId import nuke -from avalon import api, io - from openpype.api import ( Logger, Anatomy, @@ -26,7 +24,10 @@ from openpype.tools.utils import host_tools from openpype.lib.path_tools import HostDirmap from openpype.settings import get_project_settings from openpype.modules import ModulesManager -from openpype.pipeline import discover_legacy_creator_plugins +from openpype.pipeline import ( + discover_legacy_creator_plugins, + legacy_io, +) from .workio import ( save_file, @@ -569,7 +570,7 @@ def check_inventory_versions(): avalon_knob_data = read(node) # get representation from io - representation = io.find_one({ + representation = legacy_io.find_one({ "type": "representation", "_id": ObjectId(avalon_knob_data["representation"]) }) @@ -583,13 +584,13 @@ def check_inventory_versions(): continue # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') @@ -726,8 +727,8 @@ def format_anatomy(data): file = script_name() data["version"] = get_version_from_path(file) - project_doc = io.find_one({"type": "project"}) - asset_doc = io.find_one({ + project_doc = legacy_io.find_one({"type": "project"}) + asset_doc = legacy_io.find_one({ "type": "asset", "name": data["avalon"]["asset"] }) @@ -1138,8 +1139,11 @@ class WorkfileSettings(object): nodes=None, **kwargs): Context._project_doc = kwargs.get( - "project") or io.find_one({"type": "project"}) - self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"] + "project") or legacy_io.find_one({"type": "project"}) + self._asset = ( + kwargs.get("asset_name") + or legacy_io.Session["AVALON_ASSET"] + ) self._asset_entity = get_asset(self._asset) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) @@ -1486,9 +1490,9 @@ class WorkfileSettings(object): def reset_resolution(self): """Set resolution to project resolution.""" log.info("Resetting resolution") - project = io.find_one({"type": "project"}) - asset = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset, "type": "asset"}) + project = legacy_io.find_one({"type": "project"}) + asset = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": asset, "type": "asset"}) asset_data = asset.get('data', {}) data = { @@ -1608,7 +1612,7 @@ def get_hierarchical_attr(entity, attr, default=None): ): parent_id = entity['data']['visualParent'] - parent = io.find_one({'_id': parent_id}) + parent = legacy_io.find_one({'_id': parent_id}) return get_hierarchical_attr(parent, attr) diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 36cec6f4c5..91f1c80b2a 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -1,8 +1,8 @@ -from avalon import io import nuke import nukescripts from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -188,7 +188,7 @@ class LoadBackdropNodes(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -237,7 +237,7 @@ class LoadBackdropNodes(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index fb5f7f8ede..964ca5ec90 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -1,7 +1,7 @@ import nuke -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -102,7 +102,7 @@ class AlembicCameraLoader(load.LoaderPlugin): None """ # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -175,7 +175,7 @@ class AlembicCameraLoader(load.LoaderPlugin): """ Coloring a node by correct color by actual version """ # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 9b0588feac..681561e303 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -1,8 +1,10 @@ import nuke import qargparse -from avalon import io -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) from openpype.hosts.nuke.api.lib import ( get_imageio_input_colorspace, maintained_selection @@ -194,7 +196,7 @@ class LoadClip(plugin.NukeLoader): start_at_workfile = bool("start at" in read_node['frame_mode'].value()) - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -264,7 +266,7 @@ class LoadClip(plugin.NukeLoader): # change color of read_node # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 56c5acbb0a..6a30330ed0 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -3,9 +3,8 @@ from collections import OrderedDict import nuke import six -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -149,7 +148,7 @@ class LoadEffects(load.LoaderPlugin): """ # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -245,7 +244,7 @@ class LoadEffects(load.LoaderPlugin): self.connect_read_node(GN, namespace, json_f["assignTo"]) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 0bc5f5a514..eaf151b3b8 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -3,9 +3,8 @@ from collections import OrderedDict import six import nuke -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -154,7 +153,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -252,7 +251,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # return # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo.py b/openpype/hosts/nuke/plugins/load/load_gizmo.py index 6f2b191be9..4ea9d64d7d 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo.py @@ -1,8 +1,7 @@ import nuke -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -102,7 +101,7 @@ class LoadGizmo(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -150,7 +149,7 @@ class LoadGizmo(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py index 46134afcf0..38dd70935e 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -1,8 +1,8 @@ import nuke import six -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -108,7 +108,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -156,7 +156,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 9a175a0cba..6df286a4f7 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -1,9 +1,9 @@ import nuke import qargparse -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -186,13 +186,13 @@ class LoadImage(load.LoaderPlugin): format(frame_number, "0{}".format(padding))) # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index e445beca05..9788bb25d2 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -1,6 +1,7 @@ import nuke -from avalon import io + from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -99,7 +100,7 @@ class AlembicModelLoader(load.LoaderPlugin): None """ # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -172,7 +173,7 @@ class AlembicModelLoader(load.LoaderPlugin): """ Coloring a node by correct color by actual version """ # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 779f101682..bd351ad785 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -1,8 +1,7 @@ import nuke -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -117,13 +116,13 @@ class LinkAsGroup(load.LoaderPlugin): root = get_representation_path(representation).replace("\\", "/") # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index 45e9969eb9..4d6944f523 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -2,7 +2,8 @@ import os import re import nuke import pyblish.api -from avalon import io, api + +from openpype.pipeline import legacy_io @pyblish.api.log @@ -15,8 +16,10 @@ class CollectNukeReads(pyblish.api.InstancePlugin): families = ["source"] def process(self, instance): - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}) + asset_data = legacy_io.find_one({ + "type": "asset", + "name": legacy_io.Session["AVALON_ASSET"] + }) self.log.debug("asset_data: {}".format(asset_data["data"])) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 29c706f302..d778421bde 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -1,6 +1,7 @@ import nuke import pyblish.api -from avalon import io, api + +from openpype.pipeline import legacy_io from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data @@ -19,9 +20,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): sync_workfile_version_on_families = [] def process(self, context): - asset_data = io.find_one({ + asset_data = legacy_io.find_one({ "type": "asset", - "name": api.Session["AVALON_ASSET"] + "name": legacy_io.Session["AVALON_ASSET"] }) self.log.debug("asset_data: {}".format(asset_data["data"])) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index 4826b2788f..8669f4f485 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -3,9 +3,12 @@ import re from pprint import pformat import nuke import pyblish.api -from avalon import io + import openpype.api as pype -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) @pyblish.api.log @@ -180,7 +183,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): repre_doc = None if version_doc: # Try to find it's representation (Expected there is only one) - repre_doc = io.find_one( + repre_doc = legacy_io.find_one( {"type": "representation", "parent": version_doc["_id"]} ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py index c35d09dcde..10c9e93f8b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script.py @@ -1,6 +1,7 @@ import pyblish.api -from avalon import io + from openpype import lib +from openpype.pipeline import legacy_io @pyblish.api.log @@ -115,7 +116,7 @@ class ValidateScript(pyblish.api.InstancePlugin): def check_parent_hierarchical(self, entityId, attr): if entityId is None: return None - entity = io.find_one({"_id": entityId}) + entity = legacy_io.find_one({"_id": entityId}) if attr in entity['data']: self.log.info(attr) return entity['data'][attr] From f13c2d287f49688e31701f67104c4c6516fcb9a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:18:08 +0200 Subject: [PATCH 161/357] replaced avalon imports in photoshop --- openpype/hosts/photoshop/api/launch_logic.py | 9 ++++----- openpype/hosts/photoshop/api/pipeline.py | 4 ++-- .../hosts/photoshop/plugins/publish/collect_instances.py | 4 ++-- .../photoshop/plugins/publish/validate_instance_asset.py | 7 ++++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 0021905cb5..0bbb19523d 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -11,9 +11,8 @@ from wsrpc_aiohttp import ( from Qt import QtCore from openpype.api import Logger +from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools - -from avalon import api from openpype.tools.adobe_webserver.app import WebServerTool from .ws_stub import PhotoshopServerStub @@ -320,13 +319,13 @@ class PhotoshopRoute(WebSocketRoute): log.info("Setting context change") log.info("project {} asset {} ".format(project, asset)) if project: - api.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project os.environ["AVALON_PROJECT"] = project if asset: - api.Session["AVALON_ASSET"] = asset + legacy_io.Session["AVALON_ASSET"] = asset os.environ["AVALON_ASSET"] = asset if task: - api.Session["AVALON_TASK"] = task + legacy_io.Session["AVALON_TASK"] = task os.environ["AVALON_TASK"] = task async def read(self): diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 1f069c2636..906418aced 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -3,11 +3,11 @@ from Qt import QtWidgets from bson.objectid import ObjectId import pyblish.api -from avalon import io from openpype.api import Logger from openpype.lib import register_event_callback from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -37,7 +37,7 @@ def check_inventory(): outdated_containers = [] for container in host.ls(): representation = container['representation'] - representation_doc = io.find_one( + representation_doc = legacy_io.find_one( { "_id": ObjectId(representation), "type": "representation" diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 6198ed0156..50b50f86d9 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -1,9 +1,9 @@ -from avalon import api import pyblish.api from openpype.settings import get_project_settings from openpype.hosts.photoshop import api as photoshop from openpype.lib import prepare_template_data +from openpype.pipeline import legacy_io class CollectInstances(pyblish.api.ContextPlugin): @@ -79,7 +79,7 @@ class CollectInstances(pyblish.api.ContextPlugin): "CreateImage", {}).get( "defaults", ['']) family = "image" - task_name = api.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] asset_name = context.data["assetEntity"]["name"] fill_pairs = { diff --git a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py index ebe9cc21ea..b65f9d259f 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -1,6 +1,7 @@ -from avalon import api import pyblish.api + import openpype.api +from openpype.pipeline import legacy_io from openpype.hosts.photoshop import api as photoshop @@ -26,7 +27,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["asset"] = api.Session["AVALON_ASSET"] + data["asset"] = legacy_io.Session["AVALON_ASSET"] stub.imprint(instance[0], data) @@ -48,7 +49,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): def process(self, instance): instance_asset = instance.data["asset"] - current_asset = api.Session["AVALON_ASSET"] + current_asset = legacy_io.Session["AVALON_ASSET"] msg = ( f"Instance asset {instance_asset} is not the same " f"as current context {current_asset}. PLEASE DO:\n" From c13a4cd7c4d22263dfabbfc496084b32f66be0fe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:18:49 +0200 Subject: [PATCH 162/357] replacead avalon imports in resolve --- openpype/hosts/resolve/plugins/load/load_clip.py | 10 ++++++---- .../resolve/plugins/publish/precollect_workfile.py | 9 ++++----- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/resolve/plugins/load/load_clip.py b/openpype/hosts/resolve/plugins/load/load_clip.py index 71850d95f6..cf88b14e81 100644 --- a/openpype/hosts/resolve/plugins/load/load_clip.py +++ b/openpype/hosts/resolve/plugins/load/load_clip.py @@ -1,9 +1,11 @@ from copy import deepcopy from importlib import reload -from avalon import io from openpype.hosts import resolve -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) from openpype.hosts.resolve.api import lib, plugin reload(plugin) reload(lib) @@ -94,7 +96,7 @@ class LoadClip(resolve.TimelineItemLoader): namespace = container['namespace'] timeline_item_data = resolve.get_pype_timeline_item_by_name(namespace) timeline_item = timeline_item_data["clip"]["item"] - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -140,7 +142,7 @@ class LoadClip(resolve.TimelineItemLoader): # define version name version_name = version.get("name", None) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py index 1333516177..a58f288770 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py @@ -1,10 +1,9 @@ import pyblish.api -from openpype.hosts import resolve -from avalon import api as avalon from pprint import pformat - -# dev from importlib import reload + +from openpype.hosts import resolve +from openpype.pipeline import legacy_io from openpype.hosts.resolve.otio import davinci_export reload(davinci_export) @@ -17,7 +16,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): def process(self, context): - asset = avalon.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" project = resolve.get_current_project() fps = project.GetSetting("timelineFrameRate") From 4eb6f09b8d46500349613e5f827b075f2a716679 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:19:56 +0200 Subject: [PATCH 163/357] replace avalon imports in standalone publisher --- .../plugins/publish/collect_bulk_mov_instances.py | 6 +++--- .../plugins/publish/collect_context.py | 5 +++-- .../plugins/publish/collect_hierarchy.py | 13 ++++++++----- .../plugins/publish/collect_matching_asset.py | 5 +++-- .../plugins/publish/extract_bg_for_compositing.py | 7 ++++--- .../plugins/publish/extract_bg_main_groups.py | 10 ++++++---- .../plugins/publish/extract_images_from_psd.py | 9 +++++---- .../plugins/publish/validate_task_existence.py | 8 +++++--- 8 files changed, 37 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index 9f075d66cf..3e7fb19c00 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -2,8 +2,8 @@ import copy import json import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectBulkMovInstances(pyblish.api.InstancePlugin): @@ -26,7 +26,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): context = instance.context asset_name = instance.data["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -52,7 +52,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): self.subset_name_variant, task_name, asset_doc, - io.Session["AVALON_PROJECT"] + legacy_io.Session["AVALON_PROJECT"] ) instance_name = f"{asset_name}_{subset_name}" diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py index 6913e0836d..bfa9dcf73a 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py @@ -19,7 +19,8 @@ import copy from pprint import pformat import clique import pyblish.api -from avalon import io + +from openpype.pipeline import legacy_io class CollectContextDataSAPublish(pyblish.api.ContextPlugin): @@ -37,7 +38,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): def process(self, context): # get json paths from os and load them - io.install() + legacy_io.install() # get json file context input_json_path = os.environ.get("SAPUBLISH_INPATH") diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py index b2735f3428..77163651c4 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py @@ -1,8 +1,10 @@ -import pyblish.api -import re import os -from avalon import io +import re from copy import deepcopy +import pyblish.api + +from openpype.pipeline import legacy_io + class CollectHierarchyInstance(pyblish.api.ContextPlugin): """Collecting hierarchy context from `parents` and `hierarchy` data @@ -63,7 +65,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): hierarchy = list() visual_hierarchy = [instance.context.data["assetEntity"]] while True: - visual_parent = io.find_one( + visual_parent = legacy_io.find_one( {"_id": visual_hierarchy[-1]["data"]["visualParent"]} ) if visual_parent: @@ -129,7 +131,8 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): if self.shot_add_tasks: tasks_to_add = dict() - project_tasks = io.find_one({"type": "project"})["config"]["tasks"] + project_doc = legacy_io.find_one({"type": "project"}) + project_tasks = project_doc["config"]["tasks"] for task_name, task_data in self.shot_add_tasks.items(): _task_data = deepcopy(task_data) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py index 0d629b1b44..9d94bfdc91 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py @@ -2,9 +2,10 @@ import os import re import collections import pyblish.api -from avalon import io from pprint import pformat +from openpype.pipeline import legacy_io + class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin): """ @@ -119,7 +120,7 @@ class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin): def _asset_docs_by_parent_id(self, instance): # Query all assets for project and store them by parent's id to list asset_docs_by_parent_id = collections.defaultdict(list) - for asset_doc in io.find({"type": "asset"}): + for asset_doc in legacy_io.find({"type": "asset"}): parent_id = asset_doc["data"]["visualParent"] asset_docs_by_parent_id[parent_id].append(asset_doc) return asset_docs_by_parent_id diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py index f07499c15d..9621d70739 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py @@ -1,8 +1,9 @@ import os import json import copy + import openpype.api -from avalon import io +from openpype.pipeline import legacy_io PSDImage = None @@ -221,7 +222,7 @@ class ExtractBGForComp(openpype.api.Extractor): self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) def find_last_version(self, subset_name, asset_doc): - subset_doc = io.find_one({ + subset_doc = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] @@ -230,7 +231,7 @@ class ExtractBGForComp(openpype.api.Extractor): if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: - version_doc = io.find_one( + version_doc = legacy_io.find_one( { "type": "version", "parent": subset_doc["_id"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py index 2c92366ae9..b45f04e574 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py @@ -1,9 +1,11 @@ import os import copy import json -import openpype.api + import pyblish.api -from avalon import io + +import openpype.api +from openpype.pipeline import legacy_io PSDImage = None @@ -225,7 +227,7 @@ class ExtractBGMainGroups(openpype.api.Extractor): self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) def find_last_version(self, subset_name, asset_doc): - subset_doc = io.find_one({ + subset_doc = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] @@ -234,7 +236,7 @@ class ExtractBGMainGroups(openpype.api.Extractor): if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: - version_doc = io.find_one( + version_doc = legacy_io.find_one( { "type": "version", "parent": subset_doc["_id"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py index e3094b2e3f..8485fa0915 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py @@ -1,8 +1,9 @@ import os import copy -import openpype.api import pyblish.api -from avalon import io + +import openpype.api +from openpype.pipeline import legacy_io PSDImage = None @@ -149,7 +150,7 @@ class ExtractImagesFromPSD(openpype.api.Extractor): new_instance.data["representations"] = [new_repre] def find_last_version(self, subset_name, asset_doc): - subset_doc = io.find_one({ + subset_doc = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] @@ -158,7 +159,7 @@ class ExtractImagesFromPSD(openpype.api.Extractor): if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: - version_doc = io.find_one( + version_doc = legacy_io.find_one( { "type": "version", "parent": subset_doc["_id"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py index 825092c81b..4c761c7a4c 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py @@ -1,7 +1,9 @@ import pyblish.api -from avalon import io -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + legacy_io, +) class ValidateTaskExistence(pyblish.api.ContextPlugin): @@ -18,7 +20,7 @@ class ValidateTaskExistence(pyblish.api.ContextPlugin): for instance in context: asset_names.add(instance.data["asset"]) - asset_docs = io.find( + asset_docs = legacy_io.find( { "type": "asset", "name": {"$in": list(asset_names)} From 32e02701a17ab058a9efeb9d66017a5673922531 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:22:43 +0200 Subject: [PATCH 164/357] replaced avalon imports in testhost --- openpype/hosts/testhost/api/__init__.py | 6 +++--- openpype/hosts/testhost/api/pipeline.py | 5 ++--- .../testhost/plugins/create/auto_creator.py | 20 ++++++++++++------- openpype/hosts/traypublisher/api/pipeline.py | 11 +++++----- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/testhost/api/__init__.py b/openpype/hosts/testhost/api/__init__.py index 7840b25892..a929a891aa 100644 --- a/openpype/hosts/testhost/api/__init__.py +++ b/openpype/hosts/testhost/api/__init__.py @@ -1,8 +1,8 @@ import os import logging import pyblish.api -import avalon.api -from openpype.pipeline import BaseCreator + +from openpype.pipeline import register_creator_plugin_path from .pipeline import ( ls, @@ -27,7 +27,7 @@ def install(): log.info("OpenPype - Installing TestHost integration") pyblish.api.register_host("testhost") pyblish.api.register_plugin_path(PUBLISH_PATH) - avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) + register_creator_plugin_path(CREATE_PATH) __all__ = ( diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py index 1f5d680705..285fe8f8d6 100644 --- a/openpype/hosts/testhost/api/pipeline.py +++ b/openpype/hosts/testhost/api/pipeline.py @@ -1,5 +1,6 @@ import os import json +from openpype.pipeline import legacy_io class HostContext: @@ -16,9 +17,7 @@ class HostContext: if not asset_name: return project_name - from avalon import io - - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": asset_name}, {"data.parents": 1} ) diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py index 4c22eea9dd..06b95375b1 100644 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ b/openpype/hosts/testhost/plugins/create/auto_creator.py @@ -1,7 +1,7 @@ -from avalon import io from openpype.lib import NumberDef from openpype.hosts.testhost.api import pipeline from openpype.pipeline import ( + legacy_io, AutoCreator, CreatedInstance, ) @@ -38,13 +38,16 @@ class MyAutoCreator(AutoCreator): break variant = "Main" - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -66,7 +69,10 @@ class MyAutoCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py index 24175883d9..954a0bae47 100644 --- a/openpype/hosts/traypublisher/api/pipeline.py +++ b/openpype/hosts/traypublisher/api/pipeline.py @@ -3,11 +3,12 @@ import json import tempfile import atexit -from avalon import io -import avalon.api import pyblish.api -from openpype.pipeline import register_creator_plugin_path +from openpype.pipeline import ( + register_creator_plugin_path, + legacy_io, +) ROOT_DIR = os.path.dirname(os.path.dirname( os.path.abspath(__file__) @@ -175,6 +176,6 @@ def install(): def set_project_name(project_name): # TODO Deregister project specific plugins and register new project plugins os.environ["AVALON_PROJECT"] = project_name - avalon.api.Session["AVALON_PROJECT"] = project_name - io.install() + legacy_io.Session["AVALON_PROJECT"] = project_name + legacy_io.install() HostContext.set_project_name(project_name) From 98ba730a9932685e93825f515e489867b127b5cf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:26:02 +0200 Subject: [PATCH 165/357] replace avalon imports in tvpaint --- openpype/hosts/tvpaint/api/lib.py | 2 -- openpype/hosts/tvpaint/api/pipeline.py | 13 ++++++------- openpype/hosts/tvpaint/api/workio.py | 12 +++++++----- openpype/hosts/tvpaint/hooks/pre_launch_args.py | 6 ------ .../hosts/tvpaint/plugins/load/load_workfile.py | 16 +++++++++------- .../tvpaint/plugins/publish/collect_instances.py | 11 +++++------ .../plugins/publish/collect_scene_render.py | 4 ++-- .../tvpaint/plugins/publish/collect_workfile.py | 6 +++--- .../plugins/publish/collect_workfile_data.py | 11 ++++++----- 9 files changed, 38 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py index 9e6404e72f..0c63dbe5be 100644 --- a/openpype/hosts/tvpaint/api/lib.py +++ b/openpype/hosts/tvpaint/api/lib.py @@ -2,8 +2,6 @@ import os import logging import tempfile -import avalon.io - from . import CommunicationWrapper log = logging.getLogger(__name__) diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index d57ec3178a..f473f51457 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -8,12 +8,11 @@ import requests import pyblish.api -from avalon import io - from openpype.hosts import tvpaint from openpype.api import get_current_project_settings from openpype.lib import register_event_callback from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -69,10 +68,10 @@ def install(): """Install TVPaint-specific functionality.""" log.info("OpenPype - Installing TVPaint integration") - io.install() + legacy_io.install() # Create workdir folder if does not exist yet - workdir = io.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] if not os.path.exists(workdir): os.makedirs(workdir) @@ -445,12 +444,12 @@ def set_context_settings(asset_doc=None): """ if asset_doc is None: # Use current session asset if not passed - asset_doc = avalon.io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", - "name": avalon.io.Session["AVALON_ASSET"] + "name": legacy_io.Session["AVALON_ASSET"] }) - project_doc = avalon.io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) framerate = asset_doc["data"].get("fps") if framerate is None: diff --git a/openpype/hosts/tvpaint/api/workio.py b/openpype/hosts/tvpaint/api/workio.py index 88bdd7117e..1a5ad00ca8 100644 --- a/openpype/hosts/tvpaint/api/workio.py +++ b/openpype/hosts/tvpaint/api/workio.py @@ -3,8 +3,10 @@ has_unsaved_changes """ -from avalon import api -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS +from openpype.pipeline import ( + HOST_WORKFILE_EXTENSIONS, + legacy_io, +) from .lib import ( execute_george, execute_george_through_file @@ -24,9 +26,9 @@ def save_file(filepath): """Save the open scene file.""" # Store context to workfile before save context = { - "project": api.Session["AVALON_PROJECT"], - "asset": api.Session["AVALON_ASSET"], - "task": api.Session["AVALON_TASK"] + "project": legacy_io.Session["AVALON_PROJECT"], + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } save_current_workfile_context(context) diff --git a/openpype/hosts/tvpaint/hooks/pre_launch_args.py b/openpype/hosts/tvpaint/hooks/pre_launch_args.py index 2a8f49d5b0..c31403437a 100644 --- a/openpype/hosts/tvpaint/hooks/pre_launch_args.py +++ b/openpype/hosts/tvpaint/hooks/pre_launch_args.py @@ -1,14 +1,8 @@ -import os -import shutil - -from openpype.hosts import tvpaint from openpype.lib import ( PreLaunchHook, get_openpype_execute_args ) -import avalon - class TvpaintPrelaunchHook(PreLaunchHook): """Launch arguments preparation. diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 1ce5449065..0eab083c22 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,13 +1,15 @@ import os -from avalon import io from openpype.lib import ( StringTemplate, get_workfile_template_key_from_context, get_workdir_data, get_last_workfile_with_version, ) -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from openpype.api import Anatomy from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -46,13 +48,13 @@ class LoadWorkfile(plugin.Loader): task_name = context.get("task") # Far cases when there is workfile without context if not asset_name: - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] - project_doc = io.find_one({ + project_doc = legacy_io.find_one({ "type": "project" }) - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -63,7 +65,7 @@ class LoadWorkfile(plugin.Loader): task_name, host_name, project_name=project_name, - dbcon=io + dbcon=legacy_io ) anatomy = Anatomy(project_name) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 5e8d13592c..188aa8c41a 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -1,10 +1,9 @@ -import os import json import copy import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectInstances(pyblish.api.ContextPlugin): @@ -82,7 +81,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # - not sure if it's good idea to require asset id in # get_subset_name? asset_name = context.data["workfile_context"]["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -93,7 +92,7 @@ class CollectInstances(pyblish.api.ContextPlugin): host_name = context.data["hostName"] # Use empty variant value variant = "" - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] new_subset_name = get_subset_name_with_asset_doc( family, variant, @@ -157,7 +156,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Change subset name # Final family of an instance will be `render` new_family = "render" - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] new_subset_name = "{}{}_{}_Beauty".format( new_family, task_name.capitalize(), name ) @@ -202,7 +201,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Final family of an instance will be `render` new_family = "render" old_subset_name = instance_data["subset"] - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] new_subset_name = "{}{}_{}_{}".format( new_family, task_name.capitalize(), render_layer, pass_name ) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 0af9a9a400..1c042a62fb 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -1,9 +1,9 @@ import json import copy import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectRenderScene(pyblish.api.ContextPlugin): @@ -57,7 +57,7 @@ class CollectRenderScene(pyblish.api.ContextPlugin): # get_subset_name? workfile_context = context.data["workfile_context"] asset_name = workfile_context["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index 89348037d3..70d92f82e9 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -1,9 +1,9 @@ import os import json import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): @@ -28,7 +28,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # get_subset_name? family = "workfile" asset_name = context.data["workfile_context"]["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -39,7 +39,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): host_name = os.environ["AVALON_APP"] # Use empty variant value variant = "" - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] subset_name = get_subset_name_with_asset_doc( family, variant, diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index f5c86c613b..c59ef82f85 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -3,7 +3,8 @@ import json import tempfile import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io from openpype.hosts.tvpaint.api import pipeline, lib @@ -49,9 +50,9 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect and store current context to have reference current_context = { - "project": avalon.api.Session["AVALON_PROJECT"], - "asset": avalon.api.Session["AVALON_ASSET"], - "task": avalon.api.Session["AVALON_TASK"] + "project": legacy_io.Session["AVALON_PROJECT"], + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } context.data["previous_context"] = current_context self.log.debug("Current context is: {}".format(current_context)) @@ -69,7 +70,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): ("AVALON_TASK", "task") ) for env_key, key in key_map: - avalon.api.Session[env_key] = workfile_context[key] + legacy_io.Session[env_key] = workfile_context[key] os.environ[env_key] = workfile_context[key] self.log.info("Context changed to: {}".format(workfile_context)) From ee93213cf113dea428e9a4dc6a26ac2364e37105 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:27:06 +0200 Subject: [PATCH 166/357] replace avalon imports in unreal --- openpype/hosts/unreal/plugins/load/load_camera.py | 14 ++++++++------ .../hosts/unreal/plugins/publish/extract_layout.py | 4 ++-- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 40bca0b0c7..63c0845ec2 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -2,8 +2,10 @@ """Load camera from FBX.""" import os -from avalon import io -from openpype.pipeline import AVALON_CONTAINER_ID +from openpype.pipeline import ( + AVALON_CONTAINER_ID, + legacy_io, +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -87,8 +89,8 @@ class CameraLoader(plugin.Loader): factory=unreal.LevelSequenceFactoryNew() ) - io_asset = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ + io_asset = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({ "type": "asset", "name": io_asset }) @@ -172,8 +174,8 @@ class CameraLoader(plugin.Loader): factory=unreal.LevelSequenceFactoryNew() ) - io_asset = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ + io_asset = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({ "type": "asset", "name": io_asset }) diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index f34a47b89f..87e6693a97 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -10,7 +10,7 @@ from unreal import EditorLevelLibrary as ell from unreal import EditorAssetLibrary as eal import openpype.api -from avalon import io +from openpype.pipeline import legacy_io class ExtractLayout(openpype.api.Extractor): @@ -61,7 +61,7 @@ class ExtractLayout(openpype.api.Extractor): family = eal.get_metadata_tag(asset_container, "family") self.log.info("Parent: {}".format(parent)) - blend = io.find_one( + blend = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), From c1246f5349e61891d70dd84135308e0ab303bad0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:27:19 +0200 Subject: [PATCH 167/357] replace avalon imports in webpublisher --- openpype/hosts/webpublisher/api/__init__.py | 4 ++-- .../webpublisher/plugins/publish/collect_batch_data.py | 7 ++++--- .../plugins/publish/collect_published_files.py | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 72bbffd099..18e3a16cf5 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -1,9 +1,9 @@ import os import logging -from avalon import io from pyblish import api as pyblish import openpype.hosts.webpublisher +from openpype.pipeline import legacy_io log = logging.getLogger("openpype.hosts.webpublisher") @@ -19,7 +19,7 @@ def install(): pyblish.register_plugin_path(PUBLISH_PATH) log.info(PUBLISH_PATH) - io.install() + legacy_io.install() def uninstall(): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index ca14538d7d..d954c04c60 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -7,12 +7,13 @@ Provides: import os import pyblish.api -from avalon import io + from openpype.lib.plugin_tools import ( parse_json, get_batch_asset_task_info ) from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS +from openpype.pipeline import legacy_io class CollectBatchData(pyblish.api.ContextPlugin): @@ -52,9 +53,9 @@ class CollectBatchData(pyblish.api.ContextPlugin): ) os.environ["AVALON_ASSET"] = asset_name - io.Session["AVALON_ASSET"] = asset_name + legacy_io.Session["AVALON_ASSET"] = asset_name os.environ["AVALON_TASK"] = task_name - io.Session["AVALON_TASK"] = task_name + legacy_io.Session["AVALON_TASK"] = task_name context.data["asset"] = asset_name context.data["task"] = task_name diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 8edaf4f67b..84a1f63418 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -12,7 +12,6 @@ import clique import tempfile import math -from avalon import io import pyblish.api from openpype.lib import ( prepare_template_data, @@ -24,6 +23,7 @@ from openpype.lib.plugin_tools import ( parse_json, get_subset_name_with_asset_doc ) +from openpype.pipeline import legacy_io class CollectPublishedFiles(pyblish.api.ContextPlugin): @@ -261,7 +261,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): } } ] - version = list(io.aggregate(query)) + version = list(legacy_io.aggregate(query)) if version: return version[0].get("version") or 0 From 066d6123d6763b6a90737d9e69cda7c2151c69cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:29:41 +0200 Subject: [PATCH 168/357] replace avalon imports in openpype.lib --- openpype/lib/abstract_collect_render.py | 5 +- openpype/lib/avalon_context.py | 149 ++++++++++++------------ openpype/lib/plugin_tools.py | 4 +- openpype/lib/usdlib.py | 18 +-- 4 files changed, 87 insertions(+), 89 deletions(-) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 7c768e280c..fe202824a7 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -9,9 +9,10 @@ from abc import abstractmethod import attr import six -from avalon import api import pyblish.api +from openpype.pipeline import legacy_io + from .abstract_metaplugins import AbstractMetaContextPlugin @@ -127,7 +128,7 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): """Constructor.""" super(AbstractCollectRender, self).__init__(*args, **kwargs) self._file_path = None - self._asset = api.Session["AVALON_ASSET"] + self._asset = legacy_io.Session["AVALON_ASSET"] self._context = None def process(self, context): diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index d95d1b983f..139fb7edde 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -20,9 +20,7 @@ from .profiles_filtering import filter_profiles from .events import emit_event from .path_templates import StringTemplate -# avalon module is not imported at the top -# - may not be in path at the time of pype.lib initialization -avalon = None +legacy_io = None log = logging.getLogger("AvalonContext") @@ -120,17 +118,17 @@ def create_project( return project_doc -def with_avalon(func): +def with_pipeline_io(func): @functools.wraps(func) - def wrap_avalon(*args, **kwargs): - global avalon - if avalon is None: - import avalon + def wrapped(*args, **kwargs): + global legacy_io + if legacy_io is None: + from openpype.pipeline import legacy_io return func(*args, **kwargs) - return wrap_avalon + return wrapped -@with_avalon +@with_pipeline_io def is_latest(representation): """Return whether the representation is from latest version @@ -142,12 +140,12 @@ def is_latest(representation): """ - version = avalon.io.find_one({"_id": representation['parent']}) + version = legacy_io.find_one({"_id": representation['parent']}) if version["type"] == "hero_version": return True # Get highest version under the parent - highest_version = avalon.io.find_one({ + highest_version = legacy_io.find_one({ "type": "version", "parent": version["parent"] }, sort=[("name", -1)], projection={"name": True}) @@ -158,7 +156,7 @@ def is_latest(representation): return False -@with_avalon +@with_pipeline_io def any_outdated(): """Return whether the current scene has any outdated content""" from openpype.pipeline import registered_host @@ -170,7 +168,7 @@ def any_outdated(): if representation in checked: continue - representation_doc = avalon.io.find_one( + representation_doc = legacy_io.find_one( { "_id": ObjectId(representation), "type": "representation" @@ -189,7 +187,7 @@ def any_outdated(): return False -@with_avalon +@with_pipeline_io def get_asset(asset_name=None): """ Returning asset document from database by its name. @@ -202,9 +200,9 @@ def get_asset(asset_name=None): (MongoDB document) """ if not asset_name: - asset_name = avalon.api.Session["AVALON_ASSET"] + asset_name = legacy_io.Session["AVALON_ASSET"] - asset_document = avalon.io.find_one({ + asset_document = legacy_io.find_one({ "name": asset_name, "type": "asset" }) @@ -215,7 +213,7 @@ def get_asset(asset_name=None): return asset_document -@with_avalon +@with_pipeline_io def get_hierarchy(asset_name=None): """ Obtain asset hierarchy path string from mongo db @@ -228,12 +226,12 @@ def get_hierarchy(asset_name=None): """ if not asset_name: - asset_name = avalon.io.Session.get( + asset_name = legacy_io.Session.get( "AVALON_ASSET", os.environ["AVALON_ASSET"] ) - asset_entity = avalon.io.find_one({ + asset_entity = legacy_io.find_one({ "type": 'asset', "name": asset_name }) @@ -252,13 +250,13 @@ def get_hierarchy(asset_name=None): parent_id = entity.get("data", {}).get("visualParent") if not parent_id: break - entity = avalon.io.find_one({"_id": parent_id}) + entity = legacy_io.find_one({"_id": parent_id}) hierarchy_items.append(entity["name"]) # Add parents to entity data for next query entity_data = asset_entity.get("data", {}) entity_data["parents"] = hierarchy_items - avalon.io.update_many( + legacy_io.update_many( {"_id": asset_entity["_id"]}, {"$set": {"data": entity_data}} ) @@ -305,7 +303,7 @@ def get_linked_asset_ids(asset_doc): return output -@with_avalon +@with_pipeline_io def get_linked_assets(asset_doc): """Return linked assets for `asset_doc` from DB @@ -319,10 +317,10 @@ def get_linked_assets(asset_doc): if not link_ids: return [] - return list(avalon.io.find({"_id": {"$in": link_ids}})) + return list(legacy_io.find({"_id": {"$in": link_ids}})) -@with_avalon +@with_pipeline_io def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): """Retrieve latest version from `asset_name`, and `subset_name`. @@ -342,13 +340,13 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): """ if not dbcon: - log.debug("Using `avalon.io` for query.") - dbcon = avalon.io + log.debug("Using `legacy_io` for query.") + dbcon = legacy_io # Make sure is installed dbcon.install() if project_name and project_name != dbcon.Session.get("AVALON_PROJECT"): - # `avalon.io` has only `_database` attribute + # `legacy_io` has only `_database` attribute # but `AvalonMongoDB` has `database` database = getattr(dbcon, "database", dbcon._database) collection = database[project_name] @@ -648,6 +646,7 @@ def get_workdir( ) +@with_pipeline_io def template_data_from_session(session=None): """ Return dictionary with template from session keys. @@ -657,15 +656,15 @@ def template_data_from_session(session=None): Returns: dict: All available data from session. """ - from avalon import io - import avalon.api if session is None: - session = avalon.api.Session + session = legacy_io.Session project_name = session["AVALON_PROJECT"] - project_doc = io._database[project_name].find_one({"type": "project"}) - asset_doc = io._database[project_name].find_one({ + project_doc = legacy_io.database[project_name].find_one({ + "type": "project" + }) + asset_doc = legacy_io.database[project_name].find_one({ "type": "asset", "name": session["AVALON_ASSET"] }) @@ -674,6 +673,7 @@ def template_data_from_session(session=None): return get_workdir_data(project_doc, asset_doc, task_name, host_name) +@with_pipeline_io def compute_session_changes( session, task=None, asset=None, app=None, template_key=None ): @@ -712,10 +712,8 @@ def compute_session_changes( asset = asset["name"] if not asset_document or not asset_tasks: - from avalon import io - # Assume asset name - asset_document = io.find_one( + asset_document = legacy_io.find_one( { "name": asset, "type": "asset" @@ -747,11 +745,10 @@ def compute_session_changes( return changes +@with_pipeline_io def get_workdir_from_session(session=None, template_key=None): - import avalon.api - if session is None: - session = avalon.api.Session + session = legacy_io.Session project_name = session["AVALON_PROJECT"] host_name = session["AVALON_APP"] anatomy = Anatomy(project_name) @@ -768,6 +765,7 @@ def get_workdir_from_session(session=None, template_key=None): return anatomy_filled[template_key]["folder"] +@with_pipeline_io def update_current_task(task=None, asset=None, app=None, template_key=None): """Update active Session to a new task work area. @@ -782,10 +780,8 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): dict: The changed key, values in the current Session. """ - import avalon.api - changes = compute_session_changes( - avalon.api.Session, + legacy_io.Session, task=task, asset=asset, app=app, @@ -795,7 +791,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): # Update the Session and environments. Pop from environments all keys with # value set to None. for key, value in changes.items(): - avalon.api.Session[key] = value + legacy_io.Session[key] = value if value is None: os.environ.pop(key, None) else: @@ -807,7 +803,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): return changes -@with_avalon +@with_pipeline_io def get_workfile_doc(asset_id, task_name, filename, dbcon=None): """Return workfile document for entered context. @@ -819,14 +815,14 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): task_name (str): Name of task under which the workfile belongs. filename (str): Name of a workfile. dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and - `avalon.io` is used if not entered. + `legacy_io` is used if not entered. Returns: dict: Workfile document or None. """ - # Use avalon.io if dbcon is not entered + # Use legacy_io if dbcon is not entered if not dbcon: - dbcon = avalon.io + dbcon = legacy_io return dbcon.find_one({ "type": "workfile", @@ -836,7 +832,7 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): }) -@with_avalon +@with_pipeline_io def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """Creates or replace workfile document in mongo. @@ -849,11 +845,11 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): filename (str): Filename of workfile. workdir (str): Path to directory where `filename` is located. dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and - `avalon.io` is used if not entered. + `legacy_io` is used if not entered. """ - # Use avalon.io if dbcon is not entered + # Use legacy_io if dbcon is not entered if not dbcon: - dbcon = avalon.io + dbcon = legacy_io # Filter of workfile document doc_filter = { @@ -898,7 +894,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): ) -@with_avalon +@with_pipeline_io def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not workfile_doc: # TODO add log message @@ -907,9 +903,9 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not data: return - # Use avalon.io if dbcon is not entered + # Use legacy_io if dbcon is not entered if not dbcon: - dbcon = avalon.io + dbcon = legacy_io # Convert data to mongo modification keys/values # - this is naive implementation which does not expect nested @@ -959,7 +955,7 @@ class BuildWorkfile: return containers - @with_avalon + @with_pipeline_io def build_workfile(self): """Prepares and load containers into workfile. @@ -986,8 +982,8 @@ class BuildWorkfile: from openpype.pipeline import discover_loader_plugins # Get current asset name and entity - current_asset_name = avalon.io.Session["AVALON_ASSET"] - current_asset_entity = avalon.io.find_one({ + current_asset_name = legacy_io.Session["AVALON_ASSET"] + current_asset_entity = legacy_io.find_one({ "type": "asset", "name": current_asset_name }) @@ -1015,7 +1011,7 @@ class BuildWorkfile: return # Get current task name - current_task_name = avalon.io.Session["AVALON_TASK"] + current_task_name = legacy_io.Session["AVALON_TASK"] # Load workfile presets for task self.build_presets = self.get_build_presets( @@ -1103,7 +1099,7 @@ class BuildWorkfile: # Return list of loaded containers return loaded_containers - @with_avalon + @with_pipeline_io def get_build_presets(self, task_name, asset_doc): """ Returns presets to build workfile for task name. @@ -1119,7 +1115,7 @@ class BuildWorkfile: """ host_name = os.environ["AVALON_APP"] project_settings = get_project_settings( - avalon.io.Session["AVALON_PROJECT"] + legacy_io.Session["AVALON_PROJECT"] ) host_settings = project_settings.get(host_name) or {} @@ -1369,7 +1365,7 @@ class BuildWorkfile: "containers": containers } - @with_avalon + @with_pipeline_io def _load_containers( self, repres_by_subset_id, subsets_by_id, profiles_per_subset_id, loaders_by_name @@ -1495,7 +1491,7 @@ class BuildWorkfile: return loaded_containers - @with_avalon + @with_pipeline_io def _collect_last_version_repres(self, asset_entities): """Collect subsets, versions and representations for asset_entities. @@ -1534,13 +1530,13 @@ class BuildWorkfile: asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} - subsets = list(avalon.io.find({ + subsets = list(legacy_io.find({ "type": "subset", "parent": {"$in": asset_entity_by_ids.keys()} })) subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - sorted_versions = list(avalon.io.find({ + sorted_versions = list(legacy_io.find({ "type": "version", "parent": {"$in": subset_entity_by_ids.keys()} }).sort("name", -1)) @@ -1554,7 +1550,7 @@ class BuildWorkfile: subset_id_with_latest_version.append(subset_id) last_versions_by_id[version["_id"]] = version - repres = avalon.io.find({ + repres = legacy_io.find({ "type": "representation", "parent": {"$in": last_versions_by_id.keys()} }) @@ -1592,7 +1588,7 @@ class BuildWorkfile: return output -@with_avalon +@with_pipeline_io def get_creator_by_name(creator_name, case_sensitive=False): """Find creator plugin by name. @@ -1622,7 +1618,7 @@ def get_creator_by_name(creator_name, case_sensitive=False): return None -@with_avalon +@with_pipeline_io def change_timer_to_current_context(): """Called after context change to change timers. @@ -1641,9 +1637,9 @@ def change_timer_to_current_context(): log.warning("Couldn't start timer") return data = { - "project_name": avalon.io.Session["AVALON_PROJECT"], - "asset_name": avalon.io.Session["AVALON_ASSET"], - "task_name": avalon.io.Session["AVALON_TASK"] + "project_name": legacy_io.Session["AVALON_PROJECT"], + "asset_name": legacy_io.Session["AVALON_ASSET"], + "task_name": legacy_io.Session["AVALON_TASK"] } requests.post(rest_api_url, json=data) @@ -1827,10 +1823,11 @@ def get_custom_workfile_template_by_string_context( ) +@with_pipeline_io def get_custom_workfile_template(template_profiles): """Filter and fill workfile template profiles by current context. - Current context is defined by `avalon.api.Session`. That's why this + Current context is defined by `legacy_io.Session`. That's why this function should be used only inside host where context is set and stable. Args: @@ -1840,15 +1837,13 @@ def get_custom_workfile_template(template_profiles): str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) """ - # Use `avalon.io` as Mongo connection - from avalon import io return get_custom_workfile_template_by_string_context( template_profiles, - io.Session["AVALON_PROJECT"], - io.Session["AVALON_ASSET"], - io.Session["AVALON_TASK"], - io + legacy_io.Session["AVALON_PROJECT"], + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"], + legacy_io ) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 3f78407931..bcbf06a0e8 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -72,9 +72,9 @@ def get_subset_name_with_asset_doc( family = family.rsplit(".", 1)[-1] if project_name is None: - import avalon.api + from openpype.pipeline import legacy_io - project_name = avalon.api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] asset_tasks = asset_doc.get("data", {}).get("tasks") or {} task_info = asset_tasks.get(task_name) or {} diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 7b3b7112de..86de19b4be 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -8,8 +8,10 @@ except ImportError: # Allow to fall back on Multiverse 6.3.0+ pxr usd library from mvpxr import Usd, UsdGeom, Sdf, Kind -from avalon import io, api -from openpype.pipeline import registered_root +from openpype.pipeline import ( + registered_root, + legacy_io, +) log = logging.getLogger(__name__) @@ -126,7 +128,7 @@ def create_model(filename, asset, variant_subsets): """ - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) assert asset_doc, "Asset not found: %s" % asset variants = [] @@ -176,7 +178,7 @@ def create_shade(filename, asset, variant_subsets): """ - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) assert asset_doc, "Asset not found: %s" % asset variants = [] @@ -211,7 +213,7 @@ def create_shade_variation(filename, asset, model_variant, shade_variants): """ - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) assert asset_doc, "Asset not found: %s" % asset variants = [] @@ -311,7 +313,7 @@ def get_usd_master_path(asset, subset, representation): """ - project = io.find_one( + project = legacy_io.find_one( {"type": "project"}, projection={"config.template.publish": True} ) template = project["config"]["template"]["publish"] @@ -320,12 +322,12 @@ def get_usd_master_path(asset, subset, representation): # Allow explicitly passing asset document asset_doc = asset else: - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) path = template.format( **{ "root": registered_root(), - "project": api.Session["AVALON_PROJECT"], + "project": legacy_io.Session["AVALON_PROJECT"], "asset": asset_doc["name"], "subset": subset, "representation": representation, From 213ab8a811bb800d62b514cbe4258f0813d1dac6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:32:24 +0200 Subject: [PATCH 169/357] replaced avalon imports in tools --- openpype/tools/adobe_webserver/app.py | 8 +- openpype/tools/creator/window.py | 36 ++---- openpype/tools/loader/app.py | 48 ++++---- openpype/tools/mayalookassigner/app.py | 12 +- openpype/tools/mayalookassigner/commands.py | 9 +- .../tools/mayalookassigner/vray_proxies.py | 37 ++++--- openpype/tools/sceneinventory/model.py | 16 +-- .../tools/sceneinventory/switch_dialog.py | 104 +++++++++--------- openpype/tools/sceneinventory/view.py | 27 +++-- openpype/tools/sceneinventory/window.py | 15 +-- .../widgets/widget_components.py | 24 ++-- openpype/tools/texture_copy/app.py | 16 ++- openpype/tools/utils/host_tools.py | 15 ++- openpype/tools/workfiles/app.py | 15 +-- openpype/tools/workfiles/files_widget.py | 22 ++-- openpype/tools/workfiles/save_as_dialog.py | 15 +-- openpype/tools/workfiles/window.py | 23 ++-- 17 files changed, 232 insertions(+), 210 deletions(-) diff --git a/openpype/tools/adobe_webserver/app.py b/openpype/tools/adobe_webserver/app.py index b79d6c6c60..3911baf7ac 100644 --- a/openpype/tools/adobe_webserver/app.py +++ b/openpype/tools/adobe_webserver/app.py @@ -16,7 +16,7 @@ from wsrpc_aiohttp import ( WSRPCClient ) -from avalon import api +from openpype.pipeline import legacy_io log = logging.getLogger(__name__) @@ -80,9 +80,9 @@ class WebServerTool: loop=asyncio.get_event_loop()) await client.connect() - project = api.Session["AVALON_PROJECT"] - asset = api.Session["AVALON_ASSET"] - task = api.Session["AVALON_TASK"] + project = legacy_io.Session["AVALON_PROJECT"] + asset = legacy_io.Session["AVALON_ASSET"] + task = legacy_io.Session["AVALON_TASK"] log.info("Sending context change to {}-{}-{}".format(project, asset, task)) diff --git a/openpype/tools/creator/window.py b/openpype/tools/creator/window.py index 51cc66e715..e0c329fb78 100644 --- a/openpype/tools/creator/window.py +++ b/openpype/tools/creator/window.py @@ -4,16 +4,14 @@ import re from Qt import QtWidgets, QtCore -from avalon import api, io - from openpype import style from openpype.api import get_current_project_settings from openpype.tools.utils.lib import qt_app_context +from openpype.pipeline import legacy_io from openpype.pipeline.create import ( SUBSET_NAME_ALLOWED_SYMBOLS, legacy_create, CreatorError, - LegacyCreator, ) from .model import CreatorsModel @@ -220,7 +218,7 @@ class CreatorWindow(QtWidgets.QDialog): asset_doc = None if creator_plugin: # Get the asset from the database which match with the name - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"name": asset_name, "type": "asset"}, projection={"_id": 1} ) @@ -237,9 +235,9 @@ class CreatorWindow(QtWidgets.QDialog): self._set_valid_state(False) return - project_name = io.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] asset_id = asset_doc["_id"] - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] # Calculate subset name with Creator plugin subset_name = creator_plugin.get_subset_name( @@ -271,7 +269,7 @@ class CreatorWindow(QtWidgets.QDialog): self._subset_name_input.setText(subset_name) # Get all subsets of the current asset - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "parent": asset_id @@ -372,7 +370,7 @@ class CreatorWindow(QtWidgets.QDialog): self.setStyleSheet(style.load_stylesheet()) def refresh(self): - self._asset_name_input.setText(io.Session["AVALON_ASSET"]) + self._asset_name_input.setText(legacy_io.Session["AVALON_ASSET"]) self._creators_model.reset() @@ -385,7 +383,7 @@ class CreatorWindow(QtWidgets.QDialog): ) current_index = None family = None - task_name = io.Session.get("AVALON_TASK", None) + task_name = legacy_io.Session.get("AVALON_TASK", None) lowered_task_name = task_name.lower() if task_name: for _family, _task_names in pype_project_setting.items(): @@ -471,7 +469,7 @@ class CreatorWindow(QtWidgets.QDialog): self._msg_timer.start() -def show(debug=False, parent=None): +def show(parent=None): """Display asset creator GUI Arguments: @@ -488,24 +486,6 @@ def show(debug=False, parent=None): except (AttributeError, RuntimeError): pass - if debug: - from avalon import mock - for creator in mock.creators: - api.register_plugin(LegacyCreator, creator) - - import traceback - sys.excepthook = lambda typ, val, tb: traceback.print_last() - - io.install() - - any_project = next( - project for project in io.projects() - if project.get("active", True) is not False - ) - - api.Session["AVALON_PROJECT"] = any_project["name"] - module.project = any_project["name"] - with qt_app_context(): window = CreatorWindow(parent) window.refresh() diff --git a/openpype/tools/loader/app.py b/openpype/tools/loader/app.py index fad284d82b..bb589c199d 100644 --- a/openpype/tools/loader/app.py +++ b/openpype/tools/loader/app.py @@ -1,11 +1,14 @@ import sys +import traceback from Qt import QtWidgets, QtCore -from avalon import api, io from openpype import style from openpype.lib import register_event_callback -from openpype.pipeline import install_openpype_plugins +from openpype.pipeline import ( + install_openpype_plugins, + legacy_io, +) from openpype.tools.utils import ( lib, PlaceholderLineEdit @@ -36,14 +39,14 @@ class LoaderWindow(QtWidgets.QDialog): def __init__(self, parent=None): super(LoaderWindow, self).__init__(parent) title = "Asset Loader 2.1" - project_name = api.Session.get("AVALON_PROJECT") + project_name = legacy_io.Session.get("AVALON_PROJECT") if project_name: title += " - {}".format(project_name) self.setWindowTitle(title) # Groups config - self.groups_config = lib.GroupsConfig(io) - self.family_config_cache = lib.FamilyConfigCache(io) + self.groups_config = lib.GroupsConfig(legacy_io) + self.family_config_cache = lib.FamilyConfigCache(legacy_io) # Enable minimize and maximize for app window_flags = QtCore.Qt.Window @@ -60,13 +63,13 @@ class LoaderWindow(QtWidgets.QDialog): # Assets widget assets_widget = MultiSelectAssetsWidget( - io, parent=left_side_splitter + legacy_io, parent=left_side_splitter ) assets_widget.set_current_asset_btn_visibility(True) # Families widget families_filter_view = FamilyListView( - io, self.family_config_cache, left_side_splitter + legacy_io, self.family_config_cache, left_side_splitter ) left_side_splitter.addWidget(assets_widget) left_side_splitter.addWidget(families_filter_view) @@ -76,7 +79,7 @@ class LoaderWindow(QtWidgets.QDialog): # --- Middle part --- # Subsets widget subsets_widget = SubsetWidget( - io, + legacy_io, self.groups_config, self.family_config_cache, tool_name=self.tool_name, @@ -87,8 +90,12 @@ class LoaderWindow(QtWidgets.QDialog): thumb_ver_splitter = QtWidgets.QSplitter(main_splitter) thumb_ver_splitter.setOrientation(QtCore.Qt.Vertical) - thumbnail_widget = ThumbnailWidget(io, parent=thumb_ver_splitter) - version_info_widget = VersionWidget(io, parent=thumb_ver_splitter) + thumbnail_widget = ThumbnailWidget( + legacy_io, parent=thumb_ver_splitter + ) + version_info_widget = VersionWidget( + legacy_io, parent=thumb_ver_splitter + ) thumb_ver_splitter.addWidget(thumbnail_widget) thumb_ver_splitter.addWidget(version_info_widget) @@ -105,7 +112,7 @@ class LoaderWindow(QtWidgets.QDialog): repres_widget = None if sync_server_enabled: repres_widget = RepresentationWidget( - io, self.tool_name, parent=thumb_ver_splitter + legacy_io, self.tool_name, parent=thumb_ver_splitter ) thumb_ver_splitter.addWidget(repres_widget) @@ -259,13 +266,15 @@ class LoaderWindow(QtWidgets.QDialog): # Refresh families config self._families_filter_view.refresh() # Change to context asset on context change - self._assets_widget.select_asset_by_name(io.Session["AVALON_ASSET"]) + self._assets_widget.select_asset_by_name( + legacy_io.Session["AVALON_ASSET"] + ) def _refresh(self): """Load assets from database""" # Ensure a project is loaded - project = io.find_one({"type": "project"}, {"type": 1}) + project = legacy_io.find_one({"type": "project"}, {"type": 1}) assert project, "Project was not found! This is a bug" self._assets_widget.refresh() @@ -562,17 +571,16 @@ def show(debug=False, parent=None, use_context=False): module.window = None if debug: - import traceback sys.excepthook = lambda typ, val, tb: traceback.print_last() - io.install() + legacy_io.install() any_project = next( - project for project in io.projects() + project for project in legacy_io.projects() if project.get("active", True) is not False ) - api.Session["AVALON_PROJECT"] = any_project["name"] + legacy_io.Session["AVALON_PROJECT"] = any_project["name"] module.project = any_project["name"] with lib.qt_app_context(): @@ -580,7 +588,7 @@ def show(debug=False, parent=None, use_context=False): window.show() if use_context: - context = {"asset": api.Session["AVALON_ASSET"]} + context = {"asset": legacy_io.Session["AVALON_ASSET"]} window.set_context(context, refresh=True) else: window.refresh() @@ -604,10 +612,10 @@ def cli(args): print("Entering Project: %s" % project) - io.install() + legacy_io.install() # Store settings - api.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project install_openpype_plugins(project) diff --git a/openpype/tools/mayalookassigner/app.py b/openpype/tools/mayalookassigner/app.py index 0e633a21e3..1b6cad77a8 100644 --- a/openpype/tools/mayalookassigner/app.py +++ b/openpype/tools/mayalookassigner/app.py @@ -4,8 +4,8 @@ import logging from Qt import QtWidgets, QtCore -from avalon import io from openpype import style +from openpype.pipeline import legacy_io from openpype.tools.utils.lib import qt_app_context from openpype.hosts.maya.api.lib import assign_look_by_version @@ -227,9 +227,13 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): continue # Get the latest version of this asset's look subset - version = io.find_one({"type": "version", - "parent": assign_look["_id"]}, - sort=[("name", -1)]) + version = legacy_io.find_one( + { + "type": "version", + "parent": assign_look["_id"] + }, + sort=[("name", -1)] + ) subset_name = assign_look["name"] self.echo("{} Assigning {} to {}\t".format(prefix, diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index 8fd592d347..d41d8ca5a2 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -5,9 +5,8 @@ import os from bson.objectid import ObjectId import maya.cmds as cmds -from avalon import io - from openpype.pipeline import ( + legacy_io, remove_container, registered_host, ) @@ -161,8 +160,10 @@ def create_items_from_nodes(nodes): return asset_view_items for _id, id_nodes in id_hashes.items(): - asset = io.find_one({"_id": ObjectId(_id)}, - projection={"name": True}) + asset = legacy_io.find_one( + {"_id": ObjectId(_id)}, + projection={"name": True} + ) # Skip if asset id is not found if not asset: diff --git a/openpype/tools/mayalookassigner/vray_proxies.py b/openpype/tools/mayalookassigner/vray_proxies.py index c97664f3cb..3523b24bf3 100644 --- a/openpype/tools/mayalookassigner/vray_proxies.py +++ b/openpype/tools/mayalookassigner/vray_proxies.py @@ -11,9 +11,8 @@ from bson.objectid import ObjectId import alembic.Abc from maya import cmds -from avalon import io - from openpype.pipeline import ( + legacy_io, load_container, loaders_from_representation, discover_loader_plugins, @@ -158,9 +157,11 @@ def get_look_relationships(version_id): dict: Dictionary of relations. """ - json_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "json"}) + json_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "json" + }) # Load relationships shader_relation = get_representation_path(json_representation) @@ -184,9 +185,11 @@ def load_look(version_id): """ # Get representations of shader file and relationships - look_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "ma"}) + look_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "ma" + }) # See if representation is already loaded, if so reuse it. host = registered_host() @@ -232,15 +235,21 @@ def get_latest_version(asset_id, subset): RuntimeError: When subset or version doesn't exist. """ - subset = io.find_one({"name": subset, - "parent": ObjectId(asset_id), - "type": "subset"}) + subset = legacy_io.find_one({ + "name": subset, + "parent": ObjectId(asset_id), + "type": "subset" + }) if not subset: raise RuntimeError("Subset does not exist: %s" % subset) - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = legacy_io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + sort=[("name", -1)] + ) if not version: raise RuntimeError("Version does not exist.") diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 2c47381751..8d72020c98 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -7,8 +7,8 @@ from Qt import QtCore, QtGui import qtawesome from bson.objectid import ObjectId -from avalon import io from openpype.pipeline import ( + legacy_io, schema, HeroVersionType, registered_host, @@ -55,7 +55,7 @@ class InventoryModel(TreeModel): if not self.sync_enabled: return - project_name = io.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] active_site = sync_server.get_active_site(project_name) remote_site = sync_server.get_remote_site(project_name) @@ -304,32 +304,32 @@ class InventoryModel(TreeModel): for repre_id, group_dict in sorted(grouped.items()): group_items = group_dict["items"] # Get parenthood per group - representation = io.find_one({"_id": ObjectId(repre_id)}) + representation = legacy_io.find_one({"_id": ObjectId(repre_id)}) if not representation: not_found["representation"].append(group_items) not_found_ids.append(repre_id) continue - version = io.find_one({"_id": representation["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) if not version: not_found["version"].append(group_items) not_found_ids.append(repre_id) continue elif version["type"] == "hero_version": - _version = io.find_one({ + _version = legacy_io.find_one({ "_id": version["version_id"] }) version["name"] = HeroVersionType(_version["name"]) version["data"] = _version["data"] - subset = io.find_one({"_id": version["parent"]}) + subset = legacy_io.find_one({"_id": version["parent"]}) if not subset: not_found["subset"].append(group_items) not_found_ids.append(repre_id) continue - asset = io.find_one({"_id": subset["parent"]}) + asset = legacy_io.find_one({"_id": subset["parent"]}) if not asset: not_found["asset"].append(group_items) not_found_ids.append(repre_id) @@ -390,7 +390,7 @@ class InventoryModel(TreeModel): # Store the highest available version so the model can know # whether current version is currently up-to-date. - highest_version = io.find_one({ + highest_version = legacy_io.find_one({ "type": "version", "parent": version["parent"] }, sort=[("name", -1)]) diff --git a/openpype/tools/sceneinventory/switch_dialog.py b/openpype/tools/sceneinventory/switch_dialog.py index bb3e2615ac..b2d770330f 100644 --- a/openpype/tools/sceneinventory/switch_dialog.py +++ b/openpype/tools/sceneinventory/switch_dialog.py @@ -4,7 +4,7 @@ from Qt import QtWidgets, QtCore import qtawesome from bson.objectid import ObjectId -from avalon import io +from openpype.pipeline import legacy_io from openpype.pipeline.load import ( discover_loader_plugins, switch_container, @@ -151,7 +151,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): repre_ids.add(ObjectId(item["representation"])) content_loaders.add(item["loader"]) - repres = list(io.find({ + repres = list(legacy_io.find({ "type": {"$in": ["representation", "archived_representation"]}, "_id": {"$in": list(repre_ids)} })) @@ -179,7 +179,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): content_repres[repre_id] = repres_by_id[repre_id] version_ids.append(repre["parent"]) - versions = io.find({ + versions = legacy_io.find({ "type": {"$in": ["version", "hero_version"]}, "_id": {"$in": list(set(version_ids))} }) @@ -198,7 +198,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): else: subset_ids.append(content_versions[version_id]["parent"]) - subsets = io.find({ + subsets = legacy_io.find({ "type": {"$in": ["subset", "archived_subset"]}, "_id": {"$in": subset_ids} }) @@ -220,7 +220,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): asset_ids.append(subset["parent"]) content_subsets[subset_id] = subset - assets = io.find({ + assets = legacy_io.find({ "type": {"$in": ["asset", "archived_asset"]}, "_id": {"$in": list(asset_ids)} }) @@ -472,7 +472,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # Prepare asset document if asset is selected asset_doc = None if selected_asset: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": True} ) @@ -523,7 +523,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _get_current_output_repre_ids_xxx( self, asset_doc, selected_subset, selected_repre ): - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "name": selected_subset, @@ -537,7 +537,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if not version_doc: return [] - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": version_doc["_id"], @@ -548,7 +548,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_current_output_repre_ids_xxo(self, asset_doc, selected_subset): - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "parent": asset_doc["_id"], @@ -563,7 +563,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): for repre_doc in self.content_repres.values(): repre_names.add(repre_doc["name"]) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": subset_doc["_id"], @@ -578,7 +578,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): for subset_doc in self.content_subsets.values(): susbet_names.add(subset_doc["name"]) - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "name": {"$in": list(susbet_names)}, @@ -587,7 +587,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): {"_id": True} ) subset_ids = [subset_doc["_id"] for subset_doc in subset_docs] - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": {"$in": subset_ids}, @@ -606,7 +606,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_name = subset_doc["name"] repres_by_subset_name[subset_name].add(repre_name) - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": asset_doc["_id"], @@ -637,7 +637,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): "parent": version_id, "name": {"$in": list(repre_names)} }) - repre_docs = io.find( + repre_docs = legacy_io.find( {"$or": repre_or_query}, {"_id": True} ) @@ -646,7 +646,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _get_current_output_repre_ids_oxx( self, selected_subset, selected_repre ): - subset_docs = list(io.find({ + subset_docs = list(legacy_io.find({ "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, "name": selected_subset @@ -657,7 +657,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): last_version["_id"] for last_version in last_versions_by_subset_id.values() ] - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "parent": {"$in": last_version_ids}, "name": selected_repre @@ -666,7 +666,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_current_output_repre_ids_oxo(self, selected_subset): - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, @@ -713,7 +713,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): "parent": last_version_id, "name": {"$in": list(repre_names)} }) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "$or": repre_or_query @@ -724,7 +724,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_current_output_repre_ids_oox(self, selected_repre): - repre_docs = io.find( + repre_docs = legacy_io.find( { "name": selected_repre, "parent": {"$in": list(self.content_versions.keys())} @@ -734,7 +734,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_asset_box_values(self): - asset_docs = io.find( + asset_docs = legacy_io.find( {"type": "asset"}, {"_id": 1, "name": 1} ) @@ -742,7 +742,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): asset_doc["_id"]: asset_doc["name"] for asset_doc in asset_docs } - subsets = io.find( + subsets = legacy_io.find( { "type": "subset", "parent": {"$in": list(asset_names_by_id.keys())} @@ -762,12 +762,15 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _get_subset_box_values(self): selected_asset = self._assets_box.get_valid_value() if selected_asset: - asset_doc = io.find_one({"type": "asset", "name": selected_asset}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": selected_asset + }) asset_ids = [asset_doc["_id"]] else: asset_ids = list(self.content_assets.keys()) - subsets = io.find( + subsets = legacy_io.find( { "type": "subset", "parent": {"$in": asset_ids} @@ -804,7 +807,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [ ] [ ] [?] if not selected_asset and not selected_subset: # Find all representations of selection's subsets - possible_repres = list(io.find( + possible_repres = list(legacy_io.find( { "type": "representation", "parent": {"$in": list(self.content_versions.keys())} @@ -833,11 +836,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [x] [?] if selected_asset and selected_subset: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "name": selected_subset, @@ -848,7 +851,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_id = subset_doc["_id"] last_versions_by_subset_id = self.find_last_versions([subset_id]) version_doc = last_versions_by_subset_id.get(subset_id) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": version_doc["_id"] @@ -865,7 +868,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [ ] [?] # If asset only is selected if selected_asset: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) @@ -876,7 +879,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_names = set() for subset_doc in self.content_subsets.values(): subset_names.add(subset_doc["name"]) - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "parent": asset_doc["_id"], @@ -900,7 +903,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if not subset_id_by_version_id: return list() - repre_docs = list(io.find( + repre_docs = list(legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -930,7 +933,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return list(available_repres) # [ ] [x] [?] - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, @@ -957,7 +960,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if not subset_id_by_version_id: return list() - repre_docs = list(io.find( + repre_docs = list(legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -1013,11 +1016,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): return # [x] [ ] [?] - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_docs = io.find( + subset_docs = legacy_io.find( {"type": "subset", "parent": asset_doc["_id"]}, {"name": 1} ) @@ -1048,7 +1051,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): }} ] last_versions_by_subset_id = dict() - for doc in io.aggregate(_pipeline): + for doc in legacy_io.aggregate(_pipeline): doc["parent"] = doc["_id"] doc["_id"] = doc.pop("_version_id") last_versions_by_subset_id[doc["parent"]] = doc @@ -1076,11 +1079,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [x] [ ] if selected_asset is not None and selected_subset is not None: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "parent": asset_doc["_id"], @@ -1096,7 +1099,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): validation_state.repre_ok = False return - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": last_version["_id"] @@ -1116,11 +1119,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [ ] [ ] if selected_asset is not None: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": asset_doc["_id"] @@ -1142,7 +1145,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): version_id = last_version["_id"] subset_id_by_version_id[version_id] = subset_id - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -1173,7 +1176,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [ ] [x] [ ] # Subset documents - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, @@ -1194,7 +1197,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): version_id = last_version["_id"] subset_id_by_version_id[version_id] = subset_id - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -1225,7 +1228,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _on_current_asset(self): # Set initial asset as current. - asset_name = io.Session["AVALON_ASSET"] + asset_name = legacy_io.Session["AVALON_ASSET"] index = self._assets_box.findText( asset_name, QtCore.Qt.MatchFixedString ) @@ -1243,7 +1246,10 @@ class SwitchAssetDialog(QtWidgets.QDialog): selected_representation = self._representations_box.get_valid_value() if selected_asset: - asset_doc = io.find_one({"type": "asset", "name": selected_asset}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": selected_asset + }) asset_docs_by_id = {asset_doc["_id"]: asset_doc} else: asset_docs_by_id = self.content_assets @@ -1262,7 +1268,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if selected_subset: subset_query["name"] = selected_subset - subset_docs = list(io.find(subset_query)) + subset_docs = list(legacy_io.find(subset_query)) subset_ids = [] subset_docs_by_parent_and_name = collections.defaultdict(dict) for subset in subset_docs: @@ -1272,12 +1278,12 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_docs_by_parent_and_name[parent_id][name] = subset # versions - version_docs = list(io.find({ + version_docs = list(legacy_io.find({ "type": "version", "parent": {"$in": subset_ids} }, sort=[("name", -1)])) - hero_version_docs = list(io.find({ + hero_version_docs = list(legacy_io.find({ "type": "hero_version", "parent": {"$in": subset_ids} })) @@ -1297,7 +1303,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): parent_id = hero_version_doc["parent"] hero_version_docs_by_parent_id[parent_id] = hero_version_doc - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "parent": {"$in": version_ids} }) diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 2df6d00406..448e3f4e6f 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -6,10 +6,9 @@ from Qt import QtWidgets, QtCore import qtawesome from bson.objectid import ObjectId -from avalon import io - from openpype import style from openpype.pipeline import ( + legacy_io, HeroVersionType, update_container, remove_container, @@ -84,7 +83,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if item_id not in repre_ids: repre_ids.append(item_id) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "_id": {"$in": repre_ids} @@ -98,7 +97,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if version_id not in version_ids: version_ids.append(version_id) - loaded_versions = io.find({ + loaded_versions = legacy_io.find({ "_id": {"$in": version_ids}, "type": {"$in": ["version", "hero_version"]} }) @@ -115,7 +114,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if parent_id not in version_parents: version_parents.append(parent_id) - all_versions = io.find({ + all_versions = legacy_io.find({ "type": {"$in": ["hero_version", "version"]}, "parent": {"$in": version_parents} }) @@ -151,7 +150,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if item_id not in repre_ids: repre_ids.append(item_id) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "_id": {"$in": repre_ids} @@ -166,7 +165,7 @@ class SceneInventoryView(QtWidgets.QTreeView): version_id_by_repre_id[repre_doc["_id"]] = version_id if version_id not in version_ids: version_ids.append(version_id) - hero_versions = io.find( + hero_versions = legacy_io.find( { "_id": {"$in": version_ids}, "type": "hero_version" @@ -184,7 +183,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if current_version_id == hero_version_id: version_id_by_repre_id[_repre_id] = version_id - version_docs = io.find( + version_docs = legacy_io.find( { "_id": {"$in": list(version_ids)}, "type": "version" @@ -367,11 +366,11 @@ class SceneInventoryView(QtWidgets.QTreeView): repre_ids (list) side (str): 'active_site'|'remote_site' """ - project_name = io.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] active_site = self.sync_server.get_active_site(project_name) remote_site = self.sync_server.get_remote_site(project_name) - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "_id": {"$in": repre_ids} }) @@ -661,12 +660,12 @@ class SceneInventoryView(QtWidgets.QTreeView): # Get available versions for active representation representation_id = ObjectId(active["representation"]) - representation = io.find_one({"_id": representation_id}) - version = io.find_one({ + representation = legacy_io.find_one({"_id": representation_id}) + version = legacy_io.find_one({ "_id": representation["parent"] }) - versions = list(io.find( + versions = list(legacy_io.find( { "parent": version["parent"], "type": "version" @@ -674,7 +673,7 @@ class SceneInventoryView(QtWidgets.QTreeView): sort=[("name", 1)] )) - hero_version = io.find_one({ + hero_version = legacy_io.find_one({ "parent": version["parent"], "type": "hero_version" }) diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index b40fbb69e4..054c2a2daa 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -3,8 +3,8 @@ import sys from Qt import QtWidgets, QtCore import qtawesome -from avalon import io, api +from openpype.pipeline import legacy_io from openpype import style from openpype.tools.utils.delegates import VersionDelegate from openpype.tools.utils.lib import ( @@ -72,7 +72,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): control_layout.addWidget(refresh_button) # endregion control - family_config_cache = FamilyConfigCache(io) + family_config_cache = FamilyConfigCache(legacy_io) model = InventoryModel(family_config_cache) proxy = FilterProxyModel() @@ -91,7 +91,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view.setColumnWidth(4, 100) # namespace # apply delegates - version_delegate = VersionDelegate(io, self) + version_delegate = VersionDelegate(legacy_io, self) column = model.Columns.index("version") view.setItemDelegateForColumn(column, version_delegate) @@ -191,17 +191,18 @@ def show(root=None, debug=False, parent=None, items=None): pass if debug is True: - io.install() + legacy_io.install() if not os.environ.get("AVALON_PROJECT"): any_project = next( - project for project in io.projects() + project for project in legacy_io.projects() if project.get("active", True) is not False ) - api.Session["AVALON_PROJECT"] = any_project["name"] + project_name = any_project["name"] else: - api.Session["AVALON_PROJECT"] = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AVALON_PROJECT") + legacy_io.Session["AVALON_PROJECT"] = project_name with qt_app_context(): window = SceneInventoryWindow(parent) diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index 4d7f94f825..fbafc7142a 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -5,16 +5,18 @@ import random import string from Qt import QtWidgets, QtCore -from . import DropDataFrame -from .constants import HOST_NAME -from avalon import io + from openpype.api import execute, Logger +from openpype.pipeline import legacy_io from openpype.lib import ( get_openpype_execute_args, apply_project_environments_value ) -log = Logger().get_logger("standalonepublisher") +from . import DropDataFrame +from .constants import HOST_NAME + +log = Logger.get_logger("standalonepublisher") class ComponentsWidget(QtWidgets.QWidget): @@ -152,18 +154,18 @@ def set_context(project, asset, task): :type asset: str ''' os.environ["AVALON_PROJECT"] = project - io.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project os.environ["AVALON_ASSET"] = asset - io.Session["AVALON_ASSET"] = asset + legacy_io.Session["AVALON_ASSET"] = asset if not task: task = '' os.environ["AVALON_TASK"] = task - io.Session["AVALON_TASK"] = task + legacy_io.Session["AVALON_TASK"] = task - io.Session["current_dir"] = os.path.normpath(os.getcwd()) + legacy_io.Session["current_dir"] = os.path.normpath(os.getcwd()) os.environ["AVALON_APP"] = HOST_NAME - io.Session["AVALON_APP"] = HOST_NAME + legacy_io.Session["AVALON_APP"] = HOST_NAME def cli_publish(data, publish_paths, gui=True): @@ -171,7 +173,7 @@ def cli_publish(data, publish_paths, gui=True): os.path.dirname(os.path.dirname(__file__)), "publish.py" ) - io.install() + legacy_io.install() # Create hash name folder in temp chars = "".join([random.choice(string.ascii_letters) for i in range(15)]) @@ -203,6 +205,6 @@ def cli_publish(data, publish_paths, gui=True): log.info(f"Publish result: {result}") - io.uninstall() + legacy_io.uninstall() return False diff --git a/openpype/tools/texture_copy/app.py b/openpype/tools/texture_copy/app.py index 0c3c260e51..fd8d6dc02e 100644 --- a/openpype/tools/texture_copy/app.py +++ b/openpype/tools/texture_copy/app.py @@ -1,14 +1,12 @@ import os import re import click -from avalon import io, api -from pprint import pprint + +import speedcopy from openpype.lib import Terminal from openpype.api import Anatomy - -import shutil -import speedcopy +from openpype.pipeline import legacy_io t = Terminal() @@ -20,8 +18,8 @@ texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga', class TextureCopy: def __init__(self): - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() def _get_textures(self, path): textures = [] @@ -32,14 +30,14 @@ class TextureCopy: return textures def _get_project(self, project_name): - project = io.find_one({ + project = legacy_io.find_one({ 'type': 'project', 'name': project_name }) return project def _get_asset(self, asset_name): - asset = io.find_one({ + asset = legacy_io.find_one({ 'type': 'asset', 'name': asset_name }) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index b0c30f6dfb..d8f4570120 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -4,9 +4,14 @@ It is possible to create `HostToolsHelper` in host implementation or use singleton approach with global functions (using helper anyway). """ import os -import avalon.api + import pyblish.api -from openpype.pipeline import registered_host + +from openpype.pipeline import ( + registered_host, + legacy_io, +) + from .lib import qt_app_context @@ -73,8 +78,8 @@ class HostToolsHelper: if use_context: context = { - "asset": avalon.api.Session["AVALON_ASSET"], - "task": avalon.api.Session["AVALON_TASK"] + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } workfiles_tool.set_context(context) @@ -105,7 +110,7 @@ class HostToolsHelper: use_context = False if use_context: - context = {"asset": avalon.api.Session["AVALON_ASSET"]} + context = {"asset": legacy_io.Session["AVALON_ASSET"]} loader_tool.set_context(context, refresh=True) else: loader_tool.refresh() diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 38e1911060..352847ede8 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -1,9 +1,10 @@ import sys import logging -from avalon import api - -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from openpype.tools.utils import qt_app_context from .window import Window @@ -52,8 +53,8 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True): validate_host_requirements(host) if debug: - api.Session["AVALON_ASSET"] = "Mock" - api.Session["AVALON_TASK"] = "Testing" + legacy_io.Session["AVALON_ASSET"] = "Mock" + legacy_io.Session["AVALON_TASK"] = "Testing" with qt_app_context(): window = Window(parent=parent) @@ -61,8 +62,8 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True): if use_context: context = { - "asset": api.Session["AVALON_ASSET"], - "task": api.Session["AVALON_TASK"] + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } window.set_context(context) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index bb2ded3b94..977111b71b 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -4,7 +4,6 @@ import shutil import Qt from Qt import QtWidgets, QtCore -from avalon import io, api from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate @@ -18,7 +17,10 @@ from openpype.lib.avalon_context import ( update_current_task, compute_session_changes ) -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from .model import ( WorkAreaFilesModel, PublishFilesModel, @@ -87,7 +89,7 @@ class FilesWidget(QtWidgets.QWidget): self._task_type = None # Pype's anatomy object for current project - self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) + self.anatomy = Anatomy(legacy_io.Session["AVALON_PROJECT"]) # Template key used to get work template from anatomy templates self.template_key = "work" @@ -147,7 +149,9 @@ class FilesWidget(QtWidgets.QWidget): workarea_files_view.setColumnWidth(0, 330) # --- Publish files view --- - publish_files_model = PublishFilesModel(extensions, io, self.anatomy) + publish_files_model = PublishFilesModel( + extensions, legacy_io, self.anatomy + ) publish_proxy_model = QtCore.QSortFilterProxyModel() publish_proxy_model.setSourceModel(publish_files_model) @@ -380,13 +384,13 @@ class FilesWidget(QtWidgets.QWidget): return None if self._asset_doc is None: - self._asset_doc = io.find_one({"_id": self._asset_id}) + self._asset_doc = legacy_io.find_one({"_id": self._asset_id}) return self._asset_doc def _get_session(self): """Return a modified session for the current asset and task""" - session = api.Session.copy() + session = legacy_io.Session.copy() self.template_key = get_workfile_template_key( self._task_type, session["AVALON_APP"], @@ -405,7 +409,7 @@ class FilesWidget(QtWidgets.QWidget): def _enter_session(self): """Enter the asset and task session currently selected""" - session = api.Session.copy() + session = legacy_io.Session.copy() changes = compute_session_changes( session, asset=self._get_asset_doc(), @@ -595,10 +599,10 @@ class FilesWidget(QtWidgets.QWidget): # Create extra folders create_workdir_extra_folders( self._workdir_path, - api.Session["AVALON_APP"], + legacy_io.Session["AVALON_APP"], self._task_type, self._task_name, - api.Session["AVALON_PROJECT"] + legacy_io.Session["AVALON_PROJECT"] ) # Trigger after save events emit_event( diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index 0a7c7821ba..3e97d6c938 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,13 +5,14 @@ import logging from Qt import QtWidgets, QtCore -from avalon import api, io - from openpype.lib import ( get_last_workfile_with_version, get_workdir_data, ) -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from openpype.tools.utils import PlaceholderLineEdit log = logging.getLogger(__name__) @@ -24,7 +25,7 @@ def build_workfile_data(session): asset_name = session["AVALON_ASSET"] task_name = session["AVALON_TASK"] host_name = session["AVALON_APP"] - project_doc = io.find_one( + project_doc = legacy_io.find_one( {"type": "project"}, { "name": True, @@ -33,7 +34,7 @@ def build_workfile_data(session): } ) - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( { "type": "asset", "name": asset_name @@ -208,7 +209,7 @@ class SaveAsDialog(QtWidgets.QDialog): if not session: # Fallback to active session - session = api.Session + session = legacy_io.Session self.data = build_workfile_data(session) @@ -283,7 +284,7 @@ class SaveAsDialog(QtWidgets.QDialog): if current_filepath: # We match the current filename against the current session # instead of the session where the user is saving to. - current_data = build_workfile_data(api.Session) + current_data = build_workfile_data(legacy_io.Session) matcher = CommentMatcher(anatomy, template_key, current_data) comment = matcher.parse_comment(current_filepath) if comment: diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 73e63d30b5..02a22af26c 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -2,14 +2,13 @@ import os import datetime from Qt import QtCore, QtWidgets -from avalon import io - from openpype import style from openpype.lib import ( get_workfile_doc, create_workfile_doc, save_workfile_data_to_doc, ) +from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget @@ -158,10 +157,12 @@ class Window(QtWidgets.QMainWindow): home_page_widget = QtWidgets.QWidget(pages_widget) home_body_widget = QtWidgets.QWidget(home_page_widget) - assets_widget = SingleSelectAssetsWidget(io, parent=home_body_widget) + assets_widget = SingleSelectAssetsWidget( + legacy_io, parent=home_body_widget + ) assets_widget.set_current_asset_btn_visibility(True) - tasks_widget = TasksWidget(io, home_body_widget) + tasks_widget = TasksWidget(legacy_io, home_body_widget) files_widget = FilesWidget(home_body_widget) side_panel = SidePanelWidget(home_body_widget) @@ -250,7 +251,7 @@ class Window(QtWidgets.QMainWindow): if asset_id and task_name and filepath: filename = os.path.split(filepath)[1] workfile_doc = get_workfile_doc( - asset_id, task_name, filename, io + asset_id, task_name, filename, legacy_io ) self.side_panel.set_context( asset_id, task_name, filepath, workfile_doc @@ -272,7 +273,7 @@ class Window(QtWidgets.QMainWindow): self._create_workfile_doc(filepath, force=True) workfile_doc = self._get_current_workfile_doc() - save_workfile_data_to_doc(workfile_doc, data, io) + save_workfile_data_to_doc(workfile_doc, data, legacy_io) def _get_current_workfile_doc(self, filepath=None): if filepath is None: @@ -284,7 +285,7 @@ class Window(QtWidgets.QMainWindow): filename = os.path.split(filepath)[1] return get_workfile_doc( - asset_id, task_name, filename, io + asset_id, task_name, filename, legacy_io ) def _create_workfile_doc(self, filepath, force=False): @@ -295,9 +296,11 @@ class Window(QtWidgets.QMainWindow): if not workfile_doc: workdir, filename = os.path.split(filepath) asset_id = self.assets_widget.get_selected_asset_id() - asset_doc = io.find_one({"_id": asset_id}) + asset_doc = legacy_io.find_one({"_id": asset_id}) task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc(asset_doc, task_name, filename, workdir, io) + create_workfile_doc( + asset_doc, task_name, filename, workdir, legacy_io + ) def refresh(self): # Refresh asset widget @@ -319,7 +322,7 @@ class Window(QtWidgets.QMainWindow): self._context_to_set, context = None, self._context_to_set if "asset" in context: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( { "name": context["asset"], "type": "asset" From 2f02e0399589e35dd3434450f8e068016daedad6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:34:03 +0200 Subject: [PATCH 170/357] replace avalon imports in global plugins --- openpype/plugins/publish/cleanup_farm.py | 5 ++- .../publish/collect_anatomy_context_data.py | 7 ++-- .../publish/collect_anatomy_instance_data.py | 9 +++-- .../publish/collect_avalon_entities.py | 15 +++---- .../publish/collect_from_create_context.py | 5 ++- openpype/plugins/publish/collect_hierarchy.py | 5 ++- .../plugins/publish/collect_rendered_files.py | 5 ++- .../plugins/publish/collect_resources_path.py | 5 ++- .../publish/collect_scene_loaded_versions.py | 12 ++++-- .../publish/extract_hierarchy_avalon.py | 25 ++++++------ .../plugins/publish/integrate_hero_version.py | 21 +++++----- .../plugins/publish/integrate_inputlinks.py | 8 ++-- openpype/plugins/publish/integrate_new.py | 39 ++++++++++--------- .../plugins/publish/integrate_thumbnail.py | 14 +++---- .../publish/validate_editorial_asset_name.py | 12 +++--- 15 files changed, 106 insertions(+), 81 deletions(-) diff --git a/openpype/plugins/publish/cleanup_farm.py b/openpype/plugins/publish/cleanup_farm.py index ab0c6e469e..2c6c1625bb 100644 --- a/openpype/plugins/publish/cleanup_farm.py +++ b/openpype/plugins/publish/cleanup_farm.py @@ -3,7 +3,8 @@ import os import shutil import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CleanUpFarm(pyblish.api.ContextPlugin): @@ -22,7 +23,7 @@ class CleanUpFarm(pyblish.api.ContextPlugin): def process(self, context): # Get source host from which farm publishing was started - src_host_name = avalon.api.Session.get("AVALON_APP") + src_host_name = legacy_io.Session.get("AVALON_APP") self.log.debug("Host name from session is {}".format(src_host_name)) # Skip process if is not in list of source hosts in which this # plugin should run diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index bd8d9e50c4..0794adfb67 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -13,11 +13,12 @@ Provides: """ import json +import pyblish.api + from openpype.lib import ( get_system_general_anatomy_data ) -from avalon import api -import pyblish.api +from openpype.pipeline import legacy_io class CollectAnatomyContextData(pyblish.api.ContextPlugin): @@ -65,7 +66,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): asset_entity = context.data.get("assetEntity") if asset_entity: - task_name = api.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] asset_tasks = asset_entity["data"]["tasks"] task_type = asset_tasks.get(task_name, {}).get("type") diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index 42836e796b..6a6ea170b5 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -25,9 +25,10 @@ import copy import json import collections -from avalon import io import pyblish.api +from openpype.pipeline import legacy_io + class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): """Collect Instance specific Anatomy data. @@ -83,7 +84,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): self.log.debug("Querying asset documents with names: {}".format( ", ".join(["\"{}\"".format(name) for name in asset_names]) )) - asset_docs = io.find({ + asset_docs = legacy_io.find({ "type": "asset", "name": {"$in": asset_names} }) @@ -153,7 +154,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): subset_docs = [] if subset_filters: - subset_docs = list(io.find({ + subset_docs = list(legacy_io.find({ "type": "subset", "$or": subset_filters })) @@ -202,7 +203,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): ] last_version_by_subset_id = {} - for doc in io.aggregate(_pipeline): + for doc in legacy_io.aggregate(_pipeline): subset_id = doc["_id"] last_version_by_subset_id[subset_id] = doc["name"] diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index c099a2cf75..3e7843407f 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -8,9 +8,10 @@ Provides: context -> assetEntity - asset entity from database """ -from avalon import io, api import pyblish.api +from openpype.pipeline import legacy_io + class CollectAvalonEntities(pyblish.api.ContextPlugin): """Collect Anatomy into Context""" @@ -19,12 +20,12 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin): label = "Collect Avalon Entities" def process(self, context): - io.install() - project_name = api.Session["AVALON_PROJECT"] - asset_name = api.Session["AVALON_ASSET"] - task_name = api.Session["AVALON_TASK"] + legacy_io.install() + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] - project_entity = io.find_one({ + project_entity = legacy_io.find_one({ "type": "project", "name": project_name }) @@ -38,7 +39,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin): if not asset_name: self.log.info("Context is not set. Can't collect global data.") return - asset_entity = io.find_one({ + asset_entity = legacy_io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 16e3f669c3..b2f757f108 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -3,7 +3,8 @@ """ import os import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CollectFromCreateContext(pyblish.api.ContextPlugin): @@ -30,7 +31,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"): value = create_context.dbcon.Session.get(key) if value is not None: - avalon.api.Session[key] = value + legacy_io.Session[key] = value os.environ[key] = value def create_instance(self, context, in_data): diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index efb40407d9..4e94acce4a 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -1,5 +1,6 @@ import pyblish.api -import avalon.api as avalon + +from openpype.pipeline import legacy_io class CollectHierarchy(pyblish.api.ContextPlugin): @@ -19,7 +20,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): def process(self, context): temp_context = {} - project_name = avalon.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] final_context = {} final_context[project_name] = {} final_context[project_name]['entity_type'] = 'Project' diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 1005c38b9d..670e57ed10 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -11,7 +11,8 @@ import os import json import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io class CollectRenderedFiles(pyblish.api.ContextPlugin): @@ -150,7 +151,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): session_data["AVALON_WORKDIR"] = remapped self.log.info("Setting session using data from file") - api.Session.update(session_data) + legacy_io.Session.update(session_data) os.environ.update(session_data) session_is_set = True self._process_path(data, anatomy) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 1f509365c7..89df031fb0 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -12,7 +12,8 @@ import os import copy import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io class CollectResourcesPath(pyblish.api.InstancePlugin): @@ -84,7 +85,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index e54592abb8..f2ade1ac28 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -1,8 +1,11 @@ from bson.objectid import ObjectId import pyblish.api -from avalon import io -from openpype.pipeline import registered_host + +from openpype.pipeline import ( + registered_host, + legacy_io, +) class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): @@ -40,7 +43,10 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): _repr_ids = [ObjectId(c["representation"]) for c in _containers] version_by_repr = { str(doc["_id"]): doc["parent"] for doc in - io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1}) + legacy_io.find( + {"_id": {"$in": _repr_ids}}, + projection={"parent": 1} + ) } for con in _containers: diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index b062a9c4b5..2f528d4469 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,7 +1,10 @@ -import pyblish.api -from avalon import io from copy import deepcopy +import pyblish.api + +from openpype.pipeline import legacy_io + + class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): """Create entities in Avalon based on collected data.""" @@ -16,8 +19,8 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): return hierarchy_context = deepcopy(context.data["hierarchyContext"]) - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() active_assets = [] # filter only the active publishing insatnces @@ -78,7 +81,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): update_data = True # Process project if entity_type.lower() == "project": - entity = io.find_one({"type": "project"}) + entity = legacy_io.find_one({"type": "project"}) # TODO: should be in validator? assert (entity is not None), "Did not find project in DB" @@ -95,7 +98,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): ) # Else process assset else: - entity = io.find_one({"type": "asset", "name": name}) + entity = legacy_io.find_one({"type": "asset", "name": name}) if entity: # Do not override data, only update cur_entity_data = entity.get("data") or {} @@ -119,7 +122,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): # Skip updating data update_data = False - archived_entities = io.find({ + archived_entities = legacy_io.find({ "type": "archived_asset", "name": name }) @@ -143,7 +146,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if update_data: # Update entity data with input data - io.update_many( + legacy_io.update_many( {"_id": entity["_id"]}, {"$set": {"data": data}} ) @@ -161,7 +164,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): "type": "asset", "data": data } - io.replace_one( + legacy_io.replace_one( {"_id": entity["_id"]}, new_entity ) @@ -176,9 +179,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): "data": data } self.log.debug("Creating asset: {}".format(item)) - entity_id = io.insert_one(item).inserted_id + entity_id = legacy_io.insert_one(item).inserted_id - return io.find_one({"_id": entity_id}) + return legacy_io.find_one({"_id": entity_id}) def _get_assets(self, input_dict): """ Returns only asset dictionary. diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 76720fc9a3..a706b653c4 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -8,12 +8,14 @@ from bson.objectid import ObjectId from pymongo import InsertOne, ReplaceOne import pyblish.api -from avalon import api, io from openpype.lib import ( create_hard_link, filter_profiles ) -from openpype.pipeline import schema +from openpype.pipeline import ( + schema, + legacy_io, +) class IntegrateHeroVersion(pyblish.api.InstancePlugin): @@ -63,7 +65,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): template_key = self._get_template_key(instance) anatomy = instance.context.data["anatomy"] - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -221,7 +223,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if old_repres_by_name: old_repres_to_delete = old_repres_by_name - archived_repres = list(io.find({ + archived_repres = list(legacy_io.find({ # Check what is type of archived representation "type": "archived_repsentation", "parent": new_version_id @@ -442,7 +444,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) if bulk_writes: - io._database[io.Session["AVALON_PROJECT"]].bulk_write( + project_name = legacy_io.Session["AVALON_PROJECT"] + legacy_io.database[project_name].bulk_write( bulk_writes ) @@ -504,7 +507,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." @@ -585,12 +588,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): def version_from_representations(self, repres): for repre in repres: - version = io.find_one({"_id": repre["parent"]}) + version = legacy_io.find_one({"_id": repre["parent"]}) if version: return version def current_hero_ents(self, version): - hero_version = io.find_one({ + hero_version = legacy_io.find_one({ "parent": version["parent"], "type": "hero_version" }) @@ -598,7 +601,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if not hero_version: return (None, []) - hero_repres = list(io.find({ + hero_repres = list(legacy_io.find({ "parent": hero_version["_id"], "type": "representation" })) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index 11cffc4638..6964f2d938 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -3,7 +3,7 @@ from collections import OrderedDict from bson.objectid import ObjectId import pyblish.api -from avalon import io +from openpype.pipeline import legacy_io class IntegrateInputLinks(pyblish.api.ContextPlugin): @@ -129,5 +129,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): if input_links is None: continue - io.update_one({"_id": version_doc["_id"]}, - {"$set": {"data.inputLinks": input_links}}) + legacy_io.update_one( + {"_id": version_doc["_id"]}, + {"$set": {"data.inputLinks": input_links}} + ) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 5dcbb8fabd..891d47f471 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -9,14 +9,13 @@ import six import re import shutil from collections import deque, defaultdict +from datetime import datetime from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api -from avalon import io + import openpype.api -from datetime import datetime -# from pype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, @@ -24,6 +23,7 @@ from openpype.lib import ( StringTemplate, TemplateUnsolved ) +from openpype.pipeline import legacy_io # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -152,7 +152,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Required environment variables anatomy_data = instance.data["anatomyData"] - io.install() + legacy_io.install() context = instance.context @@ -166,7 +166,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): asset_name = instance.data["asset"] asset_entity = instance.data.get("assetEntity") if not asset_entity or asset_entity["name"] != context_asset_name: - asset_entity = io.find_one({ + asset_entity = legacy_io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] @@ -259,14 +259,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): new_repre_names_low = [_repre["name"].lower() for _repre in repres] - existing_version = io.find_one({ + existing_version = legacy_io.find_one({ 'type': 'version', 'parent': subset["_id"], 'name': version_number }) if existing_version is None: - version_id = io.insert_one(version).inserted_id + version_id = legacy_io.insert_one(version).inserted_id else: # Check if instance have set `append` mode which cause that # only replicated representations are set to archive @@ -274,7 +274,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Update version data # TODO query by _id and - io.update_many({ + legacy_io.update_many({ 'type': 'version', 'parent': subset["_id"], 'name': version_number @@ -284,7 +284,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] # Find representations of existing version and archive them - current_repres = list(io.find({ + current_repres = list(legacy_io.find({ "type": "representation", "parent": version_id })) @@ -307,14 +307,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # bulk updates if bulk_writes: - io._database[io.Session["AVALON_PROJECT"]].bulk_write( + project_name = legacy_io.Session["AVALON_PROJECT"] + legacy_io.database[project_name].bulk_write( bulk_writes ) - version = io.find_one({"_id": version_id}) + version = legacy_io.find_one({"_id": version_id}) instance.data["versionEntity"] = version - existing_repres = list(io.find({ + existing_repres = list(legacy_io.find({ "parent": version_id, "type": "archived_representation" })) @@ -654,12 +655,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_ids_to_remove = [] for repre in existing_repres: repre_ids_to_remove.append(repre["_id"]) - io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + legacy_io.delete_many({"_id": {"$in": repre_ids_to_remove}}) for rep in instance.data["representations"]: self.log.debug("__ rep: {}".format(rep)) - io.insert_many(representations) + legacy_io.insert_many(representations) instance.data["published_representations"] = ( published_representations ) @@ -761,7 +762,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def get_subset(self, asset, instance): subset_name = instance.data["subset"] - subset = io.find_one({ + subset = legacy_io.find_one({ "type": "subset", "parent": asset["_id"], "name": subset_name @@ -782,7 +783,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if _family not in families: families.append(_family) - _id = io.insert_one({ + _id = legacy_io.insert_one({ "schema": "openpype:subset-3.0", "type": "subset", "name": subset_name, @@ -792,7 +793,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "parent": asset["_id"] }).inserted_id - subset = io.find_one({"_id": _id}) + subset = legacy_io.find_one({"_id": _id}) # QUESTION Why is changing of group and updating it's # families in 'get_subset'? @@ -801,7 +802,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Update families on subset. families = [instance.data["family"]] families.extend(instance.data.get("families", [])) - io.update_many( + legacy_io.update_many( {"type": "subset", "_id": ObjectId(subset["_id"])}, {"$set": {"data.families": families}} ) @@ -825,7 +826,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_group = self._get_subset_group(instance) if subset_group: - io.update_many({ + legacy_io.update_many({ 'type': 'subset', '_id': ObjectId(subset_id) }, {'$set': {'data.subsetGroup': subset_group}}) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index 28a93efb9a..5d6fc561ea 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -8,7 +8,7 @@ import six import pyblish.api from bson.objectid import ObjectId -from avalon import api, io +from openpype.pipeline import legacy_io class IntegrateThumbnails(pyblish.api.InstancePlugin): @@ -38,7 +38,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: @@ -66,11 +66,11 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - io.install() + legacy_io.install() thumbnail_template = anatomy.templates["publish"]["thumbnail"] - version = io.find_one({"_id": thumb_repre["parent"]}) + version = legacy_io.find_one({"_id": thumb_repre["parent"]}) if not version: raise AssertionError( "There does not exist version with id {}".format( @@ -137,12 +137,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): } } # Create thumbnail entity - io.insert_one(thumbnail_entity) + legacy_io.insert_one(thumbnail_entity) self.log.debug( "Creating entity in database {}".format(str(thumbnail_entity)) ) # Set thumbnail id for version - io.update_many( + legacy_io.update_many( {"_id": version["_id"]}, {"$set": {"data.thumbnail_id": thumbnail_id}} ) @@ -151,7 +151,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): )) asset_entity = instance.data["assetEntity"] - io.update_many( + legacy_io.update_many( {"_id": asset_entity["_id"]}, {"$set": {"data.thumbnail_id": thumbnail_id}} ) diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py index 4a65f3c64a..f9cdaebf0c 100644 --- a/openpype/plugins/publish/validate_editorial_asset_name.py +++ b/openpype/plugins/publish/validate_editorial_asset_name.py @@ -1,7 +1,9 @@ -import pyblish.api -from avalon import io from pprint import pformat +import pyblish.api + +from openpype.pipeline import legacy_io + class ValidateEditorialAssetName(pyblish.api.ContextPlugin): """ Validating if editorial's asset names are not already created in db. @@ -24,10 +26,10 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin): asset_and_parents = self.get_parents(context) self.log.debug("__ asset_and_parents: {}".format(asset_and_parents)) - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() - db_assets = list(io.find( + db_assets = list(legacy_io.find( {"type": "asset"}, {"name": 1, "data.parents": 1})) self.log.debug("__ db_assets: {}".format(db_assets)) From e75170d5c6fe99ccd4a58a8dafd5602bf73c3f9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:35:27 +0200 Subject: [PATCH 171/357] replace avalon imports in modules --- .../publish/submit_aftereffects_deadline.py | 5 ++-- .../publish/submit_harmony_deadline.py | 4 ++-- .../publish/submit_houdini_remote_publish.py | 10 ++++---- .../publish/submit_houdini_render_deadline.py | 8 +++---- .../plugins/publish/submit_maya_deadline.py | 4 ++-- .../plugins/publish/submit_nuke_deadline.py | 6 ++--- .../plugins/publish/submit_publish_job.py | 23 ++++++++++--------- .../ftrack/event_handlers_user/action_rv.py | 21 +++++++++-------- .../plugins/publish/collect_ftrack_api.py | 9 ++++---- .../plugins/publish/collect_ftrack_family.py | 6 ++--- .../publish/integrate_hierarchy_ftrack.py | 9 ++++---- .../publish/collect_sequences_from_job.py | 9 +++++--- .../plugins/publish/collect_slack_family.py | 4 ++-- 13 files changed, 63 insertions(+), 55 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index c499c14d40..ba79e1ed4d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -3,10 +3,9 @@ import attr import getpass import pyblish.api -from avalon import api - from openpype.lib import env_value_to_bool from openpype.lib.delivery import collect_frames +from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -89,7 +88,7 @@ class AfterEffectsSubmitDeadline( keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) for key in keys: val = environment.get(key) if val: diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 918efb6630..dda7f7f3aa 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -8,8 +8,8 @@ import re import attr import pyblish.api -from avalon import api +from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -282,7 +282,7 @@ class HarmonySubmitDeadline( keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) for key in keys: val = environment.get(key) if val: diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index c683eb68a8..f834ae7e92 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -4,10 +4,10 @@ import json import requests import hou -from avalon import api, io - import pyblish.api +from openpype.pipeline import legacy_io + class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): """Submit Houdini scene to perform a local publish in Deadline. @@ -35,7 +35,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): ), "Errors found, aborting integration.." # Deadline connection - AVALON_DEADLINE = api.Session.get( + AVALON_DEADLINE = legacy_io.Session.get( "AVALON_DEADLINE", "http://localhost:8082" ) assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" @@ -55,7 +55,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): scenename = os.path.basename(scene) # Get project code - project = io.find_one({"type": "project"}) + project = legacy_io.find_one({"type": "project"}) code = project["data"].get("code", project["name"]) job_name = "{scene} [PUBLISH]".format(scene=scenename) @@ -137,7 +137,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): environment = dict( {key: os.environ[key] for key in keys if key in os.environ}, - **api.Session + **legacy_io.Session ) environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances) diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 59aeb68b79..b94ad24397 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -3,12 +3,12 @@ import json import getpass import requests -from avalon import api - import pyblish.api import hou +from openpype.pipeline import legacy_io + class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): """Submit Solaris USD Render ROPs to Deadline. @@ -106,7 +106,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( @@ -140,7 +140,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): def submit(self, instance, payload): - AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + AVALON_DEADLINE = legacy_io.Session.get("AVALON_DEADLINE", "http://localhost:8082") assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 34147712bc..37bdaede1c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -32,10 +32,10 @@ import requests from maya import cmds -from avalon import api import pyblish.api from openpype.hosts.maya.api import lib +from openpype.pipeline import legacy_io # Documentation for keys available at: # https://docs.thinkboxsoftware.com @@ -488,7 +488,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) # to recognize job from PYPE for turning Event On/Off diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 9b5800c33f..942d442c25 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -4,10 +4,10 @@ import json import getpass import requests - -from avalon import api import pyblish.api + import nuke +from openpype.pipeline import legacy_io class NukeSubmitDeadline(pyblish.api.InstancePlugin): @@ -266,7 +266,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): keys += self.env_allowed_keys environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) for _path in os.environ: if _path.lower().startswith('openpype_'): diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 3c4e0d2913..78e05d80fc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -7,13 +7,14 @@ import re from copy import copy, deepcopy import requests import clique -import openpype.api - -from avalon import api, io import pyblish.api -from openpype.pipeline import get_representation_path +import openpype.api +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) def get_resources(version, extension=None): @@ -22,7 +23,7 @@ def get_resources(version, extension=None): if extension: query["name"] = extension - representation = io.find_one(query) + representation = legacy_io.find_one(query) assert representation, "This is a bug" directory = get_representation_path(representation) @@ -221,9 +222,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self._create_metadata_path(instance) environment = job["Props"].get("Env", {}) - environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"] - environment["AVALON_ASSET"] = io.Session["AVALON_ASSET"] - environment["AVALON_TASK"] = io.Session["AVALON_TASK"] + environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] + environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] + environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] @@ -663,7 +664,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if hasattr(instance, "_log"): data['_log'] = instance._log - asset = data.get("asset") or api.Session["AVALON_ASSET"] + asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"] subset = data.get("subset") start = instance.data.get("frameStart") @@ -955,7 +956,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "intent": context.data.get("intent"), "comment": context.data.get("comment"), "job": render_job or None, - "session": api.Session.copy(), + "session": legacy_io.Session.copy(), "instances": instances } @@ -1063,7 +1064,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." diff --git a/openpype/modules/ftrack/event_handlers_user/action_rv.py b/openpype/modules/ftrack/event_handlers_user/action_rv.py index bdb0eaf250..040ca75582 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_rv.py +++ b/openpype/modules/ftrack/event_handlers_user/action_rv.py @@ -4,8 +4,11 @@ import traceback import json import ftrack_api -from avalon import io, api -from openpype.pipeline import get_representation_path + +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) from openpype_modules.ftrack.lib import BaseAction, statics_icon @@ -253,8 +256,8 @@ class RVAction(BaseAction): )["version"]["asset"]["parent"]["link"][0] project = session.get(link["type"], link["id"]) os.environ["AVALON_PROJECT"] = project["name"] - api.Session["AVALON_PROJECT"] = project["name"] - io.install() + legacy_io.Session["AVALON_PROJECT"] = project["name"] + legacy_io.install() location = ftrack_api.Session().pick_location() @@ -278,22 +281,22 @@ class RVAction(BaseAction): if online_source: continue - asset = io.find_one({"type": "asset", "name": parent_name}) - subset = io.find_one( + asset = legacy_io.find_one({"type": "asset", "name": parent_name}) + subset = legacy_io.find_one( { "type": "subset", "name": component["version"]["asset"]["name"], "parent": asset["_id"] } ) - version = io.find_one( + version = legacy_io.find_one( { "type": "version", "name": component["version"]["version"], "parent": subset["_id"] } ) - representation = io.find_one( + representation = legacy_io.find_one( { "type": "representation", "parent": version["_id"], @@ -301,7 +304,7 @@ class RVAction(BaseAction): } ) if representation is None: - representation = io.find_one( + representation = legacy_io.find_one( { "type": "representation", "parent": version["_id"], diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py index 436a61cc18..14da188150 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py @@ -1,6 +1,7 @@ import logging import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CollectFtrackApi(pyblish.api.ContextPlugin): @@ -23,9 +24,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): self.log.debug("Ftrack user: \"{0}\"".format(session.api_user)) # Collect task - project_name = avalon.api.Session["AVALON_PROJECT"] - asset_name = avalon.api.Session["AVALON_ASSET"] - task_name = avalon.api.Session["AVALON_TASK"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] # Find project entity project_query = 'Project where full_name is "{0}"'.format(project_name) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py index 95987fe42e..820390b1f0 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -6,8 +6,8 @@ Provides: instance -> families ([]) """ import pyblish.api -import avalon.api +from openpype.pipeline import legacy_io from openpype.lib.plugin_tools import filter_profiles @@ -35,8 +35,8 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin): return task_name = instance.data.get("task", - avalon.api.Session["AVALON_TASK"]) - host_name = avalon.api.Session["AVALON_APP"] + legacy_io.Session["AVALON_TASK"]) + host_name = legacy_io.Session["AVALON_APP"] family = instance.data["family"] filtering_criteria = { diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 61892240d7..cf90c11b65 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -2,7 +2,8 @@ import sys import collections import six import pyblish.api -from avalon import io + +from openpype.pipeline import legacy_io # Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" @@ -80,8 +81,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): auto_sync_state = project[ "custom_attributes"][CUST_ATTR_AUTO_SYNC] - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() self.ft_project = None @@ -271,7 +272,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # Create new links. for input in entity_data.get("inputs", []): - input_id = io.find_one({"_id": input})["data"]["ftrackId"] + input_id = legacy_io.find_one({"_id": input})["data"]["ftrackId"] assetbuild = self.session.get("AssetBuild", input_id) self.log.debug( "Creating link from {0} to {1}".format( diff --git a/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py b/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py index 4d216c1c0a..65af90e8a6 100644 --- a/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py +++ b/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py @@ -7,7 +7,8 @@ import json from pprint import pformat import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io def collect(root, @@ -127,7 +128,7 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): session = metadata.get("session") if session: self.log.info("setting session using metadata") - api.Session.update(session) + legacy_io.Session.update(session) os.environ.update(session) else: @@ -187,7 +188,9 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): "family": families[0], # backwards compatibility / pyblish "families": list(families), "subset": subset, - "asset": data.get("asset", api.Session["AVALON_ASSET"]), + "asset": data.get( + "asset", legacy_io.Session["AVALON_ASSET"] + ), "stagingDir": root, "frameStart": start, "frameEnd": end, diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 7475bdc89e..39b05937dc 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -1,7 +1,7 @@ -from avalon import io import pyblish.api from openpype.lib.profiles_filtering import filter_profiles +from openpype.pipeline import legacy_io class CollectSlackFamilies(pyblish.api.InstancePlugin): @@ -18,7 +18,7 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None def process(self, instance): - task_name = io.Session.get("AVALON_TASK") + task_name = legacy_io.Session.get("AVALON_TASK") family = self.main_family_from_instance(instance) key_values = { "families": family, From 3772e1d68cec4399f94d01a148ba5177bbb7b021 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:36:53 +0200 Subject: [PATCH 172/357] replace avalon imports in pipeline --- openpype/pipeline/__init__.py | 13 ++++++------ openpype/pipeline/context_tools.py | 15 +++++++------- openpype/pipeline/create/context.py | 12 +++++------ openpype/pipeline/load/utils.py | 32 ++++++++++++++--------------- openpype/pipeline/thumbnail.py | 4 ++-- 5 files changed, 36 insertions(+), 40 deletions(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 2c35ea2d57..e67b21105c 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -3,6 +3,10 @@ from .constants import ( HOST_WORKFILE_EXTENSIONS, ) +from .mongodb import ( + AvalonMongoDB, +) + from .create import ( BaseCreator, Creator, @@ -85,16 +89,13 @@ from .context_tools import ( install = install_host uninstall = uninstall_host -from .mongodb import ( - AvalonMongoDB, -) - __all__ = ( "AVALON_CONTAINER_ID", "HOST_WORKFILE_EXTENSIONS", - "attribute_definitions", + # --- MongoDB --- + "AvalonMongoDB", # --- Create --- "BaseCreator", @@ -174,6 +175,4 @@ __all__ = ( # Backwards compatible function names "install", "uninstall", - - "AvalonMongoDB", ) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 1bef260ec9..06bd639776 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -11,8 +11,6 @@ import platform import pyblish.api from pyblish.lib import MessageHandler -from avalon import io, Session - import openpype from openpype.modules import load_modules from openpype.settings import get_project_settings @@ -24,6 +22,7 @@ from openpype.lib import ( ) from . import ( + legacy_io, register_loader_plugin_path, register_inventory_action, register_creator_plugin_path, @@ -57,7 +56,7 @@ def registered_root(): if root: return root - root = Session.get("AVALON_PROJECTS") + root = legacy_io.Session.get("AVALON_PROJECTS") if root: return os.path.normpath(root) return "" @@ -74,20 +73,20 @@ def install_host(host): _is_installed = True - io.install() + legacy_io.install() missing = list() for key in ("AVALON_PROJECT", "AVALON_ASSET"): - if key not in Session: + if key not in legacy_io.Session: missing.append(key) assert not missing, ( "%s missing from environment, %s" % ( ", ".join(missing), - json.dumps(Session, indent=4, sort_keys=True) + json.dumps(legacy_io.Session, indent=4, sort_keys=True) )) - project_name = Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] log.info("Activating %s.." % project_name) # Optional host install function @@ -170,7 +169,7 @@ def uninstall_host(): deregister_host() - io.uninstall() + legacy_io.uninstall() log.info("Successfully uninstalled Avalon!") diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 584752e38a..6f862e0588 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,13 +6,13 @@ import inspect from uuid import uuid4 from contextlib import contextmanager +from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( AvalonMongoDB, session_data_from_environment, ) from .creator_plugins import ( - BaseCreator, Creator, AutoCreator, discover_creator_plugins, @@ -773,12 +773,11 @@ class CreateContext: """Give ability to reset avalon context. Reset is based on optional host implementation of `get_current_context` - function or using `avalon.api.Session`. + function or using `legacy_io.Session`. Some hosts have ability to change context file without using workfiles tool but that change is not propagated to """ - import avalon.api project_name = asset_name = task_name = None if hasattr(self.host, "get_current_context"): @@ -789,11 +788,11 @@ class CreateContext: task_name = host_context.get("task_name") if not project_name: - project_name = avalon.api.Session.get("AVALON_PROJECT") + project_name = legacy_io.Session.get("AVALON_PROJECT") if not asset_name: - asset_name = avalon.api.Session.get("AVALON_ASSET") + asset_name = legacy_io.Session.get("AVALON_ASSET") if not task_name: - task_name = avalon.api.Session.get("AVALON_TASK") + task_name = legacy_io.Session.get("AVALON_TASK") if project_name: self.dbcon.Session["AVALON_PROJECT"] = project_name @@ -808,7 +807,6 @@ class CreateContext: Reloads creators from preregistered paths and can load publish plugins if it's enabled on context. """ - import avalon.api import pyblish.logic from openpype.pipeline import OpenPypePyblishPluginMixin diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index ca04f79ae6..99e5d11f82 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -9,11 +9,11 @@ import numbers import six from bson.objectid import ObjectId -from avalon import io -from avalon.api import Session - from openpype.lib import Anatomy -from openpype.pipeline import schema +from openpype.pipeline import ( + schema, + legacy_io, +) log = logging.getLogger(__name__) @@ -60,7 +60,7 @@ def get_repres_contexts(representation_ids, dbcon=None): """ if not dbcon: - dbcon = io + dbcon = legacy_io contexts = {} if not representation_ids: @@ -167,7 +167,7 @@ def get_subset_contexts(subset_ids, dbcon=None): dict: The full representation context by representation id. """ if not dbcon: - dbcon = io + dbcon = legacy_io contexts = {} if not subset_ids: @@ -230,10 +230,10 @@ def get_representation_context(representation): assert representation is not None, "This is a bug" if isinstance(representation, (six.string_types, ObjectId)): - representation = io.find_one( + representation = legacy_io.find_one( {"_id": ObjectId(str(representation))}) - version, subset, asset, project = io.parenthood(representation) + version, subset, asset, project = legacy_io.parenthood(representation) assert all([representation, version, subset, asset, project]), ( "This is a bug" @@ -405,17 +405,17 @@ def update_container(container, version=-1): """Update a container""" # Compute the different version from 'representation' - current_representation = io.find_one({ + current_representation = legacy_io.find_one({ "_id": ObjectId(container["representation"]) }) assert current_representation is not None, "This is a bug" - current_version, subset, asset, project = io.parenthood( + current_version, subset, asset, project = legacy_io.parenthood( current_representation) if version == -1: - new_version = io.find_one({ + new_version = legacy_io.find_one({ "type": "version", "parent": subset["_id"] }, sort=[("name", -1)]) @@ -431,11 +431,11 @@ def update_container(container, version=-1): "type": "version", "name": version } - new_version = io.find_one(version_query) + new_version = legacy_io.find_one(version_query) assert new_version is not None, "This is a bug" - new_representation = io.find_one({ + new_representation = legacy_io.find_one({ "type": "representation", "parent": new_version["_id"], "name": current_representation["name"] @@ -482,7 +482,7 @@ def switch_container(container, representation, loader_plugin=None): )) # Get the new representation to switch to - new_representation = io.find_one({ + new_representation = legacy_io.find_one({ "type": "representation", "_id": representation["_id"], }) @@ -501,7 +501,7 @@ def get_representation_path_from_context(context): representation = context['representation'] project_doc = context.get("project") root = None - session_project = Session.get("AVALON_PROJECT") + session_project = legacy_io.Session.get("AVALON_PROJECT") if project_doc and project_doc["name"] != session_project: anatomy = Anatomy(project_doc["name"]) root = anatomy.roots @@ -530,7 +530,7 @@ def get_representation_path(representation, root=None, dbcon=None): from openpype.lib import StringTemplate, TemplateUnsolved if dbcon is None: - dbcon = io + dbcon = legacy_io if root is None: from openpype.pipeline import registered_root diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index c09dab70eb..ec97b36954 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -2,6 +2,7 @@ import os import copy import logging +from . import legacy_io from .plugin_discover import ( discover, register_plugin, @@ -17,8 +18,7 @@ def get_thumbnail_binary(thumbnail_entity, thumbnail_type, dbcon=None): resolvers = discover_thumbnail_resolvers() resolvers = sorted(resolvers, key=lambda cls: cls.priority) if dbcon is None: - from avalon import io - dbcon = io + dbcon = legacy_io for Resolver in resolvers: available_types = Resolver.thumbnail_types From eacfaa7f11d0d96bd1c037c2bdbc16a1d0e62dae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:37:03 +0200 Subject: [PATCH 173/357] added missing legacy_io --- openpype/pipeline/legacy_io.py | 146 +++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 openpype/pipeline/legacy_io.py diff --git a/openpype/pipeline/legacy_io.py b/openpype/pipeline/legacy_io.py new file mode 100644 index 0000000000..c41406b208 --- /dev/null +++ b/openpype/pipeline/legacy_io.py @@ -0,0 +1,146 @@ +"""Wrapper around interactions with the database""" + +import sys +import logging +import functools + +from . import schema +from .mongodb import AvalonMongoDB, session_data_from_environment + +module = sys.modules[__name__] + +Session = {} +_is_installed = False +_connection_object = AvalonMongoDB(Session) +_mongo_client = None +_database = database = None + +log = logging.getLogger(__name__) + + +def install(): + """Establish a persistent connection to the database""" + if module._is_installed: + return + + session = session_data_from_environment(context_keys=True) + + session["schema"] = "openpype:session-2.0" + try: + schema.validate(session) + except schema.ValidationError as e: + # TODO(marcus): Make this mandatory + log.warning(e) + + _connection_object.Session.update(session) + _connection_object.install() + + module._mongo_client = _connection_object.mongo_client + module._database = module.database = _connection_object.database + + module._is_installed = True + + +def uninstall(): + """Close any connection to the database""" + module._mongo_client = None + module._database = module.database = None + module._is_installed = False + try: + module._connection_object.uninstall() + except AttributeError: + pass + + +def requires_install(func): + @functools.wraps(func) + def decorated(*args, **kwargs): + if not module._is_installed: + install() + return func(*args, **kwargs) + return decorated + + +@requires_install +def projects(*args, **kwargs): + return _connection_object.projects(*args, **kwargs) + + +@requires_install +def insert_one(doc, *args, **kwargs): + return _connection_object.insert_one(doc, *args, **kwargs) + + +@requires_install +def insert_many(docs, *args, **kwargs): + return _connection_object.insert_many(docs, *args, **kwargs) + + +@requires_install +def update_one(*args, **kwargs): + return _connection_object.update_one(*args, **kwargs) + + +@requires_install +def update_many(*args, **kwargs): + return _connection_object.update_many(*args, **kwargs) + + +@requires_install +def replace_one(*args, **kwargs): + return _connection_object.replace_one(*args, **kwargs) + + +@requires_install +def replace_many(*args, **kwargs): + return _connection_object.replace_many(*args, **kwargs) + + +@requires_install +def delete_one(*args, **kwargs): + return _connection_object.delete_one(*args, **kwargs) + + +@requires_install +def delete_many(*args, **kwargs): + return _connection_object.delete_many(*args, **kwargs) + + +@requires_install +def find(*args, **kwargs): + return _connection_object.find(*args, **kwargs) + + +@requires_install +def find_one(*args, **kwargs): + return _connection_object.find_one(*args, **kwargs) + + +@requires_install +def distinct(*args, **kwargs): + return _connection_object.distinct(*args, **kwargs) + + +@requires_install +def aggregate(*args, **kwargs): + return _connection_object.aggregate(*args, **kwargs) + + +@requires_install +def save(*args, **kwargs): + return _connection_object.save(*args, **kwargs) + + +@requires_install +def drop(*args, **kwargs): + return _connection_object.drop(*args, **kwargs) + + +@requires_install +def parenthood(*args, **kwargs): + return _connection_object.parenthood(*args, **kwargs) + + +@requires_install +def bulk_write(*args, **kwargs): + return _connection_object.bulk_write(*args, **kwargs) From b334a4251b36f986b16c49b52629831c8a81747d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:53:18 +0200 Subject: [PATCH 174/357] replace avalon imports on remaining places --- .../clockify/launcher_actions/ClockifyStart.py | 9 +++++---- .../clockify/launcher_actions/ClockifySync.py | 11 ++++++----- openpype/scripts/fusion_switch_shot.py | 12 ++++++------ 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/openpype/modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/clockify/launcher_actions/ClockifyStart.py index 6428d5e7aa..4669f98b01 100644 --- a/openpype/modules/clockify/launcher_actions/ClockifyStart.py +++ b/openpype/modules/clockify/launcher_actions/ClockifyStart.py @@ -1,7 +1,8 @@ -from avalon import io - from openpype.api import Logger -from openpype.pipeline import LauncherAction +from openpype.pipeline import ( + legacy_io, + LauncherAction, +) from openpype_modules.clockify.clockify_api import ClockifyAPI @@ -28,7 +29,7 @@ class ClockifyStart(LauncherAction): task_name = session['AVALON_TASK'] description = asset_name - asset = io.find_one({ + asset = legacy_io.find_one({ 'type': 'asset', 'name': asset_name }) diff --git a/openpype/modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/clockify/launcher_actions/ClockifySync.py index 3c81e2766c..356bbd0306 100644 --- a/openpype/modules/clockify/launcher_actions/ClockifySync.py +++ b/openpype/modules/clockify/launcher_actions/ClockifySync.py @@ -1,8 +1,9 @@ -from avalon import io - from openpype_modules.clockify.clockify_api import ClockifyAPI from openpype.api import Logger -from openpype.pipeline import LauncherAction +from openpype.pipeline import ( + legacy_io, + LauncherAction, +) log = Logger.get_logger(__name__) @@ -25,10 +26,10 @@ class ClockifySync(LauncherAction): projects_to_sync = [] if project_name.strip() == '' or project_name is None: - for project in io.projects(): + for project in legacy_io.projects(): projects_to_sync.append(project) else: - project = io.find_one({'type': 'project'}) + project = legacy_io.find_one({'type': 'project'}) projects_to_sync.append(project) projects_info = {} diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 3ba150902e..245fc665f0 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -4,7 +4,6 @@ import sys import logging # Pipeline imports -from avalon import io from openpype.hosts.fusion import api import openpype.hosts.fusion.api.lib as fusion_lib @@ -13,6 +12,7 @@ from openpype.lib import version_up from openpype.pipeline import ( install_host, registered_host, + legacy_io, ) from openpype.lib.avalon_context import get_workdir_from_session @@ -131,7 +131,7 @@ def update_frame_range(comp, representations): """ version_ids = [r["parent"] for r in representations] - versions = io.find({"type": "version", "_id": {"$in": version_ids}}) + versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}}) versions = list(versions) start = min(v["data"]["frameStart"] for v in versions) @@ -162,13 +162,13 @@ def switch(asset_name, filepath=None, new=True): # Assert asset name exists # It is better to do this here then to wait till switch_shot does it - asset = io.find_one({"type": "asset", "name": asset_name}) + asset = legacy_io.find_one({"type": "asset", "name": asset_name}) assert asset, "Could not find '%s' in the database" % asset_name # Get current project - self._project = io.find_one({ + self._project = legacy_io.find_one({ "type": "project", - "name": io.Session["AVALON_PROJECT"] + "name": legacy_io.Session["AVALON_PROJECT"] }) # Go to comp @@ -198,7 +198,7 @@ def switch(asset_name, filepath=None, new=True): current_comp.Print(message) # Build the session to switch to - switch_to_session = io.Session.copy() + switch_to_session = legacy_io.Session.copy() switch_to_session["AVALON_ASSET"] = asset['name'] if new: From 14fbabd4771508d5088bfe30f9cd4cc46f64fd1d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 13:37:15 +0200 Subject: [PATCH 175/357] OP-3021 - refactor CreateWriteRender as a base for other write creators There are 3 creators which contained almost same code. CreateWriteRender was chosen as a base implementation, which other 2 could inherit and modify necessary parts --- .../plugins/create/create_write_render.py | 35 +++++++++++++------ 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index a9c4b5341e..52edd85e41 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -97,9 +97,28 @@ class CreateWriteRender(plugin.OpenPypeCreator): else: self.log.info("Adding template path from plugin") write_data.update({ - "fpath_template": ("{work}/renders/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) + "fpath_template": + ("{work}/{}s/nuke/{subset}".format(self.family) + + "/{subset}.{frame}.{ext}")}) + write_node = self._create_write_node(selected_node, + inputs, outputs, + write_data) + + # relinking to collected connections + for i, input in enumerate(inputs): + write_node.setInput(i, input) + + write_node.autoplace() + + for output in outputs: + output.setInput(0, write_node) + + write_node = self._modify_write_node(write_node) + + return write_node + + def _create_write_node(self, selected_node, inputs, outputs, write_data): # add reformat node to cut off all outside of format bounding box # get width and height try: @@ -126,13 +145,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): input=selected_node, prenodes=_prenodes) - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - return write_node + + def _modify_write_node(self, write_node): + return write_node \ No newline at end of file From 0b423fc6b5964a4ab4f749564ebbea7b3706516c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 13:57:28 +0200 Subject: [PATCH 176/357] OP-3021 - refactor CreateWriteRender as a base for other write creators There are 3 creators which contained almost same code. CreateWriteRender was chosen as a base implementation, which other 2 could inherit and modify necessary parts --- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 52edd85e41..4c494a58be 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -17,7 +17,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): defaults = ["Main", "Mask"] def __init__(self, *args, **kwargs): - super(CreateWriteRender, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) data = OrderedDict() From 87a99d6d029b1c5ac88063616abfc74629395bc9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 16:36:21 +0200 Subject: [PATCH 177/357] removed AVALON_MONGO --- openpype/pipeline/mongodb.py | 3 --- openpype/tools/loader/__main__.py | 1 - 2 files changed, 4 deletions(-) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index 9efd231bb2..565e26b966 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -95,9 +95,6 @@ def session_data_from_environment(context_keys=False): # Used during any connections to the outside world ("AVALON_TIMEOUT", "1000"), - # Address to Asset Database - ("AVALON_MONGO", "mongodb://localhost:27017"), - # Name of database used in MongoDB ("AVALON_DB", "avalon"), ): diff --git a/openpype/tools/loader/__main__.py b/openpype/tools/loader/__main__.py index 400a034a76..acf357aa97 100644 --- a/openpype/tools/loader/__main__.py +++ b/openpype/tools/loader/__main__.py @@ -19,7 +19,6 @@ def my_exception_hook(exctype, value, traceback): if __name__ == '__main__': - os.environ["AVALON_MONGO"] = "mongodb://localhost:27017" os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017" os.environ["AVALON_DB"] = "avalon" os.environ["AVALON_TIMEOUT"] = "1000" From b0da2a07f80110d92dde21eb8e7e3da667f7d0c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 16:39:44 +0200 Subject: [PATCH 178/357] removed avalon-core submodule --- .gitmodules | 3 --- repos/avalon-core | 1 - 2 files changed, 4 deletions(-) delete mode 160000 repos/avalon-core diff --git a/.gitmodules b/.gitmodules index 9920ceaad6..e69de29bb2 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "repos/avalon-core"] - path = repos/avalon-core - url = https://github.com/pypeclub/avalon-core.git \ No newline at end of file diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 2fa14cea6f..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From 6bf92ef0f6dee5d6475e169df73294d41c62befa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 17:20:50 +0200 Subject: [PATCH 179/357] OP-3021 - refactor CreateWritePrerender --- openpype/hosts/nuke/plugins/__init__.py | 0 .../hosts/nuke/plugins/create/__init__.py | 0 .../plugins/create/create_write_prerender.py | 96 +------------------ .../plugins/create/create_write_render.py | 2 +- 4 files changed, 6 insertions(+), 92 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/__init__.py create mode 100644 openpype/hosts/nuke/plugins/create/__init__.py diff --git a/openpype/hosts/nuke/plugins/__init__.py b/openpype/hosts/nuke/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/plugins/create/__init__.py b/openpype/hosts/nuke/plugins/create/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 761439fdb2..e9309d8170 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,12 +1,10 @@ -from collections import OrderedDict - import nuke -from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWritePrerender(plugin.OpenPypeCreator): +class CreateWritePrerender(create_write_render.CreateWriteRender): # change this to template preset name = "WritePrerender" label = "Create Write Prerender" @@ -19,85 +17,7 @@ class CreateWritePrerender(plugin.OpenPypeCreator): def __init__(self, *args, **kwargs): super(CreateWritePrerender, self).__init__(*args, **kwargs) - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) - - def process(self): - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. The node " - "you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - if self.presets.get('fpath_template'): - self.log.info("Adding template path from preset") - write_data.update( - {"fpath_template": self.presets["fpath_template"]} - ) - else: - self.log.info("Adding template path from plugin") - write_data.update({ - "fpath_template": ("{work}/prerenders/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) - - self.log.info("write_data: {}".format(write_data)) + def _create_write_node(self, selected_node, inputs, outputs, write_data): reviewable = self.presets.get("reviewable") write_node = create_write_node( self.data["subset"], @@ -107,15 +27,9 @@ class CreateWritePrerender(plugin.OpenPypeCreator): review=reviewable, linked_knobs=["channels", "___", "first", "last", "use_limit"]) - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) + return write_node + def _modify_write_node(self, write_node): # open group node write_node.begin() for n in nuke.allNodes(): diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 4c494a58be..52edd85e41 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -17,7 +17,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): defaults = ["Main", "Mask"] def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + super(CreateWriteRender, self).__init__(*args, **kwargs) data = OrderedDict() From 4d1345f2e4790b876eed9e4a55f547ff67c450c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 17:39:10 +0200 Subject: [PATCH 180/357] OP-3021 - fix template value Cannot use format function --- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 52edd85e41..8204c6420d 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -98,7 +98,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): self.log.info("Adding template path from plugin") write_data.update({ "fpath_template": - ("{work}/{}s/nuke/{subset}".format(self.family) + + ("{work}/" + self.family + "s/nuke/{subset}" "/{subset}.{frame}.{ext}")}) write_node = self._create_write_node(selected_node, From 31020f6a9c9764649040c874110eeeaec2b50269 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Apr 2022 11:53:53 +0200 Subject: [PATCH 181/357] flame: fixing flair to flare --- .../schemas/projects_schema/schemas/schema_anatomy_imageio.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 9f142bad09..1d6c428fe0 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -446,7 +446,7 @@ { "key": "flame", "type": "dict", - "label": "Flame/Flair", + "label": "Flame & Flare", "children": [ { "key": "project", From dc88fb91679521c581bc4e8e16a5c6a23a81868e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Apr 2022 10:22:14 +0000 Subject: [PATCH 182/357] Bump async from 2.6.3 to 2.6.4 in /website Bumps [async](https://github.com/caolan/async) from 2.6.3 to 2.6.4. - [Release notes](https://github.com/caolan/async/releases) - [Changelog](https://github.com/caolan/async/blob/v2.6.4/CHANGELOG.md) - [Commits](https://github.com/caolan/async/compare/v2.6.3...v2.6.4) --- updated-dependencies: - dependency-name: async dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index e01f0c4ef2..04b9dd658b 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2311,9 +2311,9 @@ asap@~2.0.3: integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= async@^2.6.2: - version "2.6.3" - resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" - integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== + version "2.6.4" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221" + integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA== dependencies: lodash "^4.17.14" From eae3934aa82751988f282381203343db21b60a9f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 15 Apr 2022 15:21:26 +0200 Subject: [PATCH 183/357] flame: fixing loading --- openpype/hosts/flame/api/plugin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index c87445fdd3..11108ba49f 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -873,6 +873,5 @@ class OpenClipSolver(flib.MediaInfoFile): if feed_clr_obj is not None: feed_clr_obj = ET.Element( "colourSpace", {"type": "string"}) + feed_clr_obj.text = profile_name feed_storage_obj.append(feed_clr_obj) - - feed_clr_obj.text = profile_name From 094729c3f86db58dd6f28cff75173933f7603f07 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Apr 2022 17:28:08 +0200 Subject: [PATCH 184/357] OP-3021 - refactored still image creator Not tested yet as it is not working in regular develop either. --- .../nuke/plugins/create/create_write_still.py | 90 ++----------------- 1 file changed, 6 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index 0037b64ce3..3361bc2602 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,12 +1,10 @@ -from collections import OrderedDict - import nuke -from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWriteStill(plugin.OpenPypeCreator): +class CreateWriteStill(create_write_render.CreateWriteRender): # change this to template preset name = "WriteStillFrame" label = "Create Write Still Image" @@ -23,77 +21,8 @@ class CreateWriteStill(plugin.OpenPypeCreator): def __init__(self, *args, **kwargs): super(CreateWriteStill, self).__init__(*args, **kwargs) - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) - - def process(self): - - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. " - "The node you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - return - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - return - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - self.log.info("Adding template path from plugin") + def _create_write_node(self, selected_node, inputs, outputs, write_data): + # explicitly reset template to 'renders', not same as other 2 writes write_data.update({ "fpath_template": ( "{work}/renders/nuke/{subset}/{subset}.{ext}")}) @@ -118,16 +47,9 @@ class CreateWriteStill(plugin.OpenPypeCreator): farm=False, linked_knobs=["channels", "___", "first", "last", "use_limit"]) - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) + return write_node - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - - # link frame hold to group node + def _modify_write_node(self, write_node): write_node.begin() for n in nuke.allNodes(): # get write node From 8680e841787aa3f51392a9a57859fe482923219b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:05:48 +0200 Subject: [PATCH 185/357] removed usage of AVALON_SCHEMA --- .../hosts/maya/plugins/publish/submit_maya_muster.py | 2 -- .../deadline/plugins/publish/submit_nuke_deadline.py | 1 - openpype/pipeline/schema.py | 9 +++------ 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index 43a01fe542..c4250a20bd 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -488,7 +488,6 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "MAYA_RENDER_DESC_PATH", "MAYA_MODULE_PATH", "ARNOLD_PLUGIN_PATH", - "AVALON_SCHEMA", "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", @@ -547,4 +546,3 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "%f=%d was rounded off to nearest integer" % (value, int(value)) ) - diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index ed0041b153..94c703d66d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -242,7 +242,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): keys = [ "PYTHONPATH", "PATH", - "AVALON_SCHEMA", "AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK", diff --git a/openpype/pipeline/schema.py b/openpype/pipeline/schema.py index 26d987b8f3..7e96bfe1b1 100644 --- a/openpype/pipeline/schema.py +++ b/openpype/pipeline/schema.py @@ -118,15 +118,12 @@ _cache = { def _precache(): + """Store available schemas in-memory for reduced disk access""" global _CACHED - if os.environ.get('AVALON_SCHEMA'): - schema_dir = os.environ['AVALON_SCHEMA'] - else: - current_dir = os.path.dirname(os.path.abspath(__file__)) - schema_dir = os.path.join(current_dir, "schema") + repos_root = os.environ["OPENPYPE_REPOS_ROOT"] + schema_dir = os.path.join(repos_root, "schema") - """Store available schemas in-memory for reduced disk access""" for schema in os.listdir(schema_dir): if schema.startswith(("_", ".")): continue From 3babf06542241dadf38b3fe32e3a1b457e781b56 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:17:05 +0200 Subject: [PATCH 186/357] changed how and if are repos added to sys path --- start.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/start.py b/start.py index f8a01dd9ab..c7dc251f5f 100644 --- a/start.py +++ b/start.py @@ -320,6 +320,7 @@ def run_disk_mapping_commands(settings): destination)) raise + def set_avalon_environments(): """Set avalon specific environments. @@ -838,8 +839,14 @@ def _bootstrap_from_code(use_version, use_staging): version_path = Path(_openpype_root) os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root - repos = os.listdir(os.path.join(_openpype_root, "repos")) - repos = [os.path.join(_openpype_root, "repos", repo) for repo in repos] + repos = [] + # Check for "openpype/repos" directory for sumodules + # NOTE: Is not used at this moment but can be re-used in future + repos_dir = os.path.join(_openpype_root, "repos") + if os.path.exists(repos_dir): + for name in os.listdir(repos_dir): + repos.append(os.path.join(repos_dir, name)) + # add self to python paths repos.insert(0, _openpype_root) for repo in repos: From 89eeb4b31b5c7ad661237ad68d68f0c1fd92be76 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:17:24 +0200 Subject: [PATCH 187/357] don't set all environments --- start.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/start.py b/start.py index c7dc251f5f..35a14a059e 100644 --- a/start.py +++ b/start.py @@ -328,28 +328,12 @@ def set_avalon_environments(): before avalon module is imported because avalon works with globals set with environment variables. """ - from openpype import PACKAGE_DIR - # Path to OpenPype's schema - schema_path = os.path.join( - os.path.dirname(PACKAGE_DIR), - "schema" - ) - # Avalon mongo URL - avalon_mongo_url = ( - os.environ.get("AVALON_MONGO") - or os.environ["OPENPYPE_MONGO"] - ) avalon_db = os.environ.get("AVALON_DB") or "avalon" # for tests os.environ.update({ - # Mongo url (use same as OpenPype has) - "AVALON_MONGO": avalon_mongo_url, - - "AVALON_SCHEMA": schema_path, # Mongo DB name where avalon docs are stored "AVALON_DB": avalon_db, # Name of config - "AVALON_CONFIG": "openpype", "AVALON_LABEL": "OpenPype" }) From 15d59e1f00fdf30aa2f7fd292a2a723f838ae9e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:45:55 +0200 Subject: [PATCH 188/357] use 'OPENPYPE_LOG_LEVEL' if available instead of 'OPENPYPE_DEBUG' --- openpype/lib/log.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index f33385e0ba..51afac6d8d 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -216,8 +216,8 @@ class PypeLogger: # Collection name under database in Mongo log_collection_name = "logs" - # OPENPYPE_DEBUG - pype_debug = 0 + # Logging level - OPENPYPE_LOG_LEVEL + log_level = None # Data same for all record documents process_data = None @@ -231,10 +231,7 @@ class PypeLogger: logger = logging.getLogger(name or "__main__") - if cls.pype_debug > 0: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) + logger.setLevel(cls.log_level) add_mongo_handler = cls.use_mongo_logging add_console_handler = True @@ -357,8 +354,16 @@ class PypeLogger: # Store result to class definition cls.use_mongo_logging = use_mongo_logging - # Define if is in OPENPYPE_DEBUG mode - cls.pype_debug = int(os.getenv("OPENPYPE_DEBUG") or "0") + # Define what is logging level + log_level = os.getenv("OPENPYPE_LOG_LEVEL") + if not log_level: + # Check OPENPYPE_DEBUG for backwards compatibility + op_debug = os.getenv("OPENPYPE_DEBUG") + if op_debug and int(op_debug) > 0: + log_level = 10 + else: + log_level = 20 + cls.log_level = int(log_level) if not os.environ.get("OPENPYPE_MONGO"): cls.use_mongo_logging = False From 9ff8f3011b6f6e8eae0336fdb0e829b5d3d0a5ce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:49:54 +0200 Subject: [PATCH 189/357] added global verbose argument which can change log level --- start.py | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/start.py b/start.py index f8a01dd9ab..c61d72dabf 100644 --- a/start.py +++ b/start.py @@ -191,6 +191,46 @@ else: if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": os.environ.pop("OPENPYPE_HEADLESS_MODE", None) +# Enabled logging debug mode when "--debug" is passed +if "--verbose" in sys.argv: + expected_values = ( + "Expected: notset, debug, info, warning, error, critical" + " or integer [0-50]." + ) + idx = sys.argv.index("--verbose") + sys.argv.pop(idx) + if idx < len(sys.argv): + value = sys.argv.pop(idx) + else: + raise RuntimeError(( + "Expect value after \"--verbose\" argument. {}" + ).format(expected_values)) + + log_level = None + low_value = value.lower() + if low_value.isdigit(): + log_level = int(low_value) + elif low_value == "notset": + log_level = 0 + elif low_value == "debug": + log_level = 10 + elif low_value == "info": + log_level = 20 + elif low_value == "warning": + log_level = 30 + elif low_value == "error": + log_level = 40 + elif low_value == "critical": + log_level = 50 + + if log_level is None: + raise RuntimeError(( + "Unexpected value after \"--verbose\" argument \"{}\". {}" + ).format(value, expected_values)) + + os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) + + import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 from igniter.tools import ( From d71baa839006c68360401c40b5573a6f65a93d25 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:50:11 +0200 Subject: [PATCH 190/357] added debug argument to global sys argv handling --- start.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/start.py b/start.py index c61d72dabf..541a28f6c9 100644 --- a/start.py +++ b/start.py @@ -230,6 +230,11 @@ if "--verbose" in sys.argv: os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) +# Enable debug mode, may affect log level if log level is not defined +if "--debug" in sys.argv: + sys.argv.remove("--debug") + os.environ["OPENPYPE_DEBUG"] = "1" + import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 From 4c495d0aa6b256a8d81ed00bdd62455d7840d54d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:50:40 +0200 Subject: [PATCH 191/357] added settings which defines if logs are added to mongo --- openpype/settings/defaults/system_settings/general.json | 1 + .../entities/schemas/system_schema/schema_general.json | 5 +++++ openpype/settings/handlers.py | 1 + 3 files changed, 7 insertions(+) diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index e1785f8709..a06947ba77 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -7,6 +7,7 @@ "global": [] } }, + "log_to_server": true, "disk_mapping": { "windows": [], "linux": [], diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index fcab4cd5d8..0090c54386 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -40,6 +40,11 @@ { "type": "splitter" }, + { + "type": "boolean", + "key": "log_to_server", + "label": "Log to mongo" + }, { "type": "dict", "key": "disk_mapping", diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 2109b53b09..af54946d5e 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -324,6 +324,7 @@ class MongoSettingsHandler(SettingsHandler): global_general_keys = ( "openpype_path", "admin_password", + "log_to_server", "disk_mapping", "production_version", "staging_version" From a118c1e98014477bc008dd73b5c58a858bb33aca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:51:05 +0200 Subject: [PATCH 192/357] check global settings on start if logs should be send to mongo --- openpype/lib/log.py | 3 +++ start.py | 10 ++++++++++ 2 files changed, 13 insertions(+) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 51afac6d8d..2cdb7ec8e4 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -330,6 +330,9 @@ class PypeLogger: # Define if should logging to mongo be used use_mongo_logging = bool(log4mongo is not None) + if use_mongo_logging: + use_mongo_logging = os.environ.get("OPENPYPE_LOG_TO_SERVER") == "1" + # Set mongo id for process (ONLY ONCE) if use_mongo_logging and cls.mongo_process_id is None: try: diff --git a/start.py b/start.py index 541a28f6c9..0c4cfb90ea 100644 --- a/start.py +++ b/start.py @@ -972,6 +972,16 @@ def boot(): _print(">>> run disk mapping command ...") run_disk_mapping_commands(global_settings) + # Logging to server enabled/disabled + log_to_server = global_settings.get("log_to_server", True) + if log_to_server: + os.environ["OPENPYPE_LOG_TO_SERVER"] = "1" + log_to_server_msg = "ON" + else: + os.environ.pop("OPENPYPE_LOG_TO_SERVER", None) + log_to_server_msg = "OFF" + _print(f">>> Logging to server is turned {log_to_server_msg}") + # Get openpype path from database and set it to environment so openpype can # find its versions there and bootstrap them. openpype_path = get_openpype_path_from_settings(global_settings) From fc4f7ed5d93b5bdf8c8738bdc7264bb1e7ae627a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 12:09:22 +0200 Subject: [PATCH 193/357] Update openpype/modules/sync_server/sync_server_module.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index af69e645d5..fb81791da2 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1423,7 +1423,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site - Throws: + Raises: SiteAlreadyPresentError - if adding already existing site and not 'force' ValueError - other errors (repre not found, misconfiguration) From eb1f72fccd192d5e6d6b326d4fac18206e454aa9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 12:29:02 +0200 Subject: [PATCH 194/357] OP-2951 - refactored to use AvalonMongoDB --- openpype/lib/avalon_context.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 7f35694e58..35ccb1b68d 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1990,13 +1990,14 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, Returns: (list) of ObjectId - linked representations """ + # Create new dbcon if not passed and use passed project name if not dbcon: - log.debug("Using `avalon.io` for query.") - dbcon = avalon.io - # Make sure is installed - dbcon.install() - - dbcon.Session["AVALON_PROJECT"] = project_name + from avalon.api import AvalonMongoDB + dbcon = AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = project_name + # Validate that passed dbcon has same project + elif dbcon.Session["AVALON_PROJECT"] != project_name: + raise ValueError("Passed connection does not have right project") if not isinstance(repre_ids, list): repre_ids = [repre_ids] From c86f62e2d2afc7302b2765c4c750c6c94bbed941 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 13:25:34 +0200 Subject: [PATCH 195/357] ignore missing repos folder --- igniter/bootstrap_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index ad49f868d5..2e47f549d7 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1097,7 +1097,7 @@ class BootstrapRepos: sys.path.insert(0, directory.as_posix()) directory /= "repos" if not directory.exists() and not directory.is_dir(): - raise ValueError("directory is invalid") + return roots = [] for item in directory.iterdir(): From b9b199c61de0ef07d1b9fb340ea28db56f152545 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 13:26:28 +0200 Subject: [PATCH 196/357] ignore repos dir in include files if not available --- setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bf42602b52..dc6c003ed6 100644 --- a/setup.py +++ b/setup.py @@ -123,12 +123,15 @@ bin_includes = [ include_files = [ "igniter", "openpype", - "repos", "schema", "LICENSE", "README.md" ] +repos_path = openpype_root / "repos" +if repos_path.exists(): + include_files.append("repos") + if IS_WINDOWS: install_requires.extend([ # `pywin32` packages From 884e1a409ed6d0b5b47af640a2f82b38aab7b5bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 14:18:00 +0200 Subject: [PATCH 197/357] removed env_group_key from schemas --- openpype/settings/entities/schemas/README.md | 3 +-- .../schemas/system_schema/example_schema.json | 13 ------------- .../schemas/system_schema/example_template.json | 3 +-- .../schemas/system_schema/schema_general.json | 1 - openpype/tools/settings/settings/README.md | 3 +-- 5 files changed, 3 insertions(+), 20 deletions(-) diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index b4bfef2972..b4c878fe0f 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -46,8 +46,7 @@ }, { "type": "raw-json", "label": "{host_label} Environments", - "key": "{host_name}_environments", - "env_group_key": "{host_name}" + "key": "{host_name}_environments" }, { "type": "path", "key": "{host_name}_executables", diff --git a/openpype/settings/entities/schemas/system_schema/example_schema.json b/openpype/settings/entities/schemas/system_schema/example_schema.json index 6a86dae259..b9747b5f4f 100644 --- a/openpype/settings/entities/schemas/system_schema/example_schema.json +++ b/openpype/settings/entities/schemas/system_schema/example_schema.json @@ -117,19 +117,6 @@ } ] }, - { - "key": "env_group_test", - "label": "EnvGroup Test", - "type": "dict", - "children": [ - { - "key": "key_to_store_in_system_settings", - "label": "Testing environment group", - "type": "raw-json", - "env_group_key": "test_group" - } - ] - }, { "key": "dict_wrapper", "type": "dict", diff --git a/openpype/settings/entities/schemas/system_schema/example_template.json b/openpype/settings/entities/schemas/system_schema/example_template.json index ff78c78e8f..9955cf5651 100644 --- a/openpype/settings/entities/schemas/system_schema/example_template.json +++ b/openpype/settings/entities/schemas/system_schema/example_template.json @@ -7,8 +7,7 @@ { "type": "raw-json", "label": "{host_label} Environments", - "key": "{host_name}_environments", - "env_group_key": "{host_name}" + "key": "{host_name}_environments" }, { "type": "path", diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index fcab4cd5d8..695ab8bceb 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -34,7 +34,6 @@ "key": "environment", "label": "Environment", "type": "raw-json", - "env_group_key": "global", "require_restart": true }, { diff --git a/openpype/tools/settings/settings/README.md b/openpype/tools/settings/settings/README.md index 1c916ddff2..c29664a907 100644 --- a/openpype/tools/settings/settings/README.md +++ b/openpype/tools/settings/settings/README.md @@ -44,8 +44,7 @@ }, { "type": "raw-json", "label": "{host_label} Environments", - "key": "{host_name}_environments", - "env_group_key": "{host_name}" + "key": "{host_name}_environments" }, { "type": "path-widget", "key": "{host_name}_executables", From ecbf5d859b13332b9afbada6524dff8b25d9b72a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 14:19:03 +0200 Subject: [PATCH 198/357] removed env group ogic from entities --- openpype/settings/entities/base_entity.py | 21 ----------- .../entities/dict_mutable_keys_entity.py | 33 ++++------------- openpype/settings/entities/input_entities.py | 20 +---------- openpype/settings/entities/root_entities.py | 36 ------------------- .../settings/settings/dict_mutable_widget.py | 4 --- 5 files changed, 7 insertions(+), 107 deletions(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 21ee44ae77..741f13c49b 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -127,12 +127,6 @@ class BaseItemEntity(BaseEntity): # Entity is in hierarchy of dynamically created entity self.is_in_dynamic_item = False - # Entity will save metadata about environments - # - this is current possible only for RawJsonEnity - self.is_env_group = False - # Key of environment group key must be unique across system settings - self.env_group_key = None - # Roles of an entity self.roles = None @@ -286,16 +280,6 @@ class BaseItemEntity(BaseEntity): ).format(self.group_item.path) raise EntitySchemaError(self, reason) - # Validate that env group entities will be stored into file. - # - env group entities must store metadata which is not possible if - # metadata would be outside of file - if self.file_item is None and self.is_env_group: - reason = ( - "Environment item is not inside file" - " item so can't store metadata for defaults." - ) - raise EntitySchemaError(self, reason) - # Dynamic items must not have defined labels. (UI specific) if self.label and self.is_dynamic_item: raise EntitySchemaError( @@ -862,11 +846,6 @@ class ItemEntity(BaseItemEntity): if self.is_dynamic_item: self.require_key = False - # If value should be stored to environments and uder which group key - # - the key may be dynamically changed by it's parent on save - self.env_group_key = self.schema_data.get("env_group_key") - self.is_env_group = bool(self.env_group_key is not None) - # Root item reference self.root_item = self.parent.root_item diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index a0c93b97a7..3dc07524af 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -148,11 +148,7 @@ class DictMutableKeysEntity(EndpointEntity): ): raise InvalidKeySymbols(self.path, key) - if self.value_is_env_group: - item_schema = copy.deepcopy(self.item_schema) - item_schema["env_group_key"] = key - else: - item_schema = self.item_schema + item_schema = self.item_schema new_child = self.create_schema_object(item_schema, self, True) self.children_by_key[key] = new_child @@ -216,9 +212,7 @@ class DictMutableKeysEntity(EndpointEntity): self.children_label_by_id = {} self.store_as_list = self.schema_data.get("store_as_list") or False - self.value_is_env_group = ( - self.schema_data.get("value_is_env_group") or False - ) + self.required_keys = self.schema_data.get("required_keys") or [] self.collapsible_key = self.schema_data.get("collapsible_key") or False # GUI attributes @@ -241,9 +235,6 @@ class DictMutableKeysEntity(EndpointEntity): object_type.update(input_modifiers) self.item_schema = object_type - if self.value_is_env_group: - self.item_schema["env_group_key"] = "" - if self.group_item is None: self.is_group = True @@ -259,10 +250,6 @@ class DictMutableKeysEntity(EndpointEntity): if used_temp_label: self.label = None - if self.value_is_env_group and self.store_as_list: - reason = "Item can't store environments metadata to list output." - raise EntitySchemaError(self, reason) - if not self.schema_data.get("object_type"): reason = ( "Modifiable dictionary must have specified `object_type`." @@ -579,18 +566,10 @@ class DictMutableKeysEntity(EndpointEntity): output.append([key, child_value]) return output - output = {} - for key, child_entity in self.children_by_key.items(): - child_value = child_entity.settings_value() - # TODO child should have setter of env group key se child can - # know what env group represents. - if self.value_is_env_group: - if key not in child_value[M_ENVIRONMENT_KEY]: - _metadata = child_value[M_ENVIRONMENT_KEY] - _m_keykey = tuple(_metadata.keys())[0] - env_keys = child_value[M_ENVIRONMENT_KEY].pop(_m_keykey) - child_value[M_ENVIRONMENT_KEY][key] = env_keys - output[key] = child_value + output = { + key: child_entity.settings_value() + for key, child_entity in self.children_by_key.items() + } output.update(self.metadata) return output diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 3dcd238672..32eedf3b3e 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -534,13 +534,7 @@ class RawJsonEntity(InputEntity): @property def metadata(self): - output = {} - if isinstance(self._current_value, dict) and self.is_env_group: - output[M_ENVIRONMENT_KEY] = { - self.env_group_key: list(self._current_value.keys()) - } - - return output + return {} @property def has_unsaved_changes(self): @@ -549,15 +543,6 @@ class RawJsonEntity(InputEntity): result = self.metadata != self._metadata_for_current_state() return result - def schema_validations(self): - if self.store_as_string and self.is_env_group: - reason = ( - "RawJson entity can't store environment group metadata" - " as string." - ) - raise EntitySchemaError(self, reason) - super(RawJsonEntity, self).schema_validations() - def _convert_to_valid_type(self, value): if isinstance(value, STRING_TYPE): try: @@ -583,9 +568,6 @@ class RawJsonEntity(InputEntity): def _settings_value(self): value = super(RawJsonEntity, self)._settings_value() - if self.is_env_group and isinstance(value, dict): - value.update(self.metadata) - if self.store_as_string: return json.dumps(value) return value diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index edb4407679..ff76fa5180 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -52,7 +52,6 @@ from openpype.settings.lib import ( get_available_studio_project_settings_overrides_versions, get_available_studio_project_anatomy_overrides_versions, - find_environments, apply_overrides ) @@ -422,11 +421,6 @@ class RootEntity(BaseItemEntity): """ pass - @abstractmethod - def _validate_defaults_to_save(self, value): - """Validate default values before save.""" - pass - def _save_default_values(self): """Save default values. @@ -435,7 +429,6 @@ class RootEntity(BaseItemEntity): DEFAULTS. """ settings_value = self.settings_value() - self._validate_defaults_to_save(settings_value) defaults_dir = self.defaults_dir() for file_path, value in settings_value.items(): @@ -604,8 +597,6 @@ class SystemSettings(RootEntity): def _save_studio_values(self): settings_value = self.settings_value() - self._validate_duplicated_env_group(settings_value) - self.log.debug("Saving system settings: {}".format( json.dumps(settings_value, indent=4) )) @@ -613,29 +604,6 @@ class SystemSettings(RootEntity): # Reset source version after restart self._source_version = None - def _validate_defaults_to_save(self, value): - """Valiations of default values before save.""" - self._validate_duplicated_env_group(value) - - def _validate_duplicated_env_group(self, value, override_state=None): - """ Validate duplicated environment groups. - - Raises: - DuplicatedEnvGroups: When value contain duplicated env groups. - """ - value = copy.deepcopy(value) - if override_state is None: - override_state = self._override_state - - if override_state is OverrideState.STUDIO: - default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] - final_value = apply_overrides(default_values, value) - else: - final_value = value - - # Check if final_value contain duplicated environment groups - find_environments(final_value) - def _save_project_values(self): """System settings can't have project overrides. @@ -911,10 +879,6 @@ class ProjectSettings(RootEntity): if warnings: raise SaveWarningExc(warnings) - def _validate_defaults_to_save(self, value): - """Valiations of default values before save.""" - pass - def _validate_values_to_save(self, value): pass diff --git a/openpype/tools/settings/settings/dict_mutable_widget.py b/openpype/tools/settings/settings/dict_mutable_widget.py index 6489266131..1c704b3cd5 100644 --- a/openpype/tools/settings/settings/dict_mutable_widget.py +++ b/openpype/tools/settings/settings/dict_mutable_widget.py @@ -465,10 +465,6 @@ class ModifiableDictItem(QtWidgets.QWidget): self.entity_widget.change_key(key, self) self.update_style() - @property - def value_is_env_group(self): - return self.entity_widget.value_is_env_group - def update_key_label(self): if not self.collapsible_key: return From 95a8ccb47488dbf0d3c4be9333c3222887bfe017 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 14:20:33 +0200 Subject: [PATCH 199/357] removed logic related to env groups --- openpype/api.py | 8 +- openpype/lib/env_tools.py | 54 ----------- openpype/settings/__init__.py | 2 - openpype/settings/constants.py | 4 - .../entities/dict_mutable_keys_entity.py | 1 - openpype/settings/entities/input_entities.py | 5 +- openpype/settings/lib.py | 95 ------------------- 7 files changed, 4 insertions(+), 165 deletions(-) diff --git a/openpype/api.py b/openpype/api.py index b692b36065..9ce745b653 100644 --- a/openpype/api.py +++ b/openpype/api.py @@ -3,7 +3,6 @@ from .settings import ( get_project_settings, get_current_project_settings, get_anatomy_settings, - get_environments, SystemSettings, ProjectSettings @@ -23,7 +22,6 @@ from .lib import ( get_app_environments_for_context, source_hash, get_latest_version, - get_global_environments, get_local_site_id, change_openpype_mongo_url, create_project_folders, @@ -69,10 +67,10 @@ __all__ = [ "get_project_settings", "get_current_project_settings", "get_anatomy_settings", - "get_environments", "get_project_basic_paths", "SystemSettings", + "ProjectSettings", "PypeLogger", "Logger", @@ -102,8 +100,9 @@ __all__ = [ # get contextual data "version_up", - "get_hierarchy", "get_asset", + "get_hierarchy", + "get_workdir_data", "get_version_from_path", "get_last_version_from_path", "get_app_environments_for_context", @@ -111,7 +110,6 @@ __all__ = [ "run_subprocess", "get_latest_version", - "get_global_environments", "get_local_site_id", "change_openpype_mongo_url", diff --git a/openpype/lib/env_tools.py b/openpype/lib/env_tools.py index 6521d20f1e..25bcbf7c1b 100644 --- a/openpype/lib/env_tools.py +++ b/openpype/lib/env_tools.py @@ -69,57 +69,3 @@ def get_paths_from_environ(env_key=None, env_value=None, return_first=False): return None # Return all existing paths from environment variable return existing_paths - - -def get_global_environments(env=None): - """Load global environments from Pype. - - Return prepared and parsed global environments by pype's settings. Use - combination of "global" environments set in pype's settings and enabled - modules. - - Args: - env (dict, optional): Initial environments. Empty dictionary is used - when not entered. - - Returns; - dict of str: Loaded and processed environments. - - """ - import acre - from openpype.modules import ModulesManager - from openpype.settings import get_environments - - if env is None: - env = {} - - # Get global environments from settings - all_settings_env = get_environments() - parsed_global_env = acre.parse(all_settings_env["global"]) - - # Merge with entered environments - merged_env = acre.append(env, parsed_global_env) - - # Get environments from Pype modules - modules_manager = ModulesManager() - - module_envs = modules_manager.collect_global_environments() - publish_plugin_dirs = modules_manager.collect_plugin_paths()["publish"] - - # Set pyblish plugins paths if any module want to register them - if publish_plugin_dirs: - publish_paths_str = os.environ.get("PYBLISHPLUGINPATH") or "" - publish_paths = publish_paths_str.split(os.pathsep) - _publish_paths = { - os.path.normpath(path) for path in publish_paths if path - } - for path in publish_plugin_dirs: - _publish_paths.add(os.path.normpath(path)) - module_envs["PYBLISHPLUGINPATH"] = os.pathsep.join(_publish_paths) - - # Merge environments with current environments and update values - if module_envs: - parsed_envs = acre.parse(module_envs) - merged_env = acre.merge(parsed_envs, merged_env) - - return acre.compute(merged_env, cleanup=True) diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py index 14e4678050..ca7157812d 100644 --- a/openpype/settings/__init__.py +++ b/openpype/settings/__init__.py @@ -22,7 +22,6 @@ from .lib import ( get_project_settings, get_current_project_settings, get_anatomy_settings, - get_environments, get_local_settings ) from .entities import ( @@ -54,7 +53,6 @@ __all__ = ( "get_project_settings", "get_current_project_settings", "get_anatomy_settings", - "get_environments", "get_local_settings", "SystemSettings", diff --git a/openpype/settings/constants.py b/openpype/settings/constants.py index 19ff953eb4..cd84d4db1c 100644 --- a/openpype/settings/constants.py +++ b/openpype/settings/constants.py @@ -3,14 +3,11 @@ import re # Metadata keys for work with studio and project overrides M_OVERRIDDEN_KEY = "__overriden_keys__" -# Metadata key for storing information about environments -M_ENVIRONMENT_KEY = "__environment_keys__" # Metadata key for storing dynamic created labels M_DYNAMIC_KEY_LABEL = "__dynamic_keys_labels__" METADATA_KEYS = frozenset([ M_OVERRIDDEN_KEY, - M_ENVIRONMENT_KEY, M_DYNAMIC_KEY_LABEL ]) @@ -35,7 +32,6 @@ KEY_REGEX = re.compile(r"^[{}]+$".format(KEY_ALLOWED_SYMBOLS)) __all__ = ( "M_OVERRIDDEN_KEY", - "M_ENVIRONMENT_KEY", "M_DYNAMIC_KEY_LABEL", "METADATA_KEYS", diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index 3dc07524af..e6d332b9ad 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -15,7 +15,6 @@ from .exceptions import ( from openpype.settings.constants import ( METADATA_KEYS, M_DYNAMIC_KEY_LABEL, - M_ENVIRONMENT_KEY, KEY_REGEX, KEY_ALLOWED_SYMBOLS ) diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 32eedf3b3e..89f12afd9b 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -15,10 +15,7 @@ from .exceptions import ( EntitySchemaError ) -from openpype.settings.constants import ( - METADATA_KEYS, - M_ENVIRONMENT_KEY -) +from openpype.settings.constants import METADATA_KEYS class EndpointEntity(ItemEntity): diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 937329b417..f921b9c318 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -9,7 +9,6 @@ from .exceptions import ( ) from .constants import ( M_OVERRIDDEN_KEY, - M_ENVIRONMENT_KEY, METADATA_KEYS, @@ -457,24 +456,6 @@ def get_local_settings(): return _LOCAL_SETTINGS_HANDLER.get_local_settings() -class DuplicatedEnvGroups(Exception): - def __init__(self, duplicated): - self.origin_duplicated = duplicated - self.duplicated = {} - for key, items in duplicated.items(): - self.duplicated[key] = [] - for item in items: - self.duplicated[key].append("/".join(item["parents"])) - - msg = "Duplicated environment group keys. {}".format( - ", ".join([ - "\"{}\"".format(env_key) for env_key in self.duplicated.keys() - ]) - ) - - super(DuplicatedEnvGroups, self).__init__(msg) - - def load_openpype_default_settings(): """Load openpype default settings.""" return load_jsons_from_dir(DEFAULTS_DIR) @@ -624,69 +605,6 @@ def load_jsons_from_dir(path, *args, **kwargs): return output -def find_environments(data, with_items=False, parents=None): - """ Find environemnt values from system settings by it's metadata. - - Args: - data(dict): System settings data or dictionary which may contain - environments metadata. - - Returns: - dict: Key as Environment key and value for `acre` module. - """ - if not data or not isinstance(data, dict): - return {} - - output = {} - if parents is None: - parents = [] - - if M_ENVIRONMENT_KEY in data: - metadata = data.get(M_ENVIRONMENT_KEY) - for env_group_key, env_keys in metadata.items(): - if env_group_key not in output: - output[env_group_key] = [] - - _env_values = {} - for key in env_keys: - _env_values[key] = data[key] - - item = { - "env": _env_values, - "parents": parents[:-1] - } - output[env_group_key].append(item) - - for key, value in data.items(): - _parents = copy.deepcopy(parents) - _parents.append(key) - result = find_environments(value, True, _parents) - if not result: - continue - - for env_group_key, env_values in result.items(): - if env_group_key not in output: - output[env_group_key] = [] - - for env_values_item in env_values: - output[env_group_key].append(env_values_item) - - if with_items: - return output - - duplicated_env_groups = {} - final_output = {} - for key, value_in_list in output.items(): - if len(value_in_list) > 1: - duplicated_env_groups[key] = value_in_list - else: - final_output[key] = value_in_list[0]["env"] - - if duplicated_env_groups: - raise DuplicatedEnvGroups(duplicated_env_groups) - return final_output - - def subkey_merge(_dict, value, keys): key = keys.pop(0) if not keys: @@ -1082,19 +1000,6 @@ def get_current_project_settings(): return get_project_settings(project_name) -def get_environments(): - """Calculated environment based on defaults and system settings. - - Any default environment also found in the system settings will be fully - overridden by the one from the system settings. - - Returns: - dict: Output should be ready for `acre` module. - """ - - return find_environments(get_system_settings(False)) - - def get_general_environments(): """Get general environments. From 9de22092cfd516d8ba4a96eb8fc83d750387064e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 16:58:24 +0200 Subject: [PATCH 200/357] OP-3021 - added Suspend publish knob to Nuke Added to Deadline tab of Write node. --- openpype/hosts/nuke/api/lib.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e05c6aecbd..b859454e8f 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1062,6 +1062,14 @@ def add_deadline_tab(node): knob.setValue(0) node.addKnob(knob) + knob = nuke.Text_Knob("divd", '') + knob.setValue('') + node.addKnob(knob) + + knob = nuke.Boolean_Knob("suspend_publish", "Suspend publish") + knob.setValue(False) + node.addKnob(knob) + def get_deadline_knob_names(): return [ From cf362dc50266fe9ae656f3aa5c100711fa8e47a2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 16:58:59 +0200 Subject: [PATCH 201/357] OP-3021 - collect suspend_publish --- openpype/hosts/nuke/plugins/publish/precollect_instances.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 29c706f302..76d402164c 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -69,6 +69,11 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): instance = context.create_instance(subset) instance.append(node) + suspend_publish = False + if "suspend_publish" in node.knobs(): + suspend_publish = node["suspend_publish"].value() + instance.data["suspend_publish"] = suspend_publish + # get review knob value review = False if "review" in node.knobs(): From 2c7b1aab50e191f8dda0d2dcf67bd549d5ed34fd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 16:59:40 +0200 Subject: [PATCH 202/357] OP-3021 - check if suspend_publish exists If exists and set to True, suspend publish job. Artist need to enable it manually. --- .../modules/deadline/plugins/publish/submit_publish_job.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 4f781de62d..18d01555e5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -284,6 +284,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: payload["JobInfo"]["JobDependency0"] = job["_id"] + self.log.info("suspend {}".format(instance.data.get("suspend_publish"))) + if instance.data.get("suspend_publish"): + payload["JobInfo"]["InitialStatus"] = "Suspended" + index = 0 for key in environment: if key.upper() in self.enviro_filter: From acaa3cad9f20aaa6445917f9ea077b63d1d6921b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 17:54:35 +0200 Subject: [PATCH 203/357] Fix Houdini FPS + outdated content pop-ups --- openpype/hosts/houdini/api/lib.py | 4 +- openpype/hosts/maya/api/lib.py | 4 +- openpype/hosts/maya/api/pipeline.py | 2 +- openpype/widgets/popup.py | 81 +++++++++++++++-------------- 4 files changed, 47 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..7ee72d0b9f 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -155,7 +155,7 @@ def validate_fps(): if parent is None: pass else: - dialog = popup.Popup(parent=parent) + dialog = popup.PopupUpdateKeys(parent=parent) dialog.setModal(True) dialog.setWindowTitle("Houdini scene does not match project FPS") dialog.setMessage("Scene %i FPS does not match project %i FPS" % @@ -163,7 +163,7 @@ def validate_fps(): dialog.setButtonText("Fix") # on_show is the Fix button clicked callback - dialog.on_clicked.connect(lambda: set_scene_fps(fps)) + dialog.on_clicked_state.connect(lambda: set_scene_fps(fps)) dialog.show() diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 9e99b96477..b5b4f4b7d1 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2210,7 +2210,7 @@ def validate_fps(): parent = get_main_window() - dialog = popup.Popup2(parent=parent) + dialog = popup.PopupUpdateKeys(parent=parent) dialog.setModal(True) dialog.setWindowTitle("Maya scene not in line with project") dialog.setMessage("The FPS is out of sync, please fix") @@ -2218,7 +2218,7 @@ def validate_fps(): # Set new text for button (add optional argument for the popup?) toggle = dialog.widgets["toggle"] update = toggle.isChecked() - dialog.on_show.connect(lambda: set_scene_fps(fps, update)) + dialog.on_clicked_state.connect(lambda: set_scene_fps(fps, update)) dialog.show() diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f6f3472eef..d5887adb24 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -448,7 +448,7 @@ def on_open(): dialog.setWindowTitle("Maya scene has outdated content") dialog.setMessage("There are outdated containers in " "your Maya scene.") - dialog.on_show.connect(_on_show_inventory) + dialog.on_clicked.connect(_on_show_inventory) dialog.show() diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index e661d3d293..6c9e4c1b51 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -3,14 +3,20 @@ import logging import contextlib -from Qt import QtCore, QtWidgets +from avalon.vendor.Qt import QtCore, QtWidgets, QtGui log = logging.getLogger(__name__) class Popup(QtWidgets.QDialog): + """A Popup that moves itself to bottom right of screen on show event. - on_show = QtCore.Signal() + The UI contains a message label and a red highlighted button to "show" + or perform another custom action from this pop-up. + + """ + + on_clicked = QtCore.Signal() def __init__(self, parent=None, *args, **kwargs): super(Popup, self).__init__(parent=parent, *args, **kwargs) @@ -19,32 +25,34 @@ class Popup(QtWidgets.QDialog): # Layout layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(10, 5, 10, 10) + + # Increase spacing slightly for readability + layout.setSpacing(10) + message = QtWidgets.QLabel("") message.setStyleSheet(""" QLabel { font-size: 12px; } """) - show = QtWidgets.QPushButton("Show") - show.setSizePolicy(QtWidgets.QSizePolicy.Maximum, + button = QtWidgets.QPushButton("Show") + button.setSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum) - show.setStyleSheet("""QPushButton { background-color: #BB0000 }""") + button.setStyleSheet("""QPushButton { background-color: #BB0000 }""") layout.addWidget(message) - layout.addWidget(show) + layout.addWidget(button) - # Size + # Default size self.resize(400, 40) - geometry = self.calculate_window_geometry() - self.setGeometry(geometry) self.widgets = { "message": message, - "show": show, + "button": button, } # Signals - show.clicked.connect(self._on_show_clicked) + button.clicked.connect(self._on_clicked) # Set default title self.setWindowTitle("Popup") @@ -52,7 +60,10 @@ class Popup(QtWidgets.QDialog): def setMessage(self, message): self.widgets['message'].setText(message) - def _on_show_clicked(self): + def setButtonText(self, text): + self.widgets["button"].setText(text) + + def _on_clicked(self): """Callback for when the 'show' button is clicked. Raises the parent (if any) @@ -63,11 +74,19 @@ class Popup(QtWidgets.QDialog): self.close() # Trigger the signal - self.on_show.emit() + self.on_clicked.emit() if parent: parent.raise_() + def showEvent(self, event): + + # Position popup based on contents on show event + geo = self.calculate_window_geometry() + self.setGeometry(geo) + + return super(Popup, self).showEvent(event) + def calculate_window_geometry(self): """Respond to status changes @@ -104,45 +123,29 @@ class Popup(QtWidgets.QDialog): return QtCore.QRect(x, y, width, height) -class Popup2(Popup): +class PopupUpdateKeys(Popup): + """Popup with Update Keys checkbox (intended for Maya)""" - on_show = QtCore.Signal() + on_clicked_state = QtCore.Signal(bool) def __init__(self, parent=None, *args, **kwargs): Popup.__init__(self, parent=parent, *args, **kwargs) layout = self.layout() - # Add toggle + # Insert toggle for Update keys toggle = QtWidgets.QCheckBox("Update Keys") layout.insertWidget(1, toggle) self.widgets["toggle"] = toggle + self.on_clicked.connect(self.emit_click_with_state) + layout.insertStretch(1, 1) - # Update button text - fix = self.widgets["show"] - fix.setText("Fix") - - def calculate_window_geometry(self): - """Respond to status changes - - On creation, align window with screen bottom right. - - """ - parent_widget = self.parent() - - desktop = QtWidgets.QApplication.desktop() - if parent_widget: - screen = desktop.screenNumber(parent_widget) - else: - screen = desktop.screenNumber(desktop.cursor().pos()) - center_point = desktop.screenGeometry(screen).center() - - frame_geo = self.frameGeometry() - frame_geo.moveCenter(center_point) - - return frame_geo + def emit_click_with_state(self): + """Emit the on_clicked_state signal with the toggled state""" + checked = self.widgets["toggle"].isChecked() + self.on_clicked_state.emit(checked) @contextlib.contextmanager From 08afa8b088f1dfa97710a3cd056cf289d1cd57f7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 17:55:43 +0200 Subject: [PATCH 204/357] Remove unused import --- openpype/widgets/popup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index 6c9e4c1b51..670f6d8d8a 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -3,7 +3,7 @@ import logging import contextlib -from avalon.vendor.Qt import QtCore, QtWidgets, QtGui +from avalon.vendor.Qt import QtCore, QtWidgets log = logging.getLogger(__name__) From 98dc1f0a43e2ba7c0eb63190343b312027ac36a8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 18:08:42 +0200 Subject: [PATCH 205/357] OP-3021 - Hound --- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 8204c6420d..79766929ac 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -148,4 +148,4 @@ class CreateWriteRender(plugin.OpenPypeCreator): return write_node def _modify_write_node(self, write_node): - return write_node \ No newline at end of file + return write_node diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 18d01555e5..715d9a8336 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -284,7 +284,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: payload["JobInfo"]["JobDependency0"] = job["_id"] - self.log.info("suspend {}".format(instance.data.get("suspend_publish"))) if instance.data.get("suspend_publish"): payload["JobInfo"]["InitialStatus"] = "Suspended" From fca3645a7afaa3725b81a37a5494f0eb100ec6e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 20 Apr 2022 18:10:05 +0200 Subject: [PATCH 206/357] add support for bgeo and vdb add support for standalone publisher to publish bgeo and vdb sequences --- .../plugins/publish/collect_context.py | 3 ++- .../project_settings/standalonepublisher.json | 11 ++++++++++- .../standalonepublish/widgets/widget_drop_frame.py | 8 ++++++-- .../standalonepublish/widgets/widget_family_desc.py | 1 + 4 files changed, 19 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py index 6913e0836d..aabccc0328 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py @@ -247,7 +247,8 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): self.log.debug("collecting sequence: {}".format(collections)) instance.data["frameStart"] = int(component["frameStart"]) instance.data["frameEnd"] = int(component["frameEnd"]) - instance.data["fps"] = int(component["fps"]) + if component.get("fps"): + instance.data["fps"] = int(component["fps"]) ext = component["ext"] if ext.startswith("."): diff --git a/openpype/settings/defaults/project_settings/standalonepublisher.json b/openpype/settings/defaults/project_settings/standalonepublisher.json index bc91a5ea8a..e36232d3f7 100644 --- a/openpype/settings/defaults/project_settings/standalonepublisher.json +++ b/openpype/settings/defaults/project_settings/standalonepublisher.json @@ -141,6 +141,14 @@ "defaults": [], "help": "Texture files with Unreal naming convention" }, + "create_vdb": { + "name": "vdb", + "label": "VDB Volumetric Data", + "family": "vdbcache", + "icon": "cloud", + "defaults": [], + "help": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids" + }, "__dynamic_keys_labels__": { "create_workfile": "Workfile", "create_model": "Model", @@ -154,7 +162,8 @@ "create_render": "Render", "create_mov_batch": "Batch Mov", "create_texture_batch": "Batch Texture", - "create_simple_unreal_texture": "Simple Unreal Texture" + "create_simple_unreal_texture": "Simple Unreal Texture", + "create_vdb": "VDB Cache" } }, "publish": { diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index c1c59d65b6..e6c7328e88 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -37,6 +37,10 @@ class DropDataFrame(QtWidgets.QFrame): "video_file": video_extensions } + sequence_types = [ + ".bgeo", ".vdb" + ] + def __init__(self, parent): super().__init__() self.parent_widget = parent @@ -176,7 +180,7 @@ class DropDataFrame(QtWidgets.QFrame): non_collectionable_paths = [] for path in in_paths: ext = os.path.splitext(path)[1] - if ext in self.image_extensions: + if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) else: non_collectionable_paths.append(path) @@ -289,7 +293,7 @@ class DropDataFrame(QtWidgets.QFrame): def get_file_data(self, data): filepath = data['files'][0] ext = data['ext'].lower() - output = {} + output = {"fps": None} file_info = None if 'file_info' in data: diff --git a/openpype/tools/standalonepublish/widgets/widget_family_desc.py b/openpype/tools/standalonepublish/widgets/widget_family_desc.py index 79681615b9..2095b332bd 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family_desc.py +++ b/openpype/tools/standalonepublish/widgets/widget_family_desc.py @@ -52,6 +52,7 @@ class FamilyDescriptionWidget(QtWidgets.QWidget): family.setAlignment(QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft) help = QtWidgets.QLabel("help") + help.setWordWrap(True) help.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft) label_layout.addWidget(family) From c426c8a1566541c99bcafb0c60905373985a0586 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 18:15:11 +0200 Subject: [PATCH 207/357] Remove unused logger --- openpype/widgets/popup.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index 670f6d8d8a..1a975376f8 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -1,12 +1,9 @@ import sys -import logging import contextlib from avalon.vendor.Qt import QtCore, QtWidgets -log = logging.getLogger(__name__) - class Popup(QtWidgets.QDialog): """A Popup that moves itself to bottom right of screen on show event. From 5438decc1d1a169e1139d2a6d67eb62eae32e486 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 18:16:05 +0200 Subject: [PATCH 208/357] Refactor Qt import --- openpype/widgets/popup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index 1a975376f8..9fc33ccbb8 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -2,7 +2,7 @@ import sys import contextlib -from avalon.vendor.Qt import QtCore, QtWidgets +from Qt import QtCore, QtWidgets class Popup(QtWidgets.QDialog): From 2c9a5998b3b1725023d606ab871bdb11b1128920 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 18:26:37 +0200 Subject: [PATCH 209/357] Improve Maya FPS pop-up message similar to Houdini --- openpype/hosts/maya/api/lib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index b5b4f4b7d1..801cdb16f4 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2212,8 +2212,10 @@ def validate_fps(): dialog = popup.PopupUpdateKeys(parent=parent) dialog.setModal(True) - dialog.setWindowTitle("Maya scene not in line with project") - dialog.setMessage("The FPS is out of sync, please fix") + dialog.setWindowTitle("Maya scene does not match project FPS") + dialog.setMessage("Scene %i FPS does not match project %i FPS" % + (current_fps, fps)) + dialog.setButtonText("Fix") # Set new text for button (add optional argument for the popup?) toggle = dialog.widgets["toggle"] From 475654f51f5d98a4230dd46e66910a60959d276e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 18:33:10 +0200 Subject: [PATCH 210/357] fix report messages --- openpype/pipeline/plugin_discover.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/pipeline/plugin_discover.py b/openpype/pipeline/plugin_discover.py index fb860fe5f2..004e530b1c 100644 --- a/openpype/pipeline/plugin_discover.py +++ b/openpype/pipeline/plugin_discover.py @@ -59,7 +59,7 @@ class DiscoverResult: self.ignored_plugins ))) for cls in self.ignored_plugins: - lines.append("- {}".format(cls.__class__.__name__)) + lines.append("- {}".format(cls.__name__)) # Abstract classes if self.abstract_plugins or full_report: @@ -67,7 +67,7 @@ class DiscoverResult: self.abstract_plugins ))) for cls in self.abstract_plugins: - lines.append("- {}".format(cls.__class__.__name__)) + lines.append("- {}".format(cls.__name__)) # Abstract classes if self.duplicated_plugins or full_report: @@ -75,7 +75,7 @@ class DiscoverResult: self.duplicated_plugins ))) for cls in self.duplicated_plugins: - lines.append("- {}".format(cls.__class__.__name__)) + lines.append("- {}".format(cls.__name__)) if self.crashed_file_paths or full_report: lines.append("*** Failed to load {} files".format(len( From 4e0a3259ed87971481bb33d8fbd1a077350ddfeb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 10:05:28 +0200 Subject: [PATCH 211/357] query parent and data.parents from asset document --- .../tools/project_manager/project_manager/model.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 1c3ec089f6..b3fd7fa0c7 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -1819,12 +1819,16 @@ class AssetItem(BaseItem): } query_projection = { "_id": 1, - "data.tasks": 1, - "data.visualParent": 1, - "schema": 1, - "name": 1, + "schema": 1, "type": 1, + "parent": 1, + + "data.visualParent": 1, + "data.parents": 1, + + "data.tasks": 1, + "data.frameStart": 1, "data.frameEnd": 1, "data.fps": 1, @@ -1835,7 +1839,7 @@ class AssetItem(BaseItem): "data.clipIn": 1, "data.clipOut": 1, "data.pixelAspect": 1, - "data.tools_env": 1 + "data.tools_env": 1, } def __init__(self, asset_doc): From 589666682c7a71b6d948c6c765fbfb7b433bcf95 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 10:23:17 +0200 Subject: [PATCH 212/357] added info logs --- .../project_manager/project_manager/model.py | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index b3fd7fa0c7..5fd06ef442 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -7,6 +7,11 @@ from pymongo import UpdateOne, DeleteOne from Qt import QtCore, QtGui +from openpype.lib import ( + CURRENT_DOC_SCHEMAS, + PypeLogger, +) + from .constants import ( IDENTIFIER_ROLE, ITEM_TYPE_ROLE, @@ -18,8 +23,6 @@ from .constants import ( ) from .style import ResourceCache -from openpype.lib import CURRENT_DOC_SCHEMAS - class ProjectModel(QtGui.QStandardItemModel): """Load possible projects to modify from MongoDB. @@ -185,6 +188,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): for key in self.multiselection_columns } + self._log = None # TODO Reset them on project change self._current_project = None self._root_item = None @@ -194,6 +198,12 @@ class HierarchyModel(QtCore.QAbstractItemModel): self._reset_root_item() + @property + def log(self): + if self._log is None: + self._log = PypeLogger.get_logger("ProjectManagerModel") + return self._log + @property def items_by_id(self): return self._items_by_id @@ -1367,6 +1377,9 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process = collections.deque() to_process.append(project_item) + updated_count = 0 + created_count = 0 + removed_count = 0 bulk_writes = [] while to_process: parent = to_process.popleft() @@ -1378,9 +1391,11 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process.append(item) if item.is_new: + created_count += 1 insert_list.append(item) elif item.data(REMOVED_ROLE): + removed_count += 1 if item.data(HIERARCHY_CHANGE_ABLE_ROLE): bulk_writes.append(DeleteOne( {"_id": item.asset_id} @@ -1394,6 +1409,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): else: update_data = item.update_data() if update_data: + updated_count += 1 bulk_writes.append(UpdateOne( {"_id": item.asset_id}, update_data @@ -1408,8 +1424,15 @@ class HierarchyModel(QtCore.QAbstractItemModel): for idx, mongo_id in enumerate(result.inserted_ids): insert_list[idx].mongo_id = mongo_id - if bulk_writes: - project_col.bulk_write(bulk_writes) + if not bulk_writes: + self.log.info("Nothing has changed") + return + + project_col.bulk_write(bulk_writes) + self.log.info(( + "Save finished." + " Created {} | Updated {} | Removed {} asset documents" + ).format(created_count, updated_count, removed_count)) self.refresh_project() From 7d2dc0b0ea0835c3ccde25cb41ba095811581982 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 11:34:53 +0200 Subject: [PATCH 213/357] fixed changes check --- .../tools/project_manager/project_manager/model.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 5fd06ef442..871704e13c 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -1377,8 +1377,8 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process = collections.deque() to_process.append(project_item) - updated_count = 0 created_count = 0 + updated_count = 0 removed_count = 0 bulk_writes = [] while to_process: @@ -1391,7 +1391,6 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process.append(item) if item.is_new: - created_count += 1 insert_list.append(item) elif item.data(REMOVED_ROLE): @@ -1422,13 +1421,16 @@ class HierarchyModel(QtCore.QAbstractItemModel): result = project_col.insert_many(new_docs) for idx, mongo_id in enumerate(result.inserted_ids): + created_count += 1 insert_list[idx].mongo_id = mongo_id - if not bulk_writes: + if sum([created_count, updated_count, removed_count]) == 0: self.log.info("Nothing has changed") return - project_col.bulk_write(bulk_writes) + if bulk_writes: + project_col.bulk_write(bulk_writes) + self.log.info(( "Save finished." " Created {} | Updated {} | Removed {} asset documents" From e50d8ee1ed596064cd0fb1b3d83d3823b4184af3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 11:58:20 +0200 Subject: [PATCH 214/357] initial settings for tray publisher --- .../project_settings/traypublisher.json | 38 ++++++ .../schemas/projects_schema/schema_main.json | 4 + .../schema_project_traypublisher.json | 117 ++++++++++++++++++ 3 files changed, 159 insertions(+) create mode 100644 openpype/settings/defaults/project_settings/traypublisher.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json new file mode 100644 index 0000000000..e6c6747ca2 --- /dev/null +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -0,0 +1,38 @@ +{ + "simple_creators": [ + { + "family": "workfile", + "identifier": "", + "label": "Workfile", + "icon": "fa.file", + "default_variants": [ + "Main" + ], + "enable_review": false, + "description": "Publish workfile backup", + "detailed_description": "", + "extensions": [ + ".ma", + ".mb", + ".nk", + ".hrox", + ".hip", + ".hiplc", + ".hipnc", + ".blend", + ".scn", + ".tvpp", + ".comp", + ".zip", + ".prproj", + ".drp", + ".psd", + ".psb", + ".aep" + ], + "allow_sequences": { + "allow": "no" + } + } + ] +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 8e4eba86ef..dbddd18c80 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -126,6 +126,10 @@ "type": "schema", "name": "schema_project_standalonepublisher" }, + { + "type": "schema", + "name": "schema_project_traypublisher" + }, { "type": "schema", "name": "schema_project_webpublisher" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json new file mode 100644 index 0000000000..00deb84172 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -0,0 +1,117 @@ +{ + "type": "dict", + "collapsible": true, + "key": "traypublisher", + "label": "Tray Publisher", + "is_file": true, + "children": [ + { + "type": "list", + "collapsible": true, + "key": "simple_creators", + "label": "Creator plugins", + "use_label_wrap": true, + "collapsible_key": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "family", + "label": "Family" + }, + { + "type": "text", + "key": "identifier", + "label": "Identifier", + "placeholder": "< Use 'Family' >", + "tooltip": "All creators must have unique identifier.\nBy default is used 'family' but if you need to have more creators with same families\nyou have to set identifier too." + }, + { + "type": "text", + "key": "label", + "label": "Label" + }, + { + "type": "text", + "key": "icon", + "label": "Icon" + }, + { + "type": "list", + "key": "default_variants", + "label": "Default variants", + "object_type": { + "type": "text" + } + }, + { + "type": "boolean", + "key": "enable_review", + "label": "Enable review", + "tooltip": "Allow to create review from source file/s.\nFiles must be supported to be able create review." + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "description", + "label": "Description" + }, + { + "type": "text", + "key": "detailed_description", + "label": "Detailed Description", + "multiline": true + }, + { + "type": "separator" + }, + { + "type": "list", + "key": "extensions", + "label": "Extensions", + "use_label_wrap": true, + "collapsible_key": true, + "collapsed": false, + "object_type": "text" + }, + { + "key": "allow_sequences", + "label": "Allow sequences", + "type": "dict-conditional", + "use_label_wrap": true, + "collapsible_key": true, + "enum_key": "allow", + "enum_children": [ + { + "key": "all", + "label": "Yes (all extensions)" + }, + { + "key": "selection", + "label": "Yes (limited extensions)", + "children": [ + { + "type": "list", + "key": "extensions", + "label": "Extensions", + "use_label_wrap": true, + "collapsible_key": true, + "collapsed": false, + "object_type": "text" + } + ] + }, + { + "key": "no", + "label": "No" + } + ] + } + ] + } + } + ] +} From 9780de94c53244426e48f192d59f476da9cbb606 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 11:58:55 +0200 Subject: [PATCH 215/357] added file adding creators from settings --- .../plugins/create/create_from_settings.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/create/create_from_settings.py diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py new file mode 100644 index 0000000000..19ade437ab --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -0,0 +1,34 @@ +import os +import copy + +from openpype.api import get_project_settings + + +def initialize(): + from openpype.hosts.traypublisher.api.plugin import SettingsCreator + + project_name = os.environ["AVALON_PROJECT"] + project_settings = get_project_settings(project_name) + + simple_creators = project_settings["traypublisher"]["simple_creators"] + + global_variables = globals() + for item in simple_creators: + allow_sequences_value = item["allow_sequences"] + allow_sequences = allow_sequences_value["allow"] + if allow_sequences == "all": + sequence_extensions = copy.deepcopy(item["extensions"]) + + elif allow_sequences == "no": + sequence_extensions = [] + + elif allow_sequences == "selection": + sequence_extensions = allow_sequences_value["extensions"] + + item["sequence_extensions"] = sequence_extensions + item["enable_review"] = False + dynamic_plugin = SettingsCreator.from_settings(item) + global_variables[dynamic_plugin.__name__] = dynamic_plugin + + +initialize() From 20ef8b0c58358992f242c3c286cfca44d102999e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 11:59:04 +0200 Subject: [PATCH 216/357] removed current workfile creator --- .../plugins/create/create_workfile.py | 97 ------------------- 1 file changed, 97 deletions(-) delete mode 100644 openpype/hosts/traypublisher/plugins/create/create_workfile.py diff --git a/openpype/hosts/traypublisher/plugins/create/create_workfile.py b/openpype/hosts/traypublisher/plugins/create/create_workfile.py deleted file mode 100644 index 5e0af350f0..0000000000 --- a/openpype/hosts/traypublisher/plugins/create/create_workfile.py +++ /dev/null @@ -1,97 +0,0 @@ -from openpype.hosts.traypublisher.api import pipeline -from openpype.lib import FileDef -from openpype.pipeline import ( - Creator, - CreatedInstance -) - - -class WorkfileCreator(Creator): - identifier = "workfile" - label = "Workfile" - family = "workfile" - description = "Publish backup of workfile" - - create_allow_context_change = True - - extensions = [ - # Maya - ".ma", ".mb", - # Nuke - ".nk", - # Hiero - ".hrox", - # Houdini - ".hip", ".hiplc", ".hipnc", - # Blender - ".blend", - # Celaction - ".scn", - # TVPaint - ".tvpp", - # Fusion - ".comp", - # Harmony - ".zip", - # Premiere - ".prproj", - # Resolve - ".drp", - # Photoshop - ".psd", ".psb", - # Aftereffects - ".aep" - ] - - def get_icon(self): - return "fa.file" - - def collect_instances(self): - for instance_data in pipeline.list_instances(): - creator_id = instance_data.get("creator_identifier") - if creator_id == self.identifier: - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - pipeline.update_instances(update_list) - - def remove_instances(self, instances): - pipeline.remove_instances(instances) - for instance in instances: - self._remove_instance_from_context(instance) - - def create(self, subset_name, data, pre_create_data): - # Pass precreate data to creator attributes - data["creator_attributes"] = pre_create_data - # Create new instance - new_instance = CreatedInstance(self.family, subset_name, data, self) - # Host implementation of storing metadata about instance - pipeline.HostContext.add_instance(new_instance.data_to_store()) - # Add instance to current context - self._add_instance_to_context(new_instance) - - def get_default_variants(self): - return [ - "Main" - ] - - def get_instance_attr_defs(self): - output = [ - FileDef( - "filepath", - folders=False, - extensions=self.extensions, - label="Filepath" - ) - ] - return output - - def get_pre_create_attr_defs(self): - # Use same attributes as for instance attrobites - return self.get_instance_attr_defs() - - def get_detail_description(self): - return """# Publish workfile backup""" From 1b3026bdc7f0e73ba1cac50a655119e1fa30e86f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 12:05:24 +0200 Subject: [PATCH 217/357] replaced prints with logs --- openpype/modules/ftrack/ftrack_server/lib.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index f8319b67d4..bc595430fc 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -31,10 +31,13 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" -def check_ftrack_url(url, log_errors=True): +def check_ftrack_url(url, log_errors=True, logger=None): """Checks if Ftrack server is responding""" + if logger is None: + logger = Logger.get_logger(__name__) + if not url: - print('ERROR: Ftrack URL is not set!') + logger.error("Ftrack URL is not set!") return None url = url.strip('/ ') @@ -48,15 +51,15 @@ def check_ftrack_url(url, log_errors=True): result = requests.get(url, allow_redirects=False) except requests.exceptions.RequestException: if log_errors: - print('ERROR: Entered Ftrack URL is not accesible!') + logger.error("Entered Ftrack URL is not accesible!") return False if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): if log_errors: - print('ERROR: Entered Ftrack URL is not accesible!') + logger.error("Entered Ftrack URL is not accesible!") return False - print('DEBUG: Ftrack server {} is accessible.'.format(url)) + logger.debug("Ftrack server {} is accessible.".format(url)) return url @@ -133,7 +136,7 @@ class ProcessEventHub(SocketBaseEventHub): hearbeat_msg = b"processor" is_collection_created = False - pypelog = Logger().get_logger("Session Processor") + pypelog = Logger.get_logger("Session Processor") def __init__(self, *args, **kwargs): self.mongo_url = None From 87878cf9538b07c9c8336ee6fb121e81275a17fd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 21 Apr 2022 12:06:46 +0200 Subject: [PATCH 218/357] OP-2765 - minor update of validation message --- .../plugins/publish/help/validate_scene_settings.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml b/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml index 36fa90456e..0591020ed3 100644 --- a/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml +++ b/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml @@ -12,6 +12,8 @@ One of the settings in a scene doesn't match to asset settings in database. ### How to repair? Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there. + + In the scene it is right mouse click on published composition > `Composition Settings`. ### __Detailed Info__ (optional) From cf37cd3e8c25b23691555ba34143da7e35efc47a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 21 Apr 2022 17:43:16 +0200 Subject: [PATCH 219/357] fix deadline renderman version handling --- .../plugins/publish/submit_maya_deadline.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 498397b81b..14e458a401 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -837,6 +837,23 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "AssetDependency0": data["filepath"], } + renderer = self._instance.data["renderer"] + + # This hack is here because of how Deadline handles Renderman version. + # it considers everything with `renderman` set as version older than + # Renderman 22, and so if we are using renderman > 21 we need to set + # renderer string on the job to `renderman22`. We will have to change + # this when Deadline releases new version handling this. + if self._instance.data["renderer"] == "renderman": + try: + from rfm2.config import cfg # noqa + except ImportError: + raise Exception("Cannot determine renderman version") + + rman_version = cfg().build_info.version() # type: str + if int(rman_version.split(".")[0]) > 22: + renderer = "renderman22" + plugin_info = { "SceneFile": data["filepath"], # Output directory and filename @@ -850,7 +867,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "RenderLayer": data["renderlayer"], # Determine which renderer to use from the file itself - "Renderer": self._instance.data["renderer"], + "Renderer": renderer, # Resolve relative references "ProjectPath": data["workspace"], From 0666af82e6ec8f2ec2b8694877c193df598c1dc5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 18:45:36 +0200 Subject: [PATCH 220/357] variant input has aligned options button --- openpype/style/style.css | 11 +++++- .../tools/publisher/widgets/create_dialog.py | 37 +++++++++++++++---- 2 files changed, 39 insertions(+), 9 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index b5f6962eee..9df615d953 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -852,7 +852,16 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { #PublishLogConsole { font-family: "Noto Sans Mono"; } - +VariantInputsWidget QLineEdit { + border-bottom-right-radius: 0px; + border-top-right-radius: 0px; +} +VariantInputsWidget QToolButton { + border-bottom-left-radius: 0px; + border-top-left-radius: 0px; + padding-top: 0.5em; + padding-bottom: 0.5em; +} #VariantInput[state="new"], #VariantInput[state="new"]:focus, #VariantInput[state="new"]:hover { border-color: {color:publisher:success}; } diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index 7d98609c2c..21e1bd5cfc 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -29,6 +29,14 @@ from ..constants import ( SEPARATORS = ("---separator---", "---") +class VariantInputsWidget(QtWidgets.QWidget): + resized = QtCore.Signal() + + def resizeEvent(self, event): + super(VariantInputsWidget, self).resizeEvent(event) + self.resized.emit() + + class CreateErrorMessageBox(ErrorMessageBox): def __init__( self, @@ -247,22 +255,25 @@ class CreateDialog(QtWidgets.QDialog): creators_model = QtGui.QStandardItemModel() creators_view.setModel(creators_model) - variant_input = QtWidgets.QLineEdit(self) + variant_widget = VariantInputsWidget(self) + + variant_input = QtWidgets.QLineEdit(variant_widget) variant_input.setObjectName("VariantInput") variant_input.setToolTip(VARIANT_TOOLTIP) - variant_hints_btn = QtWidgets.QPushButton(self) - variant_hints_btn.setFixedWidth(18) + variant_hints_btn = QtWidgets.QToolButton(variant_widget) + variant_hints_btn.setArrowType(QtCore.Qt.DownArrow) + variant_hints_btn.setIconSize(QtCore.QSize(12, 12)) - variant_hints_menu = QtWidgets.QMenu(variant_hints_btn) + variant_hints_menu = QtWidgets.QMenu(variant_widget) variant_hints_group = QtWidgets.QActionGroup(variant_hints_menu) - variant_hints_btn.setMenu(variant_hints_menu) + # variant_hints_btn.setMenu(variant_hints_menu) - variant_layout = QtWidgets.QHBoxLayout() + variant_layout = QtWidgets.QHBoxLayout(variant_widget) variant_layout.setContentsMargins(0, 0, 0, 0) variant_layout.setSpacing(0) variant_layout.addWidget(variant_input, 1) - variant_layout.addWidget(variant_hints_btn, 0) + variant_layout.addWidget(variant_hints_btn, 0, QtCore.Qt.AlignVCenter) subset_name_input = QtWidgets.QLineEdit(self) subset_name_input.setEnabled(False) @@ -271,7 +282,7 @@ class CreateDialog(QtWidgets.QDialog): create_btn.setEnabled(False) form_layout = QtWidgets.QFormLayout() - form_layout.addRow("Variant:", variant_layout) + form_layout.addRow("Variant:", variant_widget) form_layout.addRow("Subset:", subset_name_input) mid_widget = QtWidgets.QWidget(self) @@ -341,11 +352,13 @@ class CreateDialog(QtWidgets.QDialog): help_btn.resized.connect(self._on_help_btn_resize) create_btn.clicked.connect(self._on_create) + variant_widget.resized.connect(self._on_variant_widget_resize) variant_input.returnPressed.connect(self._on_create) variant_input.textChanged.connect(self._on_variant_change) creators_view.selectionModel().currentChanged.connect( self._on_creator_item_change ) + variant_hints_btn.clicked.connect(self._on_variant_btn_click) variant_hints_menu.triggered.connect(self._on_variant_action) assets_widget.selection_changed.connect(self._on_asset_change) assets_widget.current_context_required.connect( @@ -660,6 +673,14 @@ class CreateDialog(QtWidgets.QDialog): self.variant_input.setText(default_variant or "Main") + def _on_variant_widget_resize(self): + self.variant_hints_btn.setFixedHeight(self.variant_input.height()) + + def _on_variant_btn_click(self): + pos = self.variant_hints_btn.rect().bottomLeft() + point = self.variant_hints_btn.mapToGlobal(pos) + self.variant_hints_menu.popup(point) + def _on_variant_action(self, action): value = action.text() if self.variant_input.text() != value: From 7415c857905a8eddb71eff26e2e1c1456330b113 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 10:27:58 +0200 Subject: [PATCH 221/357] use operational patter to recognize op atom mxf format --- openpype/lib/transcoding.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index fcec5d4216..f20bef3854 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -727,9 +727,9 @@ def get_ffmpeg_format_args(ffprobe_data, source_ffmpeg_cmd=None): def _ffmpeg_mxf_format_args(ffprobe_data, source_ffmpeg_cmd): input_format = ffprobe_data["format"] format_tags = input_format.get("tags") or {} - product_name = format_tags.get("product_name") or "" + operational_pattern_ul = format_tags.get("operational_pattern_ul") or "" output = [] - if "opatom" in product_name.lower(): + if operational_pattern_ul == "060e2b34.04010102.0d010201.10030000": output.extend(["-f", "mxf_opatom"]) return output From 613e14c012430d7df3f9494cd0acccbed20165ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:25:40 +0200 Subject: [PATCH 222/357] don't look into repos directory to be added to sys path --- start.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/start.py b/start.py index 35a14a059e..c066fa3ab7 100644 --- a/start.py +++ b/start.py @@ -823,23 +823,14 @@ def _bootstrap_from_code(use_version, use_staging): version_path = Path(_openpype_root) os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root - repos = [] - # Check for "openpype/repos" directory for sumodules - # NOTE: Is not used at this moment but can be re-used in future - repos_dir = os.path.join(_openpype_root, "repos") - if os.path.exists(repos_dir): - for name in os.listdir(repos_dir): - repos.append(os.path.join(repos_dir, name)) - - # add self to python paths - repos.insert(0, _openpype_root) - for repo in repos: - sys.path.insert(0, repo) + # add self to sys.path of current process + sys.path.insert(0, _openpype_root) # add venv 'site-packages' to PYTHONPATH python_path = os.getenv("PYTHONPATH", "") split_paths = python_path.split(os.pathsep) - # Add repos as first in list - split_paths = repos + split_paths + # add self to python paths + split_paths.insert(0, _openpype_root) + # last one should be venv site-packages # this is slightly convoluted as we can get here from frozen code too # in case when we are running without any version installed. From bf45122d8cf013418b16d90bdc242ccb5daea33c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:25:51 +0200 Subject: [PATCH 223/357] don't handle repos directory in setup --- setup.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.py b/setup.py index dc6c003ed6..899e9375c0 100644 --- a/setup.py +++ b/setup.py @@ -128,10 +128,6 @@ include_files = [ "README.md" ] -repos_path = openpype_root / "repos" -if repos_path.exists(): - include_files.append("repos") - if IS_WINDOWS: install_requires.extend([ # `pywin32` packages From a616611b6cdffba1a8fab080d22fe27be306b21a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:27:30 +0200 Subject: [PATCH 224/357] don't use repos subdir to create zip --- igniter/bootstrap_repos.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 2e47f549d7..0638ee2341 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -668,9 +668,9 @@ class BootstrapRepos: self._progress_callback = progress_callback if getattr(sys, "frozen", False): - self.live_repo_dir = Path(sys.executable).parent / "repos" + self.live_repo_dir = Path(sys.executable).parent else: - self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos") + self.live_repo_dir = Path(Path(__file__).parent / "..") @staticmethod def get_version_path_from_list( @@ -756,7 +756,7 @@ class BootstrapRepos: Path(temp_dir) / f"openpype-v{version}.zip" self._print(f"creating zip: {temp_zip}") - self._create_openpype_zip(temp_zip, repo_dir.parent) + self._create_openpype_zip(temp_zip, repo_dir) if not os.path.exists(temp_zip): self._print("make archive failed.", LOG_ERROR) return None From db10343a292be770cdc20ae73d11659caf11b81d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:27:40 +0200 Subject: [PATCH 225/357] removed repos from filter list --- igniter/bootstrap_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 0638ee2341..c882ec6e49 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -654,7 +654,7 @@ class BootstrapRepos: self.registry = OpenPypeSettingsRegistry() self.zip_filter = [".pyc", "__pycache__"] self.openpype_filter = [ - "openpype", "repos", "schema", "LICENSE" + "openpype", "schema", "LICENSE" ] self._message = message From 38d93c1b46b74ea5596543747058841f7d384bf2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:30:40 +0200 Subject: [PATCH 226/357] don't look for repos directory in add_paths_from_directory --- igniter/bootstrap_repos.py | 18 +----------------- start.py | 1 + 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index c882ec6e49..50b46c36ab 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1094,24 +1094,8 @@ class BootstrapRepos: directory (Path): path to directory. """ + sys.path.insert(0, directory.as_posix()) - directory /= "repos" - if not directory.exists() and not directory.is_dir(): - return - - roots = [] - for item in directory.iterdir(): - if item.is_dir(): - root = item.as_posix() - if root not in roots: - roots.append(root) - sys.path.insert(0, root) - - pythonpath = os.getenv("PYTHONPATH", "") - paths = pythonpath.split(os.pathsep) - paths += roots - - os.environ["PYTHONPATH"] = os.pathsep.join(paths) @staticmethod def find_openpype_version(version, staging): diff --git a/start.py b/start.py index c066fa3ab7..8944da4ba0 100644 --- a/start.py +++ b/start.py @@ -824,6 +824,7 @@ def _bootstrap_from_code(use_version, use_staging): os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root # add self to sys.path of current process + # NOTE: this seems to be duplicate of 'add_paths_from_directory' sys.path.insert(0, _openpype_root) # add venv 'site-packages' to PYTHONPATH python_path = os.getenv("PYTHONPATH", "") From 4bae7484faf8e5777ba2e45a046dae1324409810 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:32:48 +0200 Subject: [PATCH 227/357] modified adding paths from archive to sys path --- igniter/bootstrap_repos.py | 22 +++------------------- tests/unit/igniter/test_bootstrap_repos.py | 2 -- 2 files changed, 3 insertions(+), 21 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 50b46c36ab..e9fb6fa0ec 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1057,27 +1057,11 @@ class BootstrapRepos: if not archive.is_file() and not archive.exists(): raise ValueError("Archive is not file.") - with ZipFile(archive, "r") as zip_file: - name_list = zip_file.namelist() - - roots = [] - paths = [] - for item in name_list: - if not item.startswith("repos/"): - continue - - root = item.split("/")[1] - - if root not in roots: - roots.append(root) - paths.append( - f"{archive}{os.path.sep}repos{os.path.sep}{root}") - sys.path.insert(0, paths[-1]) - - sys.path.insert(0, f"{archive}") + archive_path = str(archive) + sys.path.insert(0, archive_path) pythonpath = os.getenv("PYTHONPATH", "") python_paths = pythonpath.split(os.pathsep) - python_paths += paths + python_paths.insert(0, archive_path) os.environ["PYTHONPATH"] = os.pathsep.join(python_paths) diff --git a/tests/unit/igniter/test_bootstrap_repos.py b/tests/unit/igniter/test_bootstrap_repos.py index 65cd5a2399..10278c4928 100644 --- a/tests/unit/igniter/test_bootstrap_repos.py +++ b/tests/unit/igniter/test_bootstrap_repos.py @@ -152,8 +152,6 @@ def test_install_live_repos(fix_bootstrap, printer, monkeypatch, pytestconfig): openpype_version = fix_bootstrap.create_version_from_live_code() sep = os.path.sep expected_paths = [ - f"{openpype_version.path}{sep}repos{sep}avalon-core", - f"{openpype_version.path}{sep}repos{sep}avalon-unreal-integration", f"{openpype_version.path}" ] printer("testing zip creation") From fc73f253b27b6d4d4faa672112bf744a34e0e8c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:35:50 +0200 Subject: [PATCH 228/357] remove copied zip from version repository after extraction --- igniter/bootstrap_repos.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index e9fb6fa0ec..6392517cda 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1405,6 +1405,7 @@ class BootstrapRepos: # create destination parent directories even if they don't exist. destination.mkdir(parents=True) + remove_source_file = False # version is directory if openpype_version.path.is_dir(): # create zip inside temporary directory. @@ -1438,6 +1439,8 @@ class BootstrapRepos: self._progress_callback(35) openpype_version.path = self._copy_zip( openpype_version.path, destination) + # Mark zip to be deleted when done + remove_source_file = True # extract zip there self._print("extracting zip to destination ...") @@ -1446,6 +1449,10 @@ class BootstrapRepos: zip_ref.extractall(destination) self._progress_callback(100) + # Remove zip file copied to local app data + if remove_source_file: + os.remove(openpype_version.path) + return destination def _copy_zip(self, source: Path, destination: Path) -> Path: From e5b6105476e293759e07aa85294238669de3b122 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 11:46:40 +0200 Subject: [PATCH 229/357] OP-2765 - bump order to run after precollect --- openpype/hosts/aftereffects/plugins/publish/collect_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 58aa01ad87..adbbe7eee9 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -27,7 +27,7 @@ class AERenderInstance(RenderInstance): class CollectAERender(abstract_collect_render.AbstractCollectRender): - order = pyblish.api.CollectorOrder + 0.400 + order = pyblish.api.CollectorOrder + 0.405 label = "Collect After Effects Render Layers" hosts = ["aftereffects"] From 917013e353ce535d062b060a35d9ee6339fba2db Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 22 Apr 2022 12:33:55 +0200 Subject: [PATCH 230/357] Fix any render cameras check --- openpype/hosts/maya/plugins/publish/collect_render.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 14b9157005..dfab8252d0 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -194,13 +194,11 @@ class CollectMayaRender(pyblish.api.ContextPlugin): assert render_products, "no render products generated" exp_files = [] multipart = False - render_cameras = [] for product in render_products: if product.multipart: multipart = True product_name = product.productName if product.camera and layer_render_products.has_camera_token(): - render_cameras.append(product.camera) product_name = "{}{}".format( product.camera, "_" + product_name if product_name else "") @@ -210,7 +208,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin): product) }) - assert render_cameras, "No render cameras found." + has_cameras = any(product.camera for product in render_products) + assert has_cameras, "No render cameras found." self.log.info("multipart: {}".format( multipart)) From 01e045d9abeae87accaaa19cd14cd5c17d519323 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 12:37:52 +0200 Subject: [PATCH 231/357] OP-2765 - removed obsolete code Replaced lower by get_subset_name_with_asset_doc --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 0f09b5fdf8..e96541e47b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -42,10 +42,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - # workfile instance - family = "workfile" - subset = family + task.capitalize() # TOOD use method - instance_data = { "asset": asset_entity["name"], "task": task, From e1995e8828ffa7178149813905407c2a08718ebf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 12:52:11 +0200 Subject: [PATCH 232/357] added verbose and debug to global click options and removed debug from each individual callback --- openpype/cli.py | 55 +++++++++++++-------------------------- openpype/pype_commands.py | 2 +- 2 files changed, 19 insertions(+), 38 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index cbeb7fef9b..2aa4a46929 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -20,6 +20,10 @@ from .pype_commands import PypeCommands "to list staging versions.")) @click.option("--validate-version", expose_value=False, help="validate given version integrity") +@click.option("--debug", is_flag=True, expose_value=False, + help=("Enable debug")) +@click.option("--verbose", expose_value=False, + help=("Change OpenPype log level (debug - critical or 0-50)")) def main(ctx): """Pype is main command serving as entry point to pipeline system. @@ -49,18 +53,13 @@ def traypublisher(): @main.command() -@click.option("-d", "--debug", - is_flag=True, help=("Run pype tray in debug mode")) -def tray(debug=False): +def tray(): """Launch pype tray. Default action of pype command is to launch tray widget to control basic aspects of pype. See documentation for more information. - - Running pype with `--debug` will result in lot of information useful for - debugging to be shown in console. """ - PypeCommands().launch_tray(debug) + PypeCommands().launch_tray() @PypeCommands.add_modules @@ -75,7 +74,6 @@ def module(ctx): @main.command() -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("--ftrack-url", envvar="FTRACK_SERVER", help="Ftrack server url") @click.option("--ftrack-user", envvar="FTRACK_API_USER", @@ -88,8 +86,7 @@ def module(ctx): help="Clockify API key.") @click.option("--clockify-workspace", envvar="CLOCKIFY_WORKSPACE", help="Clockify workspace") -def eventserver(debug, - ftrack_url, +def eventserver(ftrack_url, ftrack_user, ftrack_api_key, legacy, @@ -100,8 +97,6 @@ def eventserver(debug, This should be ideally used by system service (such us systemd or upstart on linux and window service). """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_eventservercli( ftrack_url, @@ -114,12 +109,11 @@ def eventserver(debug, @main.command() -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-h", "--host", help="Host", default=None) @click.option("-p", "--port", help="Port", default=None) @click.option("-e", "--executable", help="Executable") @click.option("-u", "--upload_dir", help="Upload dir") -def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): +def webpublisherwebserver(executable, upload_dir, host=None, port=None): """Starts webserver for communication with Webpublish FR via command line OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND @@ -127,8 +121,6 @@ def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): Expect "pype.club" user created on Ftrack. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_webpublisher_webservercli( upload_dir=upload_dir, @@ -164,38 +156,34 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): @main.command() @click.argument("paths", nargs=-1) -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-t", "--targets", help="Targets module", default=None, multiple=True) @click.option("-g", "--gui", is_flag=True, help="Show Publish UI", default=False) -def publish(debug, paths, targets, gui): +def publish(paths, targets, gui): """Start CLI publishing. Publish collects json from paths provided as an argument. More than one path is allowed. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands.publish(list(paths), targets, gui) @main.command() @click.argument("path") -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-h", "--host", help="Host") @click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def remotepublishfromapp(debug, project, path, host, user=None, targets=None): +def remotepublishfromapp(project, path, host, user=None, targets=None): """Start CLI publishing. Publish collects json from paths provided as an argument. More than one path is allowed. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands.remotepublishfromapp( project, path, host, user, targets=targets ) @@ -203,24 +191,21 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None): @main.command() @click.argument("path") -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def remotepublish(debug, project, path, user=None, targets=None): +def remotepublish(project, path, user=None, targets=None): """Start CLI publishing. Publish collects json from paths provided as an argument. More than one path is allowed. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands.remotepublish(project, path, user, targets=targets) @main.command() -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-p", "--project", required=True, help="name of project asset is under") @click.option("-a", "--asset", required=True, @@ -228,7 +213,7 @@ def remotepublish(debug, project, path, user=None, targets=None): @click.option("--path", required=True, help="path where textures are found", type=click.Path(exists=True)) -def texturecopy(debug, project, asset, path): +def texturecopy(project, asset, path): """Copy specified textures to provided asset path. It validates if project and asset exists. Then it will use speedcopy to @@ -239,8 +224,7 @@ def texturecopy(debug, project, asset, path): Result will be copied without directory structure so it will be flat then. Nothing is written to database. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands().texture_copy(project, asset, path) @@ -389,11 +373,9 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant, @main.command() -@click.option("-d", "--debug", - is_flag=True, help=("Run process in debug mode")) @click.option("-a", "--active_site", required=True, help="Name of active stie") -def syncserver(debug, active_site): +def syncserver(active_site): """Run sync site server in background. Some Site Sync use cases need to expose site to another one. @@ -408,8 +390,7 @@ def syncserver(debug, active_site): Settings (configured by starting OP Tray with env var OPENPYPE_LOCAL_ID set to 'active_site'. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands().syncserver(active_site) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index e0c8847040..7dcfc001f0 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -25,7 +25,7 @@ class PypeCommands: Most of its methods are called by :mod:`cli` module. """ @staticmethod - def launch_tray(debug=False): + def launch_tray(): PypeLogger.set_process_name("Tray") from openpype.tools import tray From 4ad395953e6818142ae764e73066e4eb5baea04f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 13:10:39 +0200 Subject: [PATCH 233/357] OP-2765 - add publish value Fix wrong recreation of legacy instance --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index e96541e47b..450a4540b8 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -36,6 +36,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data["representations"].append(representation) + instance.data["publish"] = instance.data["active"] # for DL + def _get_new_instance(self, context, scene_file): task = api.Session["AVALON_TASK"] version = context.data["version"] @@ -83,8 +85,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "representations": list() }) - # Create instance - instance = context.create_instance(subset) instance.data.update(instance_data) return instance From 5dd449e31bf9a65b4d07aa7e8a816b30b59ddd9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 13:55:20 +0200 Subject: [PATCH 234/357] added args to documentation --- website/docs/admin_openpype_commands.md | 21 ++++++++++++--------- website/docs/admin_use.md | 16 ++++++++++++++++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 74cb895ac9..53b4799d6e 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -24,7 +24,11 @@ openpype_console --use-version=3.0.0-foo+bar `--list-versions [--use-staging]` - to list available versions. -`--validate-version` to validate integrity of given version +`--validate-version` - to validate integrity of given version + +`--verbose` `` - change log verbose level of OpenPype loggers + +`--debug` - set debug flag affects logging For more information [see here](admin_use.md#run-openpype). @@ -47,13 +51,9 @@ For more information [see here](admin_use.md#run-openpype). --- ### `tray` arguments {#tray-arguments} -| Argument | Description | -| --- | --- | -| `--debug` | print verbose information useful for debugging (works with `openpype_console`) | -To launch Tray with debugging information: ```shell -openpype_console tray --debug +openpype_console tray ``` --- ### `launch` arguments {#eventserver-arguments} @@ -62,7 +62,6 @@ option to specify them. | Argument | Description | | --- | --- | -| `--debug` | print debug info | | `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) | | `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) | | `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) | @@ -98,12 +97,16 @@ pype launch --app python --project my_project --asset my_asset --task my_task --- ### `publish` arguments {#publish-arguments} +Run publishing based on metadata passed in json file e.g. on farm. + | Argument | Description | | --- | --- | -| `--debug` | print more verbose information | +| `--targets` | define publishing targets (e.g. "farm") | +| `--gui` (`-g`) | Show publishing | +| Positional argument | Path to metadata json file | ```shell -pype publish +openpype publish --targes farm ``` --- diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md index 178241ad19..f84905c486 100644 --- a/website/docs/admin_use.md +++ b/website/docs/admin_use.md @@ -69,6 +69,22 @@ stored in `checksums` file. Add `--headless` to run OpenPype without graphical UI (useful on server or on automated tasks, etc.) ::: +`--verbose` `` - change log verbose level of OpenPype loggers. + +Level value can be integer in range `0-50` or one of enum strings `"notset" (0)`, `"debug" (10)`, `"info" (20)`, `"warning" (30)`, `"error" (40)`, `"ciritcal" (50)`. Value is stored to `OPENPYPE_LOG_LEVEL` environment variable for next processes. + +```shell +openpype_console --verbose debug +``` + +`--debug` - set debug flag affects logging + +Enable debug flag for OpenPype process. Change value of environment variable `OPENPYPE_DEBUG` to `"1"`. At this moment affects only OpenPype loggers. Argument `--verbose` or environment variable `OPENPYPE_LOG_LEVEL` are used in preference to affect log level. + +```shell +openpype_console --debug +``` + ### Details When you run OpenPype from executable, few check are made: From 91e2ffb8dcdc40254fc751f9683d8dd747d10ff5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 14:56:06 +0200 Subject: [PATCH 235/357] OP-2765 - fix missing representation for disabled workfile --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 450a4540b8..64a81b58eb 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -34,6 +34,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "stagingDir": staging_dir, } + if not instance.data.get("representations"): + instance.data["representations"] = [] instance.data["representations"].append(representation) instance.data["publish"] = instance.data["active"] # for DL @@ -45,6 +47,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity = context.data["projectEntity"] instance_data = { + "active": True, "asset": asset_entity["name"], "task": task, "frameStart": asset_entity["data"]["frameStart"], From 3bc4d98c98e7b3054f4d254b3aa42ac61f0cde1b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 14:59:03 +0200 Subject: [PATCH 236/357] OP-2765 - removed unwanted assetEntity AssetEntity not available after change of order. anatomyData not available after change of order. Added pulling of resolution from workfile. --- .../plugins/publish/collect_render.py | 33 +++++++------------ 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index adbbe7eee9..fa23bf92b0 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -2,7 +2,6 @@ import os import re import tempfile import attr -from copy import deepcopy import pyblish.api @@ -23,6 +22,7 @@ class AERenderInstance(RenderInstance): stagingDir = attr.ib(default=None) app_version = attr.ib(default=None) publish_attributes = attr.ib(default=None) + file_name = attr.ib(default=None) class CollectAERender(abstract_collect_render.AbstractCollectRender): @@ -64,8 +64,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): if family not in ["render", "renderLocal"]: # legacy continue - asset_entity = inst.data["assetEntity"] - item_id = inst.data["members"][0] work_area_info = CollectAERender.get_stub().get_work_area( @@ -84,8 +82,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): fps = work_area_info.frameRate # TODO add resolution when supported by extension - task_name = (inst.data.get("task") or - list(asset_entity["data"]["tasks"].keys())[0]) # lega + task_name = inst.data.get("task") # legacy + + render_q = CollectAERender.get_stub().get_render_info() + if not render_q: + raise ValueError("No file extension set in Render Queue") subset_name = inst.data["subset"] instance = AERenderInstance( @@ -103,12 +104,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): publish=True, renderer='aerender', name=subset_name, - resolutionWidth=asset_entity["data"].get( - "resolutionWidth", - project_entity["data"]["resolutionWidth"]), - resolutionHeight=asset_entity["data"].get( - "resolutionHeight", - project_entity["data"]["resolutionHeight"]), + resolutionWidth=render_q.width, + resolutionHeight=render_q.height, pixelAspect=1, tileRendering=False, tilesX=0, @@ -119,8 +116,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): toBeRenderedOn='deadline', fps=fps, app_version=app_version, - anatomyData=deepcopy(inst.data["anatomyData"]), - publish_attributes=inst.data.get("publish_attributes") + publish_attributes=inst.data.get("publish_attributes"), + file_name=render_q.file_name ) comp = compositions_by_id.get(int(item_id)) @@ -165,15 +162,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): start = render_instance.frameStart end = render_instance.frameEnd - # pull file name from Render Queue Output module - render_q = CollectAERender.get_stub().get_render_info() - if not render_q: - raise ValueError("No file extension set in Render Queue") - _, ext = os.path.splitext(os.path.basename(render_q.file_name)) + _, ext = os.path.splitext(os.path.basename(render_instance.file_name)) base_dir = self._get_output_dir(render_instance) expected_files = [] - if "#" not in render_q.file_name: # single frame (mov)W + if "#" not in render_instance.file_name: # single frame (mov)W path = os.path.join(base_dir, "{}_{}_{}.{}".format( render_instance.asset, render_instance.subset, @@ -216,8 +209,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): def _update_for_local(self, instance, project_entity): """Update old saved instances to current publishing format""" - instance.anatomyData["version"] = instance.version - instance.anatomyData["subset"] = instance.subset instance.stagingDir = tempfile.mkdtemp() instance.projectEntity = project_entity fam = "render.local" From ac1eeca9060008578472d82a6ec4439b37df090a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 15:00:12 +0200 Subject: [PATCH 237/357] OP-2765 - added pulling resolution from scene --- openpype/hosts/aftereffects/api/extension.zxp | Bin 100982 -> 101003 bytes .../api/extension/CSXS/manifest.xml | 2 +- .../api/extension/jsx/hostscript.jsx | 4 +++- openpype/hosts/aftereffects/api/ws_stub.py | 6 +++++- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/api/extension.zxp b/openpype/hosts/aftereffects/api/extension.zxp index 389d74505dfc45ecba6313435056634a64b27c20..0ed799991e9c791565a92d3899f3db67b9fea910 100644 GIT binary patch delta 9400 zcmaKy1yoy0x5pFQT}p9xDDLi7Tw0(McekQ};O+z|?(R~cc#Av5trRWpQ0$d^*Zpq$ zy}q}yPO_5U?Ah}_IkRWh?0uOC7m);qs-gf5g988n5CCCv)^VtAD26bMM%j5rMlz_d zMl~@?KStZy(1^i}KcMmm)`j_1xhVSo01Jex%-Tz)TVFW7{C8Zha zKd$O5v4v5gy`7U2O3BIT+GIpa$$8-gh7c{s4SMNV z7uZ`dA(C7I+A@U=5n3)S^(A^KrcMR(hj=1ckdO8QfZ%=L(syX%kIO!-_S6+8ED*=Q z%n}X70@j4rh-qQJ{Gjkf)7<$&)C4>TG9uuLJB9Jt+kZ6bTx9~{(Ns~$pL@4qR8ZzcsG^3sdKs4uN0+zIDSzNT zFm#!mV}hTzh~X5-E93?(k$>&XTGGbqHWU=(|Dhtjk{3R5!!qxguqfYH7zc8ZU(u}%&kf<^^ zkg713TAMqXms1)YD-f?-b(i*J}_*@N*`)R zUQuZ~w>SkAqxtLisNJkYIerjvkzU>0)?76X^&2Gy?GtY z@ZL1S+&{u8Y@rg33!Pw{>9R{OMJn%3qPh}!yo1HJOAxJ<#ETFu7*4 ziV;t5JUfk+a7p6xs>7?*1HsMRl8lFu(6(6Y)@*JQYvY7j`#gP1fn!28de+c=6yb+!9ok&e*-+!|;IGnH zyt7kQ4q@&1iW^5QwX@e~Fc+JUWKYB!dDmJg59%Sp9Ye2SoE|%_{8LPAjsd2u(=p3! z(x(ZB^fM#?0D%tj2}IQI2%ciWKQZCo>G(8_E2u+%pT=S|A2I)$;6UE+-wA|w;BR6q zAN+riF1^6tiK>3e-!}Vp|7GKAKZyBPh*9wO{5`}>_)|hd_xuc6SAnhsBfmtm-*p8A zdHz4To(2DxztUiQ+P`%5Rz&v>p)rd!I)7 zz;PI8Mb~5qml4p7syg>i5%wq6K<@6tO8B`X2x>ycjDYSazqW*^83UDI-SbW)@|yCE zfzm)wV{Up!H4|fQTzr&Sd1+o3l6}0CX8FLb>&PjLMpp90+v%^>CKr~z1pcLJ{6i^Q zc`c6;C}XMjg1u|BT2p~D)3T1o$2svo$jPf<8DGk`3?QX-W)B?8@Egy^GO593YB3?C zTnh1y#3nSXo9KB~oNUSiU&q@8>DfK1%iVko#8+@KH*;PYd^|c1!F->*PF8lo@(|WEtDNgD#W7 zL(Qt1OKVR9WaNr{YsgVuw%B0wwtP?&&eAnrRwNx&VquLAKH@MXA zK%-md{F|=%tx~jP4b1jY z8zWcR2@y;5`xItPVKKSYd~r`d%IRzD*9oC_kHwQVqM?$Gu&s0TR}&FqMOK=;*?6IA zwS4X#b6-osco%hbyQs4$#i7B*Mpfiy02(E$Hr=gas`VZ|t(8ivsf*uSGrM5eFusmW zkac~51p|W+BKzW8&rp?tSU5y0eski@mCOw?j-hI_wEwXe$|}ozozV&ajPV^6-UvG^ zD3q_h2T>H3OAhCQWu=sK9s*P!hf&*RAXb}U>tQs|aO&AD=|MEGNK;c|VoftA5ro7~ zc2)>=u4Q#_35B(l^f*~QiwiA3uQyu;0{HuqEIJiAE!sph3)!vOMD>7-aYeDPhPc`{GE|`k1RUzC$w;C~p~i z?FtY_V?`8**ofG-`G#tk@4ZvvH1u=>3@a2%TH zP>jm9kN)snY@+9@YHKzUbP->brLg&$5&P!JWL=&F3%uaDZPoc2p#;6h@rl{Eb!au2 zndg42WIbZ&`s;OPQ$C==5e|%g4rS1Bj4~!rq;fYqCI-6KJwU>dxf--Gw;7owmbQau ziRBp$RfZ>iO^DBA>=(9)#-wi(NYcU!+ZZOLN!TWoyc8(;eZHP92tCBCTVX<6Y>Q4m zAVYS9S6Vkx%fpeYyF|0yI-lR%yg$-y-dBfyj-bC`QxI$f81L>{yXfW19B(p7v8jsl}iLSriLK%Y6Ff(Gtu! zt8s8DwmFnPNx#f`i))Lt91i3BSZX4QySt39vA6Fq>qeWa%;^iQFnUt-6+T@c^X#}C zNAJ7+RzW^qz?VUlr3`|l z(nL*lL$eO2Slw(=(t`>GP}Ni=e*7aHZ=IW=`sq3SO|uhl}XN@%bCovSgYCQ4sZ=N?} z7bLgW`Aks%2zlcO2eP2X-1t;`jnKX&g=(#zKzq_T8Kc1(W^x_FyhYy&B1Q*FyA-MOLVahQ%zDO`dq##Pd6%#&^XO$#zUTSb!)jHkCUYw+v9|*HesC2!MQ0U=uAZxlL zZSEYY#7hN4c5>XawaFnQ=rwZTZVx46WlTUGt!KWlyAlwA+p~-eXzZ3&*kFLYdM-$j ziNn(_=l4o_8w#co?nK3$SCedqw`+q-T;be$g1#HZF=(-(P?73A40ls!tUQ!)S+7D( z)F+#qNlx@bFi5yTje#EZBK{l^JJEd;sK4!VZ~#&K@DFPe2NgN2T}f;XH^6*+|p zDU$z;}HX_D1bMrT}!|(1` zpLO1=__eUz;A0|o+Pen=yt@n1!&XAV?dXi^3%O|J1wb_Ir1{tzodgw~oFy5Zl)e2_ zW(WQ0?{DwkxYqM9FhVAL?hD!YfPxwV!3ZR9Lf9i@!*;JfxoaxD5sNH-2FDxc$n4}W z_!=pgjimkULF9g2fp}%3A@-x*u>yUI(cXG(23ki{Fr@sUpjenlJ2FkSnUXYfDq)-<*{F=Di8>)OLUT2zl#FE>dy|fpHSx#*u#RCVv zcDq`efbg}n2NEyAemx6Tr@E2J9NTe#sE&<;p@|M{4`#3XNTsCv+n0%c$V;x>OGY^e zl%lN1sFjJ`v|JTd8IlTWXofzNheA~_NC)IzGa4Xqk6<-@FS+I}s1I#$<2i(BgghM; zF(;KVQDJ$3mul!}c+$+a=y+S;V1!kPro?L;!GIz}NlC|xw#uD5=nsk|C*@UdFQV^R z)G*mI$VqMvzKB-1dG7nF=OGHJ%&dZRktc_a`R-syj@v2Sk*U*AE8n`nD59~9W@gCU z!8w8SHKp2#kl)fZPDRp?6GVI1)59YUff3&{rR5n^0)$aXGCJ_S?o(gCuSf`~zQM3`_Et!aX(9*c~vD?E%~Djrvjk_{!(tj<~*YzqfHPH5IPAwslO zgmvkm-Zeu)JKZGb+STd6YGC;_ujix?EJr;EQyy-As_hWCVWdlS_6HYjs3vf5>J^TA@`kBuNxHj1p|k|Y&Oes zo$KI&sT*z5QnMyeqvQw0&m3R zqbsSA1Y zU3XBd!P|`e#K!)6sk&rSv5Zo{a)nCuCEqKq4Lq0~G*byi;<9s+VUfMdlW?b;!}fB1 z)-hV%xW$y3J>Cb7B^7_UTz%YM%F%i(}YKXm-M9=!XeTjF)2W13p&|)myx4 zL>!iJg)(w&TVC;W`*CwFU}MM?0dhc%mTmn;71d=h+|}+lFKT7o(a}hF?9Y&W(t9(= zL9UT}3~j@ljQf4%vJ%9$ zYyBiR>m&3&!QT->C!as8C?|velAKg+9#5+<*EyWV29_J4y?SQ4JkAZg z7&H=M1?YszD6iOpqWD)H<}WmP^RvWEW4R+i*Tu^Ad(HYE%wFR-z6W@u}@CFYlHnQI7*$kHrB*ey>2R4)Gs91uVO7 zsG{qai`U?W4S>N*?@Fi3^e9_8avMESh%SaGR6?Ik5>SW)24!aprJGKam=L#4sewGr z>UO?}^0+r(VdzTTSI0{te;_N~GW8s(HI80P6V9bIK3&dQe^hBb!>x$-h@zLQusI6s@h3ZTE8Rk0M~BmviD0W8&ZOf@DzN}BDwdrYB88G@ z7w}9_>K&OljGd8IN>*N(f3I)_v(jyI_1-DKeO`3PAl;=3sHgc(T*bSq7`r^91WQb0 z!B-wd$LYAyDWNA0+IEJkF*IW^lvT!}*NA5&wwYa|A$OIh?8k|>FR!#Zm=WUck??rG zPi*WXWMae z%bFAU&)AIIe?UvL!AkqKV2ZeXO0ipdbs&mtJjqyy(ZHd^1*&s+g%J%K(2?QZIPZgW zKEtpVwCdE>6ufr5+j(61%tux)whsvxX5#um(o(14`PZP&9mrcZQ6iW-9!@LooIM#- zN9sFapLc{J;9DNAD`vU1-xZ3U9ICKkP)k;N$)w78p_G;hEvT#b&jzOr*~*s=*CavF zUXQv(_^)-L34$cR4Vs5?H3V_N*jk}_vW&fLsd7S6F@0NR(jNC}Qnf(dhAS_qU}fzB z_mhM}%nOOr+|G@4B$_Wyrf=%7;fGImG$e%M#1x%rp;uq_-M47>Gt_=xuP8b^#Z|p4 zAVV%3Owvi3k?R!D^-Q)v_cC38Ul7waW{1u?G1ZozDh6$4L5Gkry3c>HE(($WK)JkO zux!AEqtFuJ$z;GSt>!)7nJ7K7Ce$5Lr#(BYvhwklESg^A^fl&v+1#On1;P4>+g4JeD|lC_DVFz8GE^i0Kd23uU5v{tWnOqTd+ z-B;BGh>Ah@m2f;XTJuW>P0$y%t;?g|3b;jP>8Vr(l#Vby(|5R`=h2Lo>zuEJ|;CmUD!59bwiotb3q!{8(&*D@t9sWQvK9 zfSYk}H6PJAI%hMptg*Dw7Vq+egSD!xqpCDK@yzfgDv^ zQZv`iO7Ot^HEYvN|M=i}%7#r1vtRN1iY97r)@`mkefuGp_9ExI0K#$P7pxw&%>Euy z_{Z*`hKgj4JL5%$ykq!BNCdM%p)MNA9@X-ULQ92M{PWfh#huIe*~L8(md4q1gLwo{ zFH4YVlHfa}A+GWt41|ByOTTh#0ge*dH<3V@6ov8;7qsc(_Ut#4BKMS^ zfiRm};-js&`ET`f&=35TidJVG4GtV$_uG^`M$35R5chnZLWAa0I5uZT3${Yv-OsOR z;uq)EN@#9nR6WY{-j5%mrM<559fk78P6Q^maQQlMJb6j zO9Y(NNBDn_3Wt9lm@T*iz389LKmWP@Jp=uTHBf`5AKKsn02(|103TotISvCphorm% z>Vy7i|MdAK<&|@~B3|&3{^nz>6N@HyYjO#%rj0mR4W;N3xy1}zFNOd49=t#nwm?E! zIQzt>C6)7swxTgUm$L$;Z<9ySs1_Y7t3GV2@>#)9$p>c=ldd5ka@o}@WuX%dE;eu; zU3fOK$v#GOfQX1UR?jy42s+3~1a7rYq-_LFye6^~g0%P_TR>_I;`5>y#BF+xZ1SeaLanX-LUz!3`q65d<@gHU{sL!6lRLZ&GnW3 znvs^DCJJ^9YnPsLVFgG~y~$#o8Up>HjFq0<`P&CKkj&R)W)d-@Z>aPvmmV{>gtE*m z3Zwf!5~N?tp|c4}jebhB-+^V7j7(H-ppYtan6GWUZB?HU7??ie_II1Zp{`L-BzLD7 zD)7!Ext+SJr`Ra4CtUFNekeQ-0jnMqyQ7ZUPwFG-JNRyQe5?ay?@n4ohNF2WNyVk= z=_I?kB!A%pSzY;dlJLv~X{GR1B?#i;2@Q^Ww&wE%CtWm}Ul5z`wRB!#xr4S%?xYKy z{XaJfD#S$Ta2*p=aMT!axKP)zNWgYqHuu2WKu!vW$E$pH+aX}w{i7qnguzQjfh2;- zO{N;ihifKXPHK>WlVVL-%}WxKh$GEIFa4?x-HV54xs6%Zd1PiET^8sTd93Bie~dGVX#pmAD;p7@~}aLq1NgtiE#5 zqf07Im?txwPdrq~+QqgaDUk|N`r#VN<)!gmfa+z*iOl>)PWMsv*Sl(cDRa2LI z{vgSeX0HoyG3gHPtkfCBP8mobK-v^iWVwegAixmedg%=+QqdJ@>r|?=mMqx(+-8nt zh-R+!Vx9!#^o50@L7H1|A(xOF#Xa*C>-i{gOZ;%QB~RULYrdVQdC_2~V+m7nvrW4o z0>&(XHf2V`t|9|WaJPvQ4vHQht6L)f@ot=ig#k>xOs69cWm%(fBx}oSO(c%N*QPyJ zMXBpq-bP+6l0s}3JJ2bYV`HrT4|U2NUq0Rlp0eP9rhVYRhnok?QSHjGq)t0A!*^-s z5%qxOlw8vGhoJf^^4)`p)Yl~V{;)w@TSPA%n=}j)m6<{-7uJeHmo<+0$E^QGjSE|y0L$N{zk>QckuC8HC8Yx{KEm#~)lp}w{Q+PO{31mH{V zX>7>GXSA~j`Wn8zU_F9;;#M?e4tUBF93`}+d07coyne=#*6$b^zE{t{+@!0d5*-6s z;q`4%(Y(pq+s^3%GDKHr<4)pr^iX#xvg>xBGW5j*JLhSHa7C&IB;xYjG}#knCXpxr zh_p@bn?=b2qd=7&x_Y9ofG(Lo{#LOuyBr#R`dRdXtYkHA6+k(l)Fuu>%af*05ThaN z&7a5(VN<(HHw`BnN3BEr8#ISJRIicqFB8yUBM+sucJ9xnTQpoqNz309mty!J1$CE5rB*Bl6XiHa&o3s|g zbdgxwK2>%YOS8ilB^QH( zA}WS;ruEGuo}!X`^fgT3EuNv0qu?569I0Q?J{-NTznd23ICwcGzf2?GqVS|JqLE=I z&CRe_oJT~BtDUn{Bzx>TrNwB>HTI9d4&OpM@~%^&oYx#{rV;Y$w z>=xTb=M%SLxsOiMQ`O&3XbZNB=FOZNrj#GLNbsh`vl`>J0eY%7L z_8~Fd)LZ$r4ga%ZX#1(~oC?AgYtT#lm`b(aFi4=$cIot+Gt2vaAI9`;Sf$)aJnH-W zc%EyUkDQIKP)2_cRHha@Q&y@K7%+UpHmfHfec9cCS+kWtIox8AlB5vQwVdwL8mi12 zzk4T&FP}BcLb%m0ZvrxP+Q(AyI}zu7158hkg_h=_G)5r@<6ISqBGQ{U_?Spr`oO|p zyyIrvE0|^$s%>Twgr_Q5s6(#!H6I59m+o|eeBPb9-`_GJNPJ3;0g~1_Tof6!{}C1_Sy37kJ7#R)c|x*iV%d zzxw!DDFHzb0J1%Ms*?B{{O#kP5(!q5C|0gBM=SxgT>PgO9}#V|04hI;XWlnSAu}7*iT8&U*-SI zTSLf#ft-KTVIKSkA5S{kf`KxB92H&&P~eZiBm^k&N6-=i6#OGV4E=-hCmnjBKL!4) z$y0{**MIB3vbd6AKo0DG+x*?b)4n!53`i)q z=?(-aNoj~N%e|ClRhNVT3=qPx)1=r8h!Bt0&Fd9OZ-0)yP}h!ualsWlw=_wUx9&v!Gdw>io_$iBbLDbFfQ}FGk!VN#>oR9 zeqK*p1-RqBTqys4;15-zJ$UdRvie8xA*8=_L#Y0L5;ZjUUlvEv-~UOhFa(hx#B4+p z%Sk+l^nd#K|9$jEuHt*B<< z1likm>1m=4qlo6uW40v|k5_ycgyT~5&5QPY11GVuO?vjya(oKX+axq;AL8iycojL%Joc1KStFzN z1G{v{OzC840dcq}0%lanuANK(S4xXw7iF~(XxJs^ zE2>MyA&M-Zyh@d}t&6R86nb@W@(0h>Y`*T!@L{>ayC0iZX)+PWy`(wO&F{MM-(-Mo zr@I%DH}Xd%ell<}5}iMP%?Pbdmi%!(`|tb7L>5tG0PAf-H(IvIOkP zm+eoQG@i(0I$%-;%wIR+X|FL@7*+R;@o6KQHao)Cf$8f+g&qTv&EfSoQ!M%XM=K&O z{Q*HF^$d1~r4eN*Q&Jj$1^<#C$WRsFjrh+5o3~W~hp>O?X7m6-f7&$B2TVZzrQaHWEb2R)^qjKAzpKd^$U7rCaq9+h%?4#&x6yZ4IdQLHG2t1OG( zSX&pPyb=>NK+woca!tf!4-`pUk8|y>Bh@ZMWT@0aOjb|C8*TITR76iaTaqr?kG>VLc=<5xR z!5RWW^vfzUgRL4J>*Y(2=*K!N`oniOz{N$a@o0-uUz0Zqc3qi)ro&T)+Gg{q=H}7t zrk0e$JtO<-gZ-zZ)baa96tJ+U$ni$IX}$SsyuQel6vG9b-{a1)X?pVs-hm(A;B@KN zS@v}RAW3KN-9PtZAfUOt4Mzxow@#pOp4N+wNhE_M7>+ng*_1gAvuopn4v{xK1Ogit zVKOz;l_i2TLsjT@14c67C#16MdVSJ9MDNI_EQ=Dm_D5Hg;r76hy5;drj;%-UiBHU9 zk+9b)sVp3;R7lZH43QZ)`r3*mduC(mWgIfHGcg=R)8A9(5)c6z=y{BI0|om)`NgvR zV!U&BQfhGE!gB<-sy(Q-Xxf6DFxL5}d@4ULo*1o*K;U(Gz!@!|U1_-2upL5@RzD zc73rMMWe2&vlWfrfIT8E;G9*;l=XCWHSJt`UOK1a_-7hWvm}CUB>x;Mpe50jbOVZ` zH6P#E(+GDQJQrr91t$KIU*W6}C5i|+ZUISM&N2NW{ksb^6_pqfbElW<;C4Xx(-OVF zZDL*$|E(w6ZdODMiG|nUd+`Rih>fw&J44RkY@Mp8?4hx*dIbd~fRGO@sxW@O1SDPf z1W`7DSN%+u4~SFZ&2JJXTwZB4qV}$~7~vAiF(?}CnUG&j^wB!XF_bT?9o248QK*a; zOVHYKH=r9tVZpOX?csfooQWxDmT*YM)+E=%o8b(=3d~fC5w!`tVc79SP-Lz#^l3XJ zQKN;EF;`8?(=I$028Q;AQNXr3t~U$OAyIm#99>0bRlUb zH)nsJ$b{O)lX0E11UbQM8WxXX2?!(_l-fvlYm=0IML$2DCY*$*=pp5A>gzwkyw&C= zb*>RfON_F@BL!fbo3LjS2Oehd^KgTGmC;<%5A5yKVsPE!z{}2jbQL6sKlx(1Y6(KM z1}Fg=ov&}8RS`sFkr35~%F9^l40#3_C*tC+XN!QdRyPP=fUoql+iPgO?vi|^x_%VX zJK>}W`Zp6#bCVU^e?AR+JCEQQc!XTkY=9+TMcs<#+MI8thmBF&3cHoIAjpt%hIlN+ zW^Yt7WR+s~V9@Q*6pyGlh@avFee_(UnF#=nB{-g&==wq$UgU>$R((nIT#o?AGtxtRMYi|U!B_qh^Qr**U)=c_N zFTn#UYbMcKWqLZG<07d0-U(55GZeT0z!%}plq|W`NOrk<);L6E8*CPce-+)H;cr0z-p!~Seaa>}c7qh!9Waz3WCCkPLi9oXY@Bn# z*A?K_miw(?gQj2H&M1K<)e@_}a^wJ;1yQVy#*KG*^@cwh#R=YMENiidXXNU3n7N_k z+ce$0fB$uD{e_~|*{$J3;C&UCo9PB~FGj%fu_NF%M3xPs69scw>GrL>4XX?v5UcB? z5M!$wx6;U3g)cxVB3`;;A-yUj;3Z>)Su8kdz9!$9(QNb05Dn(f?E3| z(If-1H}UmoLlv!czVb$-6cQ}mNiO*qz`a@!yRIk;IY%sNyAE;;hH~~o$`#{c=d$}WqrgtKBWAX39L*EKceX9 zPE3C-z4r)f)D_leSAda~Lv5aDu4gZ0=LW;gkBi08B~L)kFs-$g4b{d*`sy@i2j4Mv zKfsZ)bJcbJ^PGm^(%WJ_#LRwwU~1-VXL zA#MGuiDFJDR_t2$8G+0|D`2dVe^>sG1hlUZt}$}=+UcW-gSj*YXik}6)G5?IXv93HZ%XpHgT*03x(G(XzMh5?tJdjiwkhSsm`w(vct#|2rE*p6B=$? zhqAFTsfOM2QX&IJcV98&gwMZ<;x_}xV6$Ydi*DtEg%f;4qogmTch`@S^Yi)kl=eX` zq1;|X7GugU@S+pOkU%H!y68l*G4RLWSqMGO$8PAkGU<4jU<<|rFWx=h8In>rhxa!;R&^=SfrqKY5;7^hB@V1oN@ zUa(s>zbpWWcK|LeYN8zN5DZu|-0;@339dpaxvhqG#onQl_fhzka4b7rqQz74^RQEV zzY#MPxq}=hYT|-J_0bovTslL3r>=7u*smB5V3tAVp>56NCt(@o%)-PyC2p^i#oOT# zEy74aU;G#>Y$suWvv@5%GXa$!^>FU7HZfqY(PLRzueT7f@dn@FWsk1;@^k5ly9rWH zs;3*RdG}3imw!JphR>v|NJWuLu3?;tnzLZUXEH%-F6~2Pdc$UGnY9z(GE7B*m;l{8OJ9M0HDHhC!&X>6hb%vtjdAzA?p&0;;e%=)5ay7OFs0#X1vQAnm9p5= zshd9Zop^&%>O^{_rB=zzCZK{2QG3#CdBO#JKUv*98=?j^&Y+`tY3;0j;>&=7Xx)xh zE0@ikfY+2NaEodEn3Jc42EJ}qnbV$*Y+yelYT9k-A-G5Iq>v|6xU(WtN|88#oXMEF z6TcVV8k}GV2u8&cwfc2J;gT08^Q}?E!8F8^^!3K9iMRZHISZ6!8QoVEaRUKsVaF^T zP*;%L!_pMuX)x5GC>TM&JhNkbXJ5=A-+5^`lSb9I^Ur1?BKC4HS9Eg`$0wzgL=ZgQ z+MRGjVt8U(?KP;c8Z_%Gu#ziGJ+$`~$E~qC@piu-12w=+=&Ok}65f(>L4NI!tJZcO zIn$b!x-^sPx>tQ5-q-5PS7&8Lmqc&MmDjjfj8QY6yzk!wiE(GwI858#THn3P* z$H8BU-H>1s5}})K0?KayreB8a(-{kDA2rTp3q;C%wCQKb`T2&*2mAJYUbY>A^lQFT z!XgEbJ9`52#WX5fa6H4Humu%2~C;LL5&)i^-YPlJxti z+&-;0KPMO4&;e*D5vjYhlNg-Ew~ekdI?L5%80AYecyKhmmfCQ#mym*4nE9Bh7*G&R zM9zg8e3fb7u}yQ3t`2^ovgG(0Q~4p17@>G5OY7@r>25w6 z{bI$#XLN-E2XJalAedjT_PFJm_5y93dM{avjBkuTV(qubFKZ25(=IG5YN|#ogdGvY zR{&7QAQX+@5%zaMUciwv9V}j459gy}1dDKptKylp{#&|?_D7wZF1zN%Gz05Rmn?WN z%%#f&7KP=8jF>w4b+f*;8X9`($#%34MyFeFs zPgTw4;H%R-Pd!MxP^lpP`IPRCe1zNf1&v|cr&_yY<=f!r;l32+X-1eZw*>nG%H>%S z>37S9RIWZ>4vwyfu@?_Ut8GI*3W9o@WQh1fkMP|6o)L?n=Wyv&IgBM#kRwTPULo18i(1pe#_R8d{|~hPl}_j4yf;~yk)}K zQ5N&o!6Puj0#{(u2EJ1R2M0TRSsp$B@S;qfWMcF-|tLshG8HvdXCw!GgU!> znJE@{dzRE&AiZZV#dvA*62&xFkzUQ`WCXEe3O zRWttq9YCy?R(1U_G*@Ix#4QO0E1tb#x-{QQ-=Beq@L9q}9dXXsrZbXF|6*IPIG^;C z9Wv_V`5%*(Ha4Q5a}R(I@k@2!W!miJ_Z8 zSOYgz=N@lPl(ziXx&hZPtgNb)@7E7Uvp?&8~LuPBsUPIrkivtQN7}p+b^V(3Rl!gGQL>a|yJp*f-(gOyQ+07rlE(h?_AMC9t7e8HC z?miA;w>UKUGWuPHIOtw4JeE9NES#U3`&h}>g~rUlTzgw3`a$kzYmU9?81PIY+4fhZ zmDdE?u*og-PFT&MN8{OI_QsmIjPc!gh$s%#cg3*a$Y@$%wEf_2R(2qQW3tI2i0V#x zoML-;+ecbJyBE}J`+CnY|CW^BVQK3rcA%(S5+%A@m+%@kkaIC@Zn|#PH6vX&11xMT z$}xiJHqGy;{IWer`l!$YKgMBdl&8ZabJh#!Qxe=J-%CrkuogXdF>@$7U6?n~(90cd zdgAHYJk4AvE}@NS3D>-D9Ho}d@kObjlwF`wYeZR@y9qn>o{!wzgDlPI=VYptv$gn8 zpuBWwR3W)U3YndXOB|mo7B7j$kTsb^*EQw6!6|3%$B*@mUimzFrNrtDl^dYWeDey59HBxfPTtJBufuXrCv$55Lbvk%K>U zE;~bdA}pX;S@kO{EZg{aS#Oh&RHl~p&@;3l800rURJF zdTMY}U$+7oOKdJ^`nraq?Agqc+B{w~?#5&4VeJ;XW;jVeN~Jn|5l47aw`wk-&BhZ`)&5G28JY@n5i&*M*eo@2F1-1%lj#TC zdUS9>t_-)#D4ETn^kIk$S444MU*QFuRCl(S52hhCUbVP0 zBsubXIo5>V(9JX&*#&Sr#n3o)d`v5+p$&8X^Sr{jkJiX|J+Xu53~ z2Bqerf$Ywk`_b;K-fxTz8gp4o?V5aRWo|(Fl?8J+PvjqUz?HIt?|_`}dMd+&?5`$! z1-N^0)8cXRik}!-GrR4DT6{+1sZmbPuQ&ql6-1`EnP4BD74LNK#@re zNc+6|1WcSrYVPTOd_Z-B@4uH4ZyRyttLL}oITWji;-9bGHct%|kb)gKF$!h{@8JRC zs&7N2JOD4Ki4hR}>iba$PxjGMWz+dEi;-0Nmh=ii#^Ck$6K;jo8R`?NB@HEt6>zaL z4AXJN!}z3`$0i`v;+=2p7uLMex9_9}w@SyEFDK;zp7T_0BwQH_Z`GPV*o`-BmJA%@~FibeGz4 zSQ?9U`ZToltzU#l4!d^`UTZakKlc@*S_x{e1aRdOEMLKYk)Neau3l5o@9G z-OgO-4p^W0G^asU<^r7W?Zp<<6kJ1AD{TC5GsXYT6d2)YXZ z7$Lrl5dPjk7J&ecziMdNfdE#FmsJl+vH)-*u>X_}K?some9%)M0P`R8KWivJR6zit ze+8aF0N#HEeL(=Ze}G?(elB(ZQ3L~Cn*U;{R~;8>ocxpuh5$tW6@Z5Vg#Q(&hyE{$@=yThKNP>heW_z!gaVi`Uh0wm z{hT?%0PO!z{0`SQ?0@kw83vI0R|S1IfbU;{S2#fIU%_}dfd5|sQN+Jw{*s4N#LonO z=kYt-wg`aWKTQ43{O60rLiFE?|Ej@iM*`UXVfB~zWsIBi^DP97g4!bixXizXxG!(% zFR%X@ - diff --git a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx index 8f82c9709d..91df433908 100644 --- a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx +++ b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx @@ -417,7 +417,9 @@ function getRenderInfo(){ var file_url = item.file.toString(); return JSON.stringify({ - "file_name": file_url + "file_name": file_url, + "width": render_item.comp.width, + "height": render_item.comp.height }) } diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 9a6462fcd4..8719a8f46e 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -29,6 +29,8 @@ class AEItem(object): frameRate = attr.ib(default=None) file_name = attr.ib(default=None) instance_id = attr.ib(default=None) # New Publisher + width = attr.ib(default=None) + height = attr.ib(default=None) class AfterEffectsServerStub(): @@ -609,7 +611,9 @@ class AfterEffectsServerStub(): d.get('workAreaDuration'), d.get('frameRate'), d.get('file_name'), - d.get("instance_id")) + d.get("instance_id"), + d.get("width"), + d.get("height")) ret.append(item) return ret From 1adec078d88952bf0dcbb6705e904c5dbab09182 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 15:47:06 +0200 Subject: [PATCH 238/357] OP-3021 - refactored base class into abstract class Moved to api.plugin to make it clearer. --- openpype/hosts/nuke/api/plugin.py | 138 ++++++++++++++++++ .../plugins/create/create_write_prerender.py | 4 +- .../plugins/create/create_write_render.py | 103 +------------ .../nuke/plugins/create/create_write_still.py | 4 +- 4 files changed, 143 insertions(+), 106 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 3ac750a48f..eaf0ab6911 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,6 +1,8 @@ import os import random import string +from collections import OrderedDict +from abc import abstractmethod import nuke @@ -594,3 +596,139 @@ class ExporterReviewMov(ExporterReview): nuke.scriptSave() return self.data + + +class AbstractWriteRender(OpenPypeCreator): + """Abstract creator to gather similar implementation for Write creators""" + name = "" + label = "" + hosts = ["nuke"] + n_class = "Write" + family = "render" + icon = "sign-out" + defaults = ["Main", "Mask"] + + def __init__(self, *args, **kwargs): + super(AbstractWriteRender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family + data["families"] = self.n_class + + for k, v in self.data.items(): + if k not in data.keys(): + data.update({k: v}) + + self.data = data + self.nodes = nuke.selectedNodes() + self.log.debug("_ self.data: '{}'".format(self.data)) + + def process(self): + + inputs = [] + outputs = [] + instance = nuke.toNode(self.data["subset"]) + selected_node = None + + # use selection + if (self.options or {}).get("useSelection"): + nodes = self.nodes + + if not (len(nodes) < 2): + msg = ("Select only one node. " + "The node you want to connect to, " + "or tick off `Use selection`") + self.log.error(msg) + nuke.message(msg) + return + + if len(nodes) == 0: + msg = ( + "No nodes selected. Please select a single node to connect" + " to or tick off `Use selection`" + ) + self.log.error(msg) + nuke.message(msg) + return + + selected_node = nodes[0] + inputs = [selected_node] + outputs = selected_node.dependent() + + if instance: + if (instance.name() in selected_node.name()): + selected_node = instance.dependencies()[0] + + # if node already exist + if instance: + # collect input / outputs + inputs = instance.dependencies() + outputs = instance.dependent() + selected_node = inputs[0] + # remove old one + nuke.delete(instance) + + # recreate new + write_data = { + "nodeclass": self.n_class, + "families": [self.family], + "avalon": self.data + } + + # add creator data + creator_data = {"creator": self.__class__.__name__} + self.data.update(creator_data) + write_data.update(creator_data) + + if self.presets.get('fpath_template'): + self.log.info("Adding template path from preset") + write_data.update( + {"fpath_template": self.presets["fpath_template"]} + ) + else: + self.log.info("Adding template path from plugin") + write_data.update({ + "fpath_template": + ("{work}/" + self.family + "s/nuke/{subset}" + "/{subset}.{frame}.{ext}")}) + + write_node = self._create_write_node(selected_node, + inputs, outputs, + write_data) + + # relinking to collected connections + for i, input in enumerate(inputs): + write_node.setInput(i, input) + + write_node.autoplace() + + for output in outputs: + output.setInput(0, write_node) + + write_node = self._modify_write_node(write_node) + + return write_node + + @abstractmethod + def _create_write_node(self, selected_node, inputs, outputs, write_data): + """Family dependent implementation of Write node creation + + Args: + selected_node (nuke.Node) + inputs (list of nuke.Node) - input dependencies (what is connected) + outputs (list of nuke.Node) - output dependencies + write_data (dict) - values used to fill Knobs + Returns: + node (nuke.Node): group node with data as Knobs + """ + pass + + @abstractmethod + def _modify_write_node(self, write_node): + """Family dependent modification of created 'write_node' + + Returns: + node (nuke.Node): group node with data as Knobs + """ + pass diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index e9309d8170..7297f74c13 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,10 +1,10 @@ import nuke +from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node -from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWritePrerender(create_write_render.CreateWriteRender): +class CreateWritePrerender(plugin.AbstractWriteRender): # change this to template preset name = "WritePrerender" label = "Create Write Prerender" diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 79766929ac..18a101546f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,12 +1,10 @@ -from collections import OrderedDict - import nuke from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node -class CreateWriteRender(plugin.OpenPypeCreator): +class CreateWriteRender(plugin.AbstractWriteRender): # change this to template preset name = "WriteRender" label = "Create Write Render" @@ -19,105 +17,6 @@ class CreateWriteRender(plugin.OpenPypeCreator): def __init__(self, *args, **kwargs): super(CreateWriteRender, self).__init__(*args, **kwargs) - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) - - def process(self): - - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. " - "The node you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - return - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - return - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - if self.presets.get('fpath_template'): - self.log.info("Adding template path from preset") - write_data.update( - {"fpath_template": self.presets["fpath_template"]} - ) - else: - self.log.info("Adding template path from plugin") - write_data.update({ - "fpath_template": - ("{work}/" + self.family + "s/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) - - write_node = self._create_write_node(selected_node, - inputs, outputs, - write_data) - - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - - write_node = self._modify_write_node(write_node) - - return write_node - def _create_write_node(self, selected_node, inputs, outputs, write_data): # add reformat node to cut off all outside of format bounding box # get width and height diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index 3361bc2602..d22b5eab3f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,10 +1,10 @@ import nuke +from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node -from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWriteStill(create_write_render.CreateWriteRender): +class CreateWriteStill(plugin.AbstractWriteRender): # change this to template preset name = "WriteStillFrame" label = "Create Write Still Image" From 37e43de5dee0418dc95d5135aeb81d6567fcbd14 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 16:24:35 +0200 Subject: [PATCH 239/357] updated create folders action to use task as dictionary --- .../action_create_folders.py | 180 +++++++++++------- 1 file changed, 116 insertions(+), 64 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py index 0ed12bd03e..8104818195 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py @@ -1,6 +1,8 @@ import os -from openpype_modules.ftrack.lib import BaseAction, statics_icon +import collections +import copy from openpype.api import Anatomy +from openpype_modules.ftrack.lib import BaseAction, statics_icon class CreateFolders(BaseAction): @@ -86,10 +88,19 @@ class CreateFolders(BaseAction): 'message': 'Nothing was created' } - all_entities = [] - all_entities.append(entity) - if with_childrens: - all_entities = self.get_notask_children(entity) + task_entities = [] + other_entities = [] + self.get_all_entities( + session, entities, task_entities, other_entities + ) + hierarchy = self.get_entities_hierarchy( + session, task_entities, other_entities + ) + task_types = session.query("select id, name from Type").all() + task_type_names_by_id = { + task_type["id"]: task_type["name"] + for task_type in task_types + } anatomy = Anatomy(project_name) @@ -97,77 +108,67 @@ class CreateFolders(BaseAction): work_template = anatomy.templates for key in work_keys: work_template = work_template[key] - work_has_apps = "{app" in work_template publish_keys = ["publish", "folder"] publish_template = anatomy.templates for key in publish_keys: publish_template = publish_template[key] - publish_has_apps = "{app" in publish_template + + project_data = { + "project": { + "name": project_name, + "code": project_code + } + } collected_paths = [] - for entity in all_entities: - if entity.entity_type.lower() == "project": - continue - ent_data = { - "project": { - "name": project_name, - "code": project_code - } - } + for item in hierarchy: + parent_entity, task_entities = item - ent_data["asset"] = entity["name"] + parent_data = copy.deepcopy(project_data) - parents = entity["link"][1:-1] + parents = parent_entity["link"][1:-1] hierarchy_names = [p["name"] for p in parents] - hierarchy = "" + hierarchy = "/".join(hierarchy_names) + if hierarchy_names: - hierarchy = os.path.sep.join(hierarchy_names) - ent_data["hierarchy"] = hierarchy + parent_name = hierarchy_names[-1] + else: + parent_name = project_name - tasks_created = False - for child in entity["children"]: - if child["object_type"]["name"].lower() != "task": - continue - tasks_created = True - task_data = ent_data.copy() - task_data["task"] = child["name"] + parent_data.update({ + "asset": parent_entity["name"], + "hierarchy": hierarchy, + "parent": parent_name + }) - apps = [] - - # Template wok - if work_has_apps: - app_data = task_data.copy() - for app in apps: - app_data["app"] = app - collected_paths.append(self.compute_template( - anatomy, app_data, work_keys - )) - else: - collected_paths.append(self.compute_template( - anatomy, task_data, work_keys - )) - - # Template publish - if publish_has_apps: - app_data = task_data.copy() - for app in apps: - app_data["app"] = app - collected_paths.append(self.compute_template( - anatomy, app_data, publish_keys - )) - else: - collected_paths.append(self.compute_template( - anatomy, task_data, publish_keys - )) - - if not tasks_created: + if not task_entities: # create path for entity collected_paths.append(self.compute_template( - anatomy, ent_data, work_keys + anatomy, parent_data, work_keys )) collected_paths.append(self.compute_template( - anatomy, ent_data, publish_keys + anatomy, parent_data, publish_keys + )) + continue + + for task_entity in task_entities: + task_type_id = task_entity["type_id"] + task_type_name = task_type_names_by_id[task_type_id] + task_data = copy.deepcopy(parent_data) + task_data["task"] = { + "name": task_entity["name"], + "type": task_type_name + } + + # Template wok + collected_paths.append(self.compute_template( + anatomy, task_data, work_keys + )) + + # Template publish + collected_paths.append(self.compute_template( + anatomy, task_data, publish_keys )) if len(collected_paths) == 0: @@ -188,14 +189,65 @@ class CreateFolders(BaseAction): "message": "Successfully created project folders." } - def get_notask_children(self, entity): + def get_all_entities( + self, session, entities, task_entities, other_entities + ): + if not entities: + return + + no_task_entities = [] + for entity in entities: + if entity.entity_type.lower() == "task": + task_entities.append(entity) + else: + no_task_entities.append(entity) + + if not no_task_entities: + return task_entities + + other_entities.extend(no_task_entities) + + no_task_entity_ids = [entity["id"] for entity in no_task_entities] + next_entities = session.query(( + "select id, object_type_id, parent_id" + " from TypedContext where parent_id in ({})" + ).format(self.join_query_keys(no_task_entity_ids))).all() + + self.get_all_entities( + session, next_entities, task_entities, other_entities + ) + + def get_entities_hierarchy(self, session, task_entities, other_entities): + task_entity_ids = [entity["id"] for entity in task_entities] + full_task_entities = session.query(( + "select id, name, type_id, parent_id" + " from TypedContext where id in ({})" + ).format(self.join_query_keys(task_entity_ids))) + task_entities_by_parent_id = collections.defaultdict(list) + for entity in full_task_entities: + parent_id = entity["parent_id"] + task_entities_by_parent_id[parent_id].append(entity) + output = [] - if entity.entity_type.lower() == "task": + if not task_entities_by_parent_id: return output - output.append(entity) - for child in entity["children"]: - output.extend(self.get_notask_children(child)) + other_ids = set() + for entity in other_entities: + other_ids.add(entity["id"]) + other_ids |= set(task_entities_by_parent_id.keys()) + + parent_entities = session.query(( + "select id, name from TypedContext where id in ({})" + ).format(self.join_query_keys(other_ids))).all() + + for parent_entity in parent_entities: + parent_id = parent_entity["id"] + output.append(( + parent_entity, + task_entities_by_parent_id[parent_id] + )) + return output def compute_template(self, anatomy, data, anatomy_keys): From 730f79a316a41d9b8c0e4b70fc3934c26a3790fa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 16:32:48 +0200 Subject: [PATCH 240/357] OP-2765 - render creator can change context --- openpype/hosts/aftereffects/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index c43ada84b5..8bddbb2e99 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -16,7 +16,7 @@ class RenderCreator(Creator): family = "render" description = "Render creator" - create_allow_context_change = False + create_allow_context_change = True def get_icon(self): return resources.get_openpype_splash_filepath() From f20551854f639c758cdce1f187ca3b88c70d4f11 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 22 Apr 2022 16:56:57 +0200 Subject: [PATCH 241/357] change default app name hack --- .../plugins/control_job/perjob/m50__openpype_publish_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py index 82a79daf3b..7f5b514253 100644 --- a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py +++ b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py @@ -119,7 +119,7 @@ class OpenPypeContextSelector: # app names and versions, but since app_name is not used # currently down the line (but it is required by OP publish command # right now). - self.context["app_name"] = "maya/2020" + self.context["app_name"] = "celaction/local" return True @staticmethod From 6968d2fdfdd293dd757dc0cddaed7aa4ec2bd37d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 17:04:20 +0200 Subject: [PATCH 242/357] allow multiselection --- .../action_create_folders.py | 106 ++++++++++-------- 1 file changed, 59 insertions(+), 47 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py index 8104818195..81f38e0c39 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py @@ -11,55 +11,59 @@ class CreateFolders(BaseAction): icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg") def discover(self, session, entities, event): - if len(entities) != 1: - return False - - not_allowed = ["assetversion", "project"] - if entities[0].entity_type.lower() in not_allowed: - return False - - return True + for entity_item in event["data"]["selection"]: + if entity_item.get("entityType").lower() in ("task", "show"): + return True + return False def interface(self, session, entities, event): if event["data"].get("values", {}): return - entity = entities[0] - without_interface = True - for child in entity["children"]: - if child["object_type"]["name"].lower() != "task": - without_interface = False + + with_interface = False + for entity in entities: + if entity.entity_type.lower() != "task": + with_interface = True break - self.without_interface = without_interface - if without_interface: + + if "values" not in event["data"]: + event["data"]["values"] = {} + + event["data"]["values"]["with_interface"] = with_interface + if not with_interface: return + title = "Create folders" entity_name = entity["name"] msg = ( "

    Do you want create folders also" - " for all children of \"{}\"?

    " + " for all children of your selection?" ) if entity.entity_type.lower() == "project": entity_name = entity["full_name"] msg = msg.replace(" also", "") msg += "

    (Project root won't be created if not checked)

    " - items = [] - item_msg = { - "type": "label", - "value": msg.format(entity_name) - } - item_label = { - "type": "label", - "value": "With all chilren entities" - } - item = { - "name": "children_included", - "type": "boolean", - "value": False - } - items.append(item_msg) - items.append(item_label) - items.append(item) + items = [ + { + "type": "label", + "value": msg.format(entity_name) + }, + { + "type": "label", + "value": "With all chilren entities" + }, + { + "name": "children_included", + "type": "boolean", + "value": False + }, + { + "type": "hidden", + "name": "with_interface", + "value": with_interface + } + ] return { "items": items, @@ -68,26 +72,34 @@ class CreateFolders(BaseAction): def launch(self, session, entities, event): '''Callback method for custom action.''' + + if "values" not in event["data"]: + return + + with_interface = event["data"]["values"]["with_interface"] with_childrens = True - if self.without_interface is False: - if "values" not in event["data"]: - return + if with_interface: with_childrens = event["data"]["values"]["children_included"] - entity = entities[0] - if entity.entity_type.lower() == "project": - proj = entity - else: - proj = entity["project"] - project_name = proj["full_name"] - project_code = proj["name"] + filtered_entities = [] + for entity in entities: + low_context_type = entity["context_type"].lower() + if low_context_type in ("task", "show"): + if not with_childrens and low_context_type == "show": + continue + filtered_entities.append(entity) - if entity.entity_type.lower() == 'project' and with_childrens is False: + if not filtered_entities: return { - 'success': True, - 'message': 'Nothing was created' + "success": True, + "message": 'Nothing was created' } + project_entity = self.get_project_from_entity(filtered_entities[0]) + + project_name = project_entity["full_name"] + project_code = project_entity["name"] + task_entities = [] other_entities = [] self.get_all_entities( @@ -209,7 +221,7 @@ class CreateFolders(BaseAction): no_task_entity_ids = [entity["id"] for entity in no_task_entities] next_entities = session.query(( - "select id, object_type_id, parent_id" + "select id, parent_id" " from TypedContext where parent_id in ({})" ).format(self.join_query_keys(no_task_entity_ids))).all() From 015d0b3e15d4144f79414c77631db85d271ed0d0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 17:06:38 +0200 Subject: [PATCH 243/357] removed unused imports --- .../event_handlers_user/action_create_project_structure.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py index 94f359c317..ebea8872f9 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -1,6 +1,4 @@ -import os import re -import json from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import get_project_basic_paths, create_project_folders From d4bc73cad4e9428e8e817d2f59bffe620316caeb Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:10:51 +0200 Subject: [PATCH 244/357] Add @mkolar as a contributor --- .all-contributorsrc | 26 ++++++++++++++++++++++++++ README.md | 23 +++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 .all-contributorsrc diff --git a/.all-contributorsrc b/.all-contributorsrc new file mode 100644 index 0000000000..c5e3d06746 --- /dev/null +++ b/.all-contributorsrc @@ -0,0 +1,26 @@ +{ + "projectName": "OpenPype", + "projectOwner": "pypeclub", + "repoType": "github", + "repoHost": "https://github.com", + "files": [ + "README.md" + ], + "imageSize": 80, + "commit": true, + "commitConvention": "none", + "contributors": [ + { + "login": "mkolar", + "name": "Milan Kolar", + "avatar_url": "https://avatars.githubusercontent.com/u/3333008?v=4", + "profile": "http://pype.club/", + "contributions": [ + "code", + "doc", + "infra" + ] + } + ], + "contributorsPerLine": 7 +} diff --git a/README.md b/README.md index 0e450fc48d..5f3b98a339 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ + +[![All Contributors](https://img.shields.io/badge/all_contributors-1-orange.svg?style=flat-square)](#contributors-) + OpenPype ==== @@ -283,3 +286,23 @@ Running tests To run tests, execute `.\tools\run_tests(.ps1|.sh)`. **Note that it needs existing virtual environment.** + +## Contributors ✨ + +Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): + + + + + + + + +

    Milan Kolar

    💻 📖 🚇
    + + + + + + +This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! \ No newline at end of file From 832c2ab7eb1b9ebd8bff392943fdad080229dded Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:11:05 +0200 Subject: [PATCH 245/357] Add @antirotor as a contributor --- .all-contributorsrc | 11 +++++++++++ README.md | 3 ++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index c5e3d06746..1ca62667ee 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -20,6 +20,17 @@ "doc", "infra" ] + }, + { + "login": "antirotor", + "name": "Ondřej Samohel", + "avatar_url": "https://avatars.githubusercontent.com/u/33513211?v=4", + "profile": "https://github.com/antirotor", + "contributions": [ + "code", + "doc", + "infra" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 5f3b98a339..4ff733eea5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-1-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-2-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -297,6 +297,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d +

    Milan Kolar

    💻 📖 🚇

    Ondřej Samohel

    💻 📖 🚇
    From 6e1cc50c0dbf10235df2af3b9a3c1b5bd5e86628 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:11:14 +0200 Subject: [PATCH 246/357] Add @iLLiCiTiT as a contributor --- .all-contributorsrc | 11 +++++++++++ README.md | 3 ++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1ca62667ee..15c43a502e 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -31,6 +31,17 @@ "doc", "infra" ] + }, + { + "login": "iLLiCiTiT", + "name": "Jakub Trllo", + "avatar_url": "https://avatars.githubusercontent.com/u/43494761?v=4", + "profile": "https://github.com/iLLiCiTiT", + "contributions": [ + "code", + "doc", + "infra" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 4ff733eea5..01ca3f26c7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-2-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-3-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -298,6 +298,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Milan Kolar

    💻 📖 🚇
    Ondřej Samohel

    💻 📖 🚇 +
    Jakub Trllo

    💻 📖 🚇 From 912af06a94d741bb2c7b6c7eb99f4e67bd057411 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:12:47 +0200 Subject: [PATCH 247/357] Update @mkolar as a contributor --- .all-contributorsrc | 9 ++++++++- README.md | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 15c43a502e..1a2287b39d 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -18,7 +18,14 @@ "contributions": [ "code", "doc", - "infra" + "infra", + "business", + "content", + "fundingFinding", + "ideas", + "maintenance", + "projectManagement", + "review" ] }, { diff --git a/README.md b/README.md index 01ca3f26c7..157cba8391 100644 --- a/README.md +++ b/README.md @@ -296,7 +296,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - + From 2bf0a76898c494ed4c527172a865a31038fcd200 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:15:27 +0200 Subject: [PATCH 248/357] Add @jakubjezek001 as a contributor --- .all-contributorsrc | 23 +++++++++++++++++++---- README.md | 9 +++++---- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1a2287b39d..bbc613aa1a 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -22,10 +22,10 @@ "business", "content", "fundingFinding", - "ideas", "maintenance", "projectManagement", - "review" + "review", + "mentoring" ] }, { @@ -36,7 +36,11 @@ "contributions": [ "code", "doc", - "infra" + "infra", + "content", + "review", + "maintenance", + "mentoring" ] }, { @@ -47,7 +51,18 @@ "contributions": [ "code", "doc", - "infra" + "infra", + "review", + "maintenance" + ] + }, + { + "login": "jakubjezek001", + "name": "Jakub Ježek", + "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4", + "profile": "https://www.linkedin.com/in/jakubjezek79", + "contributions": [ + "code" ] } ], diff --git a/README.md b/README.md index 157cba8391..b313b85da5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-3-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-4-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -296,9 +296,10 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d

    Milan Kolar

    💻 📖 🚇

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🤔 🚧 📆 👀

    Ondřej Samohel

    💻 📖 🚇

    Jakub Trllo

    💻 📖 🚇
    - - - + + + +

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🤔 🚧 📆 👀

    Ondřej Samohel

    💻 📖 🚇

    Jakub Trllo

    💻 📖 🚇

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🚧 📆 👀 🧑‍🏫

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫

    Jakub Trllo

    💻 📖 🚇 👀 🚧

    Jakub Ježek

    💻
    From 2709e0c86db952936e30ae097c10f35ad42d051f Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:16:39 +0200 Subject: [PATCH 249/357] Add @kalisp as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index bbc613aa1a..a52e278a1d 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -64,6 +64,15 @@ "contributions": [ "code" ] + }, + { + "login": "kalisp", + "name": "Petr Kalis", + "avatar_url": "https://avatars.githubusercontent.com/u/4457962?v=4", + "profile": "https://github.com/kalisp", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index b313b85da5..cc60f9a194 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-4-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -300,6 +300,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫
    Jakub Trllo

    💻 📖 🚇 👀 🚧
    Jakub Ježek

    💻 +
    Petr Kalis

    💻 From 8be18076f611d79a0761d3a2aa57621dc8c9f358 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:16:49 +0200 Subject: [PATCH 250/357] Add @aardschok as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index a52e278a1d..422a3112f8 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -73,6 +73,15 @@ "contributions": [ "code" ] + }, + { + "login": "aardschok", + "name": "Wijnand Koreman", + "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4", + "profile": "https://github.com/aardschok", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index cc60f9a194..2272882092 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-6-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -301,6 +301,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jakub Trllo

    💻 📖 🚇 👀 🚧
    Jakub Ježek

    💻
    Petr Kalis

    💻 +
    Wijnand Koreman

    💻 From 258af755f4872a0a26c4f991a0e799897c7552f2 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:16:57 +0200 Subject: [PATCH 251/357] Add @BigRoy as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 422a3112f8..ed0a4d729c 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -82,6 +82,15 @@ "contributions": [ "code" ] + }, + { + "login": "BigRoy", + "name": "Roy Nieterau", + "avatar_url": "https://avatars.githubusercontent.com/u/2439881?v=4", + "profile": "http://www.colorbleed.nl/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 2272882092..df652f97e7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-6-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-7-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -302,6 +302,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jakub Ježek

    💻
    Petr Kalis

    💻
    Wijnand Koreman

    💻 +
    Roy Nieterau

    💻 From b8f8f3ca1946893193c1dfafe570d1dbb3521c67 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:35 +0200 Subject: [PATCH 252/357] Add @tokejepsen as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 5 ++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index ed0a4d729c..793b03cc78 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -91,6 +91,15 @@ "contributions": [ "code" ] + }, + { + "login": "tokejepsen", + "name": "Toke Jepsen", + "avatar_url": "https://avatars.githubusercontent.com/u/1860085?v=4", + "profile": "https://github.com/tokejepsen", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index df652f97e7..d86ec93ae8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-7-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-8-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -304,6 +304,9 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Wijnand Koreman

    💻
    Roy Nieterau

    💻 + +
    Toke Jepsen

    💻 + From 8433d789f1cdb7130278df5c4fefe9bc188bdc69 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:46 +0200 Subject: [PATCH 253/357] Add @simonebarbieri as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 793b03cc78..1e800ad238 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -100,6 +100,15 @@ "contributions": [ "code" ] + }, + { + "login": "simonebarbieri", + "name": "Simone Barbieri", + "avatar_url": "https://avatars.githubusercontent.com/u/1087869?v=4", + "profile": "https://barbierisimone.com/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index d86ec93ae8..b7bc0651c5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-8-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-9-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -306,6 +306,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Toke Jepsen

    💻 +
    Simone Barbieri

    💻 From 37e29dd7ae7ed16b3eeabf16a214812a82e7a9ce Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:52 +0200 Subject: [PATCH 254/357] Add @karimmozilla as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1e800ad238..6b7d654848 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -109,6 +109,15 @@ "contributions": [ "code" ] + }, + { + "login": "karimmozilla", + "name": "karimmozilla", + "avatar_url": "https://avatars.githubusercontent.com/u/82811760?v=4", + "profile": "http://karimmozilla.xyz/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index b7bc0651c5..8c41ce69a9 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-9-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-10-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -307,6 +307,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Toke Jepsen

    💻
    Simone Barbieri

    💻 +
    karimmozilla

    💻 From f70e67feb6200d5f56359410794cf3c3e7c1d122 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:59 +0200 Subject: [PATCH 255/357] Add @zhoub as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 6b7d654848..cbcafe14d2 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -118,6 +118,15 @@ "contributions": [ "code" ] + }, + { + "login": "zhoub", + "name": "Bo Zhou", + "avatar_url": "https://avatars.githubusercontent.com/u/1798206?v=4", + "profile": "http://jedimaster.cnblogs.com/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 8c41ce69a9..ad6dfb52fb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-10-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-11-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -308,6 +308,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Toke Jepsen

    💻
    Simone Barbieri

    💻
    karimmozilla

    💻 +
    Bo Zhou

    💻 From 35ae3e08d07dd274de30833405ac0bacbd973923 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:24:22 +0200 Subject: [PATCH 256/357] Add @ClementHector as a contributor --- .all-contributorsrc | 43 ++++++++++++++++++++++++++++++++----------- README.md | 9 +++++---- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index cbcafe14d2..4a21a7d3fb 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -28,6 +28,22 @@ "mentoring" ] }, + { + "login": "jakubjezek001", + "name": "Jakub Ježek", + "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4", + "profile": "https://www.linkedin.com/in/jakubjezek79", + "contributions": [ + "code", + "doc", + "infra", + "content", + "review", + "maintenance", + "mentoring", + "projectManagement" + ] + }, { "login": "antirotor", "name": "Ondřej Samohel", @@ -40,7 +56,8 @@ "content", "review", "maintenance", - "mentoring" + "mentoring", + "projectManagement" ] }, { @@ -56,22 +73,17 @@ "maintenance" ] }, - { - "login": "jakubjezek001", - "name": "Jakub Ježek", - "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4", - "profile": "https://www.linkedin.com/in/jakubjezek79", - "contributions": [ - "code" - ] - }, { "login": "kalisp", "name": "Petr Kalis", "avatar_url": "https://avatars.githubusercontent.com/u/4457962?v=4", "profile": "https://github.com/kalisp", "contributions": [ - "code" + "code", + "doc", + "infra", + "review", + "maintenance" ] }, { @@ -127,6 +139,15 @@ "contributions": [ "code" ] + }, + { + "login": "ClementHector", + "name": "Clément Hector", + "avatar_url": "https://avatars.githubusercontent.com/u/7068597?v=4", + "profile": "https://www.linkedin.com/in/clementhector/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index ad6dfb52fb..83e117ea40 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-11-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-12-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -297,10 +297,10 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - + + - - + @@ -309,6 +309,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d +

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🚧 📆 👀 🧑‍🏫

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫

    Jakub Ježek

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆

    Jakub Trllo

    💻 📖 🚇 👀 🚧

    Jakub Ježek

    💻

    Petr Kalis

    💻

    Petr Kalis

    💻 📖 🚇 👀 🚧

    Wijnand Koreman

    💻

    Roy Nieterau

    💻

    Simone Barbieri

    💻

    karimmozilla

    💻

    Bo Zhou

    💻

    Clément Hector

    💻
    From c60dd2e75963f0aa185d3c2f8b0297f6341ecc2f Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:24:38 +0200 Subject: [PATCH 257/357] Add @ClementHector as a contributor --- .all-contributorsrc | 30 ++++++++++++++++++++++-------- README.md | 16 ++++++++-------- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 4a21a7d3fb..47d38fa911 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -25,7 +25,8 @@ "maintenance", "projectManagement", "review", - "mentoring" + "mentoring", + "question" ] }, { @@ -41,7 +42,8 @@ "review", "maintenance", "mentoring", - "projectManagement" + "projectManagement", + "question" ] }, { @@ -57,7 +59,8 @@ "review", "maintenance", "mentoring", - "projectManagement" + "projectManagement", + "question" ] }, { @@ -70,7 +73,8 @@ "doc", "infra", "review", - "maintenance" + "maintenance", + "question" ] }, { @@ -83,7 +87,8 @@ "doc", "infra", "review", - "maintenance" + "maintenance", + "question" ] }, { @@ -101,7 +106,11 @@ "avatar_url": "https://avatars.githubusercontent.com/u/2439881?v=4", "profile": "http://www.colorbleed.nl/", "contributions": [ - "code" + "code", + "doc", + "review", + "mentoring", + "question" ] }, { @@ -110,7 +119,11 @@ "avatar_url": "https://avatars.githubusercontent.com/u/1860085?v=4", "profile": "https://github.com/tokejepsen", "contributions": [ - "code" + "code", + "doc", + "review", + "mentoring", + "question" ] }, { @@ -119,7 +132,8 @@ "avatar_url": "https://avatars.githubusercontent.com/u/1087869?v=4", "profile": "https://barbierisimone.com/", "contributions": [ - "code" + "code", + "doc" ] }, { diff --git a/README.md b/README.md index 83e117ea40..06659f26d3 100644 --- a/README.md +++ b/README.md @@ -296,17 +296,17 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - - - - - + + + + + - + - - + + From 60d8dafa30f111bfc5d6f964c3ae90243d8266c9 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:25:20 +0200 Subject: [PATCH 258/357] Add @davidlatwe as a contributor --- .all-contributorsrc | 13 ++++++++++++- README.md | 5 +++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 47d38fa911..ac51161163 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -160,7 +160,18 @@ "avatar_url": "https://avatars.githubusercontent.com/u/7068597?v=4", "profile": "https://www.linkedin.com/in/clementhector/", "contributions": [ - "code" + "code", + "review" + ] + }, + { + "login": "davidlatwe", + "name": "David Lai", + "avatar_url": "https://avatars.githubusercontent.com/u/3357009?v=4", + "profile": "https://twitter.com/davidlatwe", + "contributions": [ + "code", + "review" ] } ], diff --git a/README.md b/README.md index 06659f26d3..7621bd5798 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-12-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-13-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -309,7 +309,8 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - + +

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🚧 📆 👀 🧑‍🏫

    Jakub Ježek

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆

    Jakub Trllo

    💻 📖 🚇 👀 🚧

    Petr Kalis

    💻 📖 🚇 👀 🚧

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🚧 📆 👀 🧑‍🏫 💬

    Jakub Ježek

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬

    Jakub Trllo

    💻 📖 🚇 👀 🚧 💬

    Petr Kalis

    💻 📖 🚇 👀 🚧 💬

    Wijnand Koreman

    💻

    Roy Nieterau

    💻

    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬

    Toke Jepsen

    💻

    Simone Barbieri

    💻

    Toke Jepsen

    💻 📖 👀 🧑‍🏫 💬

    Simone Barbieri

    💻 📖

    karimmozilla

    💻

    Bo Zhou

    💻

    Clément Hector

    💻

    Simone Barbieri

    💻 📖

    karimmozilla

    💻

    Bo Zhou

    💻

    Clément Hector

    💻

    Clément Hector

    💻 👀

    David Lai

    💻 👀
    From f8ce0055dff275d4011ad97a8aa625e75d25a3b6 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:26:32 +0200 Subject: [PATCH 259/357] Add @2-REC as a contributor --- .all-contributorsrc | 10 ++++++++++ README.md | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index ac51161163..2dbd36c2ff 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -173,6 +173,16 @@ "code", "review" ] + }, + { + "login": "2-REC", + "name": "Derek ", + "avatar_url": "https://avatars.githubusercontent.com/u/42170307?v=4", + "profile": "https://github.com/2-REC", + "contributions": [ + "code", + "review" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 7621bd5798..66189dd430 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-13-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-14-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -311,6 +311,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Bo Zhou

    💻
    Clément Hector

    💻 👀
    David Lai

    💻 👀 +
    Derek

    💻 👀 From ad8e91dd918775eb1b3e05359e6ca02d137442a5 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:26:56 +0200 Subject: [PATCH 260/357] Add @Allan-I as a contributor --- .all-contributorsrc | 14 +++++++++++++- README.md | 7 +++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 2dbd36c2ff..32e664e0fc 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -181,7 +181,19 @@ "profile": "https://github.com/2-REC", "contributions": [ "code", - "review" + "review", + "doc" + ] + }, + { + "login": "Allan-I", + "name": "Allan I. A.", + "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", + "profile": "https://github.com/Allan-I", + "contributions": [ + "code", + "review", + "doc" ] } ], diff --git a/README.md b/README.md index 66189dd430..44d23e2039 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-14-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-15-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -311,7 +311,10 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Bo Zhou

    💻
    Clément Hector

    💻 👀
    David Lai

    💻 👀 -
    Derek

    💻 👀 +
    Derek

    💻 👀 📖 + + +
    Allan I. A.

    💻 👀 📖 From b8c37088f4c70851d570d9363c5c5c608463ad32 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:28:35 +0200 Subject: [PATCH 261/357] Add @gabormarinov as a contributor --- .all-contributorsrc | 10 ++++++++++ README.md | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 32e664e0fc..6564be1cd1 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -195,6 +195,16 @@ "review", "doc" ] + }, + { + "login": "gabormarinov", + "name": "Gábor Marinov", + "avatar_url": "https://avatars.githubusercontent.com/u/8620515?v=4", + "profile": "https://github.com/gabormarinov", + "contributions": [ + "code", + "doc" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 44d23e2039..8da8fae349 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-15-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-16-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -315,6 +315,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Allan I. A.

    💻 👀 📖 +
    Gábor Marinov

    💻 📖 From 43e29a3a2eb3c0ae9e7af7a16c4e10418eb8986d Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:28:47 +0200 Subject: [PATCH 262/357] Add @gabormarinov as a contributor --- .all-contributorsrc | 4 +--- README.md | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 6564be1cd1..4260b2fb4b 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -191,9 +191,7 @@ "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", "profile": "https://github.com/Allan-I", "contributions": [ - "code", - "review", - "doc" + "code" ] }, { diff --git a/README.md b/README.md index 8da8fae349..6524868bae 100644 --- a/README.md +++ b/README.md @@ -314,7 +314,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Derek

    💻 👀 📖 -
    Allan I. A.

    💻 👀 📖 +
    Allan I. A.

    💻
    Gábor Marinov

    💻 📖 From 8dbdf655a986cfe5b7545a551c990e73b0ffd886 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:29:24 +0200 Subject: [PATCH 263/357] Add @icyvapor as a contributor --- .all-contributorsrc | 10 ++++++++++ README.md | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 4260b2fb4b..fbca23716c 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -203,6 +203,16 @@ "code", "doc" ] + }, + { + "login": "icyvapor", + "name": "icyvapor", + "avatar_url": "https://avatars.githubusercontent.com/u/1195278?v=4", + "profile": "https://github.com/icyvapor", + "contributions": [ + "code", + "doc" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 6524868bae..6f252f3035 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-16-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-17-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -316,6 +316,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Allan I. A.

    💻
    Gábor Marinov

    💻 📖 +
    icyvapor

    💻 📖 From 8265882b22b907fdaf98a069a5d3952e4e36b1ea Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:29:48 +0200 Subject: [PATCH 264/357] Add @jlorrain as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index fbca23716c..13b7a627f7 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -213,6 +213,15 @@ "code", "doc" ] + }, + { + "login": "jlorrain", + "name": "Jérôme LORRAIN", + "avatar_url": "https://avatars.githubusercontent.com/u/7955673?v=4", + "profile": "https://github.com/jlorrain", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 6f252f3035..d01308c843 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-17-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-18-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -317,6 +317,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Allan I. A.

    💻
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖 +
    Jérôme LORRAIN

    💻 From 3377a9e05dc25c516846d448f1a0cd8d2551d3b1 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:30:12 +0200 Subject: [PATCH 265/357] Add @dmo-j-cube as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 13b7a627f7..1f969a61bb 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -222,6 +222,15 @@ "contributions": [ "code" ] + }, + { + "login": "dmo-j-cube", + "name": "David Morris-Oliveros", + "avatar_url": "https://avatars.githubusercontent.com/u/89823400?v=4", + "profile": "https://github.com/dmo-j-cube", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index d01308c843..e853001aec 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-18-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-19-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -318,6 +318,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻 +
    David Morris-Oliveros

    💻 From 25ca9c0e8aa4f5f38beb25ee2c9ab709e9c00deb Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:30:29 +0200 Subject: [PATCH 266/357] Add @BenoitConnan as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1f969a61bb..15e0dbca5b 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -231,6 +231,15 @@ "contributions": [ "code" ] + }, + { + "login": "BenoitConnan", + "name": "BenoitConnan", + "avatar_url": "https://avatars.githubusercontent.com/u/82808268?v=4", + "profile": "https://github.com/BenoitConnan", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index e853001aec..5778969ac1 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-19-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-20-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -319,6 +319,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻
    David Morris-Oliveros

    💻 +
    BenoitConnan

    💻 From 6ab23cd3a4b02ad1d0d8c50983cb54b5da666aa2 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:30:41 +0200 Subject: [PATCH 267/357] Add @Malthaldar as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 15e0dbca5b..f5650dd82c 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -240,6 +240,15 @@ "contributions": [ "code" ] + }, + { + "login": "Malthaldar", + "name": "Malthaldar", + "avatar_url": "https://avatars.githubusercontent.com/u/33671694?v=4", + "profile": "https://github.com/Malthaldar", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 5778969ac1..aaaf719caa 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-20-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-21-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -320,6 +320,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jérôme LORRAIN

    💻
    David Morris-Oliveros

    💻
    BenoitConnan

    💻 +
    Malthaldar

    💻 From 6d4c29831c859c6cf1006b9c56637b54fb34505d Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:31:22 +0200 Subject: [PATCH 268/357] Add @64qam as a contributor --- .all-contributorsrc | 15 +++++++++++++++ README.md | 5 ++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index f5650dd82c..9b186f2a40 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -249,6 +249,21 @@ "contributions": [ "code" ] + }, + { + "login": "64qam", + "name": "64qam", + "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4", + "profile": "https://github.com/64qam", + "contributions": [ + "code", + "review", + "doc", + "infra", + "projectManagement", + "maintenance", + "content" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index aaaf719caa..56bbc92768 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-21-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-22-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -322,6 +322,9 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    BenoitConnan

    💻
    Malthaldar

    💻 + +
    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 + From 46d425bd8dbc5e12984638073a559ad87f69c117 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:33:17 +0200 Subject: [PATCH 269/357] Add @jrsndl as a contributor --- .all-contributorsrc | 30 ++++++++++++++++++++++-------- README.md | 11 ++++++----- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 9b186f2a40..ea1d162c4e 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -91,6 +91,21 @@ "question" ] }, + { + "login": "64qam", + "name": "64qam", + "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4", + "profile": "https://github.com/64qam", + "contributions": [ + "code", + "review", + "doc", + "infra", + "projectManagement", + "maintenance", + "content" + ] + }, { "login": "aardschok", "name": "Wijnand Koreman", @@ -251,18 +266,17 @@ ] }, { - "login": "64qam", - "name": "64qam", - "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4", - "profile": "https://github.com/64qam", + "login": "jrsndl", + "name": "Jiri Sindelar", + "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4", + "profile": "https://github.com/jrsndl", "contributions": [ "code", "review", "doc", - "infra", - "projectManagement", - "maintenance", - "content" + "content", + "tutorial", + "userTesting" ] } ], diff --git a/README.md b/README.md index 56bbc92768..0c8bc93aa8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-22-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-23-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -301,29 +301,30 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬
    Jakub Trllo

    💻 📖 🚇 👀 🚧 💬
    Petr Kalis

    💻 📖 🚇 👀 🚧 💬 +
    64qam

    💻 👀 📖 🚇 📆 🚧 🖋
    Wijnand Koreman

    💻 -
    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬 +
    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬
    Toke Jepsen

    💻 📖 👀 🧑‍🏫 💬
    Simone Barbieri

    💻 📖
    karimmozilla

    💻
    Bo Zhou

    💻
    Clément Hector

    💻 👀
    David Lai

    💻 👀 -
    Derek

    💻 👀 📖 +
    Derek

    💻 👀 📖
    Allan I. A.

    💻
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻
    David Morris-Oliveros

    💻
    BenoitConnan

    💻 -
    Malthaldar

    💻 -
    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 +
    Malthaldar

    💻 +
    Jiri Sindelar

    💻 👀 📖 🖋 📓 From 5c793bb8ecdbf97be39ace78d90ea5b3187c2d41 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:33:24 +0200 Subject: [PATCH 270/357] Add @jrsndl as a contributor --- .all-contributorsrc | 36 ++++++++++++++++++------------------ README.md | 8 ++++---- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index ea1d162c4e..737bf7e174 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -106,15 +106,6 @@ "content" ] }, - { - "login": "aardschok", - "name": "Wijnand Koreman", - "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4", - "profile": "https://github.com/aardschok", - "contributions": [ - "code" - ] - }, { "login": "BigRoy", "name": "Roy Nieterau", @@ -160,6 +151,24 @@ "code" ] }, + { + "login": "Allan-I", + "name": "Allan I. A.", + "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", + "profile": "https://github.com/Allan-I", + "contributions": [ + "code" + ] + }, + { + "login": "aardschok", + "name": "Wijnand Koreman", + "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4", + "profile": "https://github.com/aardschok", + "contributions": [ + "code" + ] + }, { "login": "zhoub", "name": "Bo Zhou", @@ -200,15 +209,6 @@ "doc" ] }, - { - "login": "Allan-I", - "name": "Allan I. A.", - "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", - "profile": "https://github.com/Allan-I", - "contributions": [ - "code" - ] - }, { "login": "gabormarinov", "name": "Gábor Marinov", diff --git a/README.md b/README.md index 0c8bc93aa8..1a239d4f03 100644 --- a/README.md +++ b/README.md @@ -302,20 +302,20 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jakub Trllo

    💻 📖 🚇 👀 🚧 💬
    Petr Kalis

    💻 📖 🚇 👀 🚧 💬
    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 -
    Wijnand Koreman

    💻 +
    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬 -
    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬
    Toke Jepsen

    💻 📖 👀 🧑‍🏫 💬
    Simone Barbieri

    💻 📖
    karimmozilla

    💻 +
    Allan I. A.

    💻 +
    Wijnand Koreman

    💻
    Bo Zhou

    💻
    Clément Hector

    💻 👀 -
    David Lai

    💻 👀 +
    David Lai

    💻 👀
    Derek

    💻 👀 📖 -
    Allan I. A.

    💻
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻 From 6f7e9b749e7e3ff6853ad68cd7e7855374d4f626 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:34:35 +0200 Subject: [PATCH 271/357] Update @jrsndl as a contributor --- .all-contributorsrc | 31 ++++++++++++++++--------------- README.md | 8 ++++---- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 737bf7e174..156cda9324 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -103,7 +103,8 @@ "infra", "projectManagement", "maintenance", - "content" + "content", + "userTesting" ] }, { @@ -132,6 +133,20 @@ "question" ] }, + { + "login": "jrsndl", + "name": "Jiri Sindelar", + "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4", + "profile": "https://github.com/jrsndl", + "contributions": [ + "code", + "review", + "doc", + "content", + "tutorial", + "userTesting" + ] + }, { "login": "simonebarbieri", "name": "Simone Barbieri", @@ -264,20 +279,6 @@ "contributions": [ "code" ] - }, - { - "login": "jrsndl", - "name": "Jiri Sindelar", - "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4", - "profile": "https://github.com/jrsndl", - "contributions": [ - "code", - "review", - "doc", - "content", - "tutorial", - "userTesting" - ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 1a239d4f03..b29d5c0c9a 100644 --- a/README.md +++ b/README.md @@ -301,30 +301,30 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬
    Jakub Trllo

    💻 📖 🚇 👀 🚧 💬
    Petr Kalis

    💻 📖 🚇 👀 🚧 💬 -
    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 +
    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 📓
    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬
    Toke Jepsen

    💻 📖 👀 🧑‍🏫 💬 +
    Jiri Sindelar

    💻 👀 📖 🖋 📓
    Simone Barbieri

    💻 📖
    karimmozilla

    💻
    Allan I. A.

    💻
    Wijnand Koreman

    💻
    Bo Zhou

    💻 -
    Clément Hector

    💻 👀 +
    Clément Hector

    💻 👀
    David Lai

    💻 👀
    Derek

    💻 👀 📖
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻
    David Morris-Oliveros

    💻 -
    BenoitConnan

    💻 +
    BenoitConnan

    💻
    Malthaldar

    💻 -
    Jiri Sindelar

    💻 👀 📖 🖋 📓 From 54d5240efa54617e44290b9d8bfbad1f1004fc18 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:34:47 +0200 Subject: [PATCH 272/357] Add @svenneve as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 156cda9324..b8f621afcb 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -279,6 +279,15 @@ "contributions": [ "code" ] + }, + { + "login": "svenneve", + "name": "Sven Neve", + "avatar_url": "https://avatars.githubusercontent.com/u/2472863?v=4", + "profile": "http://www.svenneve.com/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index b29d5c0c9a..061b4ec707 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-23-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-24-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -325,6 +325,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    BenoitConnan

    💻
    Malthaldar

    💻 +
    Sven Neve

    💻 From 1d5406b378771bf88360a94a72b2b378945e94ca Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:34:58 +0200 Subject: [PATCH 273/357] Add @zafrs as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index b8f621afcb..2578651ee4 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -288,6 +288,15 @@ "contributions": [ "code" ] + }, + { + "login": "zafrs", + "name": "zafrs", + "avatar_url": "https://avatars.githubusercontent.com/u/26890002?v=4", + "profile": "https://github.com/zafrs", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 061b4ec707..2abb7791ff 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-24-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-25-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -326,6 +326,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    BenoitConnan

    💻
    Malthaldar

    💻
    Sven Neve

    💻 +
    zafrs

    💻 From 6c560ac3f7d514fd90ec4ebd5e65cd6093f1e985 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:35:48 +0200 Subject: [PATCH 274/357] ignore node.js files --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index fa3fae1ad2..28cfb4b1e9 100644 --- a/.gitignore +++ b/.gitignore @@ -70,6 +70,8 @@ coverage.xml ################## node_modules package-lock.json +package.json +yarn.lock openpype/premiere/ppro/js/debug.log From 8d45b649ec97d4dd041ab6617ba67658141d64a9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 22 Apr 2022 17:37:31 +0200 Subject: [PATCH 275/357] remove the need to set AVALON_APP_NAME --- .../perjob/m50__openpype_publish_render.py | 7 ++++--- openpype/pype_commands.py | 15 ++++++++------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py index 7f5b514253..cdc37588cd 100644 --- a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py +++ b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py @@ -119,7 +119,7 @@ class OpenPypeContextSelector: # app names and versions, but since app_name is not used # currently down the line (but it is required by OP publish command # right now). - self.context["app_name"] = "celaction/local" + # self.context["app_name"] = "maya/2022" return True @staticmethod @@ -139,7 +139,8 @@ class OpenPypeContextSelector: env = {"AVALON_PROJECT": str(self.context.get("project")), "AVALON_ASSET": str(self.context.get("asset")), "AVALON_TASK": str(self.context.get("task")), - "AVALON_APP_NAME": str(self.context.get("app_name"))} + # "AVALON_APP_NAME": str(self.context.get("app_name")) + } print(">>> setting environment:") for k, v in env.items(): @@ -184,7 +185,7 @@ selector = OpenPypeContextSelector() selector.context["project"] = os.getenv("AVALON_PROJECT") selector.context["asset"] = os.getenv("AVALON_ASSET") selector.context["task"] = os.getenv("AVALON_TASK") -selector.context["app_name"] = os.getenv("AVALON_APP_NAME") +# selector.context["app_name"] = os.getenv("AVALON_APP_NAME") # if anything inside is None, scratch the whole thing and # ask user for context. diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index e0c8847040..bd2008e144 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -125,13 +125,14 @@ class PypeCommands: if not any(paths): raise RuntimeError("No publish paths specified") - env = get_app_environments_for_context( - os.environ["AVALON_PROJECT"], - os.environ["AVALON_ASSET"], - os.environ["AVALON_TASK"], - os.environ["AVALON_APP_NAME"] - ) - os.environ.update(env) + if os.getenv("AVALON_APP_NAME"): + env = get_app_environments_for_context( + os.environ["AVALON_PROJECT"], + os.environ["AVALON_ASSET"], + os.environ["AVALON_TASK"], + os.environ["AVALON_APP_NAME"] + ) + os.environ.update(env) pyblish.api.register_host("shell") From 61fa6b23ac94ac158df290d22066999bee21514e Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:41:36 +0200 Subject: [PATCH 276/357] update style --- .all-contributorsrc | 2 +- README.md | 50 ++++++++++++++++++++++----------------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 2578651ee4..492d6f2f1f 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -6,7 +6,7 @@ "files": [ "README.md" ], - "imageSize": 80, + "imageSize": 100, "commit": true, "commitConvention": "none", "contributors": [ diff --git a/README.md b/README.md index 2abb7791ff..448ca1a263 100644 --- a/README.md +++ b/README.md @@ -296,37 +296,37 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - - - - - - - + + + + + + + - - - - - - - + + + + + + + - - - - - - - + + + + + + + - - - - + + + +

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🚧 📆 👀 🧑‍🏫 💬

    Jakub Ježek

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬

    Jakub Trllo

    💻 📖 🚇 👀 🚧 💬

    Petr Kalis

    💻 📖 🚇 👀 🚧 💬

    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 📓

    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬

    Milan Kolar

    💻 📖 🚇 💼 🖋 🔍 🚧 📆 👀 🧑‍🏫 💬

    Jakub Ježek

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬

    Ondřej Samohel

    💻 📖 🚇 🖋 👀 🚧 🧑‍🏫 📆 💬

    Jakub Trllo

    💻 📖 🚇 👀 🚧 💬

    Petr Kalis

    💻 📖 🚇 👀 🚧 💬

    64qam

    💻 👀 📖 🚇 📆 🚧 🖋 📓

    Roy Nieterau

    💻 📖 👀 🧑‍🏫 💬

    Toke Jepsen

    💻 📖 👀 🧑‍🏫 💬

    Jiri Sindelar

    💻 👀 📖 🖋 📓

    Simone Barbieri

    💻 📖

    karimmozilla

    💻

    Allan I. A.

    💻

    Wijnand Koreman

    💻

    Bo Zhou

    💻

    Toke Jepsen

    💻 📖 👀 🧑‍🏫 💬

    Jiri Sindelar

    💻 👀 📖 🖋 📓

    Simone Barbieri

    💻 📖

    karimmozilla

    💻

    Allan I. A.

    💻

    Wijnand Koreman

    💻

    Bo Zhou

    💻

    Clément Hector

    💻 👀

    David Lai

    💻 👀

    Derek

    💻 👀 📖

    Gábor Marinov

    💻 📖

    icyvapor

    💻 📖

    Jérôme LORRAIN

    💻

    David Morris-Oliveros

    💻

    Clément Hector

    💻 👀

    David Lai

    💻 👀

    Derek

    💻 👀 📖

    Gábor Marinov

    💻 📖

    icyvapor

    💻 📖

    Jérôme LORRAIN

    💻

    David Morris-Oliveros

    💻

    BenoitConnan

    💻

    Malthaldar

    💻

    Sven Neve

    💻

    zafrs

    💻

    BenoitConnan

    💻

    Malthaldar

    💻

    Sven Neve

    💻

    zafrs

    💻
    From e74a295c0f17defaa8cf59d02c9541c834769eaa Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:46:03 +0200 Subject: [PATCH 277/357] Add @m-u-r-p-h-y as a contributor --- .all-contributorsrc | 14 +++++++++++++- README.md | 5 +++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 492d6f2f1f..3277d64485 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -220,7 +220,6 @@ "profile": "https://github.com/2-REC", "contributions": [ "code", - "review", "doc" ] }, @@ -297,6 +296,19 @@ "contributions": [ "code" ] + }, + { + "login": "m-u-r-p-h-y", + "name": "murphy", + "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4", + "profile": "https://www.linkedin.com/in/mmuurrpphhyy/", + "contributions": [ + "code", + "review", + "userTesting", + "doc", + "projectManagement" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 448ca1a263..52a3556a4e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-25-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-26-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -316,7 +316,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Clément Hector

    💻 👀
    David Lai

    💻 👀 -
    Derek

    💻 👀 📖 +
    Derek

    💻 📖
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻 @@ -327,6 +327,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Malthaldar

    💻
    Sven Neve

    💻
    zafrs

    💻 +
    murphy

    💻 👀 📓 📖 📆 From 910e875d3388718f78f71c1d981be534504326b1 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:46:34 +0200 Subject: [PATCH 278/357] update readme --- .all-contributorsrc | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 3277d64485..81447b3dfe 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -175,6 +175,19 @@ "code" ] }, + { + "login": "m-u-r-p-h-y", + "name": "murphy", + "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4", + "profile": "https://www.linkedin.com/in/mmuurrpphhyy/", + "contributions": [ + "code", + "review", + "userTesting", + "doc", + "projectManagement" + ] + } { "login": "aardschok", "name": "Wijnand Koreman", @@ -296,19 +309,6 @@ "contributions": [ "code" ] - }, - { - "login": "m-u-r-p-h-y", - "name": "murphy", - "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4", - "profile": "https://www.linkedin.com/in/mmuurrpphhyy/", - "contributions": [ - "code", - "review", - "userTesting", - "doc", - "projectManagement" - ] } ], "contributorsPerLine": 7 From 85027923b29be628f7b3ab658209229e739d3799 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:51:30 +0200 Subject: [PATCH 279/357] change order --- .all-contributorsrc | 4 ++-- README.md | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 81447b3dfe..a3b85cae68 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -187,7 +187,7 @@ "doc", "projectManagement" ] - } + }, { "login": "aardschok", "name": "Wijnand Koreman", @@ -312,4 +312,4 @@ } ], "contributorsPerLine": 7 -} +} \ No newline at end of file diff --git a/README.md b/README.md index 52a3556a4e..b6966adbc4 100644 --- a/README.md +++ b/README.md @@ -310,24 +310,24 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Simone Barbieri

    💻 📖
    karimmozilla

    💻
    Allan I. A.

    💻 +
    murphy

    💻 👀 📓 📖 📆
    Wijnand Koreman

    💻 -
    Bo Zhou

    💻 +
    Bo Zhou

    💻
    Clément Hector

    💻 👀
    David Lai

    💻 👀
    Derek

    💻 📖
    Gábor Marinov

    💻 📖
    icyvapor

    💻 📖
    Jérôme LORRAIN

    💻 -
    David Morris-Oliveros

    💻 +
    David Morris-Oliveros

    💻
    BenoitConnan

    💻
    Malthaldar

    💻
    Sven Neve

    💻
    zafrs

    💻 -
    murphy

    💻 👀 📓 📖 📆 From e311a48ef47f0ba8d80c59e30c1fb3dcf3f1c93a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:23:03 +0200 Subject: [PATCH 280/357] skip containers with not found versions --- .../plugins/publish/collect_scene_loaded_versions.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index e54592abb8..4c54a7d46c 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -44,12 +44,20 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): } for con in _containers: + repre_id = con["representation"] + version_id = version_by_repr.get(repre_id) + if version_id is None: + self.log.warning(( + "Skipping container, did not find version document. {}" + ).format(str(con))) + continue + # NOTE: # may have more then one representation that are same version version = { "subsetName": con["name"], - "representation": ObjectId(con["representation"]), - "version": version_by_repr[con["representation"]], # _id + "representation": ObjectId(repre_id), + "version": version_id, } loaded_versions.append(version) From 7c460886442aa0ec5097f7f93c097b2987386882 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:27:30 +0200 Subject: [PATCH 281/357] better log message --- openpype/plugins/publish/collect_scene_loaded_versions.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index 4c54a7d46c..7b44aa7963 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -43,12 +43,15 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1}) } + # QUESTION should we add same representation id when loaded multiple + # times? for con in _containers: repre_id = con["representation"] version_id = version_by_repr.get(repre_id) if version_id is None: self.log.warning(( - "Skipping container, did not find version document. {}" + "Skipping container," + " did not find representation document. {}" ).format(str(con))) continue From a5826ae33667c67d424376a45edb59ca80c31c6f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:27:39 +0200 Subject: [PATCH 282/357] reorganized code a little bit --- openpype/plugins/publish/collect_scene_loaded_versions.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index 7b44aa7963..ffdd532df2 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -38,9 +38,13 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): loaded_versions = [] _containers = list(host.ls()) _repr_ids = [ObjectId(c["representation"]) for c in _containers] + repre_docs = io.find( + {"_id": {"$in": _repr_ids}}, + projection={"_id": 1, "parent": 1} + ) version_by_repr = { - str(doc["_id"]): doc["parent"] for doc in - io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1}) + str(doc["_id"]): doc["parent"] + for doc in repre_docs } # QUESTION should we add same representation id when loaded multiple From 750ec30c55d63daedf2a5741010a415fe479390f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:50:50 +0200 Subject: [PATCH 283/357] files widget has only one widget --- .../widgets/attribute_defs/files_widget.py | 122 +++++------------- openpype/widgets/attribute_defs/widgets.py | 11 +- 2 files changed, 34 insertions(+), 99 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 34f7d159ad..af00ffe5ad 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -87,15 +87,29 @@ class FilesModel(QtGui.QStandardItemModel): ".xpm", ".xwd" ] - def __init__(self): + def __init__(self, multivalue): super(FilesModel, self).__init__() self._filenames_by_dirpath = collections.defaultdict(set) self._items_by_dirpath = collections.defaultdict(list) + self._multivalue = multivalue + def add_filepaths(self, filepaths): if not filepaths: return + if not self._multivalue: + filepaths = [filepaths[0]] + item_ids = [] + for items in self._items_by_dirpath.values(): + for item in items: + item_id = item.data(ITEM_ID_ROLE) + if item_id: + item_ids.append(item_id) + + if item_ids: + self.remove_item_by_ids(item_ids) + new_dirpaths = set() for filepath in filepaths: filename = os.path.basename(filepath) @@ -368,16 +382,16 @@ class FilesView(QtWidgets.QListView): return super(FilesView, self).event(event) -class MultiFilesWidget(QtWidgets.QFrame): +class FilesWidget(QtWidgets.QFrame): value_changed = QtCore.Signal() - def __init__(self, parent): - super(MultiFilesWidget, self).__init__(parent) + def __init__(self, multiselect, parent): + super(FilesWidget, self).__init__(parent) self.setAcceptDrops(True) empty_widget = DropEmpty(self) - files_model = FilesModel() + files_model = FilesModel(multiselect) files_proxy_model = FilesProxyModel() files_proxy_model.setSourceModel(files_model) files_view = FilesView(self) @@ -392,6 +406,11 @@ class MultiFilesWidget(QtWidgets.QFrame): files_proxy_model.rowsInserted.connect(self._on_rows_inserted) files_proxy_model.rowsRemoved.connect(self._on_rows_removed) + drag_label = DragLabel() + drag_label.setVisible(False) + + self._drag_label = drag_label + self._in_set_value = False self._empty_widget = empty_widget @@ -501,7 +520,7 @@ class MultiFilesWidget(QtWidgets.QFrame): def sizeHint(self): # Get size hints of widget and visible widgets - result = super(MultiFilesWidget, self).sizeHint() + result = super(FilesWidget, self).sizeHint() if not self._files_view.isVisible(): not_visible_hint = self._files_view.sizeHint() else: @@ -557,90 +576,11 @@ class MultiFilesWidget(QtWidgets.QFrame): self._empty_widget.setVisible(not files_exists) -class SingleFileWidget(QtWidgets.QWidget): - value_changed = QtCore.Signal() - - def __init__(self, parent): - super(SingleFileWidget, self).__init__(parent) - - self.setAcceptDrops(True) - - filepath_input = QtWidgets.QLineEdit(self) - - browse_btn = QtWidgets.QPushButton("Browse", self) - browse_btn.setVisible(False) +class DragLabel(QtWidgets.QWidget): + def __init__(self, parent=None): + super(DragLabel, self).__init__(parent) + t_label = QtWidgets.QLabel("TESTING", self) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(filepath_input, 1) - layout.addWidget(browse_btn, 0) - - browse_btn.clicked.connect(self._on_browse_clicked) - filepath_input.textChanged.connect(self._on_text_change) - - self._in_set_value = False - - self._filepath_input = filepath_input - self._folders_allowed = False - self._exts_filter = [] - - def set_value(self, value, multivalue): - self._in_set_value = True - - if multivalue: - set_value = set(value) - if len(set_value) == 1: - value = tuple(set_value)[0] - else: - value = "< Multiselection >" - self._filepath_input.setText(value) - - self._in_set_value = False - - def current_value(self): - return self._filepath_input.text() - - def set_filters(self, folders_allowed, exts_filter): - self._folders_allowed = folders_allowed - self._exts_filter = exts_filter - - def _on_text_change(self, text): - if not self._in_set_value: - self.value_changed.emit() - - def _on_browse_clicked(self): - # TODO implement file dialog logic in '_on_browse_clicked' - print("_on_browse_clicked") - - def dragEnterEvent(self, event): - mime_data = event.mimeData() - if not mime_data.hasUrls(): - return - - filepaths = [] - for url in mime_data.urls(): - filepath = url.toLocalFile() - if os.path.exists(filepath): - filepaths.append(filepath) - - # TODO add folder, extensions check - if len(filepaths) == 1: - event.setDropAction(QtCore.Qt.CopyAction) - event.accept() - - def dragLeaveEvent(self, event): - event.accept() - - def dropEvent(self, event): - mime_data = event.mimeData() - if mime_data.hasUrls(): - filepaths = [] - for url in mime_data.urls(): - filepath = url.toLocalFile() - if os.path.exists(filepath): - filepaths.append(filepath) - # TODO filter check - if len(filepaths) == 1: - self._filepath_input.setText(filepaths[0]) - - event.accept() + layout.addWidget(t_label) + self._t_label = t_label diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 23f025967d..83eeaea61f 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -15,6 +15,8 @@ from openpype.lib.attribute_definitions import ( ) from openpype.widgets.nice_checkbox import NiceCheckbox +from .files_widget import FilesWidget + def create_widget_for_attr_def(attr_def, parent=None): if not isinstance(attr_def, AbtractAttrDef): @@ -337,15 +339,8 @@ class UnknownAttrWidget(_BaseAttrDefWidget): class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): self.multipath = self.attr_def.multipath - if self.multipath: - from .files_widget import MultiFilesWidget - input_widget = MultiFilesWidget(self) - - else: - from .files_widget import SingleFileWidget - - input_widget = SingleFileWidget(self) + input_widget = FilesWidget(self.multipath, self) if self.attr_def.tooltip: input_widget.setToolTip(self.attr_def.tooltip) From 7965001b162b4aa09028584fc447acac66e21a3f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:51:08 +0200 Subject: [PATCH 284/357] added first idea of FileDefItem for FileDef --- openpype/lib/attribute_definitions.py | 173 +++++++++++++++++++++++--- 1 file changed, 157 insertions(+), 16 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 189a5e7acd..3d17818ecb 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -1,8 +1,12 @@ +import os import re import collections import uuid +import json from abc import ABCMeta, abstractmethod + import six +import clique class AbstractAttrDefMeta(ABCMeta): @@ -302,6 +306,126 @@ class BoolDef(AbtractAttrDef): return self.default +class FileDefItem(object): + def __init__( + self, directory, filenames, frames=None, template=None + ): + self.directory = directory + + self.filenames = [] + self.is_sequence = False + self.template = None + self.frames = [] + + self.set_filenames(filenames, frames, template) + + def __str__(self): + return json.dumps(self.to_dict()) + + def __repr__(self): + if self.is_sequence: + filename = self.template + else: + filename = self.filenames[0] + + return "<{}: \"{}\">".format( + self.__class__.__name__, + os.path.join(self.directory, filename) + ) + + def set_directory(self, directory): + self.directory = directory + + def set_filenames(self, filenames, frames=None, template=None): + if frames is None: + frames = [] + is_sequence = False + if frames: + is_sequence = True + + if is_sequence and not template: + raise ValueError("Missing template for sequence") + + self.filenames = filenames + self.template = template + self.frames = frames + self.is_sequence = is_sequence + + @classmethod + def create_empty_item(cls): + return cls("", "") + + @classmethod + def from_value(cls, value): + multi = isinstance(value, (list, tuple, set)) + if not multi: + value = [value] + + output = [] + for item in value: + if isinstance(item, dict): + output.append(cls.from_dict(item)) + elif isinstance(item, six.string_types): + output.extend(cls.from_paths([item])) + else: + raise TypeError( + "Unknown type \"{}\". Can't convert to {}".format( + str(type(item)), cls.__name__ + ) + ) + if multi: + return output + return output[0] + + @classmethod + def from_dict(cls, data): + return cls( + data["directory"], + data["filenames"], + data.get("frames"), + data.get("template") + ) + + @classmethod + def from_paths(cls, paths): + filenames_by_dir = collections.defaultdict(list) + for path in paths: + normalized = os.path.normpath(path) + directory, filename = os.path.split(normalized) + filenames_by_dir[directory].append(filename) + + output = [] + for directory, filenames in filenames_by_dir.items(): + cols, remainders = clique.assemble(filenames) + for remainder in remainders: + output.append(cls(directory, [remainder])) + + for col in cols: + frames = list(col.indexes) + paths = [filename for filename in col] + template = col.format("{head}{padding}{tail}") + + output.append(cls( + directory, paths, frames, template + )) + + return output + + def to_dict(self): + output = { + "is_sequence": self.is_sequence, + "directory": self.directory, + "filenames": list(self.filenames), + } + if self.is_sequence: + output.update({ + "template": self.template, + "frames": list(sorted(self.frames)), + }) + + return output + + class FileDef(AbtractAttrDef): """File definition. It is possible to define filters of allowed file extensions and if supports @@ -326,7 +450,7 @@ class FileDef(AbtractAttrDef): if multipath: default = [] else: - default = "" + default = FileDefItem.create_empty_item().to_dict() else: if multipath: if not isinstance(default, (tuple, list, set)): @@ -336,11 +460,16 @@ class FileDef(AbtractAttrDef): ).format(type(default))) else: - if not isinstance(default, six.string_types): + if isinstance(default, dict): + FileDefItem.from_dict(default) + + elif isinstance(default, six.string_types): + default = FileDefItem.from_paths([default.strip()])[0] + + else: raise TypeError(( - "'default' argument must be 'str' not '{}'" + "'default' argument must be 'str' or 'dict' not '{}'" ).format(type(default))) - default = default.strip() # Change horizontal label is_label_horizontal = kwargs.get("is_label_horizontal") @@ -366,24 +495,36 @@ class FileDef(AbtractAttrDef): ) def convert_value(self, value): - if isinstance(value, six.string_types): - if self.multipath: - value = [value.strip()] - else: - value = value.strip() - return value + if isinstance(value, six.string_types) or isinstance(value, dict): + value = [value] if isinstance(value, (tuple, list, set)): - _value = [] + string_paths = [] + dict_items = [] for item in value: if isinstance(item, six.string_types): - _value.append(item.strip()) + string_paths.append(item.strip()) + elif isinstance(item, dict): + try: + FileDefItem.from_dict(item) + dict_items.append(item) + except (ValueError, KeyError): + pass + + if string_paths: + file_items = FileDefItem.from_paths(string_paths) + dict_items.extend([ + file_item.to_dict() + for file_item in file_items + ]) if self.multipath: - return _value + return dict_items - if not _value: + if not dict_items: return self.default - return _value[0].strip() + return dict_items[0] - return str(value).strip() + if self.multipath: + return [] + return FileDefItem.create_empty_item().to_dict() From a003bceb166dac3884388c2cf0fb0d07fc999766 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:59:14 +0200 Subject: [PATCH 285/357] removed live_repo_dir usage --- igniter/bootstrap_repos.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 6392517cda..fc814f871a 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -627,8 +627,6 @@ class BootstrapRepos: Attributes: data_dir (Path): local OpenPype installation directory. - live_repo_dir (Path): path to repos directory if running live, - otherwise `None`. registry (OpenPypeSettingsRegistry): OpenPype registry object. zip_filter (list): List of files to exclude from zip openpype_filter (list): list of top level directories to @@ -667,11 +665,6 @@ class BootstrapRepos: progress_callback = empty_progress self._progress_callback = progress_callback - if getattr(sys, "frozen", False): - self.live_repo_dir = Path(sys.executable).parent - else: - self.live_repo_dir = Path(Path(__file__).parent / "..") - @staticmethod def get_version_path_from_list( version: str, version_list: list) -> Union[Path, None]: @@ -736,11 +729,16 @@ class BootstrapRepos: # if repo dir is not set, we detect local "live" OpenPype repository # version and use it as a source. Otherwise repo_dir is user # entered location. - if not repo_dir: - version = OpenPypeVersion.get_installed_version_str() - repo_dir = self.live_repo_dir - else: + if repo_dir: version = self.get_version(repo_dir) + else: + version = OpenPypeVersion.get_installed_version_str() + # QUESTION Can we use 'OPENPYPE_ROOT' env variable or it may + # not be defined yet? + if getattr(sys, "frozen", False): + repo_dir = Path(sys.executable).parent + else: + repo_dir = Path(Path(__file__).parent / "..") if not version: self._print("OpenPype not found.", LOG_ERROR) From 9fd2a7b978355616146805852fec24db50324e57 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 19:04:30 +0200 Subject: [PATCH 286/357] simplified repository resolving using OpenPypeVersion.get_installed_version --- igniter/bootstrap_repos.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index fc814f871a..08333885c0 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -732,13 +732,9 @@ class BootstrapRepos: if repo_dir: version = self.get_version(repo_dir) else: - version = OpenPypeVersion.get_installed_version_str() - # QUESTION Can we use 'OPENPYPE_ROOT' env variable or it may - # not be defined yet? - if getattr(sys, "frozen", False): - repo_dir = Path(sys.executable).parent - else: - repo_dir = Path(Path(__file__).parent / "..") + installed_version = OpenPypeVersion.get_installed_version() + version = str(installed_version) + repo_dir = installed_version.path if not version: self._print("OpenPype not found.", LOG_ERROR) From 19a2ef4cb69f2fc0dcf3247ffa25c628c480e1ff Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 23 Apr 2022 03:40:51 +0000 Subject: [PATCH 287/357] [Automated] Bump version --- CHANGELOG.md | 39 ++++++++++++++++++--------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 20 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a48e9ee806..e2ff9f919c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,25 +1,39 @@ # Changelog -## [3.10.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.10.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...HEAD) ### 📖 Documentation +- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094) - Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052) **🚀 Enhancements** +- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080) - Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055) +- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) **🐛 Bug fixes** +- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091) +- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089) +- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083) +- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077) +- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076) +- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068) - Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060) **🔀 Refactored code** - General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009) +**Merged pull requests:** + +- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078) +- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065) + ## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.4-nightly.2...3.9.4) @@ -53,6 +67,7 @@ - LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042) - SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041) - Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) - SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) **Merged pull requests:** @@ -72,7 +87,6 @@ - Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) - Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) -- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) **🚀 Enhancements** @@ -80,14 +94,11 @@ - Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) - Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) - General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011) -- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) - Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) -- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) **🐛 Bug fixes** - Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) -- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) - General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029) - General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) - Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) @@ -99,10 +110,6 @@ - Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) - Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) -**🔀 Refactored code** - -- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935) - **Merged pull requests:** - Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) @@ -120,19 +127,17 @@ **🆕 New features** - nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) **🚀 Enhancements** +- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) - Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) - TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) - Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) -- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) - General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) - General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975) - Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967) -- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) -- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) -- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) **🐛 Bug fixes** @@ -148,14 +153,6 @@ - General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958) - Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956) - General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950) -- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) -- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948) -- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) -- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) -- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) -- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) -- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) -- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index 9e2525e3b8..662adf28ca 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.10.0-nightly.1" +__version__ = "3.10.0-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 4c65ac9bda..f32e385e80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.10.0-nightly.1" # OpenPype +version = "3.10.0-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From a7b3a85712b1a0ee29d547dc5ddacc9dab80e160 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 11:55:34 +0200 Subject: [PATCH 288/357] added sequence handling to files widget --- openpype/lib/attribute_definitions.py | 24 ++++++- .../widgets/attribute_defs/files_widget.py | 63 ++++++++----------- openpype/widgets/attribute_defs/widgets.py | 4 +- 3 files changed, 52 insertions(+), 39 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 3d17818ecb..2cf1706b78 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -3,6 +3,7 @@ import re import collections import uuid import json +import copy from abc import ABCMeta, abstractmethod import six @@ -438,9 +439,23 @@ class FileDef(AbtractAttrDef): default(str, list): Defautl value. """ + default_sequence_extensions = [ + ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", + ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", + ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", + ".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", + ".jbig2", ".jng", ".jpeg", ".jpeg-ls", ".2000", ".jpg", ".xr", + ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", + ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", + ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", + ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", + ".tiff/ep", ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", + ".xbm", ".xcf", ".xpm", ".xwd" + ] + def __init__( self, key, multipath=False, folders=None, extensions=None, - default=None, **kwargs + sequence_extensions=None, default=None, **kwargs ): if folders is None and extensions is None: folders = True @@ -479,9 +494,13 @@ class FileDef(AbtractAttrDef): is_label_horizontal = False kwargs["is_label_horizontal"] = is_label_horizontal + if sequence_extensions is None: + sequence_extensions = self.default_sequence_extensions + self.multipath = multipath self.folders = folders - self.extensions = extensions + self.extensions = set(extensions) + self.sequence_extensions = set(sequence_extensions) super(FileDef, self).__init__(key, default=default, **kwargs) def __eq__(self, other): @@ -492,6 +511,7 @@ class FileDef(AbtractAttrDef): self.multipath == other.multipath and self.folders == other.folders and self.extensions == other.extensions + and self.sequence_extensions == self.sequence_extensions ) def convert_value(self, value): diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index af00ffe5ad..ffdc730455 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -73,32 +73,19 @@ class DropEmpty(QtWidgets.QWidget): class FilesModel(QtGui.QStandardItemModel): - sequence_exts = [ - ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal", - ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits", - ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", - ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", - ".jng", ".jpeg", ".jpeg-ls", ".2000", ".jpg", ".xr", - ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", - ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", - ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", - ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep", - ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf", - ".xpm", ".xwd" - ] - - def __init__(self, multivalue): + def __init__(self, allow_multiple_items, sequence_exts): super(FilesModel, self).__init__() self._filenames_by_dirpath = collections.defaultdict(set) self._items_by_dirpath = collections.defaultdict(list) - self._multivalue = multivalue + self._allow_multiple_items = allow_multiple_items + self.sequence_exts = sequence_exts def add_filepaths(self, filepaths): if not filepaths: return - if not self._multivalue: + if not self._allow_multiple_items: filepaths = [filepaths[0]] item_ids = [] for items in self._items_by_dirpath.values(): @@ -281,6 +268,17 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): self._allowed_extensions = extensions self.invalidateFilter() + def are_valid_files(self, filepaths): + for filepath in filepaths: + if os.path.isfile(filepath): + _, ext = os.path.splitext(filepath) + if ext in self._allowed_extensions: + return True + + elif self._allow_folders: + return True + return False + def filterAcceptsRow(self, row, parent_index): model = self.sourceModel() index = model.index(row, self.filterKeyColumn(), parent_index) @@ -385,13 +383,13 @@ class FilesView(QtWidgets.QListView): class FilesWidget(QtWidgets.QFrame): value_changed = QtCore.Signal() - def __init__(self, multiselect, parent): + def __init__(self, allow_multiple_items, sequence_exts, parent): super(FilesWidget, self).__init__(parent) self.setAcceptDrops(True) empty_widget = DropEmpty(self) - files_model = FilesModel(multiselect) + files_model = FilesModel(allow_multiple_items, sequence_exts) files_proxy_model = FilesProxyModel() files_proxy_model.setSourceModel(files_model) files_view = FilesView(self) @@ -406,13 +404,9 @@ class FilesWidget(QtWidgets.QFrame): files_proxy_model.rowsInserted.connect(self._on_rows_inserted) files_proxy_model.rowsRemoved.connect(self._on_rows_removed) - drag_label = DragLabel() - drag_label.setVisible(False) - - self._drag_label = drag_label - self._in_set_value = False + self._allow_multiple_items = allow_multiple_items self._empty_widget = empty_widget self._files_model = files_model self._files_proxy_model = files_proxy_model @@ -544,8 +538,15 @@ class FilesWidget(QtWidgets.QFrame): def dragEnterEvent(self, event): mime_data = event.mimeData() if mime_data.hasUrls(): - event.setDropAction(QtCore.Qt.CopyAction) - event.accept() + filepaths = [] + for url in mime_data.urls(): + filepath = url.toLocalFile() + if os.path.exists(filepath): + filepaths.append(filepath) + + if self._files_proxy_model.are_valid_files(filepaths): + event.setDropAction(QtCore.Qt.CopyAction) + event.accept() def dragLeaveEvent(self, event): event.accept() @@ -574,13 +575,3 @@ class FilesWidget(QtWidgets.QFrame): files_exists = self._files_proxy_model.rowCount() > 0 self._files_view.setVisible(files_exists) self._empty_widget.setVisible(not files_exists) - - -class DragLabel(QtWidgets.QWidget): - def __init__(self, parent=None): - super(DragLabel, self).__init__(parent) - - t_label = QtWidgets.QLabel("TESTING", self) - layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(t_label) - self._t_label = t_label diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 83eeaea61f..d3f53de032 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -340,7 +340,9 @@ class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): self.multipath = self.attr_def.multipath - input_widget = FilesWidget(self.multipath, self) + input_widget = FilesWidget( + self.multipath, self.attr_def.sequence_extensions, self + ) if self.attr_def.tooltip: input_widget.setToolTip(self.attr_def.tooltip) From 0b7cdeee840650816fd379066915b53c1a9f58fa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 12:12:43 +0200 Subject: [PATCH 289/357] disable always on top flags --- openpype/tools/traypublisher/window.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index a550c88ead..306b567acd 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -108,6 +108,13 @@ class TrayPublishWindow(PublisherWindow): def __init__(self, *args, **kwargs): super(TrayPublishWindow, self).__init__(reset_on_show=False) + flags = self.windowFlags() + # Disable always on top hint + if flags & QtCore.Qt.WindowStaysOnTopHint: + flags ^= QtCore.Qt.WindowStaysOnTopHint + + self.setWindowFlags(flags) + overlay_widget = StandaloneOverlayWidget(self) btns_widget = QtWidgets.QWidget(self) From 3f976f00d2ef46612a770a96bacde8d8d3df039d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 25 Apr 2022 12:30:42 +0200 Subject: [PATCH 290/357] OP-2951 - refactored Validate projects Checks also if set in DB and not physically present --- .../modules/sync_server/sync_server_module.py | 83 ++++++++++--------- 1 file changed, 44 insertions(+), 39 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index fb81791da2..7afbdc2e9b 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -212,36 +212,38 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def create_validate_project_task(self, collection, site_name): """Adds metadata about project files validation on a queue. - This process will loop through all representation and check if - their files actually exist on an active site. + This process will loop through all representation and check if + their files actually exist on an active site. - This might be useful for edge cases when artists is switching - between sites, remote site is actually physically mounted and - active site has same file urls etc. + It also checks if site is set in DB, but file is physically not + present - Task will run on a asyncio loop, shouldn't be blocking. + This might be useful for edge cases when artists is switching + between sites, remote site is actually physically mounted and + active site has same file urls etc. + + Task will run on a asyncio loop, shouldn't be blocking. """ task = { "type": "validate", "project_name": collection, - "func": lambda: self.validate_project(collection, site_name) + "func": lambda: self.validate_project(collection, site_name, + reset_missing=True) } self.projects_processed.add(collection) self.long_running_tasks.append(task) - def validate_project(self, collection, site_name, remove_missing=False): - """ - Validate 'collection' of 'site_name' and its local files + def validate_project(self, collection, site_name, reset_missing=False): + """Validate 'collection' of 'site_name' and its local files - If file present and not marked with a 'site_name' in DB, DB is - updated with site name and file modified date. + If file present and not marked with a 'site_name' in DB, DB is + updated with site name and file modified date. - Args: - module (SyncServerModule) - collection (string): project name - site_name (string): active site name - remove_missing (bool): if True remove sites in DB if missing - physically + Args: + collection (string): project name + site_name (string): active site name + reset_missing (bool): if True reset site in DB if missing + physically """ self.log.debug("Validation of {} for {} started".format(collection, site_name)) @@ -256,29 +258,32 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return sites_added = 0 - sites_removed = 0 + sites_reset = 0 for repre in representations: repre_id = repre["_id"] for repre_file in repre.get("files", []): try: - has_site = site_name in [site["name"] - for site in repre_file["sites"]] - except TypeError: + is_on_site = site_name in [site["name"] + for site in repre_file["sites"] + if (site.get("created_dt") and + not site.get("error"))] + except (TypeError, AttributeError): self.log.debug("Structure error in {}".format(repre_id)) continue - if has_site and not remove_missing: - continue - file_path = repre_file.get("path", "") local_file_path = self.get_local_file_path(collection, site_name, file_path) - if local_file_path and os.path.exists(local_file_path): - self.log.debug("Adding site {} for {}".format(site_name, - repre_id)) - if not has_site: + file_exists = (local_file_path and + os.path.exists(local_file_path)) + if not is_on_site: + if file_exists: + self.log.debug( + "Adding site {} for {}".format(site_name, + repre_id)) + query = { "_id": repre_id } @@ -288,25 +293,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "created_dt": created_dt} self._add_site(collection, query, repre, elem, site_name=site_name, - file_id=repre_file["_id"]) + file_id=repre_file["_id"], + force=True) sites_added += 1 else: - if has_site and remove_missing: - self.log.debug("Removing site {} for {}". + if not file_exists and reset_missing: + self.log.debug("Resetting site {} for {}". format(site_name, repre_id)) - self.reset_provider_for_file(collection, - repre_id, - file_id=repre_file["_id"], - remove=True) - sites_removed += 1 + self.reset_site_on_representation( + collection, repre_id, site_name=site_name, + file_id=repre_file["_id"]) + sites_reset += 1 if sites_added % 100 == 0: self.log.debug("Sites added {}".format(sites_added)) self.log.debug("Validation of {} for {} ended".format(collection, site_name)) - self.log.info("Sites added {}, sites removed {}".format(sites_added, - sites_removed)) + self.log.info("Sites added {}, sites reset {}".format(sites_added, + reset_missing)) def pause_representation(self, collection, representation_id, site_name): """ From 581647a65cf912c09eb79684af40e49fffe60bde Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 25 Apr 2022 13:33:23 +0200 Subject: [PATCH 291/357] OP-2951 - safer querying of tries --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 7afbdc2e9b..ccd0fd111d 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1772,7 +1772,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): (int) - number of failed attempts """ _, rec = self._get_site_rec(file.get("sites", []), provider) - return rec.get("tries", 0) + return self._get_tries_count_from_rec(rec) def _get_progress_dict(self, progress): """ From ed98bbcd322dd98aade17e229e745ed890e3c85a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 14:54:19 +0200 Subject: [PATCH 292/357] changed how files widget works with extensions and handle file items --- openpype/lib/__init__.py | 2 + openpype/lib/attribute_definitions.py | 83 ++++++- .../widgets/attribute_defs/files_widget.py | 214 +++++------------- 3 files changed, 141 insertions(+), 158 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index b57e469f5b..d053ec8636 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -42,6 +42,7 @@ from .attribute_definitions import ( EnumDef, BoolDef, FileDef, + FileDefItem, ) from .env_tools import ( @@ -266,6 +267,7 @@ __all__ = [ "EnumDef", "BoolDef", "FileDef", + "FileDefItem", "import_filepath", "modules_from_path", diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 2cf1706b78..6e754e6668 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -334,6 +334,61 @@ class FileDefItem(object): os.path.join(self.directory, filename) ) + @property + def label(self): + if not self.is_sequence: + return self.filenames[0] + + frame_start = self.frames[0] + filename_template = os.path.basename(self.template) + if len(self.frames) == 1: + return "{} [{}]".format(filename_template, frame_start) + + frame_end = self.frames[-1] + expected_len = (frame_end - frame_start) + 1 + if expected_len == len(self.frames): + return "{} [{}-{}]".format( + filename_template, frame_start, frame_end + ) + + ranges = [] + _frame_start = None + _frame_end = None + for frame in range(frame_start, frame_end + 1): + if frame not in self.frames: + add_to_ranges = _frame_start is not None + elif _frame_start is None: + _frame_start = _frame_end = frame + add_to_ranges = frame == frame_end + else: + _frame_end = frame + add_to_ranges = frame == frame_end + + if add_to_ranges: + if _frame_start != _frame_end: + _range = "{}-{}".format(_frame_start, _frame_end) + else: + _range = str(_frame_start) + ranges.append(_range) + _frame_start = _frame_end = None + return "{} [{}]".format( + filename_template, ",".join(ranges) + ) + + @property + def ext(self): + _, ext = os.path.splitext(self.filenames[0]) + if ext: + return ext + return None + + @property + def is_dir(self): + # QUESTION a better way how to define folder (in init argument?) + if self.ext: + return False + return True + def set_directory(self, directory): self.directory = directory @@ -357,23 +412,30 @@ class FileDefItem(object): return cls("", "") @classmethod - def from_value(cls, value): + def from_value(cls, value, sequence_extensions): multi = isinstance(value, (list, tuple, set)) if not multi: value = [value] output = [] + str_filepaths = [] for item in value: - if isinstance(item, dict): + if isinstance(item, FileDefItem): + output.append(item) + elif isinstance(item, dict): output.append(cls.from_dict(item)) elif isinstance(item, six.string_types): - output.extend(cls.from_paths([item])) + str_filepaths.append(item) else: raise TypeError( "Unknown type \"{}\". Can't convert to {}".format( str(type(item)), cls.__name__ ) ) + + if str_filepaths: + output.extend(cls.from_paths(str_filepaths, sequence_extensions)) + if multi: return output return output[0] @@ -388,7 +450,7 @@ class FileDefItem(object): ) @classmethod - def from_paths(cls, paths): + def from_paths(cls, paths, sequence_extensions): filenames_by_dir = collections.defaultdict(list) for path in paths: normalized = os.path.normpath(path) @@ -397,7 +459,18 @@ class FileDefItem(object): output = [] for directory, filenames in filenames_by_dir.items(): - cols, remainders = clique.assemble(filenames) + filtered_filenames = [] + for filename in filenames: + _, ext = os.path.splitext(filename) + if ext in sequence_extensions: + filtered_filenames.append(filename) + else: + output.append(cls(directory, [filename])) + + if not filtered_filenames: + continue + + cols, remainders = clique.assemble(filtered_filenames) for remainder in remainders: output.append(cls(directory, [remainder])) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index ffdc730455..6e2ab6e4f2 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -2,8 +2,10 @@ import os import collections import uuid import clique +import six from Qt import QtWidgets, QtCore, QtGui +from openpype.lib import FileDefItem from openpype.tools.utils import paint_image_with_color # TODO change imports from openpype.tools.resources import ( @@ -75,174 +77,76 @@ class DropEmpty(QtWidgets.QWidget): class FilesModel(QtGui.QStandardItemModel): def __init__(self, allow_multiple_items, sequence_exts): super(FilesModel, self).__init__() + + self._allow_multiple_items = allow_multiple_items + self._sequence_exts = sequence_exts + + self._items_by_id = {} + self._file_items_by_id = {} self._filenames_by_dirpath = collections.defaultdict(set) self._items_by_dirpath = collections.defaultdict(list) - self._allow_multiple_items = allow_multiple_items - self.sequence_exts = sequence_exts + def add_filepaths(self, items): + if not items: + return - def add_filepaths(self, filepaths): - if not filepaths: + obj_items = FileDefItem.from_value(items, self._sequence_exts) + if not obj_items: return if not self._allow_multiple_items: - filepaths = [filepaths[0]] - item_ids = [] - for items in self._items_by_dirpath.values(): - for item in items: - item_id = item.data(ITEM_ID_ROLE) - if item_id: - item_ids.append(item_id) + obj_items = [obj_items[0]] + current_ids = list(self._file_items_by_id.keys()) + if current_ids: + self.remove_item_by_ids(current_ids) - if item_ids: - self.remove_item_by_ids(item_ids) + new_model_items = [] + for obj_item in obj_items: + _, ext = os.path.splitext(obj_item.filenames[0]) + if ext: + icon_pixmap = get_pixmap(filename="file.png") + else: + icon_pixmap = get_pixmap(filename="folder.png") - new_dirpaths = set() - for filepath in filepaths: - filename = os.path.basename(filepath) - dirpath = os.path.dirname(filepath) - filenames = self._filenames_by_dirpath[dirpath] - if filename not in filenames: - new_dirpaths.add(dirpath) - filenames.add(filename) - self._refresh_items(new_dirpaths) + item_id, model_item = self._create_item(obj_item, icon_pixmap) + new_model_items.append(model_item) + self._file_items_by_id[item_id] = obj_item + self._items_by_id[item_id] = model_item + + if new_model_items: + self.invisibleRootItem().appendRows(new_model_items) def remove_item_by_ids(self, item_ids): if not item_ids: return - remaining_ids = set(item_ids) - result = collections.defaultdict(list) - for dirpath, items in self._items_by_dirpath.items(): - if not remaining_ids: - break + items = [] + for item_id in set(item_ids): + if item_id not in self._items_by_id: + continue + item = self._items_by_id.pop(item_id) + self._file_items_by_id.pop(item_id) + items.append(item) + + if items: for item in items: - if not remaining_ids: - break - item_id = item.data(ITEM_ID_ROLE) - if item_id in remaining_ids: - remaining_ids.remove(item_id) - result[dirpath].append(item) - - if not result: - return - - dirpaths = set(result.keys()) - for dirpath, items in result.items(): - filenames_cache = self._filenames_by_dirpath[dirpath] - for item in items: - filenames = item.data(FILENAMES_ROLE) - - self._items_by_dirpath[dirpath].remove(item) - self.removeRows(item.row(), 1) - for filename in filenames: - if filename in filenames_cache: - filenames_cache.remove(filename) - - self._refresh_items(dirpaths) - - def _refresh_items(self, dirpaths=None): - if dirpaths is None: - dirpaths = set(self._items_by_dirpath.keys()) - - new_items = [] - for dirpath in dirpaths: - items_to_remove = list(self._items_by_dirpath[dirpath]) - cols, remainders = clique.assemble( - self._filenames_by_dirpath[dirpath] - ) - filtered_cols = [] - for collection in cols: - filenames = set(collection) - valid_col = True - for filename in filenames: - ext = os.path.splitext(filename)[-1] - valid_col = ext in self.sequence_exts - break - - if valid_col: - filtered_cols.append(collection) - else: - for filename in filenames: - remainders.append(filename) - - for filename in remainders: - found = False - for item in items_to_remove: - item_filenames = item.data(FILENAMES_ROLE) - if filename in item_filenames and len(item_filenames) == 1: - found = True - items_to_remove.remove(item) - break - - if found: - continue - - fullpath = os.path.join(dirpath, filename) - if os.path.isdir(fullpath): - icon_pixmap = get_pixmap(filename="folder.png") - else: - icon_pixmap = get_pixmap(filename="file.png") - label = filename - filenames = [filename] - item = self._create_item( - label, filenames, dirpath, icon_pixmap - ) - new_items.append(item) - self._items_by_dirpath[dirpath].append(item) - - for collection in filtered_cols: - filenames = set(collection) - found = False - for item in items_to_remove: - item_filenames = item.data(FILENAMES_ROLE) - if item_filenames == filenames: - found = True - items_to_remove.remove(item) - break - - if found: - continue - - col_range = collection.format("{ranges}") - label = "{}<{}>{}".format( - collection.head, col_range, collection.tail - ) - icon_pixmap = get_pixmap(filename="files.png") - item = self._create_item( - label, filenames, dirpath, icon_pixmap - ) - new_items.append(item) - self._items_by_dirpath[dirpath].append(item) - - for item in items_to_remove: - self._items_by_dirpath[dirpath].remove(item) self.removeRows(item.row(), 1) - if new_items: - self.invisibleRootItem().appendRows(new_items) - - def _create_item(self, label, filenames, dirpath, icon_pixmap=None): - first_filename = None - for filename in filenames: - first_filename = filename - break - ext = os.path.splitext(first_filename)[-1] - is_dir = False - if len(filenames) == 1: - filepath = os.path.join(dirpath, first_filename) - is_dir = os.path.isdir(filepath) + def get_file_item_by_id(self, item_id): + return self._file_items_by_id.get(item_id) + def _create_item(self, file_item, icon_pixmap=None): item = QtGui.QStandardItem() - item.setData(str(uuid.uuid4()), ITEM_ID_ROLE) - item.setData(label, ITEM_LABEL_ROLE) - item.setData(filenames, FILENAMES_ROLE) - item.setData(dirpath, DIRPATH_ROLE) + item_id = str(uuid.uuid4()) + item.setData(item_id, ITEM_ID_ROLE) + item.setData(file_item.label, ITEM_LABEL_ROLE) + item.setData(file_item.filenames, FILENAMES_ROLE) + item.setData(file_item.directory, DIRPATH_ROLE) item.setData(icon_pixmap, ITEM_ICON_ROLE) - item.setData(ext, EXT_ROLE) - item.setData(is_dir, IS_DIR_ROLE) + item.setData(file_item.ext, EXT_ROLE) + item.setData(file_item.is_dir, IS_DIR_ROLE) - return item + return item_id, item class FilesProxyModel(QtCore.QSortFilterProxyModel): @@ -344,6 +248,7 @@ class ItemWidget(QtWidgets.QWidget): class FilesView(QtWidgets.QListView): """View showing instances and their groups.""" + def __init__(self, *args, **kwargs): super(FilesView, self).__init__(*args, **kwargs) @@ -439,14 +344,17 @@ class FilesWidget(QtWidgets.QFrame): def current_value(self): model = self._files_proxy_model - filepaths = set() + item_ids = set() for row in range(model.rowCount()): index = model.index(row, 0) - dirpath = index.data(DIRPATH_ROLE) - filenames = index.data(FILENAMES_ROLE) - for filename in filenames: - filepaths.add(os.path.join(dirpath, filename)) - return list(filepaths) + item_ids.add(index.data(ITEM_ID_ROLE)) + + file_items = [] + for item_id in item_ids: + file_item = self._files_model.get_file_item_by_id(item_id) + if file_item is not None: + file_items.append(file_item.to_dict()) + return file_items def set_filters(self, folders_allowed, exts_filter): self._files_proxy_model.set_allow_folders(folders_allowed) From 7fe279fda5ff295e9673d6d54366f8a0dd662b1b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 15:10:18 +0200 Subject: [PATCH 293/357] added missing plugins file --- openpype/hosts/traypublisher/api/plugin.py | 104 +++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 openpype/hosts/traypublisher/api/plugin.py diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py new file mode 100644 index 0000000000..6907450b15 --- /dev/null +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -0,0 +1,104 @@ +from openpype.pipeline import ( + Creator, + CreatedInstance +) +from openpype.lib import ( + FileDef, + BoolDef, +) + +from .pipeline import ( + list_instances, + update_instances, + remove_instances, + HostContext, +) + + +class TrayPublishCreator(Creator): + create_allow_context_change = True + + def collect_instances(self): + for instance_data in list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + update_instances(update_list) + + def remove_instances(self, instances): + remove_instances(instances) + for instance in instances: + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + # Use same attributes as for instance attrobites + return self.get_instance_attr_defs() + + +class SettingsCreator(TrayPublishCreator): + create_allow_context_change = True + + enable_review = False + extensions = [] + sequence_extensions = [] + + def collect_instances(self): + for instance_data in list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def create(self, subset_name, data, pre_create_data): + # Pass precreate data to creator attributes + data["creator_attributes"] = pre_create_data + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, data, self) + # Host implementation of storing metadata about instance + HostContext.add_instance(new_instance.data_to_store()) + # Add instance to current context + self._add_instance_to_context(new_instance) + + def get_instance_attr_defs(self): + output = [] + + file_def = FileDef( + "filepath", + folders=False, + extensions=self.extensions, + sequence_extensions=self.sequence_extensions, + label="Filepath" + ) + output.append(file_def) + if self.enable_review: + output.append(BoolDef("review", label="Review")) + return output + + @classmethod + def from_settings(cls, item_data): + identifier = item_data["identifier"] + family = item_data["family"] + if not identifier: + identifier = "settings_{}".format(family) + return type( + "{}{}".format(cls.__name__, identifier), + (cls, ), + { + "family": family, + "identifier": identifier, + "label": item_data["label"].strip(), + "icon": item_data["icon"], + "description": item_data["description"], + "enable_review": item_data["enable_review"], + "extensions": item_data["extensions"], + "sequence_extensions": item_data["sequence_extensions"], + "default_variants": item_data["default_variants"] + } + ) From 2efd8b774cf4e24823fd98225fd277a2353f273c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 15:24:37 +0200 Subject: [PATCH 294/357] changed multi item to single item --- openpype/hosts/traypublisher/api/plugin.py | 2 +- openpype/lib/attribute_definitions.py | 43 ++++++++++--------- .../widgets/attribute_defs/files_widget.py | 11 +++-- openpype/widgets/attribute_defs/widgets.py | 4 +- 4 files changed, 29 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 6907450b15..d31e0a1ef7 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -74,7 +74,7 @@ class SettingsCreator(TrayPublishCreator): folders=False, extensions=self.extensions, sequence_extensions=self.sequence_extensions, - label="Filepath" + label="Filepath", ) output.append(file_def) if self.enable_review: diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 6e754e6668..7a00fcdeb4 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -505,7 +505,7 @@ class FileDef(AbtractAttrDef): It is possible to define filters of allowed file extensions and if supports folders. Args: - multipath(bool): Allow multiple path. + single_item(bool): Allow only single path item. folders(bool): Allow folder paths. extensions(list): Allow files with extensions. Empty list will allow all extensions and None will disable files completely. @@ -527,7 +527,7 @@ class FileDef(AbtractAttrDef): ] def __init__( - self, key, multipath=False, folders=None, extensions=None, + self, key, single_item=True, folders=None, extensions=None, sequence_extensions=None, default=None, **kwargs ): if folders is None and extensions is None: @@ -535,19 +535,12 @@ class FileDef(AbtractAttrDef): extensions = [] if default is None: - if multipath: - default = [] - else: + if single_item: default = FileDefItem.create_empty_item().to_dict() - else: - if multipath: - if not isinstance(default, (tuple, list, set)): - raise TypeError(( - "'default' argument must be 'list', 'tuple' or 'set'" - ", not '{}'" - ).format(type(default))) - else: + default = [] + else: + if single_item: if isinstance(default, dict): FileDefItem.from_dict(default) @@ -559,18 +552,26 @@ class FileDef(AbtractAttrDef): "'default' argument must be 'str' or 'dict' not '{}'" ).format(type(default))) + else: + if not isinstance(default, (tuple, list, set)): + raise TypeError(( + "'default' argument must be 'list', 'tuple' or 'set'" + ", not '{}'" + ).format(type(default))) + # Change horizontal label is_label_horizontal = kwargs.get("is_label_horizontal") if is_label_horizontal is None: - is_label_horizontal = True - if multipath: + if single_item: + is_label_horizontal = True + else: is_label_horizontal = False kwargs["is_label_horizontal"] = is_label_horizontal if sequence_extensions is None: sequence_extensions = self.default_sequence_extensions - self.multipath = multipath + self.single_item = single_item self.folders = folders self.extensions = set(extensions) self.sequence_extensions = set(sequence_extensions) @@ -581,7 +582,7 @@ class FileDef(AbtractAttrDef): return False return ( - self.multipath == other.multipath + self.single_item == other.single_item and self.folders == other.folders and self.extensions == other.extensions and self.sequence_extensions == self.sequence_extensions @@ -611,13 +612,13 @@ class FileDef(AbtractAttrDef): for file_item in file_items ]) - if self.multipath: + if not self.single_item: return dict_items if not dict_items: return self.default return dict_items[0] - if self.multipath: - return [] - return FileDefItem.create_empty_item().to_dict() + if self.single_item: + return FileDefItem.create_empty_item().to_dict() + return [] diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 6e2ab6e4f2..e41387e0e5 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -75,10 +75,10 @@ class DropEmpty(QtWidgets.QWidget): class FilesModel(QtGui.QStandardItemModel): - def __init__(self, allow_multiple_items, sequence_exts): + def __init__(self, single_item, sequence_exts): super(FilesModel, self).__init__() - self._allow_multiple_items = allow_multiple_items + self._single_item = single_item self._sequence_exts = sequence_exts self._items_by_id = {} @@ -94,7 +94,7 @@ class FilesModel(QtGui.QStandardItemModel): if not obj_items: return - if not self._allow_multiple_items: + if self._single_item: obj_items = [obj_items[0]] current_ids = list(self._file_items_by_id.keys()) if current_ids: @@ -288,13 +288,13 @@ class FilesView(QtWidgets.QListView): class FilesWidget(QtWidgets.QFrame): value_changed = QtCore.Signal() - def __init__(self, allow_multiple_items, sequence_exts, parent): + def __init__(self, single_item, sequence_exts, parent): super(FilesWidget, self).__init__(parent) self.setAcceptDrops(True) empty_widget = DropEmpty(self) - files_model = FilesModel(allow_multiple_items, sequence_exts) + files_model = FilesModel(single_item, sequence_exts) files_proxy_model = FilesProxyModel() files_proxy_model.setSourceModel(files_model) files_view = FilesView(self) @@ -311,7 +311,6 @@ class FilesWidget(QtWidgets.QFrame): self._in_set_value = False - self._allow_multiple_items = allow_multiple_items self._empty_widget = empty_widget self._files_model = files_model self._files_proxy_model = files_proxy_model diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index d3f53de032..62877be4cf 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -338,10 +338,8 @@ class UnknownAttrWidget(_BaseAttrDefWidget): class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): - self.multipath = self.attr_def.multipath - input_widget = FilesWidget( - self.multipath, self.attr_def.sequence_extensions, self + self.attr_def.single_item, self.attr_def.sequence_extensions, self ) if self.attr_def.tooltip: From c4e826e77f97191b267421ccf23f9b2401ddc35b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 17:29:32 +0200 Subject: [PATCH 295/357] added ability to create copy of TemplateResult --- openpype/lib/path_templates.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index 14e5fe59f8..5c40aa4549 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -365,6 +365,7 @@ class TemplateResult(str): when value of key in data is dictionary but template expect string of number. """ + used_values = None solved = None template = None @@ -383,6 +384,12 @@ class TemplateResult(str): new_obj.invalid_types = invalid_types return new_obj + def __copy__(self, *args, **kwargs): + return self.copy() + + def __deepcopy__(self, *args, **kwargs): + return self.copy() + def validate(self): if not self.solved: raise TemplateUnsolved( @@ -391,6 +398,17 @@ class TemplateResult(str): self.invalid_types ) + def copy(self): + cls = self.__class__ + return cls( + str(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + class TemplatesResultDict(dict): """Holds and wrap TemplateResults for easy bug report.""" From 5e4d618be47515e9088329958ed6ea8aaee9456c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 18:19:20 +0200 Subject: [PATCH 296/357] fixed new imports of avalon --- openpype/hosts/aftereffects/api/pipeline.py | 12 +++++----- .../plugins/create/create_render.py | 7 +++--- .../plugins/create/workfile_creator.py | 20 ++++++++-------- .../plugins/publish/collect_workfile.py | 3 ++- openpype/hosts/photoshop/api/pipeline.py | 7 +++--- .../photoshop/plugins/create/create_image.py | 6 ++--- .../plugins/create/workfile_creator.py | 23 +++++++++++-------- .../plugins/publish/collect_batch_data.py | 7 +++--- openpype/lib/avalon_context.py | 4 ++-- 9 files changed, 48 insertions(+), 41 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 3a41b4f26d..0d739df748 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -13,6 +13,7 @@ from openpype.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, + legacy_io, ) import openpype.hosts.aftereffects from openpype.lib import register_event_callback @@ -142,9 +143,9 @@ def check_inventory(): outdated_containers = [] for container in host.ls(): representation = container['representation'] - representation_doc = io.find_one( + representation_doc = legacy_io.find_one( { - "_id": io.ObjectId(representation), + "_id": legacy_io.ObjectId(representation), "type": "representation" }, projection={"parent": True} @@ -280,11 +281,10 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" - import avalon.api - project_name = avalon.api.Session["AVALON_PROJECT"] - asset_name = avalon.api.Session["AVALON_ASSET"] - task_name = avalon.api.Session["AVALON_TASK"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 78d43d259a..215c148f37 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,12 +1,11 @@ -from avalon import api as avalon_api - from openpype import resources from openpype.lib import BoolDef, UISeparatorDef from openpype.hosts.aftereffects import api from openpype.pipeline import ( Creator, CreatedInstance, - CreatorError + CreatorError, + legacy_io, ) @@ -116,7 +115,7 @@ class RenderCreator(Creator): instance_data.pop("uuid") if not instance_data.get("task"): - instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + instance_data["task"] = legacy_io.Session.get("AVALON_TASK") if not instance_data.get("creator_attributes"): is_old_farm = instance_data["family"] != "renderLocal" diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index 2d9d42ee8c..7cc9bb54d4 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -1,9 +1,8 @@ -from avalon import io - import openpype.hosts.aftereffects.api as api from openpype.pipeline import ( AutoCreator, - CreatedInstance + CreatedInstance, + legacy_io, ) @@ -36,13 +35,13 @@ class AEWorkfileCreator(AutoCreator): break variant = '' - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({"type": "asset", "name": asset_name}) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -67,7 +66,10 @@ class AEWorkfileCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 06b73f4b5d..9cb6900b0a 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,6 +2,7 @@ import os import pyblish.api from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): @@ -41,7 +42,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data["publish"] = instance.data["active"] # for DL def _get_new_instance(self, context, scene_file): - task = api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index fc90be8716..6db4470428 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -260,9 +260,8 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" - import avalon.api - project_name = avalon.api.Session["AVALON_PROJECT"] - asset_name = avalon.api.Session["AVALON_ASSET"] - task_name = avalon.api.Session["AVALON_TASK"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index c2fe8b6c78..f15068b031 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,9 +1,9 @@ -from avalon import api as avalon_api from openpype.hosts.photoshop import api from openpype.lib import BoolDef from openpype.pipeline import ( Creator, - CreatedInstance + CreatedInstance, + legacy_io ) @@ -133,7 +133,7 @@ class ImageCreator(Creator): instance_data.pop("uuid") if not instance_data.get("task"): - instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + instance_data["task"] = legacy_io.Session.get("AVALON_TASK") if not instance_data.get("variant"): instance_data["variant"] = '' diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index d66a05cad7..875a9b8a94 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -1,9 +1,8 @@ -from avalon import io - import openpype.hosts.photoshop.api as api from openpype.pipeline import ( AutoCreator, - CreatedInstance + CreatedInstance, + legacy_io ) @@ -36,12 +35,15 @@ class PSWorkfileCreator(AutoCreator): break variant = '' - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -65,7 +67,10 @@ class PSWorkfileCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 5e6e916611..448493d370 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -16,11 +16,12 @@ shouldn't be pushed into general publish plugins. import os import pyblish.api -from avalon import io + from openpype.lib.plugin_tools import ( parse_json, get_batch_asset_task_info ) +from openpype.pipeline import legacy_io class CollectBatchData(pyblish.api.ContextPlugin): @@ -62,9 +63,9 @@ class CollectBatchData(pyblish.api.ContextPlugin): ) os.environ["AVALON_ASSET"] = asset_name - io.Session["AVALON_ASSET"] = asset_name os.environ["AVALON_TASK"] = task_name - io.Session["AVALON_TASK"] = task_name + legacy_io.Session["AVALON_ASSET"] = asset_name + legacy_io.Session["AVALON_TASK"] = task_name context.data["asset"] = asset_name context.data["task"] = task_name diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 3d57ee4b91..3fcddef745 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1969,7 +1969,7 @@ def get_last_workfile( return filename -@with_avalon +@with_pipeline_io def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, link_type=None, max_depth=0): """Returns list of linked ids of particular type (if provided). @@ -1987,7 +1987,7 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, """ # Create new dbcon if not passed and use passed project name if not dbcon: - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name # Validate that passed dbcon has same project From b30db92921aee1998c5cb57fb5a5e3ea6f4a9129 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 18:20:42 +0200 Subject: [PATCH 297/357] fix line length --- .../hosts/aftereffects/plugins/create/workfile_creator.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index 7cc9bb54d4..88e55e21b5 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -41,7 +41,10 @@ class AEWorkfileCreator(AutoCreator): host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = legacy_io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) From 25848817d92b47fc7e7b3f44cc68799e5d0cfa3e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 18:33:53 +0200 Subject: [PATCH 298/357] removed redundant code from aftereffects and photoshop --- openpype/hosts/aftereffects/api/pipeline.py | 20 ++++---------------- openpype/hosts/photoshop/api/pipeline.py | 19 ++++--------------- 2 files changed, 8 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 0d739df748..a428a1470d 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -139,23 +139,11 @@ def check_inventory(): if not lib.any_outdated(): return - host = pyblish.api.registered_host() - outdated_containers = [] - for container in host.ls(): - representation = container['representation'] - representation_doc = legacy_io.find_one( - { - "_id": legacy_io.ObjectId(representation), - "type": "representation" - }, - projection={"parent": True} - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - # Warn about outdated containers. - print("Starting new QApplication..") - _app = QtWidgets.QApplication(sys.argv) + _app = QtWidgets.QApplication.instance() + if not _app: + print("Starting new QApplication..") + _app = QtWidgets.QApplication([]) message_box = QtWidgets.QMessageBox() message_box.setIcon(QtWidgets.QMessageBox.Warning) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 6db4470428..20a6e3169f 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -33,22 +33,11 @@ def check_inventory(): if not lib.any_outdated(): return - host = registered_host() - outdated_containers = [] - for container in host.ls(): - representation = container['representation'] - representation_doc = legacy_io.find_one( - { - "_id": ObjectId(representation), - "type": "representation" - }, - projection={"parent": True} - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - # Warn about outdated containers. - print("Starting new QApplication..") + _app = QtWidgets.QApplication.instance() + if not _app: + print("Starting new QApplication..") + _app = QtWidgets.QApplication([]) message_box = QtWidgets.QMessageBox() message_box.setIcon(QtWidgets.QMessageBox.Warning) From aace513c84c1f2f901be54139759dcc3bddf3a5d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 19:25:57 +0200 Subject: [PATCH 299/357] moved remove button to view's bottom --- .../widgets/attribute_defs/files_widget.py | 70 ++++++++++--------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index e41387e0e5..f483fe7ef5 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -1,8 +1,7 @@ import os import collections import uuid -import clique -import six + from Qt import QtWidgets, QtCore, QtGui from openpype.lib import FileDefItem @@ -213,8 +212,6 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): class ItemWidget(QtWidgets.QWidget): - remove_requested = QtCore.Signal(str) - def __init__(self, item_id, label, pixmap_icon, parent=None): self._item_id = item_id @@ -224,31 +221,21 @@ class ItemWidget(QtWidgets.QWidget): icon_widget = PixmapLabel(pixmap_icon, self) label_widget = QtWidgets.QLabel(label, self) - pixmap = paint_image_with_color( - get_image(filename="delete.png"), QtCore.Qt.white - ) - remove_btn = IconButton(self) - remove_btn.setIcon(QtGui.QIcon(pixmap)) layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(icon_widget, 0) layout.addWidget(label_widget, 1) - layout.addWidget(remove_btn, 0) - - remove_btn.clicked.connect(self._on_remove_clicked) self._icon_widget = icon_widget self._label_widget = label_widget - self._remove_btn = remove_btn - - def _on_remove_clicked(self): - self.remove_requested.emit(self._item_id) class FilesView(QtWidgets.QListView): """View showing instances and their groups.""" + remove_requested = QtCore.Signal() + def __init__(self, *args, **kwargs): super(FilesView, self).__init__(*args, **kwargs) @@ -257,6 +244,17 @@ class FilesView(QtWidgets.QListView): QtWidgets.QAbstractItemView.ExtendedSelection ) + remove_btn = IconButton(self) + pix = paint_image_with_color( + get_image(filename="delete.png"), QtCore.Qt.white + ) + icon = QtGui.QIcon(pix) + remove_btn.setIcon(icon) + + remove_btn.clicked.connect(self._on_remove_clicked) + + self._remove_btn = remove_btn + def get_selected_item_ids(self): """Ids of selected instances.""" selected_item_ids = set() @@ -284,6 +282,24 @@ class FilesView(QtWidgets.QListView): return super(FilesView, self).event(event) + def _on_remove_clicked(self): + self.remove_requested.emit() + + def _update_remove_btn(self): + viewport = self.viewport() + height = viewport.height() + pos_x = viewport.width() - self._remove_btn.width() - 5 + pos_y = height - self._remove_btn.height() - 5 + self._remove_btn.move(max(0, pos_x), max(0, pos_y)) + + def resizeEvent(self, event): + super(FilesView, self).resizeEvent(event) + self._update_remove_btn() + + def showEvent(self, event): + super(FilesView, self).showEvent(event) + self._update_remove_btn() + class FilesWidget(QtWidgets.QFrame): value_changed = QtCore.Signal() @@ -308,7 +324,7 @@ class FilesWidget(QtWidgets.QFrame): files_proxy_model.rowsInserted.connect(self._on_rows_inserted) files_proxy_model.rowsRemoved.connect(self._on_rows_removed) - + files_view.remove_requested.connect(self._on_remove_requested) self._in_set_value = False self._empty_widget = empty_widget @@ -373,7 +389,6 @@ class FilesWidget(QtWidgets.QFrame): self._files_proxy_model.setData( index, widget.sizeHint(), QtCore.Qt.SizeHintRole ) - widget.remove_requested.connect(self._on_remove_request) self._widgets_by_id[item_id] = widget self._files_proxy_model.sort(0) @@ -401,23 +416,10 @@ class FilesWidget(QtWidgets.QFrame): if not self._in_set_value: self.value_changed.emit() - def _on_remove_request(self, item_id): - found_index = None - for row in range(self._files_model.rowCount()): - index = self._files_model.index(row, 0) - _item_id = index.data(ITEM_ID_ROLE) - if item_id == _item_id: - found_index = index - break - - if found_index is None: - return - + def _on_remove_requested(self): items_to_delete = self._files_view.get_selected_item_ids() - if item_id not in items_to_delete: - items_to_delete = [item_id] - - self._remove_item_by_ids(items_to_delete) + if items_to_delete: + self._remove_item_by_ids(items_to_delete) def sizeHint(self): # Get size hints of widget and visible widgets From 05ede8031e26f3f86408d1aa88aef8b10b6ad2de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 19:31:48 +0200 Subject: [PATCH 300/357] a little bit nicer look of items in files widget --- .../widgets/attribute_defs/files_widget.py | 34 +++++++++++-------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index f483fe7ef5..72bfd6cfa2 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -89,31 +89,26 @@ class FilesModel(QtGui.QStandardItemModel): if not items: return - obj_items = FileDefItem.from_value(items, self._sequence_exts) - if not obj_items: + file_items = FileDefItem.from_value(items, self._sequence_exts) + if not file_items: return if self._single_item: - obj_items = [obj_items[0]] + file_items = [file_items[0]] current_ids = list(self._file_items_by_id.keys()) if current_ids: self.remove_item_by_ids(current_ids) new_model_items = [] - for obj_item in obj_items: - _, ext = os.path.splitext(obj_item.filenames[0]) - if ext: - icon_pixmap = get_pixmap(filename="file.png") - else: - icon_pixmap = get_pixmap(filename="folder.png") - - item_id, model_item = self._create_item(obj_item, icon_pixmap) + for file_item in file_items: + item_id, model_item = self._create_item(file_item) new_model_items.append(model_item) - self._file_items_by_id[item_id] = obj_item + self._file_items_by_id[item_id] = file_item self._items_by_id[item_id] = model_item if new_model_items: - self.invisibleRootItem().appendRows(new_model_items) + roow_item = self.invisibleRootItem() + roow_item.appendRows(new_model_items) def remove_item_by_ids(self, item_ids): if not item_ids: @@ -134,7 +129,16 @@ class FilesModel(QtGui.QStandardItemModel): def get_file_item_by_id(self, item_id): return self._file_items_by_id.get(item_id) - def _create_item(self, file_item, icon_pixmap=None): + def _create_item(self, file_item): + if file_item.is_dir: + icon_pixmap = paint_image_with_color( + get_image(filename="folder.png"), QtCore.Qt.white + ) + else: + icon_pixmap = paint_image_with_color( + get_image(filename="file.png"), QtCore.Qt.white + ) + item = QtGui.QStandardItem() item_id = str(uuid.uuid4()) item.setData(item_id, ITEM_ID_ROLE) @@ -223,7 +227,7 @@ class ItemWidget(QtWidgets.QWidget): label_widget = QtWidgets.QLabel(label, self) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) + layout.setContentsMargins(5, 5, 0, 5) layout.addWidget(icon_widget, 0) layout.addWidget(label_widget, 1) From cc1f800740c77b5a5fafdb665a03ad9b630b239f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 10:12:35 +0200 Subject: [PATCH 301/357] added testing widget for attribute definitions --- openpype/widgets/attribute_defs/__init__.py | 6 +- openpype/widgets/attribute_defs/widgets.py | 102 ++++++++++++++++++++ 2 files changed, 107 insertions(+), 1 deletion(-) diff --git a/openpype/widgets/attribute_defs/__init__.py b/openpype/widgets/attribute_defs/__init__.py index 147efeb3d6..ce6b80109e 100644 --- a/openpype/widgets/attribute_defs/__init__.py +++ b/openpype/widgets/attribute_defs/__init__.py @@ -1,6 +1,10 @@ -from .widgets import create_widget_for_attr_def +from .widgets import ( + create_widget_for_attr_def, + AttributeDefinitionsWidget, +) __all__ = ( "create_widget_for_attr_def", + "AttributeDefinitionsWidget", ) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 62877be4cf..3f36c078cb 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -1,4 +1,5 @@ import uuid +import copy from Qt import QtWidgets, QtCore @@ -10,6 +11,7 @@ from openpype.lib.attribute_definitions import ( EnumDef, BoolDef, FileDef, + UIDef, UISeparatorDef, UILabelDef ) @@ -53,6 +55,106 @@ def create_widget_for_attr_def(attr_def, parent=None): )) +class AttributeDefinitionsWidget(QtWidgets.QWidget): + """Create widgets for attribute definitions in grid layout. + + Widget creates input widgets for passed attribute definitions. + + Widget can't handle multiselection values. + """ + + def __init__(self, attr_defs=None, parent=None): + super(AttributeDefinitionsWidget, self).__init__(parent) + + self._widgets = [] + self._current_keys = set() + + self.set_attr_defs(attr_defs) + + def clear_attr_defs(self): + """Remove all existing widgets and reset layout if needed.""" + self._widgets = [] + self._current_keys = set() + + layout = self.layout() + if layout is not None: + if layout.count() == 0: + return + + while layout.count(): + item = layout.takeAt(0) + widget = item.widget() + if widget: + widget.setVisible(False) + widget.deleteLater() + + layout.deleteLater() + + new_layout = QtWidgets.QGridLayout() + self.setLayout(new_layout) + + def set_attr_defs(self, attr_defs): + """Replace current attribute definitions with passed.""" + self.clear_attr_defs() + if attr_defs: + self.add_attr_defs(attr_defs) + + def add_attr_defs(self, attr_defs): + """Add attribute definitions to current.""" + layout = self.layout() + + row = 0 + for attr_def in attr_defs: + if attr_def.key in self._current_keys: + raise KeyError("Duplicated key \"{}\"".format(attr_def.key)) + + self._current_keys.add(attr_def.key) + widget = create_widget_for_attr_def(attr_def, self) + + expand_cols = 2 + if attr_def.is_value_def and attr_def.is_label_horizontal: + expand_cols = 1 + + col_num = 2 - expand_cols + + if attr_def.label: + label_widget = QtWidgets.QLabel(attr_def.label, self) + layout.addWidget( + label_widget, row, 0, 1, expand_cols + ) + if not attr_def.is_label_horizontal: + row += 1 + + layout.addWidget( + widget, row, col_num, 1, expand_cols + ) + self._widgets.append(widget) + row += 1 + + def set_value(self, value): + new_value = copy.deepcopy(value) + unused_keys = set(new_value.keys()) + for widget in self._widgets: + attr_def = widget.attr_def + if attr_def.key not in new_value: + continue + unused_keys.remove(attr_def.key) + + widget_value = new_value[attr_def.key] + if widget_value is None: + widget_value = copy.deepcopy(attr_def.default) + widget.set_value(widget_value) + + def current_value(self): + output = {} + for widget in self._widgets: + attr_def = widget.attr_def + if not isinstance(attr_def, UIDef): + output[attr_def.key] = widget.current_value() + + return output + + class _BaseAttrDefWidget(QtWidgets.QWidget): # Type 'object' may not work with older PySide versions value_changed = QtCore.Signal(object, uuid.UUID) From c68b1e42c5b6991db3d633b8462323b333384e4f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 11:08:23 +0200 Subject: [PATCH 302/357] added MessageOverlayObject to utils init --- openpype/tools/utils/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py index ea1133c442..0f367510bd 100644 --- a/openpype/tools/utils/__init__.py +++ b/openpype/tools/utils/__init__.py @@ -22,6 +22,10 @@ from .lib import ( from .models import ( RecursiveSortFilterProxyModel, ) +from .overlay_messages import ( + MessageOverlayObject, +) + __all__ = ( "PlaceholderLineEdit", @@ -45,4 +49,6 @@ __all__ = ( "get_asset_icon", "RecursiveSortFilterProxyModel", + + "MessageOverlayObject", ) From 24728400eac2a954f8f82e70db8fc6bdd5491c38 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 11:14:57 +0200 Subject: [PATCH 303/357] MessageOverlayObject can have it's own default timeout --- openpype/tools/utils/overlay_messages.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/utils/overlay_messages.py b/openpype/tools/utils/overlay_messages.py index 93082b9fb7..62de2cf272 100644 --- a/openpype/tools/utils/overlay_messages.py +++ b/openpype/tools/utils/overlay_messages.py @@ -174,7 +174,7 @@ class MessageOverlayObject(QtCore.QObject): widget (QWidget): """ - def __init__(self, widget): + def __init__(self, widget, default_timeout=None): super(MessageOverlayObject, self).__init__() widget.installEventFilter(self) @@ -194,6 +194,7 @@ class MessageOverlayObject(QtCore.QObject): self._spacing = 5 self._move_size = 4 self._move_size_remove = 8 + self._default_timeout = default_timeout def add_message(self, message, message_type=None, timeout=None): """Add single message into overlay. @@ -208,6 +209,9 @@ class MessageOverlayObject(QtCore.QObject): if not message: return + if timeout is None: + timeout = self._default_timeout + # Create unique id of message label_id = str(uuid.uuid4()) # Create message widget From d5e7353b665cfd8ab12b243dfebbcd75d6b38cd4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 11:20:56 +0200 Subject: [PATCH 304/357] changed success to default --- openpype/style/style.css | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index bae648b860..f2b0cdd6ac 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -696,10 +696,10 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { #OverlayMessageWidget:hover { background: {color:bg-button-hover}; } -#OverlayMessageWidget[type="success"] { +#OverlayMessageWidget { background: {color:overlay-messages:bg-success}; } -#OverlayMessageWidget[type="success"]:hover { +#OverlayMessageWidget:hover { background: {color:overlay-messages:bg-success-hover}; } From 0ebe84adf4e0181a0c842aa105d6de5c80b1d243 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 11:21:35 +0200 Subject: [PATCH 305/357] use overlay messages in local settings --- openpype/tools/settings/local_settings/window.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/tools/settings/local_settings/window.py b/openpype/tools/settings/local_settings/window.py index 4db0e01476..6a2db3fff5 100644 --- a/openpype/tools/settings/local_settings/window.py +++ b/openpype/tools/settings/local_settings/window.py @@ -8,6 +8,7 @@ from openpype.settings.lib import ( save_local_settings ) from openpype.tools.settings import CHILD_OFFSET +from openpype.tools.utils import MessageOverlayObject from openpype.api import ( Logger, SystemSettings, @@ -221,6 +222,8 @@ class LocalSettingsWindow(QtWidgets.QWidget): self.setWindowTitle("OpenPype Local settings") + overlay_object = MessageOverlayObject(self) + stylesheet = style.load_stylesheet() self.setStyleSheet(stylesheet) self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) @@ -247,6 +250,7 @@ class LocalSettingsWindow(QtWidgets.QWidget): save_btn.clicked.connect(self._on_save_clicked) reset_btn.clicked.connect(self._on_reset_clicked) + self._overlay_object = overlay_object # Do not create local settings widget in init phase as it's using # settings objects that must be OK to be able create this widget # - we want to show dialog if anything goes wrong @@ -312,8 +316,10 @@ class LocalSettingsWindow(QtWidgets.QWidget): def _on_reset_clicked(self): self.reset() + self._overlay_object.add_message("Refreshed...") def _on_save_clicked(self): value = self._settings_widget.settings_value() save_local_settings(value) + self._overlay_object.add_message("Saved...", message_type="success") self.reset() From c9f35c480507471128efbca377ad71464f028788 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 12:08:07 +0200 Subject: [PATCH 306/357] remove button is enabled/disabled on selection change --- openpype/style/style.css | 8 +++++ .../widgets/attribute_defs/files_widget.py | 30 +++++++++++++++++-- 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 9df615d953..59253a474c 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1340,3 +1340,11 @@ VariantInputsWidget QToolButton { #LikeDisabledInput:focus { border-color: {color:border}; } + +/* Attribute Definition widgets */ +InViewButton, InViewButton:disabled { + background: transparent; +} +InViewButton:hover { + background: rgba(255, 255, 255, 37); +} diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 72bfd6cfa2..3a9455584c 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -235,6 +235,10 @@ class ItemWidget(QtWidgets.QWidget): self._label_widget = label_widget +class InViewButton(IconButton): + pass + + class FilesView(QtWidgets.QListView): """View showing instances and their groups.""" @@ -248,17 +252,34 @@ class FilesView(QtWidgets.QListView): QtWidgets.QAbstractItemView.ExtendedSelection ) - remove_btn = IconButton(self) - pix = paint_image_with_color( + remove_btn = InViewButton(self) + pix_enabled = paint_image_with_color( get_image(filename="delete.png"), QtCore.Qt.white ) - icon = QtGui.QIcon(pix) + pix_disabled = paint_image_with_color( + get_image(filename="delete.png"), QtCore.Qt.gray + ) + icon = QtGui.QIcon(pix_enabled) + icon.addPixmap(pix_disabled, icon.Disabled, icon.Off) remove_btn.setIcon(icon) + remove_btn.setEnabled(False) remove_btn.clicked.connect(self._on_remove_clicked) self._remove_btn = remove_btn + def setSelectionModel(self, *args, **kwargs): + super(FilesView, self).setSelectionModel(*args, **kwargs) + selection_model = self.selectionModel() + selection_model.selectionChanged.connect(self._on_selection_change) + + def has_selected_item_ids(self): + for index in self.selectionModel().selectedIndexes(): + instance_id = index.data(ITEM_ID_ROLE) + if instance_id is not None: + return True + return False + def get_selected_item_ids(self): """Ids of selected instances.""" selected_item_ids = set() @@ -286,6 +307,9 @@ class FilesView(QtWidgets.QListView): return super(FilesView, self).event(event) + def _on_selection_change(self): + self._remove_btn.setEnabled(self.has_selected_item_ids()) + def _on_remove_clicked(self): self.remove_requested.emit() From 171e73bf218da9130b58a59647840320a42eca40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 12:10:33 +0200 Subject: [PATCH 307/357] fixed event handling on files view --- .../widgets/attribute_defs/files_widget.py | 21 +++++++------------ 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 3a9455584c..cb339f3d52 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -290,20 +290,13 @@ class FilesView(QtWidgets.QListView): return selected_item_ids def event(self, event): - if not event.type() == QtCore.QEvent.KeyPress: - pass - - elif event.key() == QtCore.Qt.Key_Space: - self.toggle_requested.emit(-1) - return True - - elif event.key() == QtCore.Qt.Key_Backspace: - self.toggle_requested.emit(0) - return True - - elif event.key() == QtCore.Qt.Key_Return: - self.toggle_requested.emit(1) - return True + if event.type() == QtCore.QEvent.KeyPress: + if ( + event.key() == QtCore.Qt.Key_Delete + and self.has_selected_item_ids() + ): + self.remove_requested.emit() + return True return super(FilesView, self).event(event) From 1bfe4232855a79720d08dcb8a7500c06057507d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 26 Apr 2022 12:11:36 +0200 Subject: [PATCH 308/357] fix proper output directory --- openpype/hosts/maya/api/lib_renderproducts.py | 45 +++++++++++++------ .../maya/plugins/create/create_render.py | 2 +- .../publish/validate_rendersettings.py | 30 +++++-------- .../plugins/publish/submit_maya_deadline.py | 24 +++++++++- 4 files changed, 64 insertions(+), 37 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 8b282094db..5956cc482c 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -79,6 +79,7 @@ IMAGE_PREFIXES = { "redshift": "defaultRenderGlobals.imageFilePrefix", } +RENDERMAN_IMAGE_DIR = "maya//" @attr.s class LayerMetadata(object): @@ -1054,6 +1055,8 @@ class RenderProductsRenderman(ARenderProducts): :func:`ARenderProducts.get_render_products()` """ + from rfm2.api.displays import get_displays # noqa + cameras = [ self.sanitize_camera_name(c) for c in self.get_renderable_cameras() @@ -1066,20 +1069,38 @@ class RenderProductsRenderman(ARenderProducts): ] products = [] - default_ext = "exr" - displays = cmds.listConnections("rmanGlobals.displays") - for aov in displays: - enabled = self._get_attr(aov, "enable") + # NOTE: This is guessing extensions from renderman display types. + # Some of them are just framebuffers, d_texture format can be + # set in display setting. We set those now to None, but it + # should be handled more gracefully. + display_types = { + "d_deepexr": "exr", + "d_it": None, + "d_null": None, + "d_openexr": "exr", + "d_png": "png", + "d_pointcloud": "ptc", + "d_targa": "tga", + "d_texture": None, + "d_tiff": "tif" + } + + displays = get_displays()["displays"] + for name, display in displays.items(): + enabled = display["params"]["enable"]["value"] if not enabled: continue - aov_name = str(aov) + aov_name = name if aov_name == "rmanDefaultDisplay": aov_name = "beauty" + extensions = display_types.get( + display["driverNode"]["type"], "exr") + for camera in cameras: product = RenderProduct(productName=aov_name, - ext=default_ext, + ext=extensions, camera=camera) products.append(product) @@ -1088,20 +1109,16 @@ class RenderProductsRenderman(ARenderProducts): def get_files(self, product): """Get expected files. - In renderman we hack it with prepending path. This path would - normally be translated from `rmanGlobals.imageOutputDir`. We skip - this and hardcode prepend path we expect. There is no place for user - to mess around with this settings anyway and it is enforced in - render settings validator. """ files = super(RenderProductsRenderman, self).get_files(product) layer_data = self.layer_data new_files = [] + + resolved_image_dir = re.sub("", layer_data.sceneName, RENDERMAN_IMAGE_DIR, flags=re.IGNORECASE) # noqa: E501 + resolved_image_dir = re.sub("", layer_data.layerName, resolved_image_dir, flags=re.IGNORECASE) # noqa: E501 for file in files: - new_file = "{}/{}/{}".format( - layer_data.sceneName, layer_data.layerName, file - ) + new_file = "{}/{}".format(resolved_image_dir, file) new_files.append(new_file) return new_files diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 13bfe1bf37..f2cf73557e 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -467,7 +467,7 @@ class CreateRender(plugin.Creator): if renderer == "renderman": cmds.setAttr("rmanGlobals.imageOutputDir", - "/maya//", type="string") + "maya//", type="string") def _set_vray_settings(self, asset): # type: (dict) -> None diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index 28fe2d317c..a513c8ebc1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -69,14 +69,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): redshift_AOV_prefix = "/{aov_separator}" # noqa: E501 - # WARNING: There is bug? in renderman, translating token - # to something left behind mayas default image prefix. So instead - # `SceneName_v01` it translates to: - # `SceneName_v01//` that means - # for example: - # `SceneName_v01/Main/Main_`. Possible solution is to define - # custom token like to point to determined scene name. - RendermanDirPrefix = "/renders/maya//" + renderman_dir_prefix = "maya//" R_AOV_TOKEN = re.compile( r'%a||', re.IGNORECASE) @@ -119,21 +112,18 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): required_prefix = "maya/" - if renderer == "renderman": - # renderman has prefix set differently - required_prefix = "/renders/{}".format(required_prefix) - if not anim_override: invalid = True cls.log.error("Animation needs to be enabled. Use the same " "frame for start and end to render single frame") - if not prefix.lower().startswith(required_prefix): - invalid = True - cls.log.error( - "Wrong image prefix [ {} ] - doesn't start with: '{}'".format( - prefix, required_prefix) - ) + if renderer != "renderman": + if not prefix.lower().startswith(required_prefix): + invalid = True + cls.log.error( + "Wrong image prefix [ {} ] - doesn't start with: '{}'".format( + prefix, required_prefix) + ) if not re.search(cls.R_LAYER_TOKEN, prefix): invalid = True @@ -207,7 +197,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): invalid = True cls.log.error("Wrong image prefix [ {} ]".format(file_prefix)) - if dir_prefix.lower() != cls.RendermanDirPrefix.lower(): + if dir_prefix.lower() != cls.renderman_dir_prefix.lower(): invalid = True cls.log.error("Wrong directory prefix [ {} ]".format( dir_prefix)) @@ -313,7 +303,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): default_prefix, type="string") cmds.setAttr("rmanGlobals.imageOutputDir", - cls.RendermanDirPrefix, + cls.renderman_dir_prefix, type="string") if renderer == "vray": diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 14e458a401..3f036dbca7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -187,6 +187,10 @@ def get_renderer_variables(renderlayer, root): filename_0 = re.sub('_', '_beauty', filename_0, flags=re.IGNORECASE) prefix_attr = "defaultRenderGlobals.imageFilePrefix" + + scene = cmds.file(query=True, sceneName=True) + scene, _ = os.path.splitext(os.path.basename(scene)) + if renderer == "vray": renderlayer = renderlayer.split("_")[-1] # Maya's renderSettings function does not return V-Ray file extension @@ -206,8 +210,7 @@ def get_renderer_variables(renderlayer, root): filename_prefix = cmds.getAttr(prefix_attr) # we need to determine path for vray as maya `renderSettings` query # does not work for vray. - scene = cmds.file(query=True, sceneName=True) - scene, _ = os.path.splitext(os.path.basename(scene)) + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 filename_0 = "{}.{}.{}".format( @@ -224,15 +227,30 @@ def get_renderer_variables(renderlayer, root): "d_it": None, "d_null": None, "d_openexr": "exr", + "d_openexr3": "exr", "d_png": "png", "d_pointcloud": "ptc", "d_targa": "tga", "d_texture": None, "d_tiff": "tif" } + extension = display_types.get( cmds.listConnections("rmanDefaultDisplay.displayType")[0] ) + + filename_prefix = "{}/{}".format( + cmds.getAttr("rmanGlobals.imageOutputDir"), + cmds.getAttr("rmanGlobals.imageFileFormat") + ) + + renderlayer = renderlayer.split("_")[-1] + + filename_0 = re.sub('', scene, filename_prefix, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', renderlayer, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', "#" * int(padding), filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = re.sub('', extension, filename_0, flags=re.IGNORECASE) # noqa: E501 + filename_0 = os.path.normpath(os.path.join(root, filename_0)) elif renderer == "redshift": # mapping redshift extension dropdown values to strings ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"] @@ -442,6 +460,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): output_filename_0 = filename_0 + dirname = os.path.dirname(output_filename_0) + # Create render folder ---------------------------------------------- try: # Ensure render folder exists From a2b2dfb1ef93b59c88097b97fcc314708ba529be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 26 Apr 2022 12:18:12 +0200 Subject: [PATCH 309/357] hound fixes --- .../plugins/publish/validate_rendersettings.py | 15 ++++++++------- .../plugins/publish/submit_maya_deadline.py | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py index a513c8ebc1..023e27de17 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rendersettings.py +++ b/openpype/hosts/maya/plugins/publish/validate_rendersettings.py @@ -117,13 +117,14 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): cls.log.error("Animation needs to be enabled. Use the same " "frame for start and end to render single frame") - if renderer != "renderman": - if not prefix.lower().startswith(required_prefix): - invalid = True - cls.log.error( - "Wrong image prefix [ {} ] - doesn't start with: '{}'".format( - prefix, required_prefix) - ) + if renderer != "renderman" and not prefix.lower().startswith( + required_prefix): + invalid = True + cls.log.error( + ("Wrong image prefix [ {} ] " + " - doesn't start with: '{}'").format( + prefix, required_prefix) + ) if not re.search(cls.R_LAYER_TOKEN, prefix): invalid = True diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3f036dbca7..347b6ab0fe 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -858,7 +858,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): } renderer = self._instance.data["renderer"] - + # This hack is here because of how Deadline handles Renderman version. # it considers everything with `renderman` set as version older than # Renderman 22, and so if we are using renderman > 21 we need to set From 911163756e994edd9e332f6ef26f973c50cd24d9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 13:35:53 +0200 Subject: [PATCH 310/357] simplified allow sequence --- openpype/hosts/traypublisher/api/plugin.py | 5 +- .../plugins/create/create_from_settings.py | 13 ---- openpype/lib/attribute_definitions.py | 66 ++++++++----------- .../project_settings/traypublisher.json | 6 +- .../schema_project_traypublisher.json | 38 ++--------- .../widgets/attribute_defs/files_widget.py | 10 +-- openpype/widgets/attribute_defs/widgets.py | 2 +- 7 files changed, 44 insertions(+), 96 deletions(-) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index d31e0a1ef7..d4bbe4c9d6 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -45,7 +45,6 @@ class SettingsCreator(TrayPublishCreator): enable_review = False extensions = [] - sequence_extensions = [] def collect_instances(self): for instance_data in list_instances(): @@ -73,7 +72,7 @@ class SettingsCreator(TrayPublishCreator): "filepath", folders=False, extensions=self.extensions, - sequence_extensions=self.sequence_extensions, + allow_sequences=self.allow_sequences, label="Filepath", ) output.append(file_def) @@ -98,7 +97,7 @@ class SettingsCreator(TrayPublishCreator): "description": item_data["description"], "enable_review": item_data["enable_review"], "extensions": item_data["extensions"], - "sequence_extensions": item_data["sequence_extensions"], + "allow_sequences": item_data["allow_sequences"], "default_variants": item_data["default_variants"] } ) diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index 19ade437ab..836939fe94 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -14,19 +14,6 @@ def initialize(): global_variables = globals() for item in simple_creators: - allow_sequences_value = item["allow_sequences"] - allow_sequences = allow_sequences_value["allow"] - if allow_sequences == "all": - sequence_extensions = copy.deepcopy(item["extensions"]) - - elif allow_sequences == "no": - sequence_extensions = [] - - elif allow_sequences == "selection": - sequence_extensions = allow_sequences_value["extensions"] - - item["sequence_extensions"] = sequence_extensions - item["enable_review"] = False dynamic_plugin = SettingsCreator.from_settings(item) global_variables[dynamic_plugin.__name__] = dynamic_plugin diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 7a00fcdeb4..ea3da53a9e 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -375,6 +375,16 @@ class FileDefItem(object): filename_template, ",".join(ranges) ) + def split_sequence(self): + if not self.is_sequence: + raise ValueError("Cannot split single file item") + + output = [] + for filename in self.filenames: + path = os.path.join(self.directory, filename) + output.append(self.from_paths([path])) + return output + @property def ext(self): _, ext = os.path.splitext(self.filenames[0]) @@ -412,7 +422,7 @@ class FileDefItem(object): return cls("", "") @classmethod - def from_value(cls, value, sequence_extensions): + def from_value(cls, value, allow_sequences): multi = isinstance(value, (list, tuple, set)) if not multi: value = [value] @@ -420,10 +430,15 @@ class FileDefItem(object): output = [] str_filepaths = [] for item in value: + if isinstance(item, dict): + item = cls.from_dict(item) + if isinstance(item, FileDefItem): - output.append(item) - elif isinstance(item, dict): - output.append(cls.from_dict(item)) + if not allow_sequences and item.is_sequence: + output.extend(item.split_sequence()) + else: + output.append(item) + elif isinstance(item, six.string_types): str_filepaths.append(item) else: @@ -434,7 +449,7 @@ class FileDefItem(object): ) if str_filepaths: - output.extend(cls.from_paths(str_filepaths, sequence_extensions)) + output.extend(cls.from_paths(str_filepaths, allow_sequences)) if multi: return output @@ -450,7 +465,7 @@ class FileDefItem(object): ) @classmethod - def from_paths(cls, paths, sequence_extensions): + def from_paths(cls, paths, allow_sequences): filenames_by_dir = collections.defaultdict(list) for path in paths: normalized = os.path.normpath(path) @@ -459,18 +474,12 @@ class FileDefItem(object): output = [] for directory, filenames in filenames_by_dir.items(): - filtered_filenames = [] - for filename in filenames: - _, ext = os.path.splitext(filename) - if ext in sequence_extensions: - filtered_filenames.append(filename) - else: - output.append(cls(directory, [filename])) + if allow_sequences: + cols, remainders = clique.assemble(filenames) + else: + cols = [] + remainders = filenames - if not filtered_filenames: - continue - - cols, remainders = clique.assemble(filtered_filenames) for remainder in remainders: output.append(cls(directory, [remainder])) @@ -512,23 +521,9 @@ class FileDef(AbtractAttrDef): default(str, list): Defautl value. """ - default_sequence_extensions = [ - ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", - ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", - ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", - ".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", - ".jbig2", ".jng", ".jpeg", ".jpeg-ls", ".2000", ".jpg", ".xr", - ".jpeg-hdr", ".kra", ".mng", ".miff", ".nrrd", - ".ora", ".pam", ".pbm", ".pgm", ".ppm", ".pnm", ".pcx", ".pgf", - ".pictor", ".png", ".psb", ".psp", ".qtvr", ".ras", - ".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", - ".tiff/ep", ".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", - ".xbm", ".xcf", ".xpm", ".xwd" - ] - def __init__( self, key, single_item=True, folders=None, extensions=None, - sequence_extensions=None, default=None, **kwargs + allow_sequences=True, default=None, **kwargs ): if folders is None and extensions is None: folders = True @@ -568,13 +563,10 @@ class FileDef(AbtractAttrDef): is_label_horizontal = False kwargs["is_label_horizontal"] = is_label_horizontal - if sequence_extensions is None: - sequence_extensions = self.default_sequence_extensions - self.single_item = single_item self.folders = folders self.extensions = set(extensions) - self.sequence_extensions = set(sequence_extensions) + self.allow_sequences = allow_sequences super(FileDef, self).__init__(key, default=default, **kwargs) def __eq__(self, other): @@ -585,7 +577,7 @@ class FileDef(AbtractAttrDef): self.single_item == other.single_item and self.folders == other.folders and self.extensions == other.extensions - and self.sequence_extensions == self.sequence_extensions + and self.allow_sequences == other.allow_sequences ) def convert_value(self, value): diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index e6c6747ca2..1b0ad67abb 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -11,6 +11,7 @@ "enable_review": false, "description": "Publish workfile backup", "detailed_description": "", + "allow_sequences": true, "extensions": [ ".ma", ".mb", @@ -29,10 +30,7 @@ ".psd", ".psb", ".aep" - ], - "allow_sequences": { - "allow": "no" - } + ] } ] } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 00deb84172..59c675d411 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -68,6 +68,11 @@ { "type": "separator" }, + { + "key": "allow_sequences", + "label": "Allow sequences", + "type": "boolean" + }, { "type": "list", "key": "extensions", @@ -76,39 +81,6 @@ "collapsible_key": true, "collapsed": false, "object_type": "text" - }, - { - "key": "allow_sequences", - "label": "Allow sequences", - "type": "dict-conditional", - "use_label_wrap": true, - "collapsible_key": true, - "enum_key": "allow", - "enum_children": [ - { - "key": "all", - "label": "Yes (all extensions)" - }, - { - "key": "selection", - "label": "Yes (limited extensions)", - "children": [ - { - "type": "list", - "key": "extensions", - "label": "Extensions", - "use_label_wrap": true, - "collapsible_key": true, - "collapsed": false, - "object_type": "text" - } - ] - }, - { - "key": "no", - "label": "No" - } - ] } ] } diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index cb339f3d52..f694f2473f 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -74,11 +74,11 @@ class DropEmpty(QtWidgets.QWidget): class FilesModel(QtGui.QStandardItemModel): - def __init__(self, single_item, sequence_exts): + def __init__(self, single_item, allow_sequences): super(FilesModel, self).__init__() self._single_item = single_item - self._sequence_exts = sequence_exts + self._allow_sequences = allow_sequences self._items_by_id = {} self._file_items_by_id = {} @@ -89,7 +89,7 @@ class FilesModel(QtGui.QStandardItemModel): if not items: return - file_items = FileDefItem.from_value(items, self._sequence_exts) + file_items = FileDefItem.from_value(items, self._allow_sequences) if not file_items: return @@ -325,13 +325,13 @@ class FilesView(QtWidgets.QListView): class FilesWidget(QtWidgets.QFrame): value_changed = QtCore.Signal() - def __init__(self, single_item, sequence_exts, parent): + def __init__(self, single_item, allow_sequences, parent): super(FilesWidget, self).__init__(parent) self.setAcceptDrops(True) empty_widget = DropEmpty(self) - files_model = FilesModel(single_item, sequence_exts) + files_model = FilesModel(single_item, allow_sequences) files_proxy_model = FilesProxyModel() files_proxy_model.setSourceModel(files_model) files_view = FilesView(self) diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 3f36c078cb..97e7d698b5 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -441,7 +441,7 @@ class UnknownAttrWidget(_BaseAttrDefWidget): class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( - self.attr_def.single_item, self.attr_def.sequence_extensions, self + self.attr_def.single_item, self.attr_def.allow_sequences, self ) if self.attr_def.tooltip: From b0fcfc6feaa772df0a1f1e21b6dd641194169b34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 26 Apr 2022 15:24:08 +0200 Subject: [PATCH 311/357] handle default extension --- .../deadline/plugins/publish/submit_maya_deadline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 347b6ab0fe..2fc495fa76 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -227,7 +227,6 @@ def get_renderer_variables(renderlayer, root): "d_it": None, "d_null": None, "d_openexr": "exr", - "d_openexr3": "exr", "d_png": "png", "d_pointcloud": "ptc", "d_targa": "tga", @@ -236,8 +235,9 @@ def get_renderer_variables(renderlayer, root): } extension = display_types.get( - cmds.listConnections("rmanDefaultDisplay.displayType")[0] - ) + cmds.listConnections("rmanDefaultDisplay.displayType")[0], + "exr" + ) or "exr" filename_prefix = "{}/{}".format( cmds.getAttr("rmanGlobals.imageOutputDir"), From 342fda6315a48a40580a885969940b32588de19e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 15:49:22 +0200 Subject: [PATCH 312/357] fixed splitting of sequence --- openpype/lib/attribute_definitions.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index ea3da53a9e..0c40d0f195 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -379,11 +379,11 @@ class FileDefItem(object): if not self.is_sequence: raise ValueError("Cannot split single file item") - output = [] - for filename in self.filenames: - path = os.path.join(self.directory, filename) - output.append(self.from_paths([path])) - return output + paths = [ + os.path.join(self.directory, filename) + for filename in self.filenames + ] + return self.from_paths(paths, False) @property def ext(self): From 1c7c0fef32180917772d24932dc11a1952740f92 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 15:49:46 +0200 Subject: [PATCH 313/357] added splitting menu option to files item widget --- openpype/tools/resources/images/menu.png | Bin 0 -> 8472 bytes .../widgets/attribute_defs/files_widget.py | 87 ++++++++++++++++-- 2 files changed, 78 insertions(+), 9 deletions(-) create mode 100644 openpype/tools/resources/images/menu.png diff --git a/openpype/tools/resources/images/menu.png b/openpype/tools/resources/images/menu.png new file mode 100644 index 0000000000000000000000000000000000000000..14a991f092fb9cf54363eccb16d1836704846ad8 GIT binary patch literal 8472 zcmZX41z3~s+x9a?_vn(Cq(~!OA}9!q9F2@rLb?Un6lDT}h@_)_l#CWo1c4DsC?POH ziP4B4-Sv*&`@R47JHBtnvE!-hx%0ZO`@GKcd0_&(Lr2X;4FCY0zMhU50Dy?6Ab^sb zc(4z6=T1D325ReDP!fNUl&)yvHI<*9O&|b}uU-B@yunI0h&MTdbghHTecgjXodVo| z(9lpx4CS#Tqy|Dgt`%1jD05fy9u7% zoJL>Y&J$G1m@EL@hPV5}6{G+7b5-BgX}JcKII%1dvI3w1(tfOdTAmUx0c$c$88+Hb zLeSUNlZ+pFRcB!tch+!}Ex`j!Nw)!@o91BHsE;KgV)YsdVmue=Y1Po7E=x*=<^T z3Z%&g1B}VPV?<8-KHPUiE&t9$@AMoltCaaxHoX4%;#EPltm7;P;!c)ti7UmdQQO*x zcKL%|fFHU+_85f8ur(kL0vLdJR2P+S^j-Lj9u~_E-R4CwXiJZCXOQw}2a?Ww!lhg6 z&6GZKH5w)ZAe_cZjN8d0wyUnp)vq%|7_2Qvy_ICdhe$sxX!6_%XE0L-WB$R4o6+>V zE~W;c+jIm9T$7nsDTs{MDSno{Y}Ly{Xo9#HAeQ6~AScb<;9r{xekkm{Oj|umd0k>( z4nSj|E|D}d8V1Jx>X~9}RUumn351ghW7u8cr{}C(zsXJThM!q^3cNHsCUzv2MA1Ca z!rlwTjpByy{qD4-euiG7$8c6T3}Z{zechKLjx^GmUX|@XX?q@ z(&w)&!#++@`rC^zk+k*T_B~kX^h5;ozbEyoR2ItNTE$K;7%%;9)oew64j6@Ui)v3~YAc@4$6z^90OzG?@lo^=|Iw>9{ z@h0sz!8?|)4e{wlW>F*27=!WQ+@GiGI7JkA>hz09;ty1ejF<8C?Mgsr z0ZJGeQ9;_FeBZ6o5R0q73N1j1qXtSwN$kt9=|yZm-nNq=IV7UftiPJ$g5YOS&R1aM z=ePxrZb->qW?pC`aM3%bO4KU8^NI|E{T2xUU z*hUT03s_?3_tr6R3bXzLip1;f8mI+d&>Vg1O!C)LUuGB!@5Ykvl{srZV~&%L%z7O3 zr;`0=g@O*C&gQdk`nAoW60>Td-&mjeePE35oHWehAsql>i_}zKd6O|3?@j4)?aY99 z2uq=JJW5Duk?p0cIENoJ`g%;jobtxeLNvCD`LGe2Gnl}6FZReg&U&^6g1uk zFWoU-fc^al$jpDM>e0;XA}8!%BkuQ~sR(K72s0}n7o(~lNN#@mEJssh4Z_?xUvgxK z9O~Pky^Xn=EBHNAO*xtH(!M8{jFCA^Qh4Si#~Zx7 zApYXN_l#O59_8|9%nYol-e3--yAJI4*8EyVp6%RK&RLeg1uuA=FXCo>9+%vkn^eX~ zr*Avoe_|CNkPa}4?=RTY&40wn^qDlY=^J2fbulhk+~eU>cS$^w&??7&OgQZgo}$DQ zVBZF;4tRicTqyNix1-9|$uG@t=nZqw04LN> z);dSgY|?DYT?Dk78Ys59Xk+Y3Ud_m%k{H!opW9r9!G(jC3o68WtWS5TW{K{LKyO*6 z{>8QzKMaI3CP~oU8Zlf4;R{}lca%(KjZd;C(N&b`DFYfPdIScmkrv#MSq02l&Xjd` zo&MQ6IE9hSVN??%Lul#2m2uO(Eb(^8S`-!e`o25ZX<|e56eHOANikEJy&_X<)3Vq# zf`(D9q8xPFee}Uh5L3?n$@F6pIy#aiq=oTRz0|w=-hY{*wLO z`DMwZNp82@#gQ2{i;vg7$xcBAd{klijLG3Ri+LXwCHXQ}g-lXp!%b&0f!a7BFRIK) zwai#^rMa&k;o~Ljc4P?y&I1OP9|6aDjgT$Qb!U#J@*JBPWhus z(1&l@g8-%kYsGOet3-DdDu?9b4Bg?HMImlc>DF3<>`~1}8>ElDCu8Sbza&uwj=o-} zoc2OLw*+dk2KcyGWhnnpQZcFdwwiOziErV@H&{+xufm^mIe_e^2^$QzMe^;_iM?X= z?|%$Bx!yq+Uj3IC#B#~MDh!3QMhIUWVoer{#8i1+TR&14M?{WE20 zcJRG}uTe@c$<9xaW&Vo6;qRC1h>`Hl_}+b8)#?t~KWejyH>K%Ue2fqFj-?qKsx zYNVE=`h35&vu{J5+aKoWJZaQ7UG71w>k%{W1M4e?;Veof@EyrPC4T6(Q~Un6heX(b zoeM^!V$UV&U)|Ml72Ra4t}*mJI8dvB&;-#mt_aFTo*t zW!{O+{&w=lf?dNNKn?&O#ddM<& zxJrKP7EZ)$ph~H$Qsxwsjq`zfmEPRNKmTWj^foI3Aqgn3A>JRm1wJH|6ve4=vo3$e zFMn1U27u3eb}pxdz|(*TknlUlBl{9|M9W#{=FUUXX~&m|o--tvIESeVlYH9~*tcGulLFfuBF+)D!kw$r{dQi2sC+eB-y|hBjq?gk8-- zoon9^2fq1UD2w&+DU14Fz*Jr*Mqy3@X`47kn!w~p>lX_N-xqdWVQo+3$jgM~$EgW=h~-O*O11*5F+>=Z@_fOCPhEQK-BC6b@89$`66E6cg;~c3 zFM*`FM3{_N@3>urlNQ4PHxbBj6AHgClCr&9^(^?{wfJL%1mnu$yjH-@#&ILsY&Rg6 zwvHqtZtBMuua`e7VzUyBRg83#zfSiucW~y}{?G#9JdZA}Nzc4nj}RRa^xH1dbJ5Ht z(9s<41>_Q8ng%J4i{O$1Jn9?Kz&dU!@p?%^5aZVtMjhix_mNYXAHy4Yt*kiLH3gB$ z2O~ZX?^bJZq2_=50+yTn*WzPR6`wPzozWlD0Cza4Yz&lW9ONG(A7;swSeNrBg#WPC z-b9l{MW}25eg*U&4wOJ#*m4+Hp>T9AqZX7=%xN*&m$gDy2GF1;b3dsu)dG4 zO_WO>Fr1RlkrNo7h9r@=Tb<{1`6D16;PadN_vcv;^RO+i{hJbpgV5c21h@CbJdM1E zNNfn7#%dHzHJ`!@8D9S#Ramf2m74S5F6EQ1rr%e9^1l}wH$2DuvslFkb~nTBfe`to z#3IVA!;53PnsPy%&K!Sw&VfN;L=3hi?XvVbbax1;rhD_%kJ*GS*vcmoj@Z86t*Jhb zyGmcKaK?As?I_iYG2;a2-EEzzo2@*FPO}eqLsm6|^dw06}R2F%vdM9H*F?W~s#BiR#S}KL)_=P(xavp0W!= z6;mV0T|^BOH4?Xk0_5+~V5Lto>WSf*(vQdohr|I$ID;SEM7mQzTs$z=StCW$LLxm! zi5MG!VkRe2rT5+PEqp7QOn*xMbaEXmW@XzO!vvTV*C`&H_I$7E@tZ$k9U>%Q zqINO4YpCNK2JqvqDGY^ntYSq`ESE5 zHL(XC9@xS>NXmV{5`?Hj>rKdkyI$1F-%Cs(g+LH>H;0Mc{s0P*z4W}~Y|5P8tbGN7 z(9`E_l?xg3bzGMBB1-9+-rst}b?L>=Q`1N5Km#PcobrFgfvz5M8mVS7wO_^9x2O`j z|9E2wJUGxwn=$?uF?;;iV{O59cO7j=kyi}TLMHsF@9YjHk{i9H06_cyI{XXu>QWmg zUI6C{<{@SjYmlBpgiI)!wH z6w=Un*#8&!G{M!phgyap05{lsVFg!=?}idO6I9sQorQVaUQ9bbY^R1KqxdrG zEY#F-4JC*bEFD|rBU54y;r676*PAt z0Q7+Cg_0bbvM38wHfFQ0HP}-kQP~bWM)Yr%AKaFpJY(bKD?kXDFjxH)B|Bl}w4yVt zJ%jz_3(_QO6LX*;rTW>}6YVMT&XTkA!qM4Av@3y@C zaI9l?oaa+eT%UCNIQx^87b!2ZT16s`uk3y#0UFx$zVG8$3O@XCviAhRWaH%b%_*m= z^BkoZSb)E{Vn0!w^6mA}TggPcAf?OoNZh!N`SYa6894xtVs4nyhWKI7YrZ2P6>D#8 zktqq<2~oT4{fbz40RDkA1^TDvcM$nM{K zYW8SjZ{v$OWpSpeulyezRlPCeX*HLFRY82% zxOvss8@I~mLFt!o`U?&b!TFkb`n}$K`(`Q09-V!}$MgNvdv(iee*}o%G#VS1`Ll0Q=V(*lo-oM}@Vy=PMI!r2+;$ zK3XCnHg=Wz*)N11_Zu^AlN9zl&s%-?f3YmylXF)>O9&dk>}QK_HQ}Zi--^M$A% z0z!^uf*eT;h^>4el6defmFm0ji!@t3p{@8ufwd3(;?+r5%j}`5Vj@ut-9eV#pe}pn zi(O59zOD*m(plifCDAnz8BXJOi|ngZz&vFSYsCW*Z$wyIHA7bqD}>W%+$24Cz3=YV zGEbu>Zm;Pl9I6dXqcdfwS!5M}ai72I-4`8Nu@tLf2>?)8s)nj1$MTmh|DBX1Dnqx$ zyj$f#i6;0j_2*^|AV5ne4e{3Ow^%!vE?R2i4I6~>c9>sFtdpVvyuQ-4=_N+Hu?{s; z@myUm5{C1aC1`v1G0=t8{AC4@Xf@J=iY8RsZ&D&5F%5K;rXKc3Lz+GP+`o5VBM{B4 zlPk)uf7DNJp&Vc6jpF03biGl!AN{Q2DE}sd0t;XfEFfKytbj#`XbYvc!&w3_E$G0# zi1x{PSJ*CMNV|bvMC6E@;N5 zF8f-B5R_thFlm0o2vblSu(JBghR4Gvj0~9ia>I&o|C`-=HYF|i#luOWMFRRWyY(}N zs8&f!NXXd@`xdDb*BI08Pca>oX=~K>Uc_8;P|4_67gLEAv&4gjT4_EWV2i;p+Z!-$=yo33lIRYs7zBM@JaEQysl{ zKt0>$=zb(F*LR2aS(NxUg%c-HzcMsW)-A`xVJUz5>A2=2P)lmjk-$7F5Y!5GATjK& zn{BaS;Hkt$_vTA={k~bgTFcVyQt(q&`)j!&-P>@0h;yPSOEb#f=UBd{S2Bd>4iN1? zjl&UAU5{%SVqA;oE%s{4xfR6pS)TP*KWJhIAz>qu-k0Qt|D3&@mc|lAhLh7{X{loRe~}vK8F;z2~F~@R>b~;}C5lqJAVXyRId? zp?9U^b;ImK-pvV2V@_jK^iT}MBf`3_Q68_fd;DS0b63 z7^Px#JCYD8ax}Gb>z%!tP*UPHEwBN$1gkC&IT5G`O3}J2Bex>)AAxc4E}vZR{=lF3 zZ>MZ|@rNjwkoSCFd^2*iH~$&Xyi>SPF0*?*=RQF~){9myvYaxB>YGJL`J`~*3?qo65h4GIdq{$73d=|+0qi7GNs zt@agoihHzDD0kMkv3@&pK z6DfJ|!sT;T(9#U0p~RD;e0~tnSVo%-@`edIi6D~Eks_8s!C90LPGRVbK)L+R~{t-dSay8Rwkt5n#{dGyh7J!(~8*Uc8w#u59AOtBes+b zr~g9aCycQZIqAV> zV?rp7!rbeHH~*WPO_PCig+e(KntX?{M;o&nN$IEomPPoC)*C1Qpu4*$83N6`x&J27 zw?BWqQMT%69`7NfvrvG0hLhhe)ijkDU<^4tQ?7sIDQW~D|JH&-lb3)WTHYi}K<($d z$r3WAU4{htyazpwd?1bNh)2E>Z>|Aa2V(92VevoxHGl^Pd&>01px2DU)FYw4XK44n zMB(08lpL=X1zm>o4t#%`k#;j9gG4G`RG2O!(}3t1gfUU&I`Jk`a+;03$TJX2o42k7I3cwSq>Bu>ednm9dSL~Uh7Tc-td3WQmCO5DhcDd{>Op+SAY9^2aL zD-bhjoFL%=LA3oB2I~E@H0>i5>m#DFCMceD?Q+P&2yAinj(KbfAn^$%#h=`xFKUNhVwd4w zyWZYY<8D`}@-jb$IY-V;Of|@Wj5s?o=rSbO%*&s;?OJ<4>tb9d%a+MAW&gY`up>BY zJG;r8wFm*- zv9~qi+)LD(xgg0?loJmI{yFOBc#}%6?epw%9ht?8G4leeIy%z~n7fpu6hdjkST#NA z^FHkH0P#Jdj7`g!EEA*UN4}_MWa*xB^-X-!P54+Ln|X*D;Hsw<6eK9Xfzgra&Ab6# z$F0Zsp6C)KI}s^0fCExHYbZqe)Mb+*w*Ea;LoDK0wKt2kbR`Vv20@M(r88f#{SigP z*2PhOi;uB*a6l4@U7cb6;}M|-2ofnSXM>#2S5XjsvLYAtYIMeh2|QO%yku_^c*W(R zcZK-Dz>CXo0XQHYS6#c*Rqio+Oy8#<3gp`Fho9lDVjGn!3P*_}(#DvaAG7u;_J`cY z^rgs75Q7jPuM5+u)J8n{FT|FP AivR!s literal 0 HcmV?d00001 diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index f694f2473f..5ea7fcc5eb 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -5,12 +5,12 @@ import uuid from Qt import QtWidgets, QtCore, QtGui from openpype.lib import FileDefItem -from openpype.tools.utils import paint_image_with_color -# TODO change imports -from openpype.tools.resources import ( - get_pixmap, - get_image, +from openpype.tools.utils import ( + paint_image_with_color, + ClickableLabel, ) +# TODO change imports +from openpype.tools.resources import get_image from openpype.tools.utils import ( IconButton, PixmapLabel @@ -22,7 +22,8 @@ ITEM_ICON_ROLE = QtCore.Qt.UserRole + 3 FILENAMES_ROLE = QtCore.Qt.UserRole + 4 DIRPATH_ROLE = QtCore.Qt.UserRole + 5 IS_DIR_ROLE = QtCore.Qt.UserRole + 6 -EXT_ROLE = QtCore.Qt.UserRole + 7 +IS_SEQUENCE_ROLE = QtCore.Qt.UserRole + 7 +EXT_ROLE = QtCore.Qt.UserRole + 8 class DropEmpty(QtWidgets.QWidget): @@ -148,6 +149,7 @@ class FilesModel(QtGui.QStandardItemModel): item.setData(icon_pixmap, ITEM_ICON_ROLE) item.setData(file_item.ext, EXT_ROLE) item.setData(file_item.is_dir, IS_DIR_ROLE) + item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE) return item_id, item @@ -216,7 +218,9 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): class ItemWidget(QtWidgets.QWidget): - def __init__(self, item_id, label, pixmap_icon, parent=None): + split_requested = QtCore.Signal(str) + + def __init__(self, item_id, label, pixmap_icon, is_sequence, parent=None): self._item_id = item_id super(ItemWidget, self).__init__(parent) @@ -226,13 +230,67 @@ class ItemWidget(QtWidgets.QWidget): icon_widget = PixmapLabel(pixmap_icon, self) label_widget = QtWidgets.QLabel(label, self) + label_size_hint = label_widget.sizeHint() + height = label_size_hint.height() + actions_menu_pix = paint_image_with_color( + get_image(filename="menu.png"), QtCore.Qt.white + ) + + split_btn = ClickableLabel(self) + split_btn.setFixedSize(height, height) + split_btn.setPixmap(actions_menu_pix) + split_btn.setVisible(is_sequence) + layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(5, 5, 0, 5) + layout.setContentsMargins(5, 5, 5, 5) layout.addWidget(icon_widget, 0) layout.addWidget(label_widget, 1) + layout.addWidget(split_btn, 0) + + split_btn.clicked.connect(self._on_actions_clicked) self._icon_widget = icon_widget self._label_widget = label_widget + self._split_btn = split_btn + self._actions_menu_pix = actions_menu_pix + self._last_scaled_pix_height = None + + def _update_btn_size(self): + label_size_hint = self._label_widget.sizeHint() + height = label_size_hint.height() + if height == self._last_scaled_pix_height: + return + self._last_scaled_pix_height = height + self._split_btn.setFixedSize(height, height) + pix = self._actions_menu_pix.scaled( + height, height, + QtCore.Qt.KeepAspectRatio, + QtCore.Qt.SmoothTransformation + ) + self._split_btn.setPixmap(pix) + + def showEvent(self, event): + super(ItemWidget, self).showEvent(event) + self._update_btn_size() + + def resizeEvent(self, event): + super(ItemWidget, self).resizeEvent(event) + self._update_btn_size() + + def _on_actions_clicked(self): + menu = QtWidgets.QMenu(self._split_btn) + + action = QtWidgets.QAction("Split sequence", menu) + action.triggered.connect(self._on_split_sequence) + + menu.addAction(action) + + pos = self._split_btn.rect().bottomLeft() + point = self._split_btn.mapToGlobal(pos) + menu.popup(point) + + def _on_split_sequence(self): + self.split_requested.emit(self._item_id) class InViewButton(IconButton): @@ -404,8 +462,10 @@ class FilesWidget(QtWidgets.QFrame): continue label = index.data(ITEM_LABEL_ROLE) pixmap_icon = index.data(ITEM_ICON_ROLE) + is_sequence = index.data(IS_SEQUENCE_ROLE) - widget = ItemWidget(item_id, label, pixmap_icon) + widget = ItemWidget(item_id, label, pixmap_icon, is_sequence) + widget.split_requested.connect(self._on_split_request) self._files_view.setIndexWidget(index, widget) self._files_proxy_model.setData( index, widget.sizeHint(), QtCore.Qt.SizeHintRole @@ -437,6 +497,15 @@ class FilesWidget(QtWidgets.QFrame): if not self._in_set_value: self.value_changed.emit() + def _on_split_request(self, item_id): + file_item = self._files_model.get_file_item_by_id(item_id) + if not file_item: + return + + new_items = file_item.split_sequence() + self._remove_item_by_ids([item_id]) + self._add_filepaths(new_items) + def _on_remove_requested(self): items_to_delete = self._files_view.get_selected_item_ids() if items_to_delete: From f1434fa175b42f314bb92d13e32ef6ae8e466a55 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 16:15:08 +0200 Subject: [PATCH 314/357] Modified publishing plugins to work with general families --- openpype/hosts/traypublisher/api/plugin.py | 1 + .../publish/collect_simple_instances.py | 48 +++++++++++++++++++ .../plugins/publish/collect_workfile.py | 31 ------------ .../plugins/publish/validate_filepaths.py | 45 +++++++++++++++++ .../plugins/publish/validate_workfile.py | 35 -------------- .../widgets/attribute_defs/files_widget.py | 11 ++++- 6 files changed, 104 insertions(+), 67 deletions(-) create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py delete mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_workfile.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py delete mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_workfile.py diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index d4bbe4c9d6..731bf7918a 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -58,6 +58,7 @@ class SettingsCreator(TrayPublishCreator): def create(self, subset_name, data, pre_create_data): # Pass precreate data to creator attributes data["creator_attributes"] = pre_create_data + data["settings_creator"] = True # Create new instance new_instance = CreatedInstance(self.family, subset_name, data, self) # Host implementation of storing metadata about instance diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py new file mode 100644 index 0000000000..5fc66084d6 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_simple_instances.py @@ -0,0 +1,48 @@ +import os +import pyblish.api + + +class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): + """Collect data for instances created by settings creators.""" + + label = "Collect Settings Simple Instances" + order = pyblish.api.CollectorOrder - 0.49 + + hosts = ["traypublisher"] + + def process(self, instance): + if not instance.data.get("settings_creator"): + return + + if "families" not in instance.data: + instance.data["families"] = [] + + if "representations" not in instance.data: + instance.data["representations"] = [] + repres = instance.data["representations"] + + creator_attributes = instance.data["creator_attributes"] + + if creator_attributes.get("review"): + instance.data["families"].append("review") + + filepath_item = creator_attributes["filepath"] + self.log.info(filepath_item) + filepaths = [ + os.path.join(filepath_item["directory"], filename) + for filename in filepath_item["filenames"] + ] + + instance.data["sourceFilepaths"] = filepaths + + filenames = filepath_item["filenames"] + ext = os.path.splitext(filenames[0])[-1] + if len(filenames) == 1: + filenames = filenames[0] + + repres.append({ + "ext": ext, + "name": ext, + "stagingDir": filepath_item["directory"], + "files": filenames + }) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_workfile.py b/openpype/hosts/traypublisher/plugins/publish/collect_workfile.py deleted file mode 100644 index d48bace047..0000000000 --- a/openpype/hosts/traypublisher/plugins/publish/collect_workfile.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -import pyblish.api - - -class CollectWorkfile(pyblish.api.InstancePlugin): - """Collect representation of workfile instances.""" - - label = "Collect Workfile" - order = pyblish.api.CollectorOrder - 0.49 - families = ["workfile"] - hosts = ["traypublisher"] - - def process(self, instance): - if "representations" not in instance.data: - instance.data["representations"] = [] - repres = instance.data["representations"] - - creator_attributes = instance.data["creator_attributes"] - filepath = creator_attributes["filepath"] - instance.data["sourceFilepath"] = filepath - - staging_dir = os.path.dirname(filepath) - filename = os.path.basename(filepath) - ext = os.path.splitext(filename)[-1] - - repres.append({ - "ext": ext, - "name": ext, - "stagingDir": staging_dir, - "files": filename - }) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py new file mode 100644 index 0000000000..41df638ac6 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py @@ -0,0 +1,45 @@ +import os +import pyblish.api +from openpype.pipeline import PublishValidationError + + +class ValidateWorkfilePath(pyblish.api.InstancePlugin): + """Validate existence of workfile instance existence.""" + + label = "Validate Workfile" + order = pyblish.api.ValidatorOrder - 0.49 + + hosts = ["traypublisher"] + + def process(self, instance): + if "sourceFilepaths" not in instance.data: + self.log.info(( + "Can't validate source filepaths existence." + " Instance does not have collected 'sourceFilepaths'" + )) + return + + filepaths = instance.data.get("sourceFilepaths") + + not_found_files = [ + filepath + for filepath in filepaths + if not os.path.exists(filepath) + ] + if not_found_files: + joined_paths = "\n".join([ + "- {}".format(filepath) + for filepath in not_found_files + ]) + raise PublishValidationError( + ( + "Filepath of '{}' instance \"{}\" does not exist:\n{}" + ).format( + instance.data["family"], instance.data["name"], joined_paths + ), + "File not found", + ( + "## Files were not found\nFiles\n{}" + "\n\nCheck if the path is still available." + ).format(joined_paths) + ) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_workfile.py b/openpype/hosts/traypublisher/plugins/publish/validate_workfile.py deleted file mode 100644 index 7501051669..0000000000 --- a/openpype/hosts/traypublisher/plugins/publish/validate_workfile.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import pyblish.api -from openpype.pipeline import PublishValidationError - - -class ValidateWorkfilePath(pyblish.api.InstancePlugin): - """Validate existence of workfile instance existence.""" - - label = "Validate Workfile" - order = pyblish.api.ValidatorOrder - 0.49 - families = ["workfile"] - hosts = ["traypublisher"] - - def process(self, instance): - filepath = instance.data["sourceFilepath"] - if not filepath: - raise PublishValidationError( - ( - "Filepath of 'workfile' instance \"{}\" is not set" - ).format(instance.data["name"]), - "File not filled", - "## Missing file\nYou are supposed to fill the path." - ) - - if not os.path.exists(filepath): - raise PublishValidationError( - ( - "Filepath of 'workfile' instance \"{}\" does not exist: {}" - ).format(instance.data["name"], filepath), - "File not found", - ( - "## File was not found\nFile \"{}\" was not found." - " Check if the path is still available." - ).format(filepath) - ) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 5ea7fcc5eb..59e9029340 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -405,6 +405,7 @@ class FilesWidget(QtWidgets.QFrame): files_proxy_model.rowsRemoved.connect(self._on_rows_removed) files_view.remove_requested.connect(self._on_remove_requested) self._in_set_value = False + self._single_item = single_item self._empty_widget = empty_widget self._files_model = files_model @@ -432,6 +433,9 @@ class FilesWidget(QtWidgets.QFrame): all_same = False value = new_value + if not isinstance(value, (list, tuple, set)): + value = [value] + if value: self._add_filepaths(value) self._in_set_value = False @@ -448,7 +452,12 @@ class FilesWidget(QtWidgets.QFrame): file_item = self._files_model.get_file_item_by_id(item_id) if file_item is not None: file_items.append(file_item.to_dict()) - return file_items + + if not self._single_item: + return file_items + if file_items: + return file_items[0] + return FileDefItem.create_empty_item() def set_filters(self, folders_allowed, exts_filter): self._files_proxy_model.set_allow_folders(folders_allowed) From 52fd938b5aa9cb08f78ae2391798ea8e207d9883 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 26 Apr 2022 17:20:16 +0200 Subject: [PATCH 315/357] hound fixes --- .../traypublisher/plugins/create/create_from_settings.py | 1 - .../hosts/traypublisher/plugins/publish/validate_filepaths.py | 4 +++- openpype/lib/attribute_definitions.py | 1 - openpype/widgets/attribute_defs/widgets.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py index 836939fe94..baca274ea6 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_from_settings.py +++ b/openpype/hosts/traypublisher/plugins/create/create_from_settings.py @@ -1,5 +1,4 @@ import os -import copy from openpype.api import get_project_settings diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py index 41df638ac6..c7302b1005 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_filepaths.py @@ -35,7 +35,9 @@ class ValidateWorkfilePath(pyblish.api.InstancePlugin): ( "Filepath of '{}' instance \"{}\" does not exist:\n{}" ).format( - instance.data["family"], instance.data["name"], joined_paths + instance.data["family"], + instance.data["name"], + joined_paths ), "File not found", ( diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 0c40d0f195..ef87002a63 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -3,7 +3,6 @@ import re import collections import uuid import json -import copy from abc import ABCMeta, abstractmethod import six diff --git a/openpype/widgets/attribute_defs/widgets.py b/openpype/widgets/attribute_defs/widgets.py index 97e7d698b5..875b69acb4 100644 --- a/openpype/widgets/attribute_defs/widgets.py +++ b/openpype/widgets/attribute_defs/widgets.py @@ -441,7 +441,7 @@ class UnknownAttrWidget(_BaseAttrDefWidget): class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( - self.attr_def.single_item, self.attr_def.allow_sequences, self + self.attr_def.single_item, self.attr_def.allow_sequences, self ) if self.attr_def.tooltip: From 0c643190463f82def4c3aaeaed99ea3fdf22f9e7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 10:49:17 +0200 Subject: [PATCH 316/357] disable files widget abilities on multivalue --- openpype/lib/attribute_definitions.py | 14 ++- .../widgets/attribute_defs/files_widget.py | 94 ++++++++++++++----- 2 files changed, 81 insertions(+), 27 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index ef87002a63..bfac9da5ce 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -422,8 +422,14 @@ class FileDefItem(object): @classmethod def from_value(cls, value, allow_sequences): - multi = isinstance(value, (list, tuple, set)) - if not multi: + """Convert passed value to FileDefItem objects. + + Returns: + list: Created FileDefItem objects. + """ + + # Convert single item to iterable + if not isinstance(value, (list, tuple, set)): value = [value] output = [] @@ -450,9 +456,7 @@ class FileDefItem(object): if str_filepaths: output.extend(cls.from_paths(str_filepaths, allow_sequences)) - if multi: - return output - return output[0] + return output @classmethod def from_dict(cls, data): diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 59e9029340..c76474d957 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -79,6 +79,7 @@ class FilesModel(QtGui.QStandardItemModel): super(FilesModel, self).__init__() self._single_item = single_item + self._multivalue = False self._allow_sequences = allow_sequences self._items_by_id = {} @@ -86,6 +87,13 @@ class FilesModel(QtGui.QStandardItemModel): self._filenames_by_dirpath = collections.defaultdict(set) self._items_by_dirpath = collections.defaultdict(list) + def set_multivalue(self, multivalue): + """Disable filtering.""" + + if self._multivalue == multivalue: + return + self._multivalue = multivalue + def add_filepaths(self, items): if not items: return @@ -94,7 +102,7 @@ class FilesModel(QtGui.QStandardItemModel): if not file_items: return - if self._single_item: + if not self._multivalue and self._single_item: file_items = [file_items[0]] current_ids = list(self._file_items_by_id.keys()) if current_ids: @@ -159,6 +167,15 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): super(FilesProxyModel, self).__init__(*args, **kwargs) self._allow_folders = False self._allowed_extensions = None + self._multivalue = False + + def set_multivalue(self, multivalue): + """Disable filtering.""" + + if self._multivalue == multivalue: + return + self._multivalue = multivalue + self.invalidateFilter() def set_allow_folders(self, allow=None): if allow is None: @@ -189,6 +206,10 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): return False def filterAcceptsRow(self, row, parent_index): + # Skip filtering if multivalue is set + if self._multivalue: + return True + model = self.sourceModel() index = model.index(row, self.filterKeyColumn(), parent_index) # First check if item is folder and if folders are enabled @@ -220,7 +241,9 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): class ItemWidget(QtWidgets.QWidget): split_requested = QtCore.Signal(str) - def __init__(self, item_id, label, pixmap_icon, is_sequence, parent=None): + def __init__( + self, item_id, label, pixmap_icon, is_sequence, multivalue, parent=None + ): self._item_id = item_id super(ItemWidget, self).__init__(parent) @@ -239,7 +262,10 @@ class ItemWidget(QtWidgets.QWidget): split_btn = ClickableLabel(self) split_btn.setFixedSize(height, height) split_btn.setPixmap(actions_menu_pix) - split_btn.setVisible(is_sequence) + if multivalue: + split_btn.setVisible(False) + else: + split_btn.setVisible(is_sequence) layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(5, 5, 5, 5) @@ -327,11 +353,22 @@ class FilesView(QtWidgets.QListView): self._remove_btn = remove_btn def setSelectionModel(self, *args, **kwargs): + """Catch selection model set to register signal callback. + + Selection model is not available during initialization. + """ + super(FilesView, self).setSelectionModel(*args, **kwargs) selection_model = self.selectionModel() selection_model.selectionChanged.connect(self._on_selection_change) + def set_multivalue(self, multivalue): + """Disable remove button on multivalue.""" + + self._remove_btn.setVisible(not multivalue) + def has_selected_item_ids(self): + """Is any index selected.""" for index in self.selectionModel().selectedIndexes(): instance_id = index.data(ITEM_ID_ROLE) if instance_id is not None: @@ -340,6 +377,7 @@ class FilesView(QtWidgets.QListView): def get_selected_item_ids(self): """Ids of selected instances.""" + selected_item_ids = set() for index in self.selectionModel().selectedIndexes(): instance_id = index.data(ITEM_ID_ROLE) @@ -365,6 +403,8 @@ class FilesView(QtWidgets.QListView): self.remove_requested.emit() def _update_remove_btn(self): + """Position remove button to bottom right.""" + viewport = self.viewport() height = viewport.height() pos_x = viewport.width() - self._remove_btn.width() - 5 @@ -406,6 +446,7 @@ class FilesWidget(QtWidgets.QFrame): files_view.remove_requested.connect(self._on_remove_requested) self._in_set_value = False self._single_item = single_item + self._multivalue = False self._empty_widget = empty_widget self._files_model = files_model @@ -414,30 +455,24 @@ class FilesWidget(QtWidgets.QFrame): self._widgets_by_id = {} + def _set_multivalue(self, multivalue): + if self._multivalue == multivalue: + return + self._multivalue = multivalue + self._files_view.set_multivalue(multivalue) + self._files_model.set_multivalue(multivalue) + self._files_proxy_model.set_multivalue(multivalue) + def set_value(self, value, multivalue): self._in_set_value = True + widget_ids = set(self._widgets_by_id.keys()) self._remove_item_by_ids(widget_ids) - # TODO how to display multivalue? - all_same = True - if multivalue: - new_value = set() - item_row = None - for _value in value: - _value_set = set(_value) - new_value |= _value_set - if item_row is None: - item_row = _value_set - elif item_row != _value_set: - all_same = False - value = new_value + self._set_multivalue(multivalue) - if not isinstance(value, (list, tuple, set)): - value = [value] + self._add_filepaths(value) - if value: - self._add_filepaths(value) self._in_set_value = False def current_value(self): @@ -473,7 +508,13 @@ class FilesWidget(QtWidgets.QFrame): pixmap_icon = index.data(ITEM_ICON_ROLE) is_sequence = index.data(IS_SEQUENCE_ROLE) - widget = ItemWidget(item_id, label, pixmap_icon, is_sequence) + widget = ItemWidget( + item_id, + label, + pixmap_icon, + is_sequence, + self._multivalue + ) widget.split_requested.connect(self._on_split_request) self._files_view.setIndexWidget(index, widget) self._files_proxy_model.setData( @@ -507,6 +548,9 @@ class FilesWidget(QtWidgets.QFrame): self.value_changed.emit() def _on_split_request(self, item_id): + if self._multivalue: + return + file_item = self._files_model.get_file_item_by_id(item_id) if not file_item: return @@ -516,6 +560,9 @@ class FilesWidget(QtWidgets.QFrame): self._add_filepaths(new_items) def _on_remove_requested(self): + if self._multivalue: + return + items_to_delete = self._files_view.get_selected_item_ids() if items_to_delete: self._remove_item_by_ids(items_to_delete) @@ -544,6 +591,9 @@ class FilesWidget(QtWidgets.QFrame): return result def dragEnterEvent(self, event): + if self._multivalue: + return + mime_data = event.mimeData() if mime_data.hasUrls(): filepaths = [] @@ -561,7 +611,7 @@ class FilesWidget(QtWidgets.QFrame): def dropEvent(self, event): mime_data = event.mimeData() - if mime_data.hasUrls(): + if not self._multivalue and mime_data.hasUrls(): filepaths = [] for url in mime_data.urls(): filepath = url.toLocalFile() From 6f98abbee285d93c4d565fc231cba3d844691b84 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 10:54:33 +0200 Subject: [PATCH 317/357] reset creator on create --- openpype/tools/publisher/widgets/create_dialog.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index 21e1bd5cfc..22f358f1aa 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -553,7 +553,7 @@ class CreateDialog(QtWidgets.QDialog): identifier = index.data(CREATOR_IDENTIFIER_ROLE) - self._set_creator(identifier) + self._set_creator_by_identifier(identifier) def _on_plugins_refresh(self): # Trigger refresh only if is visible @@ -581,7 +581,7 @@ class CreateDialog(QtWidgets.QDialog): identifier = None if new_index.isValid(): identifier = new_index.data(CREATOR_IDENTIFIER_ROLE) - self._set_creator(identifier) + self._set_creator_by_identifier(identifier) def _update_help_btn(self): pos_x = self.width() - self._help_btn.width() @@ -633,9 +633,11 @@ class CreateDialog(QtWidgets.QDialog): else: self._detail_description_widget.setMarkdown(detailed_description) - def _set_creator(self, identifier): + def _set_creator_by_identifier(self, identifier): creator = self.controller.manual_creators.get(identifier) + self._set_creator(creator) + def _set_creator(self, creator): self._creator_short_desc_widget.set_plugin(creator) self._set_creator_detailed_text(creator) self._pre_create_widget.set_plugin(creator) @@ -861,7 +863,9 @@ class CreateDialog(QtWidgets.QDialog): )) error_msg = str(exc_value) - if error_msg is not None: + if error_msg is None: + self._set_creator(self._selected_creator) + else: box = CreateErrorMessageBox( creator_label, subset_name, From 9246383621aee77701d778a2252e9ef4495f33de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 10:58:14 +0200 Subject: [PATCH 318/357] Added overlay widget showing message thant creation finished --- openpype/tools/publisher/widgets/create_dialog.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index 22f358f1aa..971799a35a 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -13,8 +13,10 @@ from openpype.pipeline.create import ( CreatorError, SUBSET_NAME_ALLOWED_SYMBOLS ) - -from openpype.tools.utils import ErrorMessageBox +from openpype.tools.utils import ( + ErrorMessageBox, + MessageOverlayObject +) from .widgets import IconValuePixmapLabel from .assets_widget import CreateDialogAssetsWidget @@ -239,6 +241,8 @@ class CreateDialog(QtWidgets.QDialog): self._name_pattern = name_pattern self._compiled_name_pattern = re.compile(name_pattern) + overlay_object = MessageOverlayObject(self) + context_widget = QtWidgets.QWidget(self) assets_widget = CreateDialogAssetsWidget(controller, context_widget) @@ -368,6 +372,8 @@ class CreateDialog(QtWidgets.QDialog): controller.add_plugins_refresh_callback(self._on_plugins_refresh) + self._overlay_object = overlay_object + self._splitter_widget = splitter_widget self._context_widget = context_widget @@ -393,6 +399,9 @@ class CreateDialog(QtWidgets.QDialog): self._prereq_timer = prereq_timer self._first_show = True + def _emit_message(self, message): + self._overlay_object.add_message(message) + def _context_change_is_enabled(self): return self._context_widget.isEnabled() @@ -865,6 +874,7 @@ class CreateDialog(QtWidgets.QDialog): if error_msg is None: self._set_creator(self._selected_creator) + self._emit_message("Creation finished...") else: box = CreateErrorMessageBox( creator_label, From 0653f77e06cb9a54f1f60b05500265ec7d8336de Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 11:18:55 +0200 Subject: [PATCH 319/357] fixed Py2 compatibility --- openpype/tools/publisher/widgets/validations_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 798c1f9d92..e7ab4ecf5a 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -142,7 +142,7 @@ class ValidationErrorTitleWidget(QtWidgets.QWidget): self._help_text_by_instance_id = help_text_by_instance_id def sizeHint(self): - result = super().sizeHint() + result = super(ValidationErrorTitleWidget, self).sizeHint() expected_width = 0 for idx in range(self._view_layout.count()): expected_width += self._view_layout.itemAt(idx).sizeHint().width() From 716a120cc1228a3bc9c6798e044b1a9a962debc5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 11:19:11 +0200 Subject: [PATCH 320/357] hide creator dialog on showing publish frame --- openpype/tools/publisher/window.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index b74e95b227..ba0c4c54c3 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -340,9 +340,23 @@ class PublisherWindow(QtWidgets.QDialog): def _set_publish_visibility(self, visible): if visible: widget = self.publish_frame + publish_frame_visible = True else: widget = self.subset_frame + publish_frame_visible = False self.content_stacked_layout.setCurrentWidget(widget) + self._set_publish_frame_visible(publish_frame_visible) + + def _set_publish_frame_visible(self, publish_frame_visible): + """Publish frame visibility has changed. + + Also used in TrayPublisher to be able handle start/end of publish + widget overlay. + """ + + # Hide creator dialog if visible + if publish_frame_visible and self.creator_window.isVisible(): + self.creator_window.close() def _on_reset_clicked(self): self.controller.reset() From 6a303361b2163d778b0479ecd14f1d2f36bf588a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 11:19:42 +0200 Subject: [PATCH 321/357] hide Change Project button of publish frame show --- openpype/tools/traypublisher/window.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index bbb6398c35..1c201230f0 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -143,6 +143,12 @@ class TrayPublishWindow(PublisherWindow): self._back_to_overlay_btn = back_to_overlay_btn self._overlay_widget = overlay_widget + def _set_publish_frame_visible(self, publish_frame_visible): + super(TrayPublishWindow, self)._set_publish_frame_visible( + publish_frame_visible + ) + self._back_to_overlay_btn.setVisible(not publish_frame_visible) + def _on_back_to_overlay(self): self._overlay_widget.setVisible(True) self._resize_overlay() From 67fee85cd9ec7a159ef825ccc7acce77155ea0f4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 11:31:22 +0200 Subject: [PATCH 322/357] put stack of frames in published into widget --- openpype/tools/publisher/window.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index ba0c4c54c3..90a36b4f01 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -83,8 +83,10 @@ class PublisherWindow(QtWidgets.QDialog): line_widget.setMinimumHeight(2) # Content + content_stacked_widget = QtWidgets.QWidget(self) + # Subset widget - subset_frame = QtWidgets.QFrame(self) + subset_frame = QtWidgets.QFrame(content_stacked_widget) subset_views_widget = BorderedLabelWidget( "Subsets to publish", subset_frame @@ -171,9 +173,12 @@ class PublisherWindow(QtWidgets.QDialog): subset_layout.addLayout(footer_layout, 0) # Create publish frame - publish_frame = PublishFrame(controller, self) + publish_frame = PublishFrame(controller, content_stacked_widget) - content_stacked_layout = QtWidgets.QStackedLayout() + content_stacked_layout = QtWidgets.QStackedLayout( + content_stacked_widget + ) + content_stacked_layout.setContentsMargins(0, 0, 0, 0) content_stacked_layout.setStackingMode( QtWidgets.QStackedLayout.StackAll ) @@ -186,7 +191,7 @@ class PublisherWindow(QtWidgets.QDialog): main_layout.setSpacing(0) main_layout.addWidget(header_widget, 0) main_layout.addWidget(line_widget, 0) - main_layout.addLayout(content_stacked_layout, 1) + main_layout.addWidget(content_stacked_widget, 1) creator_window = CreateDialog(controller, parent=self) @@ -228,6 +233,7 @@ class PublisherWindow(QtWidgets.QDialog): # Store header for TrayPublisher self._header_layout = header_layout + self._content_stacked_widget = content_stacked_widget self.content_stacked_layout = content_stacked_layout self.publish_frame = publish_frame self.subset_frame = subset_frame From 8913b74b19aa9967738309f46bded6eedd70770c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 11:33:58 +0200 Subject: [PATCH 323/357] added cancel button to change project widget --- openpype/tools/traypublisher/window.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index 1c201230f0..5934c4aa8a 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -54,8 +54,11 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): ) confirm_btn = QtWidgets.QPushButton("Confirm", content_widget) + cancel_btn = QtWidgets.QPushButton("Cancel", content_widget) + cancel_btn.setVisible(False) btns_layout = QtWidgets.QHBoxLayout() btns_layout.addStretch(1) + btns_layout.addWidget(cancel_btn, 0) btns_layout.addWidget(confirm_btn, 0) content_layout = QtWidgets.QVBoxLayout(content_widget) @@ -77,15 +80,19 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): projects_view.doubleClicked.connect(self._on_double_click) confirm_btn.clicked.connect(self._on_confirm_click) + cancel_btn.clicked.connect(self._on_cancel_click) self._projects_view = projects_view self._projects_model = projects_model + self._cancel_btn = cancel_btn self._confirm_btn = confirm_btn self._publisher_window = publisher_window + self._project_name = None def showEvent(self, event): self._projects_model.refresh() + self._cancel_btn.setVisible(self._project_name is not None) super(StandaloneOverlayWidget, self).showEvent(event) def _on_double_click(self): @@ -94,13 +101,18 @@ class StandaloneOverlayWidget(QtWidgets.QFrame): def _on_confirm_click(self): self.set_selected_project() + def _on_cancel_click(self): + self._set_project(self._project_name) + def set_selected_project(self): index = self._projects_view.currentIndex() project_name = index.data(PROJECT_NAME_ROLE) - if not project_name: - return + if project_name: + self._set_project(project_name) + def _set_project(self, project_name): + self._project_name = project_name traypublisher.set_project_name(project_name) self.setVisible(False) self.project_selected.emit(project_name) From 5afeccd4e5d396c3a68ae167254095156bba3ac4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Apr 2022 17:17:50 +0200 Subject: [PATCH 324/357] hiero: removing old plugins --- .../collect_clip_resolution.py | 38 --- .../collect_host_version.py | 15 -- .../collect_tag_retime.py | 32 --- .../precollect_instances.py | 223 ------------------ .../precollect_workfile.py | 74 ------ 5 files changed, 382 deletions(-) delete mode 100644 openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py delete mode 100644 openpype/hosts/hiero/plugins/publish_old_workflow/collect_host_version.py delete mode 100644 openpype/hosts/hiero/plugins/publish_old_workflow/collect_tag_retime.py delete mode 100644 openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py delete mode 100644 openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py deleted file mode 100644 index 1d0727d0af..0000000000 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py +++ /dev/null @@ -1,38 +0,0 @@ -import pyblish.api - - -class CollectClipResolution(pyblish.api.InstancePlugin): - """Collect clip geometry resolution""" - - order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Clip Resolution" - hosts = ["hiero"] - families = ["clip"] - - def process(self, instance): - sequence = instance.context.data['activeSequence'] - item = instance.data["item"] - source_resolution = instance.data.get("sourceResolution", None) - - resolution_width = int(sequence.format().width()) - resolution_height = int(sequence.format().height()) - pixel_aspect = sequence.format().pixelAspect() - - # source exception - if source_resolution: - resolution_width = int(item.source().mediaSource().width()) - resolution_height = int(item.source().mediaSource().height()) - pixel_aspect = item.source().mediaSource().pixelAspect() - - resolution_data = { - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect - } - # add to instacne data - instance.data.update(resolution_data) - - self.log.info("Resolution of instance '{}' is: {}".format( - instance, - resolution_data - )) diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_host_version.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_host_version.py deleted file mode 100644 index 76e5bd11d5..0000000000 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_host_version.py +++ /dev/null @@ -1,15 +0,0 @@ -import pyblish.api - - -class CollectHostVersion(pyblish.api.ContextPlugin): - """Inject the hosts version into context""" - - label = "Collect Host and HostVersion" - order = pyblish.api.CollectorOrder - 0.5 - - def process(self, context): - import nuke - import pyblish.api - - context.set_data("host", pyblish.api.current_host()) - context.set_data('hostVersion', value=nuke.NUKE_VERSION_STRING) diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_tag_retime.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_tag_retime.py deleted file mode 100644 index 0634130976..0000000000 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_tag_retime.py +++ /dev/null @@ -1,32 +0,0 @@ -from pyblish import api - - -class CollectTagRetime(api.InstancePlugin): - """Collect Retiming from Tags of selected track items.""" - - order = api.CollectorOrder + 0.014 - label = "Collect Retiming Tag" - hosts = ["hiero"] - families = ['clip'] - - def process(self, instance): - # gets tags - tags = instance.data["tags"] - - for t in tags: - t_metadata = dict(t["metadata"]) - t_family = t_metadata.get("tag.family", "") - - # gets only task family tags and collect labels - if "retiming" in t_family: - margin_in = t_metadata.get("tag.marginIn", "") - margin_out = t_metadata.get("tag.marginOut", "") - - instance.data["retimeMarginIn"] = int(margin_in) - instance.data["retimeMarginOut"] = int(margin_out) - instance.data["retime"] = True - - self.log.info("retimeMarginIn: `{}`".format(margin_in)) - self.log.info("retimeMarginOut: `{}`".format(margin_out)) - - instance.data["families"] += ["retime"] diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py deleted file mode 100644 index f9cc158e79..0000000000 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py +++ /dev/null @@ -1,223 +0,0 @@ -from compiler.ast import flatten -from pyblish import api -from openpype.hosts.hiero import api as phiero -import hiero -# from openpype.hosts.hiero.api import lib -# reload(lib) -# reload(phiero) - - -class PreCollectInstances(api.ContextPlugin): - """Collect all Track items selection.""" - - order = api.CollectorOrder - 0.509 - label = "Pre-collect Instances" - hosts = ["hiero"] - - def process(self, context): - track_items = phiero.get_track_items( - selected=True, check_tagged=True, check_enabled=True) - # only return enabled track items - if not track_items: - track_items = phiero.get_track_items( - check_enabled=True, check_tagged=True) - # get sequence and video tracks - sequence = context.data["activeSequence"] - tracks = sequence.videoTracks() - - # add collection to context - tracks_effect_items = self.collect_sub_track_items(tracks) - - context.data["tracksEffectItems"] = tracks_effect_items - - self.log.info( - "Processing enabled track items: {}".format(len(track_items))) - - for _ti in track_items: - data = {} - clip = _ti.source() - - # get clips subtracks and anotations - annotations = self.clip_annotations(clip) - subtracks = self.clip_subtrack(_ti) - self.log.debug("Annotations: {}".format(annotations)) - self.log.debug(">> Subtracks: {}".format(subtracks)) - - # get pype tag data - tag_parsed_data = phiero.get_track_item_pype_data(_ti) - # self.log.debug(pformat(tag_parsed_data)) - - if not tag_parsed_data: - continue - - if tag_parsed_data.get("id") != "pyblish.avalon.instance": - continue - # add tag data to instance data - data.update({ - k: v for k, v in tag_parsed_data.items() - if k not in ("id", "applieswhole", "label") - }) - - asset = tag_parsed_data["asset"] - subset = tag_parsed_data["subset"] - review_track = tag_parsed_data.get("reviewTrack") - hiero_track = tag_parsed_data.get("heroTrack") - audio = tag_parsed_data.get("audio") - - # remove audio attribute from data - data.pop("audio") - - # insert family into families - family = tag_parsed_data["family"] - families = [str(f) for f in tag_parsed_data["families"]] - families.insert(0, str(family)) - - track = _ti.parent() - media_source = _ti.source().mediaSource() - source_path = media_source.firstpath() - file_head = media_source.filenameHead() - file_info = media_source.fileinfos().pop() - source_first_frame = int(file_info.startFrame()) - - # apply only for review and master track instance - if review_track and hiero_track: - families += ["review", "ftrack"] - - data.update({ - "name": "{} {} {}".format(asset, subset, families), - "asset": asset, - "item": _ti, - "families": families, - - # tags - "tags": _ti.tags(), - - # track item attributes - "track": track.name(), - "trackItem": track, - "reviewTrack": review_track, - - # version data - "versionData": { - "colorspace": _ti.sourceMediaColourTransform() - }, - - # source attribute - "source": source_path, - "sourceMedia": media_source, - "sourcePath": source_path, - "sourceFileHead": file_head, - "sourceFirst": source_first_frame, - - # clip's effect - "clipEffectItems": subtracks - }) - - instance = context.create_instance(**data) - - self.log.info("Creating instance.data: {}".format(instance.data)) - - if audio: - a_data = dict() - - # add tag data to instance data - a_data.update({ - k: v for k, v in tag_parsed_data.items() - if k not in ("id", "applieswhole", "label") - }) - - # create main attributes - subset = "audioMain" - family = "audio" - families = ["clip", "ftrack"] - families.insert(0, str(family)) - - name = "{} {} {}".format(asset, subset, families) - - a_data.update({ - "name": name, - "subset": subset, - "asset": asset, - "family": family, - "families": families, - "item": _ti, - - # tags - "tags": _ti.tags(), - }) - - a_instance = context.create_instance(**a_data) - self.log.info("Creating audio instance: {}".format(a_instance)) - - @staticmethod - def clip_annotations(clip): - """ - Returns list of Clip's hiero.core.Annotation - """ - annotations = [] - subTrackItems = flatten(clip.subTrackItems()) - annotations += [item for item in subTrackItems if isinstance( - item, hiero.core.Annotation)] - return annotations - - @staticmethod - def clip_subtrack(clip): - """ - Returns list of Clip's hiero.core.SubTrackItem - """ - subtracks = [] - subTrackItems = flatten(clip.parent().subTrackItems()) - for item in subTrackItems: - # avoid all anotation - if isinstance(item, hiero.core.Annotation): - continue - # # avoid all not anaibled - if not item.isEnabled(): - continue - subtracks.append(item) - return subtracks - - @staticmethod - def collect_sub_track_items(tracks): - """ - Returns dictionary with track index as key and list of subtracks - """ - # collect all subtrack items - sub_track_items = dict() - for track in tracks: - items = track.items() - - # skip if no clips on track > need track with effect only - if items: - continue - - # skip all disabled tracks - if not track.isEnabled(): - continue - - track_index = track.trackIndex() - _sub_track_items = flatten(track.subTrackItems()) - - # continue only if any subtrack items are collected - if len(_sub_track_items) < 1: - continue - - enabled_sti = list() - # loop all found subtrack items and check if they are enabled - for _sti in _sub_track_items: - # checking if not enabled - if not _sti.isEnabled(): - continue - if isinstance(_sti, hiero.core.Annotation): - continue - # collect the subtrack item - enabled_sti.append(_sti) - - # continue only if any subtrack items are collected - if len(enabled_sti) < 1: - continue - - # add collection of subtrackitems to dict - sub_track_items[track_index] = enabled_sti - - return sub_track_items diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py deleted file mode 100644 index 693e151f6f..0000000000 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py +++ /dev/null @@ -1,74 +0,0 @@ -import os -import pyblish.api -from openpype.hosts.hiero import api as phiero -from openpype.pipeline import legacy_io - - -class PreCollectWorkfile(pyblish.api.ContextPlugin): - """Inject the current working file into context""" - - label = "Pre-collect Workfile" - order = pyblish.api.CollectorOrder - 0.51 - - def process(self, context): - asset = legacy_io.Session["AVALON_ASSET"] - subset = "workfile" - - project = phiero.get_current_project() - active_sequence = phiero.get_current_sequence() - video_tracks = active_sequence.videoTracks() - audio_tracks = active_sequence.audioTracks() - current_file = project.path() - staging_dir = os.path.dirname(current_file) - base_name = os.path.basename(current_file) - - # get workfile's colorspace properties - _clrs = {} - _clrs["useOCIOEnvironmentOverride"] = project.useOCIOEnvironmentOverride() # noqa - _clrs["lutSetting16Bit"] = project.lutSetting16Bit() - _clrs["lutSetting8Bit"] = project.lutSetting8Bit() - _clrs["lutSettingFloat"] = project.lutSettingFloat() - _clrs["lutSettingLog"] = project.lutSettingLog() - _clrs["lutSettingViewer"] = project.lutSettingViewer() - _clrs["lutSettingWorkingSpace"] = project.lutSettingWorkingSpace() - _clrs["lutUseOCIOForExport"] = project.lutUseOCIOForExport() - _clrs["ocioConfigName"] = project.ocioConfigName() - _clrs["ocioConfigPath"] = project.ocioConfigPath() - - # set main project attributes to context - context.data["activeProject"] = project - context.data["activeSequence"] = active_sequence - context.data["videoTracks"] = video_tracks - context.data["audioTracks"] = audio_tracks - context.data["currentFile"] = current_file - context.data["colorspace"] = _clrs - - self.log.info("currentFile: {}".format(current_file)) - - # creating workfile representation - representation = { - 'name': 'hrox', - 'ext': 'hrox', - 'files': base_name, - "stagingDir": staging_dir, - } - - instance_data = { - "name": "{}_{}".format(asset, subset), - "asset": asset, - "subset": "{}{}".format(asset, subset.capitalize()), - "item": project, - "family": "workfile", - - # version data - "versionData": { - "colorspace": _clrs - }, - - # source attribute - "sourcePath": current_file, - "representations": [representation] - } - - instance = context.create_instance(**instance_data) - self.log.info("Creating instance: {}".format(instance)) From ec8e277e1d794838de3021a977090cd0c4865b47 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Apr 2022 17:18:42 +0200 Subject: [PATCH 325/357] hiero: adding collector for frame tags --- openpype/hosts/hiero/api/tags.py | 20 +-- .../publish/collect_frame_tag_instances.py | 146 ++++++++++++++++++ .../plugins/publish/precollect_workfile.py | 2 + 3 files changed, 158 insertions(+), 10 deletions(-) create mode 100644 openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index e15e3119a6..8877b92b9d 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -10,16 +10,6 @@ log = Logger.get_logger(__name__) def tag_data(): return { - # "Retiming": { - # "editable": "1", - # "note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa - # "icon": "retiming.png", - # "metadata": { - # "family": "retiming", - # "marginIn": 1, - # "marginOut": 1 - # } - # }, "[Lenses]": { "Set lense here": { "editable": "1", @@ -48,6 +38,16 @@ def tag_data(): "family": "comment", "subset": "main" } + }, + "FrameMain": { + "editable": "1", + "note": "Publishing a frame subset.", + "icon": "z_layer_main.png", + "metadata": { + "family": "frame", + "subset": "main", + "format": "png" + } } } diff --git a/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py b/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py new file mode 100644 index 0000000000..84b6f9149b --- /dev/null +++ b/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py @@ -0,0 +1,146 @@ +from pprint import pformat +import re +import ast +import json + +import pyblish.api + + +class CollectFrameTagInstances(pyblish.api.ContextPlugin): + """Collect frames from tags. + + Tag is expected to have metadata: + { + "family": "frame" + "subset": "main" + } + """ + + order = pyblish.api.CollectorOrder + label = "Collect Frames" + hosts = ["hiero"] + + def process(self, context): + self._context = context + + # collect all sequence tags + subset_data = self._create_frame_subset_data_sequence(context) + self.log.debug("__ subset_data: {}".format( + pformat(subset_data) + )) + + # if sequence tags and frame type then create instances + self._create_instances(subset_data) + + # collect all instance tags + ## if instance tags and frame type then create instances + + pass + + def _get_tag_data(self, tag): + data = {} + + # get tag metadata attribute + tag_data = tag.metadata() + + # convert tag metadata to normal keys names and values to correct types + for k, v in dict(tag_data).items(): + key = k.replace("tag.", "") + + try: + # capture exceptions which are related to strings only + if re.match(r"^[\d]+$", v): + value = int(v) + elif re.match(r"^True$", v): + value = True + elif re.match(r"^False$", v): + value = False + elif re.match(r"^None$", v): + value = None + elif re.match(r"^[\w\d_]+$", v): + value = v + else: + value = ast.literal_eval(v) + except (ValueError, SyntaxError) as msg: + value = v + + data[key] = value + + return data + + def _create_frame_subset_data_sequence(self, context): + + sequence_tags = [] + sequence = context.data["activeTimeline"] + + # get all publishable sequence frames + publish_frames = range(int(sequence.duration() + 1)) + + self.log.debug("__ publish_frames: {}".format( + pformat(publish_frames) + )) + + # get all sequence tags + for tag in sequence.tags(): + tag_data = self._get_tag_data(tag) + self.log.debug("__ tag_data: {}".format( + pformat(tag_data) + )) + if not tag_data: + continue + + if "family" not in tag_data: + continue + + if tag_data["family"] != "frame": + continue + + sequence_tags.append(tag_data) + + self.log.debug("__ sequence_tags: {}".format( + pformat(sequence_tags) + )) + + # first collect all available subset tag frames + subset_data = {} + for tag_data in sequence_tags: + frame = int(tag_data["start"]) + + if frame not in publish_frames: + continue + + subset = tag_data["subset"] + + if subset in subset_data: + # update existing subset key + subset_data[subset]["frames"].append(frame) + else: + # create new subset key + subset_data[subset] = { + "frames": [frame], + "format": tag_data["format"], + "asset": context.data["assetEntity"]["name"] + } + return subset_data + + def _create_instances(self, subset_data): + # create instance per subset + for subset_name, subset_data in subset_data.items(): + name = "frame" + subset_name.title() + data = { + "name": name, + "label": "{} {}".format(name, subset_data["frames"]), + "family": "image", + "families": ["frame"], + "asset": subset_data["asset"], + "subset": subset_name, + "format": subset_data["format"], + "frames": subset_data["frames"] + } + self._context.create_instance(**data) + + self.log.info( + "Created instance: {}".format( + json.dumps(data, sort_keys=True, indent=4) + ) + ) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py index 29c0397f79..b9f58c15f6 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py @@ -68,6 +68,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): "subset": "{}{}".format(asset, subset.capitalize()), "item": project, "family": "workfile", + "families": [], "representations": [workfile_representation, thumb_representation] } @@ -77,6 +78,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): # update context with main project attributes context_data = { "activeProject": project, + "activeTimeline": active_timeline, "otioTimeline": otio_timeline, "currentFile": curent_file, "colorspace": self.get_colorspace(project), From 4687d00a1700d1940f0c06d44835aa68db756a6b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Apr 2022 17:19:10 +0200 Subject: [PATCH 326/357] hiero: fixing families exception for hierarchy --- openpype/plugins/publish/collect_hierarchy.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index 4e94acce4a..a96d444be6 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -30,14 +30,15 @@ class CollectHierarchy(pyblish.api.ContextPlugin): # shot data dict shot_data = {} - family = instance.data.get("family") + family = instance.data["family"] + families = instance.data["families"] # filter out all unepropriate instances if not instance.data["publish"]: continue # exclude other families then self.families with intersection - if not set(self.families).intersection([family]): + if not set(self.families).intersection(set(families + [family])): continue # exclude if not masterLayer True From ca0e6592db03dfeb8d0186a443683f7bbb530f92 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Apr 2022 17:19:59 +0200 Subject: [PATCH 327/357] hiero: adding frame family extractor --- .../hiero/plugins/publish/extract_frames.py | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 openpype/hosts/hiero/plugins/publish/extract_frames.py diff --git a/openpype/hosts/hiero/plugins/publish/extract_frames.py b/openpype/hosts/hiero/plugins/publish/extract_frames.py new file mode 100644 index 0000000000..5396298aac --- /dev/null +++ b/openpype/hosts/hiero/plugins/publish/extract_frames.py @@ -0,0 +1,82 @@ +import os +import pyblish.api +import openpype + + +class ExtractFrames(openpype.api.Extractor): + """Extracts frames""" + + order = pyblish.api.ExtractorOrder + label = "Extract Frames" + hosts = ["hiero"] + families = ["frame"] + movie_extensions = ["mov", "mp4"] + + def process(self, instance): + oiio_tool_path = openpype.lib.get_oiio_tools_path() + staging_dir = self.staging_dir(instance) + output_template = os.path.join(staging_dir, instance.data["name"]) + sequence = instance.context.data["activeTimeline"] + + files = [] + for frame in instance.data["frames"]: + track_item = sequence.trackItemAt(frame) + media_source = track_item.source().mediaSource() + input_path = media_source.fileinfos()[0].filename() + input_frame = ( + track_item.mapTimelineToSource(frame) + + track_item.source().mediaSource().startTime() + ) + output_ext = instance.data["format"] + output_path = output_template + output_path += ".{:04d}.{}".format(int(frame), output_ext) + + args = [oiio_tool_path] + + ext = os.path.splitext(input_path)[1][1:] + if ext in self.movie_extensions: + args.extend(["--subimage", str(int(input_frame))]) + else: + args.extend(["--frames", str(int(input_frame))]) + + if ext == "exr": + args.extend(["--powc", "0.45,0.45,0.45,1.0"]) + + args.extend([input_path, "-o", output_path]) + output = openpype.api.run_subprocess(args) + + failed_output = "oiiotool produced no output." + if failed_output in output: + raise ValueError( + "oiiotool processing failed. Args: {}".format(args) + ) + + files.append(output_path) + + # Feedback to user because "oiiotool" can make the publishing + # appear unresponsive. + self.log.info( + "Processed {} of {} frames".format( + instance.data["frames"].index(frame) + 1, + len(instance.data["frames"]) + ) + ) + + if len(files) == 1: + instance.data["representations"] = [ + { + "name": output_ext, + "ext": output_ext, + "files": os.path.basename(files[0]), + "stagingDir": staging_dir + } + ] + else: + instance.data["representations"] = [ + { + "name": output_ext, + "ext": output_ext, + "files": [os.path.basename(x) for x in files], + "stagingDir": staging_dir + } + ] \ No newline at end of file From df81486d30c4ddbbcec92cf307e30464d084e21f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 27 Apr 2022 17:27:29 +0200 Subject: [PATCH 328/357] hound --- .../plugins/publish/collect_frame_tag_instances.py | 12 ++++-------- .../hosts/hiero/plugins/publish/extract_frames.py | 2 +- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py b/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py index 84b6f9149b..80a54ba2c5 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py +++ b/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py @@ -25,18 +25,14 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): # collect all sequence tags subset_data = self._create_frame_subset_data_sequence(context) + self.log.debug("__ subset_data: {}".format( pformat(subset_data) )) - # if sequence tags and frame type then create instances + # create instances self._create_instances(subset_data) - # collect all instance tags - ## if instance tags and frame type then create instances - - pass - def _get_tag_data(self, tag): data = {} @@ -61,7 +57,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): value = v else: value = ast.literal_eval(v) - except (ValueError, SyntaxError) as msg: + except (ValueError, SyntaxError): value = v data[key] = value @@ -85,7 +81,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): tag_data = self._get_tag_data(tag) self.log.debug("__ tag_data: {}".format( pformat(tag_data) - )) + )) if not tag_data: continue diff --git a/openpype/hosts/hiero/plugins/publish/extract_frames.py b/openpype/hosts/hiero/plugins/publish/extract_frames.py index 5396298aac..aa3eda2e9f 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_frames.py +++ b/openpype/hosts/hiero/plugins/publish/extract_frames.py @@ -79,4 +79,4 @@ class ExtractFrames(openpype.api.Extractor): "files": [os.path.basename(x) for x in files], "stagingDir": staging_dir } - ] \ No newline at end of file + ] From 546b1d42015598a9b36dcfb07d942c12b391029b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 19:07:23 +0200 Subject: [PATCH 329/357] removed unused method --- .../plugins/publish/integrate_ftrack_api.py | 42 ------------------- 1 file changed, 42 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 650c59fae8..e60d00c7c3 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -24,48 +24,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): label = "Integrate Ftrack Api" families = ["ftrack"] - def query(self, entitytype, data): - """ Generate a query expression from data supplied. - - If a value is not a string, we'll add the id of the entity to the - query. - - Args: - entitytype (str): The type of entity to query. - data (dict): The data to identify the entity. - exclusions (list): All keys to exclude from the query. - - Returns: - str: String query to use with "session.query" - """ - queries = [] - if sys.version_info[0] < 3: - for key, value in data.iteritems(): - if not isinstance(value, (basestring, int)): - self.log.info("value: {}".format(value)) - if "id" in value.keys(): - queries.append( - "{0}.id is \"{1}\"".format(key, value["id"]) - ) - else: - queries.append("{0} is \"{1}\"".format(key, value)) - else: - for key, value in data.items(): - if not isinstance(value, (str, int)): - self.log.info("value: {}".format(value)) - if "id" in value.keys(): - queries.append( - "{0}.id is \"{1}\"".format(key, value["id"]) - ) - else: - queries.append("{0} is \"{1}\"".format(key, value)) - - query = ( - "select id from " + entitytype + " where " + " and ".join(queries) - ) - self.log.debug(query) - return query - def process(self, instance): session = instance.context.data["ftrackSession"] context = instance.context From 9c1fb9de477394b8b22c1910ab2a862a3005dd96 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 19:08:22 +0200 Subject: [PATCH 330/357] added asset status name filtering for asset version --- .../publish/integrate_ftrack_instances.py | 29 ++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py index 5ea0469bce..5eecf34c3d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py @@ -3,6 +3,8 @@ import json import copy import pyblish.api +from openpype.lib.profiles_filtering import filter_profiles + class IntegrateFtrackInstance(pyblish.api.InstancePlugin): """Collect ftrack component data (not integrate yet). @@ -36,6 +38,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "reference": "reference" } keep_first_subset_name_for_review = True + asset_versions_status_profiles = {} def process(self, instance): self.log.debug("instance {}".format(instance)) @@ -80,6 +83,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): if instance_fps is None: instance_fps = instance.context.data["fps"] + status_name = self._get_asset_version_status_name(instance) + # Base of component item data # - create a copy of this object when want to use it base_component_item = { @@ -91,7 +96,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): }, "assetversion_data": { "version": version_number, - "comment": instance.context.data.get("comment") or "" + "comment": instance.context.data.get("comment") or "", + "status_name": status_name }, "component_overwrite": False, # This can be change optionally @@ -317,3 +323,24 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): ) )) instance.data["ftrackComponentsList"] = component_list + + def _get_asset_version_status_name(self, instance): + if not self.asset_versions_status_profiles: + return None + + # Prepare filtering data for new asset version status + anatomy_data = instance.data["anatomyData"] + task_type = anatomy_data.get("task", {}).get("type") + filtering_criteria = { + "families": instance.data["family"], + "hosts": instance.context.data["hostName"], + "task_types": task_type + } + matching_profile = filter_profiles( + self.asset_versions_status_profiles, + filtering_criteria + ) + if not matching_profile: + return None + + return matching_profile["status"] or None From b0dd4d51530a5a345bad9fae2c3d9fac2ee6ef19 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 19:10:53 +0200 Subject: [PATCH 331/357] implemented logic which change status of asset version --- .../plugins/publish/integrate_ftrack_api.py | 44 ++++++++++++++++--- 1 file changed, 39 insertions(+), 5 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index e60d00c7c3..64af8cb208 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -66,7 +66,19 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): default_asset_name = parent_entity["name"] # Change status on task - self._set_task_status(instance, task_entity, session) + asset_version_status_ids_by_name = {} + project_entity = instance.context.data.get("ftrackProject") + if project_entity: + project_schema = project_entity["project_schema"] + asset_version_statuses = ( + project_schema.get_statuses("AssetVersion") + ) + asset_version_status_ids_by_name = { + status["name"].lower(): status["id"] + for status in asset_version_statuses + } + + self._set_task_status(instance, project_entity, task_entity, session) # Prepare AssetTypes asset_types_by_short = self._ensure_asset_types_exists( @@ -97,7 +109,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # Asset Version asset_version_data = data.get("assetversion_data") or {} asset_version_entity = self._ensure_asset_version_exists( - session, asset_version_data, asset_entity["id"], task_entity + session, + asset_version_data, + asset_entity["id"], + task_entity, + asset_version_status_ids_by_name ) # Component @@ -132,8 +148,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): if asset_version not in instance.data[asset_versions_key]: instance.data[asset_versions_key].append(asset_version) - def _set_task_status(self, instance, task_entity, session): - project_entity = instance.context.data.get("ftrackProject") + def _set_task_status(self, instance, project_entity, task_entity, session): if not project_entity: self.log.info("Task status won't be set, project is not known.") return @@ -277,12 +292,19 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ).first() def _ensure_asset_version_exists( - self, session, asset_version_data, asset_id, task_entity + self, + session, + asset_version_data, + asset_id, + task_entity, + status_ids_by_name ): task_id = None if task_entity: task_id = task_entity["id"] + status_name = asset_version_data.pop("status_name", None) + # Try query asset version by criteria (asset id and version) version = asset_version_data.get("version") or 0 asset_version_entity = self._query_asset_version( @@ -324,6 +346,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session, version, asset_id ) + if status_name: + status_id = status_ids_by_name.get(status_name.lower()) + if not status_id: + self.log.info(( + "Ftrack status with name \"{}\"" + " for AssetVersion was not found." + ).format(status_name)) + + elif asset_version_entity["status_id"] != status_id: + asset_version_entity["status_id"] = status_id + session.commit() + # Set custom attributes if there were any set custom_attrs = asset_version_data.get("custom_attributes") or {} for attr_key, attr_value in custom_attrs.items(): From eea8f906a2a2b4031b672d021db1052ba0afdd0d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 27 Apr 2022 19:16:10 +0200 Subject: [PATCH 332/357] added settings schemas and defaults for new attribute --- .../defaults/project_settings/ftrack.json | 3 +- .../schema_project_ftrack.json | 37 +++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index a846a596c2..f9d16d6476 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -418,7 +418,8 @@ "redshiftproxy": "cache", "usd": "usd" }, - "keep_first_subset_name_for_review": true + "keep_first_subset_name_for_review": true, + "asset_versions_status_profiles": [] } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index 47effb3dbd..7db490b114 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -858,6 +858,43 @@ "key": "keep_first_subset_name_for_review", "label": "Make subset name as first asset name", "default": true + }, + { + "type": "list", + "collapsible": true, + "key": "asset_versions_status_profiles", + "label": "AssetVersion status on publish", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "hosts", + "label": "Host names", + "type": "hosts-enum", + "multiselection": true + }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "family", + "label": "Family", + "type": "list", + "object_type": "text" + }, + { + "type": "separator" + }, + { + "key": "status", + "label": "Status name", + "type": "text" + } + ] + } } ] } From c2e0c84034b94a876eca830e6c4ae5188b4c000a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Apr 2022 10:36:52 +0200 Subject: [PATCH 333/357] replaced renderlayer with render_layer and renderpass with render_pass --- .../plugins/create/create_render_layer.py | 13 ++++++++++--- .../plugins/create/create_render_pass.py | 13 ++++++++++--- .../plugins/publish/collect_instances.py | 18 +++++++++++++++++- .../plugins/publish/collect_scene_render.py | 10 ++++++++-- 4 files changed, 45 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py index c1af9632b1..3b5bd47189 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py @@ -24,7 +24,9 @@ class CreateRenderlayer(plugin.Creator): " {clip_id} {group_id} {r} {g} {b} \"{name}\"" ) - dynamic_subset_keys = ["render_pass", "render_layer", "group"] + dynamic_subset_keys = [ + "renderpass", "renderlayer", "render_pass", "render_layer", "group" + ] @classmethod def get_dynamic_data( @@ -34,12 +36,17 @@ class CreateRenderlayer(plugin.Creator): variant, task_name, asset_id, project_name, host_name ) # Use render pass name from creator's plugin - dynamic_data["render_pass"] = cls.render_pass + dynamic_data["renderpass"] = cls.render_pass # Add variant to render layer - dynamic_data["render_layer"] = variant + dynamic_data["renderlayer"] = variant # Change family for subset name fill dynamic_data["family"] = "render" + # TODO remove - Backwards compatibility for old subset name templates + # - added 2022/04/28 + dynamic_data["render_pass"] = dynamic_data["renderpass"] + dynamic_data["render_layer"] = dynamic_data["renderlayer"] + return dynamic_data @classmethod diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py index a7f717ccec..1c9f31e656 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py @@ -20,7 +20,9 @@ class CreateRenderPass(plugin.Creator): icon = "cube" defaults = ["Main"] - dynamic_subset_keys = ["render_pass", "render_layer"] + dynamic_subset_keys = [ + "renderpass", "renderlayer", "render_pass", "render_layer" + ] @classmethod def get_dynamic_data( @@ -29,9 +31,13 @@ class CreateRenderPass(plugin.Creator): dynamic_data = super(CreateRenderPass, cls).get_dynamic_data( variant, task_name, asset_id, project_name, host_name ) - dynamic_data["render_pass"] = variant + dynamic_data["renderpass"] = variant dynamic_data["family"] = "render" + # TODO remove - Backwards compatibility for old subset name templates + # - added 2022/04/28 + dynamic_data["renderpass"] = dynamic_data["render_pass"] + return dynamic_data @classmethod @@ -115,6 +121,7 @@ class CreateRenderPass(plugin.Creator): else: render_layer = beauty_instance["variant"] + subset_name_fill_data["renderlayer"] = render_layer subset_name_fill_data["render_layer"] = render_layer # Format dynamic keys in subset name @@ -129,7 +136,7 @@ class CreateRenderPass(plugin.Creator): self.data["group_id"] = group_id self.data["pass"] = variant - self.data["render_layer"] = render_layer + self.data["renderlayer"] = render_layer # Collect selected layer ids to be stored into instance layer_names = [layer["name"] for layer in selected_layers] diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 188aa8c41a..9b51f88cae 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -45,6 +45,22 @@ class CollectInstances(pyblish.api.ContextPlugin): for instance_data in filtered_instance_data: instance_data["fps"] = context.data["sceneFps"] + # Conversion from older instances + # - change 'render_layer' to 'renderlayer' + # and 'render_pass' to 'renderpass' + + if ( + "renderlayer" not in instance_data + and "render_layer" in instance_data + ): + instance_data["renderlayer"] = instance_data["render_layer"] + + if ( + "renderpass" not in instance_data + and "render_pass" in instance_data + ): + instance_data["renderpass"] = instance_data["render_pass"] + # Store workfile instance data to instance data instance_data["originData"] = copy.deepcopy(instance_data) # Global instance data modifications @@ -191,7 +207,7 @@ class CollectInstances(pyblish.api.ContextPlugin): "Creating render pass instance. \"{}\"".format(pass_name) ) # Change label - render_layer = instance_data["render_layer"] + render_layer = instance_data["renderlayer"] # Backwards compatibility # - subset names were not stored as final subset names during creation diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 1c042a62fb..02e0575a2c 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -69,9 +69,13 @@ class CollectRenderScene(pyblish.api.ContextPlugin): # Variant is using render pass name variant = self.render_layer dynamic_data = { - "render_layer": self.render_layer, - "render_pass": self.render_pass + "renderlayer": self.render_layer, + "renderpass": self.render_pass, } + # TODO remove - Backwards compatibility for old subset name templates + # - added 2022/04/28 + dynamic_data["render_layer"] = dynamic_data["renderlayer"] + dynamic_data["render_pass"] = dynamic_data["renderpass"] task_name = workfile_context["task"] subset_name = get_subset_name_with_asset_doc( @@ -102,6 +106,8 @@ class CollectRenderScene(pyblish.api.ContextPlugin): "asset": asset_name, "task": task_name } + # Add 'renderlayer' and 'renderpass' to data + instance_data.update(dynamic_data) instance = context.create_instance(**instance_data) From e7d244cd585dcd4171cc50f8cb0ac537fda360ff Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Apr 2022 10:48:43 +0200 Subject: [PATCH 334/357] changed default settings of TVPaint --- openpype/settings/defaults/project_settings/global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 7317a3da1c..7b223798f1 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -307,7 +307,7 @@ ], "task_types": [], "tasks": [], - "template": "{family}{Task}_{Render_layer}_{Render_pass}" + "template": "{family}{Task}_{Renderlayer}_{Renderpass}" }, { "families": [ From ed9771392db1475a6cb74e0a4417079e4505b248 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 11:22:11 +0200 Subject: [PATCH 335/357] Nuke: anatomy node overrides plus UX improvements --- .../schemas/schema_anatomy_imageio.json | 96 +++++++++++-------- 1 file changed, 57 insertions(+), 39 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 9f142bad09..434f474f6e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -253,7 +253,7 @@ { "key": "requiredNodes", "type": "list", - "label": "Required Nodes", + "label": "Plugin required", "object_type": { "type": "dict", "children": [ @@ -272,35 +272,43 @@ "label": "Nuke Node Class" }, { - "type": "splitter" - }, - { - "key": "knobs", + "type": "collapsible-wrap", "label": "Knobs", - "type": "list", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "text", - "key": "value", - "label": "Value" + "collapsible": true, + "collapsed": true, + "children": [ + { + "key": "knobs", + "type": "list", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "value", + "label": "Value" + } + ] } - ] - } + } + ] } + ] } }, + { + "type": "splitter" + }, { "type": "list", - "key": "customNodes", - "label": "Custom Nodes", + "key": "overrideNodes", + "label": "Plugin's node overrides", "object_type": { "type": "dict", "children": [ @@ -319,27 +327,37 @@ "label": "Nuke Node Class" }, { - "type": "splitter" + "key": "sebsets", + "label": "Subsets", + "type": "list", + "object_type": "text" }, { - "key": "knobs", + "type": "collapsible-wrap", "label": "Knobs", - "type": "list", - "object_type": { - "type": "dict", - "children": [ - { - "type": "text", - "key": "name", - "label": "Name" - }, - { - "type": "text", - "key": "value", - "label": "Value" + "collapsible": true, + "collapsed": true, + "children": [ + { + "key": "knobs", + "type": "list", + "object_type": { + "type": "dict", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "value", + "label": "Value" + } + ] } - ] - } + } + ] } ] } From e0fba3583a75f832135f77c36bb6bb8690b8898b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Apr 2022 12:13:37 +0200 Subject: [PATCH 336/357] swapr keys --- openpype/hosts/tvpaint/plugins/create/create_render_pass.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py index 1c9f31e656..26fa8ac51a 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_pass.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_pass.py @@ -36,7 +36,7 @@ class CreateRenderPass(plugin.Creator): # TODO remove - Backwards compatibility for old subset name templates # - added 2022/04/28 - dynamic_data["renderpass"] = dynamic_data["render_pass"] + dynamic_data["render_pass"] = dynamic_data["renderpass"] return dynamic_data From e4b5ee107b77c88833428161188d65474f3d7e84 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 12:17:59 +0200 Subject: [PATCH 337/357] nuke: ux improvement --- .../schemas/projects_schema/schemas/schema_anatomy_imageio.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 434f474f6e..141b51da0a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -334,7 +334,7 @@ }, { "type": "collapsible-wrap", - "label": "Knobs", + "label": "Knobs overrides", "collapsible": true, "collapsed": true, "children": [ From ca803331e595bb6bb8d2ff740d33fdfe25793aeb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Apr 2022 14:00:02 +0200 Subject: [PATCH 338/357] make sure keys are as list instead of 'dict_keys' in py2 --- openpype/lib/avalon_context.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 3fcddef745..9d8a92cfe9 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1532,13 +1532,13 @@ class BuildWorkfile: subsets = list(legacy_io.find({ "type": "subset", - "parent": {"$in": asset_entity_by_ids.keys()} + "parent": {"$in": list(asset_entity_by_ids.keys())} })) subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} sorted_versions = list(legacy_io.find({ "type": "version", - "parent": {"$in": subset_entity_by_ids.keys()} + "parent": {"$in": list(subset_entity_by_ids.keys())} }).sort("name", -1)) subset_id_with_latest_version = [] @@ -1552,7 +1552,7 @@ class BuildWorkfile: repres = legacy_io.find({ "type": "representation", - "parent": {"$in": last_versions_by_id.keys()} + "parent": {"$in": list(last_versions_by_id.keys())} }) output = {} From ba537e263d756034d70b3d5f08ead851684c9f8e Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 28 Apr 2022 15:09:12 +0100 Subject: [PATCH 339/357] Fix old avalon-core import --- openpype/hosts/unreal/plugins/create/create_render.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index d81f7c7aab..1e6f5fb4d1 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,6 +1,6 @@ import unreal -from avalon import io +from openpype.pipeline import legacy_io from openpype.hosts.unreal.api import pipeline from openpype.hosts.unreal.api.plugin import Creator @@ -70,14 +70,14 @@ class CreateRender(Creator): # Get frame range. We need to go through the hierarchy and check # the frame range for the children. - asset_data = io.find_one({ + asset_data = legacy_io.find_one({ "type": "asset", "name": asset_name }) id = asset_data.get('_id') elements = list( - io.find({"type": "asset", "data.visualParent": id})) + legacy_io.find({"type": "asset", "data.visualParent": id})) if elements: start_frames = [] @@ -86,7 +86,7 @@ class CreateRender(Creator): start_frames.append(e.get('data').get('clipIn')) end_frames.append(e.get('data').get('clipOut')) - elements.extend(io.find({ + elements.extend(legacy_io.find({ "type": "asset", "data.visualParent": e.get('_id') })) From ab0980895fdac8ec1a83b777b5f41045202647c9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 16:43:37 +0200 Subject: [PATCH 340/357] nuke: adding key to default settings --- openpype/settings/defaults/project_anatomy/imageio.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index 7a3f49452e..fedae994bf 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -165,7 +165,7 @@ ] } ], - "customNodes": [] + "overrideNodes": [] }, "regexInputs": { "inputs": [ From 45a5538a5c3eec03316e9fc24c0f4feb21be3742 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 28 Apr 2022 16:50:05 +0200 Subject: [PATCH 341/357] removed backwards compatibility --- start.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/start.py b/start.py index 38eb9e9bf4..4d4801c1e5 100644 --- a/start.py +++ b/start.py @@ -266,18 +266,9 @@ def set_openpype_global_environments() -> None: """Set global OpenPype's environments.""" import acre - try: - from openpype.settings import get_general_environments + from openpype.settings import get_general_environments - general_env = get_general_environments() - - except Exception: - # Backwards compatibility for OpenPype versions where - # `get_general_environments` does not exists yet - from openpype.settings import get_environments - - all_env = get_environments() - general_env = all_env["global"] + general_env = get_general_environments() merged_env = acre.merge( acre.parse(general_env), From 77bac5c735ec2aee0f101f0267dc72b8635ad4b2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 20:20:25 +0200 Subject: [PATCH 342/357] nuke: fix `read` and rename to `read_avalon_data` --- openpype/hosts/nuke/api/lib.py | 22 ++++++++++------------ openpype/hosts/nuke/api/pipeline.py | 4 ++-- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 4e38f811c9..bd39a1f0a8 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -400,7 +400,7 @@ def add_write_node(name, **kwarg): return w -def read(node): +def read_avalon_data(node): """Return user-defined knobs from given `node` Args: @@ -415,8 +415,6 @@ def read(node): return knob_name[len("avalon:"):] elif knob_name.startswith("ak:"): return knob_name[len("ak:"):] - else: - return knob_name data = dict() @@ -445,7 +443,8 @@ def read(node): (knob_type == 26 and value) ): key = compat_prefixed(knob_name) - data[key] = value + if key is not None: + data[key] = value if knob_name == first_user_knob: break @@ -567,7 +566,7 @@ def check_inventory_versions(): if container: node = nuke.toNode(container["objectName"]) - avalon_knob_data = read(node) + avalon_knob_data = read_avalon_data(node) # get representation from io representation = legacy_io.find_one({ @@ -623,7 +622,7 @@ def writes_version_sync(): if _NODE_TAB_NAME not in each.knobs(): continue - avalon_knob_data = read(each) + avalon_knob_data = read_avalon_data(each) try: if avalon_knob_data['families'] not in ["render"]: @@ -665,14 +664,14 @@ def check_subsetname_exists(nodes, subset_name): bool: True of False """ return next((True for n in nodes - if subset_name in read(n).get("subset", "")), + if subset_name in read_avalon_data(n).get("subset", "")), False) def get_render_path(node): ''' Generate Render path from presets regarding avalon knob data ''' - data = {'avalon': read(node)} + data = {'avalon': read_avalon_data(node)} data_preset = { "nodeclass": data['avalon']['family'], "families": [data['avalon']['families']], @@ -1293,7 +1292,7 @@ class WorkfileSettings(object): for node in nuke.allNodes(filter="Group"): # get data from avalon knob - avalon_knob_data = read(node) + avalon_knob_data = read_avalon_data(node) if not avalon_knob_data: continue @@ -1342,7 +1341,6 @@ class WorkfileSettings(object): write_node[knob["name"]].setValue(value) - def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes @@ -1630,8 +1628,8 @@ def get_write_node_template_attr(node): ''' # get avalon data from node - data = dict() - data['avalon'] = read(node) + data = {"avalon": read_avalon_data(node)} + data_preset = { "nodeclass": data['avalon']['family'], "families": [data['avalon']['families']], diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 0194acd196..2785eb65cd 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -32,7 +32,7 @@ from .lib import ( launch_workfiles_app, check_inventory_versions, set_avalon_knob_data, - read, + read_avalon_data, Context ) @@ -359,7 +359,7 @@ def parse_container(node): dict: The container schema data for this container node. """ - data = read(node) + data = read_avalon_data(node) # (TODO) Remove key validation when `ls` has re-implemented. # From 2d3a4541c99c083565d56bcf6533d3c24099dce8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 28 Apr 2022 20:29:28 +0200 Subject: [PATCH 343/357] remove submodules after accidentally merging them back with PR --- openpype/modules/default_modules/ftrack/python2_vendor/arrow | 1 - .../default_modules/ftrack/python2_vendor/ftrack-python-api | 1 - repos/avalon-core | 1 - repos/avalon-unreal-integration | 1 - 4 files changed, 4 deletions(-) delete mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/arrow delete mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api delete mode 160000 repos/avalon-core delete mode 160000 repos/avalon-unreal-integration diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow b/openpype/modules/default_modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 64491fbbcf..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 64491fbbcf89ba2a0b3a20d67d7486c6142232b3 diff --git a/repos/avalon-unreal-integration b/repos/avalon-unreal-integration deleted file mode 160000 index 43f6ea9439..0000000000 --- a/repos/avalon-unreal-integration +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 43f6ea943980b29c02a170942b566ae11f2b7080 From fbdb06a9ac542831ed2da5e1cb5361acf62bc938 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 20:34:57 +0200 Subject: [PATCH 344/357] nuke: code consistency - replacing single quotations with double - improving code --- openpype/hosts/nuke/api/lib.py | 86 ++++++++++++++++--------------- openpype/hosts/nuke/api/plugin.py | 2 - 2 files changed, 45 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index bd39a1f0a8..77945c6fec 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -541,7 +541,7 @@ def get_imageio_input_colorspace(filename): def on_script_load(): ''' Callback for ffmpeg support ''' - if nuke.env['LINUX']: + if nuke.env["LINUX"]: nuke.tcl('load ffmpegReader') nuke.tcl('load ffmpegWriter') else: @@ -592,7 +592,7 @@ def check_inventory_versions(): versions = legacy_io.find({ "type": "version", "parent": version["parent"] - }).distinct('name') + }).distinct("name") max_version = max(versions) @@ -625,17 +625,17 @@ def writes_version_sync(): avalon_knob_data = read_avalon_data(each) try: - if avalon_knob_data['families'] not in ["render"]: - log.debug(avalon_knob_data['families']) + if avalon_knob_data["families"] not in ["render"]: + log.debug(avalon_knob_data["families"]) continue - node_file = each['file'].value() + node_file = each["file"].value() node_version = "v" + get_version_from_path(node_file) log.debug("node_version: {}".format(node_version)) node_new_file = node_file.replace(node_version, new_version) - each['file'].setValue(node_new_file) + each["file"].setValue(node_new_file) if not os.path.isdir(os.path.dirname(node_new_file)): log.warning("Path does not exist! I am creating it.") os.makedirs(os.path.dirname(node_new_file)) @@ -673,9 +673,9 @@ def get_render_path(node): ''' data = {'avalon': read_avalon_data(node)} data_preset = { - "nodeclass": data['avalon']['family'], - "families": [data['avalon']['families']], - "creator": data['avalon']['creator'] + "nodeclass": data["avalon"]["family"], + "families": [data["avalon"]["families"]], + "creator": data["avalon"]["creator"], } nuke_imageio_writes = get_created_node_imageio_setting(**data_preset) @@ -748,7 +748,7 @@ def format_anatomy(data): def script_name(): ''' Returns nuke script path ''' - return nuke.root().knob('name').value() + return nuke.root().knob("name").value() def add_button_write_to_read(node): @@ -843,7 +843,7 @@ def create_write_node(name, data, input=None, prenodes=None, # adding dataflow template log.debug("imageio_writes: `{}`".format(imageio_writes)) for knob in imageio_writes["knobs"]: - _data.update({knob["name"]: knob["value"]}) + _data[knob["name"]] = knob["value"] _data = fix_data_for_node_create(_data) @@ -1192,15 +1192,19 @@ class WorkfileSettings(object): erased_viewers = [] for v in nuke.allNodes(filter="Viewer"): - v['viewerProcess'].setValue(str(viewer_dict["viewerProcess"])) + # set viewProcess to preset from settings + v["viewerProcess"].setValue( + str(viewer_dict["viewerProcess"]) + ) + if str(viewer_dict["viewerProcess"]) \ - not in v['viewerProcess'].value(): + not in v["viewerProcess"].value(): copy_inputs = v.dependencies() copy_knobs = {k: v[k].value() for k in v.knobs() if k not in filter_knobs} # delete viewer with wrong settings - erased_viewers.append(v['name'].value()) + erased_viewers.append(v["name"].value()) nuke.delete(v) # create new viewer @@ -1216,7 +1220,7 @@ class WorkfileSettings(object): nv[k].setValue(v) # set viewerProcess - nv['viewerProcess'].setValue(str(viewer_dict["viewerProcess"])) + nv["viewerProcess"].setValue(str(viewer_dict["viewerProcess"])) if erased_viewers: log.warning( @@ -1308,7 +1312,7 @@ class WorkfileSettings(object): data_preset = { "nodeclass": avalon_knob_data["family"], "families": families, - "creator": avalon_knob_data['creator'] + "creator": avalon_knob_data["creator"], } nuke_imageio_writes = get_created_node_imageio_setting( @@ -1366,17 +1370,16 @@ class WorkfileSettings(object): current = n["colorspace"].value() future = str(preset_clrsp) if current != future: - changes.update({ - n.name(): { - "from": current, - "to": future - } - }) + changes[n.name()] = { + "from": current, + "to": future + } + log.debug(changes) if changes: msg = "Read nodes are not set to correct colospace:\n\n" for nname, knobs in changes.items(): - msg += str( + msg += ( " - node: '{0}' is now '{1}' but should be '{2}'\n" ).format(nname, knobs["from"], knobs["to"]) @@ -1608,17 +1611,17 @@ def get_hierarchical_attr(entity, attr, default=None): if not value: break - if value or entity['type'].lower() == 'project': + if value or entity["type"].lower() == "project": return value - parent_id = entity['parent'] + parent_id = entity["parent"] if ( - entity['type'].lower() == 'asset' - and entity.get('data', {}).get('visualParent') + entity["type"].lower() == "asset" + and entity.get("data", {}).get("visualParent") ): - parent_id = entity['data']['visualParent'] + parent_id = entity["data"]["visualParent"] - parent = legacy_io.find_one({'_id': parent_id}) + parent = legacy_io.find_one({"_id": parent_id}) return get_hierarchical_attr(parent, attr) @@ -1631,9 +1634,9 @@ def get_write_node_template_attr(node): data = {"avalon": read_avalon_data(node)} data_preset = { - "nodeclass": data['avalon']['family'], - "families": [data['avalon']['families']], - "creator": data['avalon']['creator'] + "nodeclass": data["avalon"]["family"], + "families": [data["avalon"]["families"]], + "creator": data["avalon"]["creator"], } # get template data @@ -1644,10 +1647,11 @@ def get_write_node_template_attr(node): "file": get_render_path(node) }) - # adding imageio template - {correct_data.update({k: v}) - for k, v in nuke_imageio_writes.items() - if k not in ["_id", "_previous"]} + # adding imageio knob presets + for k, v in nuke_imageio_writes.items(): + if k in ["_id", "_previous"]: + continue + correct_data[k] = v # fix badly encoded data return fix_data_for_node_create(correct_data) @@ -1763,8 +1767,8 @@ def maintained_selection(): Example: >>> with maintained_selection(): - ... node['selected'].setValue(True) - >>> print(node['selected'].value()) + ... node["selected"].setValue(True) + >>> print(node["selected"].value()) False """ previous_selection = nuke.selectedNodes() @@ -1772,11 +1776,11 @@ def maintained_selection(): yield finally: # unselect all selection in case there is some - current_seletion = nuke.selectedNodes() - [n['selected'].setValue(False) for n in current_seletion] + reset_selection() + # and select all previously selected nodes if previous_selection: - [n['selected'].setValue(True) for n in previous_selection] + select_nodes(previous_selection) def reset_selection(): diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index eaf0ab6911..9c22edf63d 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -260,8 +260,6 @@ class ExporterReview(object): return nuke_imageio["viewer"]["viewerProcess"] - - class ExporterReviewLut(ExporterReview): """ Generator object for review lut from Nuke From 162b21b61e88b26a7a82830bc5db7c8c696f0249 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 20:36:18 +0200 Subject: [PATCH 345/357] nuke: typo in subset (sebset) --- openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py | 2 +- openpype/settings/defaults/project_settings/nuke.json | 2 +- .../schemas/projects_schema/schemas/schema_anatomy_imageio.json | 2 +- .../schemas/projects_schema/schemas/schema_nuke_publish.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 2e8843d2e0..2a79d600ba 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -52,7 +52,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): for o_name, o_data in self.outputs.items(): f_families = o_data["filter"]["families"] f_task_types = o_data["filter"]["task_types"] - f_subsets = o_data["filter"]["sebsets"] + f_subsets = o_data["filter"]["subsets"] self.log.debug( "f_families `{}` > families: {}".format( diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index ab015271ff..ddf996b5f2 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -120,7 +120,7 @@ "filter": { "task_types": [], "families": [], - "sebsets": [] + "subsets": [] }, "read_raw": false, "viewer_process_override": "", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 141b51da0a..c90eeef787 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -327,7 +327,7 @@ "label": "Nuke Node Class" }, { - "key": "sebsets", + "key": "subsets", "label": "Subsets", "type": "list", "object_type": "text" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 4a796f1933..d67fb309bd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -212,7 +212,7 @@ "object_type": "text" }, { - "key": "sebsets", + "key": "subsets", "label": "Subsets", "type": "list", "object_type": "text" From 5546ea2fa7bce5db07b7cb27a430d503bf62762c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 20:37:34 +0200 Subject: [PATCH 346/357] nuke: including `overrideNodes` keys also adding subset to filtering kwargs --- openpype/hosts/nuke/api/lib.py | 63 ++++++++++++++++++++++++++++--- openpype/hosts/nuke/api/plugin.py | 3 +- 2 files changed, 59 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 77945c6fec..3745bd7be7 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -506,20 +506,68 @@ def get_created_node_imageio_setting(**kwarg): log.debug(kwarg) nodeclass = kwarg.get("nodeclass", None) creator = kwarg.get("creator", None) + subset = kwarg.get("subset", None) assert any([creator, nodeclass]), nuke.message( "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) - imageio_nodes = get_nuke_imageio_settings()["nodes"]["requiredNodes"] + imageio_nodes = get_nuke_imageio_settings()["nodes"] + required_nodes = imageio_nodes["requiredNodes"] + override_nodes = imageio_nodes["overrideNodes"] imageio_node = None - for node in imageio_nodes: + for node in required_nodes: log.info(node) - if (nodeclass in node["nukeNodeClass"]) and ( - creator in node["plugins"]): + if ( + nodeclass in node["nukeNodeClass"] + and creator in node["plugins"] + ): imageio_node = node break + log.debug("__ imageio_node: {}".format(imageio_node)) + + # find matching override node + override_imageio_node = None + for onode in override_nodes: + log.info(onode) + if nodeclass not in node["nukeNodeClass"]: + continue + + if creator not in node["plugins"]: + continue + + if ( + onode["subsets"] + and not any(re.search(s, subset) for s in onode["subsets"]) + ): + continue + + override_imageio_node = onode + break + + log.debug("__ override_imageio_node: {}".format(override_imageio_node)) + # add overrides to imageio_node + if override_imageio_node: + # get all knob names in imageio_node + knob_names = [k["name"] for k in imageio_node["knobs"]] + + for oknob in override_imageio_node["knobs"]: + for knob in imageio_node["knobs"]: + # override matching knob name + if oknob["name"] == knob["name"]: + log.debug( + "_ overriding knob: `{}` > `{}`".format( + knob, oknob + )) + knob["value"] = oknob["value"] + # add missing knobs into imageio_node + if oknob["name"] not in knob_names: + log.debug( + "_ adding knob: `{}`".format(oknob)) + imageio_node["knobs"].append(oknob) + knob_names.append(oknob["name"]) + log.info("ImageIO node: {}".format(imageio_node)) return imageio_node @@ -676,6 +724,7 @@ def get_render_path(node): "nodeclass": data["avalon"]["family"], "families": [data["avalon"]["families"]], "creator": data["avalon"]["creator"], + "subset": data["avalon"]["subset"] } nuke_imageio_writes = get_created_node_imageio_setting(**data_preset) @@ -1298,10 +1347,10 @@ class WorkfileSettings(object): # get data from avalon knob avalon_knob_data = read_avalon_data(node) - if not avalon_knob_data: + if avalon_knob_data.get("id") != "pyblish.avalon.instance": continue - if avalon_knob_data["id"] != "pyblish.avalon.instance": + if "creator" not in avalon_knob_data: continue # establish families @@ -1313,6 +1362,7 @@ class WorkfileSettings(object): "nodeclass": avalon_knob_data["family"], "families": families, "creator": avalon_knob_data["creator"], + "subset": avalon_knob_data["subset"] } nuke_imageio_writes = get_created_node_imageio_setting( @@ -1637,6 +1687,7 @@ def get_write_node_template_attr(node): "nodeclass": data["avalon"]["family"], "families": [data["avalon"]["families"]], "creator": data["avalon"]["creator"], + "subset": data["avalon"]["subset"] } # get template data diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 9c22edf63d..fdb5930cb2 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -671,7 +671,8 @@ class AbstractWriteRender(OpenPypeCreator): write_data = { "nodeclass": self.n_class, "families": [self.family], - "avalon": self.data + "avalon": self.data, + "subset": self.data["subset"] } # add creator data From 1765f25ecd3410fffd6ae9a35da96374ecb67abb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 28 Apr 2022 20:48:46 +0200 Subject: [PATCH 347/357] nuke: removing knob preset if no value in override --- openpype/hosts/nuke/api/lib.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 3745bd7be7..3223feaec7 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -560,7 +560,13 @@ def get_created_node_imageio_setting(**kwarg): "_ overriding knob: `{}` > `{}`".format( knob, oknob )) - knob["value"] = oknob["value"] + if not oknob["value"]: + # remove original knob if no value found in oknob + imageio_node["knobs"].remove(knob) + else: + # override knob value with oknob's + knob["value"] = oknob["value"] + # add missing knobs into imageio_node if oknob["name"] not in knob_names: log.debug( From 6d411d2617273de9c27d69efab0426c386d91316 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 10:09:09 +0200 Subject: [PATCH 348/357] added missing detailed description --- openpype/hosts/traypublisher/api/plugin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/traypublisher/api/plugin.py b/openpype/hosts/traypublisher/api/plugin.py index 731bf7918a..813641a7d2 100644 --- a/openpype/hosts/traypublisher/api/plugin.py +++ b/openpype/hosts/traypublisher/api/plugin.py @@ -96,6 +96,7 @@ class SettingsCreator(TrayPublishCreator): "label": item_data["label"].strip(), "icon": item_data["icon"], "description": item_data["description"], + "detailed_description": item_data["detailed_description"], "enable_review": item_data["enable_review"], "extensions": item_data["extensions"], "allow_sequences": item_data["allow_sequences"], From 7a6602a4d5c84aa0b996fe1bcdd9d516b42bbebd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 10:11:09 +0200 Subject: [PATCH 349/357] added tray publisher to extract review/burnin plugins --- openpype/plugins/publish/extract_burnin.py | 1 + openpype/plugins/publish/extract_review.py | 1 + 2 files changed, 2 insertions(+) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 544c763b52..88093fb92f 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -41,6 +41,7 @@ class ExtractBurnin(openpype.api.Extractor): "shell", "hiero", "premiere", + "traypublisher", "standalonepublisher", "harmony", "fusion", diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 9ee57c5a67..879125dac3 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -45,6 +45,7 @@ class ExtractReview(pyblish.api.InstancePlugin): "hiero", "premiere", "harmony", + "traypublisher", "standalonepublisher", "fusion", "tvpaint", From b2b4b3cb9cbe1beefbec4defd2b526650a71abdc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 10:16:13 +0200 Subject: [PATCH 350/357] fixed drop of multiple files for single file item input --- openpype/widgets/attribute_defs/files_widget.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index c76474d957..a3ee370bd3 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -205,6 +205,18 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): return True return False + def filter_valid_files(self, filepaths): + filtered_paths = [] + for filepath in filepaths: + if os.path.isfile(filepath): + _, ext = os.path.splitext(filepath) + if ext in self._allowed_extensions: + filtered_paths.append(filepath) + + elif self._allow_folders: + filtered_paths.append(filepath) + return filtered_paths + def filterAcceptsRow(self, row, parent_index): # Skip filtering if multivalue is set if self._multivalue: @@ -617,6 +629,9 @@ class FilesWidget(QtWidgets.QFrame): filepath = url.toLocalFile() if os.path.exists(filepath): filepaths.append(filepath) + + # Filter filepaths before passing it to model + filepaths = self._files_proxy_model.filter_valid_files(filepaths) if filepaths: self._add_filepaths(filepaths) event.accept() From fe128ff4728f7a675d6e15700fd2faeecf5b0647 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 10:27:16 +0200 Subject: [PATCH 351/357] added tooltip to disabled create button --- openpype/tools/publisher/widgets/create_dialog.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index 971799a35a..243540f243 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -467,12 +467,15 @@ class CreateDialog(QtWidgets.QDialog): def _on_prereq_timer(self): prereq_available = True + creator_btn_tooltips = [] if self.creators_model.rowCount() < 1: prereq_available = False + creator_btn_tooltips.append("Creator is not selected") if self._asset_doc is None: # QUESTION how to handle invalid asset? prereq_available = False + creator_btn_tooltips.append("Context is not selected") if prereq_available != self._prereq_available: self._prereq_available = prereq_available @@ -481,6 +484,12 @@ class CreateDialog(QtWidgets.QDialog): self.creators_view.setEnabled(prereq_available) self.variant_input.setEnabled(prereq_available) self.variant_hints_btn.setEnabled(prereq_available) + + tooltip = "" + if creator_btn_tooltips: + tooltip = "\n".join(creator_btn_tooltips) + self.create_btn.setToolTip(tooltip) + self._on_variant_change() def _refresh_asset(self): From 42399403810f4ddabc28ce4c1c82a7e1a402aa80 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 10:38:51 +0200 Subject: [PATCH 352/357] fixed import --- openpype/lib/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 29719b63bd..d19dacaff8 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -47,7 +47,6 @@ from .attribute_definitions import ( from .env_tools import ( env_value_to_bool, get_paths_from_environ, - get_global_environments ) from .terminal import Terminal @@ -248,7 +247,6 @@ __all__ = [ "env_value_to_bool", "get_paths_from_environ", - "get_global_environments", "get_vendor_bin_path", "get_oiio_tools_path", From 5f7524bb1cd542d49ccd60c538d78925596cf111 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 29 Apr 2022 10:46:12 +0200 Subject: [PATCH 353/357] hiero: better subset name --- .../hosts/hiero/plugins/publish/collect_frame_tag_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py b/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py index 80a54ba2c5..982a34efd6 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py +++ b/openpype/hosts/hiero/plugins/publish/collect_frame_tag_instances.py @@ -129,7 +129,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): "family": "image", "families": ["frame"], "asset": subset_data["asset"], - "subset": subset_name, + "subset": name, "format": subset_data["format"], "frames": subset_data["frames"] } From 05cc463ed9064ab81a993f2dd0a078a0fff67b2f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 29 Apr 2022 11:25:23 +0200 Subject: [PATCH 354/357] flame: addressing pr comment --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 4598405923..fd0ece2590 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -236,8 +236,8 @@ class ExtractSubsetResources(openpype.api.Extractor): # define kwargs based on preset type if "thumbnail" in unique_name: - export_kwargs["thumb_frame_number"] = in_mark + ( - source_duration_handles / 2) + export_kwargs["thumb_frame_number"] = int(in_mark + ( + source_duration_handles / 2)) else: export_kwargs.update({ "in_mark": in_mark, From 518c60de0b63746b20d80ebbd4467bc72655afbd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 15:31:05 +0200 Subject: [PATCH 355/357] fixed renderlayer key access --- .../plugins/publish/collect_instances.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 9b51f88cae..782907b65d 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -47,19 +47,18 @@ class CollectInstances(pyblish.api.ContextPlugin): # Conversion from older instances # - change 'render_layer' to 'renderlayer' - # and 'render_pass' to 'renderpass' + render_layer = instance_data.get("instance_data") + if not render_layer: + # Render Layer has only variant + if instance_data["family"] == "renderLayer": + render_layer = instance_data.get("variant") - if ( - "renderlayer" not in instance_data - and "render_layer" in instance_data - ): - instance_data["renderlayer"] = instance_data["render_layer"] + # Backwards compatibility for renderPasses + elif "render_layer" in instance_data: + render_layer = instance_data["render_layer"] - if ( - "renderpass" not in instance_data - and "render_pass" in instance_data - ): - instance_data["renderpass"] = instance_data["render_pass"] + if render_layer: + instance_data["renderlayer"] = render_layer # Store workfile instance data to instance data instance_data["originData"] = copy.deepcopy(instance_data) From f3726d0c95c35ee1e5fd3f480c751dd12fae0046 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 29 Apr 2022 15:35:25 +0200 Subject: [PATCH 356/357] modified collect scene instance to add only 'renderlayer' --- .../hosts/tvpaint/plugins/publish/collect_scene_render.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 02e0575a2c..2b8dbdc5b4 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -104,10 +104,10 @@ class CollectRenderScene(pyblish.api.ContextPlugin): "representations": [], "layers": copy.deepcopy(context.data["layersData"]), "asset": asset_name, - "task": task_name + "task": task_name, + # Add render layer to instance data + "renderlayer": self.render_layer } - # Add 'renderlayer' and 'renderpass' to data - instance_data.update(dynamic_data) instance = context.create_instance(**instance_data) From 7504f273806b59c8192dcf4bf7137340aa04d0ba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 29 Apr 2022 15:53:58 +0200 Subject: [PATCH 357/357] Nuke: fixing default settings for workfile builder loaders --- openpype/settings/defaults/project_settings/nuke.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index ddf996b5f2..0b03a00187 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -220,11 +220,12 @@ "repre_names": [ "exr", "dpx", - "mov" + "mov", + "mp4", + "h264" ], "loaders": [ - "LoadSequence", - "LoadMov" + "LoadClip" ] } ],