mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/use-package-py
# Conflicts: # server_addon/substancepainter/server/version.py
This commit is contained in:
commit
8a16b9989c
7 changed files with 749 additions and 551 deletions
|
|
@ -586,7 +586,6 @@ def prompt_new_file_with_mesh(mesh_filepath):
|
|||
# TODO: find a way to improve the process event to
|
||||
# load more complicated mesh
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 3000)
|
||||
|
||||
file_dialog.done(file_dialog.Accepted)
|
||||
app.processEvents(QtCore.QEventLoop.AllEvents)
|
||||
|
||||
|
|
@ -606,7 +605,7 @@ def prompt_new_file_with_mesh(mesh_filepath):
|
|||
mesh_select.setVisible(False)
|
||||
|
||||
# Ensure UI is visually up-to-date
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents)
|
||||
app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 8000)
|
||||
|
||||
# Trigger the 'select file' dialog to set the path and have the
|
||||
# new file dialog to use the path.
|
||||
|
|
@ -623,8 +622,6 @@ def prompt_new_file_with_mesh(mesh_filepath):
|
|||
"Failed to set mesh path with the prompt dialog:"
|
||||
f"{mesh_filepath}\n\n"
|
||||
"Creating new project directly with the mesh path instead.")
|
||||
else:
|
||||
dialog.done(dialog.Accepted)
|
||||
|
||||
new_action = _get_new_project_action()
|
||||
if not new_action:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import copy
|
||||
from qtpy import QtWidgets, QtCore
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_path,
|
||||
|
|
@ -8,10 +10,133 @@ from ayon_core.hosts.substancepainter.api.pipeline import (
|
|||
set_container_metadata,
|
||||
remove_container_metadata
|
||||
)
|
||||
from ayon_core.hosts.substancepainter.api.lib import prompt_new_file_with_mesh
|
||||
|
||||
import substance_painter.project
|
||||
import qargparse
|
||||
|
||||
|
||||
def _convert(substance_attr):
|
||||
"""Return Substance Painter Python API Project attribute from string.
|
||||
|
||||
This converts a string like "ProjectWorkflow.Default" to for example
|
||||
the Substance Painter Python API equivalent object, like:
|
||||
`substance_painter.project.ProjectWorkflow.Default`
|
||||
|
||||
Args:
|
||||
substance_attr (str): The `substance_painter.project` attribute,
|
||||
for example "ProjectWorkflow.Default"
|
||||
|
||||
Returns:
|
||||
Any: Substance Python API object of the project attribute.
|
||||
|
||||
Raises:
|
||||
ValueError: If attribute does not exist on the
|
||||
`substance_painter.project` python api.
|
||||
"""
|
||||
root = substance_painter.project
|
||||
for attr in substance_attr.split("."):
|
||||
root = getattr(root, attr, None)
|
||||
if root is None:
|
||||
raise ValueError(
|
||||
"Substance Painter project attribute"
|
||||
f" does not exist: {substance_attr}")
|
||||
|
||||
return root
|
||||
|
||||
|
||||
def get_template_by_name(name: str, templates: list[dict]) -> dict:
|
||||
return next(
|
||||
template for template in templates
|
||||
if template["name"] == name
|
||||
)
|
||||
|
||||
|
||||
class SubstanceProjectConfigurationWindow(QtWidgets.QDialog):
|
||||
"""The pop-up dialog allows users to choose material
|
||||
duplicate options for importing Max objects when updating
|
||||
or switching assets.
|
||||
"""
|
||||
def __init__(self, project_templates):
|
||||
super(SubstanceProjectConfigurationWindow, self).__init__()
|
||||
self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint)
|
||||
|
||||
self.configuration = None
|
||||
self.template_names = [template["name"] for template
|
||||
in project_templates]
|
||||
self.project_templates = project_templates
|
||||
|
||||
self.widgets = {
|
||||
"label": QtWidgets.QLabel(
|
||||
"Select your template for project configuration"),
|
||||
"template_options": QtWidgets.QComboBox(),
|
||||
"import_cameras": QtWidgets.QCheckBox("Import Cameras"),
|
||||
"preserve_strokes": QtWidgets.QCheckBox("Preserve Strokes"),
|
||||
"clickbox": QtWidgets.QWidget(),
|
||||
"combobox": QtWidgets.QWidget(),
|
||||
"buttons": QtWidgets.QDialogButtonBox(
|
||||
QtWidgets.QDialogButtonBox.Ok
|
||||
| QtWidgets.QDialogButtonBox.Cancel)
|
||||
}
|
||||
|
||||
self.widgets["template_options"].addItems(self.template_names)
|
||||
|
||||
template_name = self.widgets["template_options"].currentText()
|
||||
self._update_to_match_template(template_name)
|
||||
# Build clickboxes
|
||||
layout = QtWidgets.QHBoxLayout(self.widgets["clickbox"])
|
||||
layout.addWidget(self.widgets["import_cameras"])
|
||||
layout.addWidget(self.widgets["preserve_strokes"])
|
||||
# Build combobox
|
||||
layout = QtWidgets.QHBoxLayout(self.widgets["combobox"])
|
||||
layout.addWidget(self.widgets["template_options"])
|
||||
# Build buttons
|
||||
layout = QtWidgets.QHBoxLayout(self.widgets["buttons"])
|
||||
# Build layout.
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(self.widgets["label"])
|
||||
layout.addWidget(self.widgets["combobox"])
|
||||
layout.addWidget(self.widgets["clickbox"])
|
||||
layout.addWidget(self.widgets["buttons"])
|
||||
|
||||
self.widgets["template_options"].currentTextChanged.connect(
|
||||
self._update_to_match_template)
|
||||
self.widgets["buttons"].accepted.connect(self.on_accept)
|
||||
self.widgets["buttons"].rejected.connect(self.on_reject)
|
||||
|
||||
def on_accept(self):
|
||||
self.configuration = self.get_project_configuration()
|
||||
self.close()
|
||||
|
||||
def on_reject(self):
|
||||
self.close()
|
||||
|
||||
def _update_to_match_template(self, template_name):
|
||||
template = get_template_by_name(template_name, self.project_templates)
|
||||
self.widgets["import_cameras"].setChecked(template["import_cameras"])
|
||||
self.widgets["preserve_strokes"].setChecked(
|
||||
template["preserve_strokes"])
|
||||
|
||||
def get_project_configuration(self):
|
||||
templates = self.project_templates
|
||||
template_name = self.widgets["template_options"].currentText()
|
||||
template = get_template_by_name(template_name, templates)
|
||||
template = copy.deepcopy(template) # do not edit the original
|
||||
template["import_cameras"] = self.widgets["import_cameras"].isChecked()
|
||||
template["preserve_strokes"] = (
|
||||
self.widgets["preserve_strokes"].isChecked()
|
||||
)
|
||||
for key in ["normal_map_format",
|
||||
"project_workflow",
|
||||
"tangent_space_mode"]:
|
||||
template[key] = _convert(template[key])
|
||||
return template
|
||||
|
||||
@classmethod
|
||||
def prompt(cls, templates):
|
||||
dialog = cls(templates)
|
||||
dialog.exec_()
|
||||
configuration = dialog.configuration
|
||||
dialog.deleteLater()
|
||||
return configuration
|
||||
|
||||
|
||||
class SubstanceLoadProjectMesh(load.LoaderPlugin):
|
||||
|
|
@ -25,48 +150,35 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"preserve_strokes",
|
||||
default=True,
|
||||
help="Preserve strokes positions on mesh.\n"
|
||||
"(only relevant when loading into existing project)"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"import_cameras",
|
||||
default=True,
|
||||
help="Import cameras from the mesh file."
|
||||
)
|
||||
]
|
||||
# Defined via settings
|
||||
project_templates = []
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
def load(self, context, name, namespace, options=None):
|
||||
|
||||
# Get user inputs
|
||||
import_cameras = data.get("import_cameras", True)
|
||||
preserve_strokes = data.get("preserve_strokes", True)
|
||||
sp_settings = substance_painter.project.Settings(
|
||||
import_cameras=import_cameras
|
||||
)
|
||||
result = SubstanceProjectConfigurationWindow.prompt(
|
||||
self.project_templates)
|
||||
if not result:
|
||||
# cancelling loader action
|
||||
return
|
||||
if not substance_painter.project.is_open():
|
||||
# Allow to 'initialize' a new project
|
||||
path = self.filepath_from_context(context)
|
||||
# TODO: improve the prompt dialog function to not
|
||||
# only works for simple polygon scene
|
||||
result = prompt_new_file_with_mesh(mesh_filepath=path)
|
||||
if not result:
|
||||
self.log.info("User cancelled new project prompt."
|
||||
"Creating new project directly from"
|
||||
" Substance Painter API Instead.")
|
||||
settings = substance_painter.project.create(
|
||||
mesh_file_path=path, settings=sp_settings
|
||||
)
|
||||
|
||||
sp_settings = substance_painter.project.Settings(
|
||||
import_cameras=result["import_cameras"],
|
||||
normal_map_format=result["normal_map_format"],
|
||||
project_workflow=result["project_workflow"],
|
||||
tangent_space_mode=result["tangent_space_mode"],
|
||||
default_texture_resolution=result["default_texture_resolution"]
|
||||
)
|
||||
settings = substance_painter.project.create(
|
||||
mesh_file_path=path, settings=sp_settings
|
||||
)
|
||||
else:
|
||||
# Reload the mesh
|
||||
settings = substance_painter.project.MeshReloadingSettings(
|
||||
import_cameras=import_cameras,
|
||||
preserve_strokes=preserve_strokes
|
||||
)
|
||||
import_cameras=result["import_cameras"],
|
||||
preserve_strokes=result["preserve_strokes"])
|
||||
|
||||
def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa
|
||||
if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa
|
||||
|
|
@ -92,7 +204,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin):
|
|||
# from the user's original choice. We don't store 'preserve_strokes'
|
||||
# as we always preserve strokes on updates.
|
||||
container["options"] = {
|
||||
"import_cameras": import_cameras,
|
||||
"import_cameras": result["import_cameras"],
|
||||
}
|
||||
|
||||
set_container_metadata(project_mesh_object_name, container)
|
||||
|
|
|
|||
|
|
@ -1,501 +1,426 @@
|
|||
# TODO This plugin is not converted for AYON
|
||||
#
|
||||
# import collections
|
||||
# import os
|
||||
# import uuid
|
||||
#
|
||||
# import clique
|
||||
# import ayon_api
|
||||
# from pymongo import UpdateOne
|
||||
# import qargparse
|
||||
# from qtpy import QtWidgets, QtCore
|
||||
#
|
||||
# from ayon_core import style
|
||||
# from ayon_core.addon import AddonsManager
|
||||
# from ayon_core.lib import format_file_size
|
||||
# from ayon_core.pipeline import load, Anatomy
|
||||
# from ayon_core.pipeline.load import (
|
||||
# get_representation_path_with_anatomy,
|
||||
# InvalidRepresentationContext,
|
||||
# )
|
||||
#
|
||||
#
|
||||
# class DeleteOldVersions(load.ProductLoaderPlugin):
|
||||
# """Deletes specific number of old version"""
|
||||
#
|
||||
# is_multiple_contexts_compatible = True
|
||||
# sequence_splitter = "__sequence_splitter__"
|
||||
#
|
||||
# representations = {"*"}
|
||||
# product_types = {"*"}
|
||||
# tool_names = ["library_loader"]
|
||||
#
|
||||
# label = "Delete Old Versions"
|
||||
# order = 35
|
||||
# icon = "trash"
|
||||
# color = "#d8d8d8"
|
||||
#
|
||||
# options = [
|
||||
# qargparse.Integer(
|
||||
# "versions_to_keep", default=2, min=0, help="Versions to keep:"
|
||||
# ),
|
||||
# qargparse.Boolean(
|
||||
# "remove_publish_folder", help="Remove publish folder:"
|
||||
# )
|
||||
# ]
|
||||
#
|
||||
# def delete_whole_dir_paths(self, dir_paths, delete=True):
|
||||
# size = 0
|
||||
#
|
||||
# for dir_path in dir_paths:
|
||||
# # Delete all files and fodlers in dir path
|
||||
# for root, dirs, files in os.walk(dir_path, topdown=False):
|
||||
# for name in files:
|
||||
# file_path = os.path.join(root, name)
|
||||
# size += os.path.getsize(file_path)
|
||||
# if delete:
|
||||
# os.remove(file_path)
|
||||
# self.log.debug("Removed file: {}".format(file_path))
|
||||
#
|
||||
# for name in dirs:
|
||||
# if delete:
|
||||
# os.rmdir(os.path.join(root, name))
|
||||
#
|
||||
# if not delete:
|
||||
# continue
|
||||
#
|
||||
# # Delete even the folder and it's parents folders if they are empty
|
||||
# while True:
|
||||
# if not os.path.exists(dir_path):
|
||||
# dir_path = os.path.dirname(dir_path)
|
||||
# continue
|
||||
#
|
||||
# if len(os.listdir(dir_path)) != 0:
|
||||
# break
|
||||
#
|
||||
# os.rmdir(os.path.join(dir_path))
|
||||
#
|
||||
# return size
|
||||
#
|
||||
# def path_from_representation(self, representation, anatomy):
|
||||
# try:
|
||||
# context = representation["context"]
|
||||
# except KeyError:
|
||||
# return (None, None)
|
||||
#
|
||||
# try:
|
||||
# path = get_representation_path_with_anatomy(
|
||||
# representation, anatomy
|
||||
# )
|
||||
# except InvalidRepresentationContext:
|
||||
# return (None, None)
|
||||
#
|
||||
# sequence_path = None
|
||||
# if "frame" in context:
|
||||
# context["frame"] = self.sequence_splitter
|
||||
# sequence_path = get_representation_path_with_anatomy(
|
||||
# representation, anatomy
|
||||
# )
|
||||
#
|
||||
# if sequence_path:
|
||||
# sequence_path = sequence_path.normalized()
|
||||
#
|
||||
# return (path.normalized(), sequence_path)
|
||||
#
|
||||
# def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
|
||||
# size = 0
|
||||
#
|
||||
# for dir_id, dir_path in dir_paths.items():
|
||||
# dir_files = os.listdir(dir_path)
|
||||
# collections, remainders = clique.assemble(dir_files)
|
||||
# for file_path, seq_path in file_paths[dir_id]:
|
||||
# file_path_base = os.path.split(file_path)[1]
|
||||
# # Just remove file if `frame` key was not in context or
|
||||
# # filled path is in remainders (single file sequence)
|
||||
# if not seq_path or file_path_base in remainders:
|
||||
# if not os.path.exists(file_path):
|
||||
# self.log.debug(
|
||||
# "File was not found: {}".format(file_path)
|
||||
# )
|
||||
# continue
|
||||
#
|
||||
# size += os.path.getsize(file_path)
|
||||
#
|
||||
# if delete:
|
||||
# os.remove(file_path)
|
||||
# self.log.debug("Removed file: {}".format(file_path))
|
||||
#
|
||||
# if file_path_base in remainders:
|
||||
# remainders.remove(file_path_base)
|
||||
# continue
|
||||
#
|
||||
# seq_path_base = os.path.split(seq_path)[1]
|
||||
# head, tail = seq_path_base.split(self.sequence_splitter)
|
||||
#
|
||||
# final_col = None
|
||||
# for collection in collections:
|
||||
# if head != collection.head or tail != collection.tail:
|
||||
# continue
|
||||
# final_col = collection
|
||||
# break
|
||||
#
|
||||
# if final_col is not None:
|
||||
# # Fill full path to head
|
||||
# final_col.head = os.path.join(dir_path, final_col.head)
|
||||
# for _file_path in final_col:
|
||||
# if os.path.exists(_file_path):
|
||||
#
|
||||
# size += os.path.getsize(_file_path)
|
||||
#
|
||||
# if delete:
|
||||
# os.remove(_file_path)
|
||||
# self.log.debug(
|
||||
# "Removed file: {}".format(_file_path)
|
||||
# )
|
||||
#
|
||||
# _seq_path = final_col.format("{head}{padding}{tail}")
|
||||
# self.log.debug("Removed files: {}".format(_seq_path))
|
||||
# collections.remove(final_col)
|
||||
#
|
||||
# elif os.path.exists(file_path):
|
||||
# size += os.path.getsize(file_path)
|
||||
#
|
||||
# if delete:
|
||||
# os.remove(file_path)
|
||||
# self.log.debug("Removed file: {}".format(file_path))
|
||||
# else:
|
||||
# self.log.debug(
|
||||
# "File was not found: {}".format(file_path)
|
||||
# )
|
||||
#
|
||||
# # Delete as much as possible parent folders
|
||||
# if not delete:
|
||||
# return size
|
||||
#
|
||||
# for dir_path in dir_paths.values():
|
||||
# while True:
|
||||
# if not os.path.exists(dir_path):
|
||||
# dir_path = os.path.dirname(dir_path)
|
||||
# continue
|
||||
#
|
||||
# if len(os.listdir(dir_path)) != 0:
|
||||
# break
|
||||
#
|
||||
# self.log.debug("Removed folder: {}".format(dir_path))
|
||||
# os.rmdir(dir_path)
|
||||
#
|
||||
# return size
|
||||
#
|
||||
# def message(self, text):
|
||||
# msgBox = QtWidgets.QMessageBox()
|
||||
# msgBox.setText(text)
|
||||
# msgBox.setStyleSheet(style.load_stylesheet())
|
||||
# msgBox.setWindowFlags(
|
||||
# msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint
|
||||
# )
|
||||
# msgBox.exec_()
|
||||
#
|
||||
# def get_data(self, context, versions_count):
|
||||
# product_entity = context["product"]
|
||||
# folder_entity = context["folder"]
|
||||
# project_name = context["project"]["name"]
|
||||
# anatomy = Anatomy(project_name)
|
||||
#
|
||||
# versions = list(ayon_api.get_versions(
|
||||
# project_name, product_ids=[product_entity["id"]]
|
||||
# ))
|
||||
#
|
||||
# versions_by_parent = collections.defaultdict(list)
|
||||
# for ent in versions:
|
||||
# versions_by_parent[ent["productId"]].append(ent)
|
||||
#
|
||||
# def sort_func(ent):
|
||||
# return int(ent["version"])
|
||||
#
|
||||
# all_last_versions = []
|
||||
# for _parent_id, _versions in versions_by_parent.items():
|
||||
# for idx, version in enumerate(
|
||||
# sorted(_versions, key=sort_func, reverse=True)
|
||||
# ):
|
||||
# if idx >= versions_count:
|
||||
# break
|
||||
# all_last_versions.append(version)
|
||||
#
|
||||
# self.log.debug("Collected versions ({})".format(len(versions)))
|
||||
#
|
||||
# # Filter latest versions
|
||||
# for version in all_last_versions:
|
||||
# versions.remove(version)
|
||||
#
|
||||
# # Update versions_by_parent without filtered versions
|
||||
# versions_by_parent = collections.defaultdict(list)
|
||||
# for ent in versions:
|
||||
# versions_by_parent[ent["productId"]].append(ent)
|
||||
#
|
||||
# # Filter already deleted versions
|
||||
# versions_to_pop = []
|
||||
# for version in versions:
|
||||
# version_tags = version["data"].get("tags")
|
||||
# if version_tags and "deleted" in version_tags:
|
||||
# versions_to_pop.append(version)
|
||||
#
|
||||
# for version in versions_to_pop:
|
||||
# msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format(
|
||||
# folder_entity["path"],
|
||||
# product_entity["name"],
|
||||
# version["version"]
|
||||
# )
|
||||
# self.log.debug((
|
||||
# "Skipping version. Already tagged as `deleted`. < {} >"
|
||||
# ).format(msg))
|
||||
# versions.remove(version)
|
||||
#
|
||||
# version_ids = [ent["id"] for ent in versions]
|
||||
#
|
||||
# self.log.debug(
|
||||
# "Filtered versions to delete ({})".format(len(version_ids))
|
||||
# )
|
||||
#
|
||||
# if not version_ids:
|
||||
# msg = "Skipping processing. Nothing to delete on {}/{}".format(
|
||||
# folder_entity["path"], product_entity["name"]
|
||||
# )
|
||||
# self.log.info(msg)
|
||||
# print(msg)
|
||||
# return
|
||||
#
|
||||
# repres = list(ayon_api.get_representations(
|
||||
# project_name, version_ids=version_ids
|
||||
# ))
|
||||
#
|
||||
# self.log.debug(
|
||||
# "Collected representations to remove ({})".format(len(repres))
|
||||
# )
|
||||
#
|
||||
# dir_paths = {}
|
||||
# file_paths_by_dir = collections.defaultdict(list)
|
||||
# for repre in repres:
|
||||
# file_path, seq_path = self.path_from_representation(
|
||||
# repre, anatomy
|
||||
# )
|
||||
# if file_path is None:
|
||||
# self.log.debug((
|
||||
# "Could not format path for represenation \"{}\""
|
||||
# ).format(str(repre)))
|
||||
# continue
|
||||
#
|
||||
# dir_path = os.path.dirname(file_path)
|
||||
# dir_id = None
|
||||
# for _dir_id, _dir_path in dir_paths.items():
|
||||
# if _dir_path == dir_path:
|
||||
# dir_id = _dir_id
|
||||
# break
|
||||
#
|
||||
# if dir_id is None:
|
||||
# dir_id = uuid.uuid4()
|
||||
# dir_paths[dir_id] = dir_path
|
||||
#
|
||||
# file_paths_by_dir[dir_id].append([file_path, seq_path])
|
||||
#
|
||||
# dir_ids_to_pop = []
|
||||
# for dir_id, dir_path in dir_paths.items():
|
||||
# if os.path.exists(dir_path):
|
||||
# continue
|
||||
#
|
||||
# dir_ids_to_pop.append(dir_id)
|
||||
#
|
||||
# # Pop dirs from both dictionaries
|
||||
# for dir_id in dir_ids_to_pop:
|
||||
# dir_paths.pop(dir_id)
|
||||
# paths = file_paths_by_dir.pop(dir_id)
|
||||
# # TODO report of missing directories?
|
||||
# paths_msg = ", ".join([
|
||||
# "'{}'".format(path[0].replace("\\", "/")) for path in paths
|
||||
# ])
|
||||
# self.log.debug((
|
||||
# "Folder does not exist. Deleting it's files skipped: {}"
|
||||
# ).format(paths_msg))
|
||||
#
|
||||
# return {
|
||||
# "dir_paths": dir_paths,
|
||||
# "file_paths_by_dir": file_paths_by_dir,
|
||||
# "versions": versions,
|
||||
# "folder": folder_entity,
|
||||
# "product": product_entity,
|
||||
# "archive_product": versions_count == 0
|
||||
# }
|
||||
#
|
||||
# def main(self, project_name, data, remove_publish_folder):
|
||||
# # Size of files.
|
||||
# size = 0
|
||||
# if not data:
|
||||
# return size
|
||||
#
|
||||
# if remove_publish_folder:
|
||||
# size = self.delete_whole_dir_paths(data["dir_paths"].values())
|
||||
# else:
|
||||
# size = self.delete_only_repre_files(
|
||||
# data["dir_paths"], data["file_paths_by_dir"]
|
||||
# )
|
||||
#
|
||||
# mongo_changes_bulk = []
|
||||
# for version in data["versions"]:
|
||||
# orig_version_tags = version["data"].get("tags") or []
|
||||
# version_tags = [tag for tag in orig_version_tags]
|
||||
# if "deleted" not in version_tags:
|
||||
# version_tags.append("deleted")
|
||||
#
|
||||
# if version_tags == orig_version_tags:
|
||||
# continue
|
||||
#
|
||||
# update_query = {"id": version["id"]}
|
||||
# update_data = {"$set": {"data.tags": version_tags}}
|
||||
# mongo_changes_bulk.append(UpdateOne(update_query, update_data))
|
||||
#
|
||||
# if data["archive_product"]:
|
||||
# mongo_changes_bulk.append(UpdateOne(
|
||||
# {
|
||||
# "id": data["product"]["id"],
|
||||
# "type": "subset"
|
||||
# },
|
||||
# {"$set": {"type": "archived_subset"}}
|
||||
# ))
|
||||
#
|
||||
# if mongo_changes_bulk:
|
||||
# dbcon = AvalonMongoDB()
|
||||
# dbcon.Session["AYON_PROJECT_NAME"] = project_name
|
||||
# dbcon.install()
|
||||
# dbcon.bulk_write(mongo_changes_bulk)
|
||||
# dbcon.uninstall()
|
||||
#
|
||||
# self._ftrack_delete_versions(data)
|
||||
#
|
||||
# return size
|
||||
#
|
||||
# def _ftrack_delete_versions(self, data):
|
||||
# """Delete version on ftrack.
|
||||
#
|
||||
# Handling of ftrack logic in this plugin is not ideal. But in OP3 it is
|
||||
# almost impossible to solve the issue other way.
|
||||
#
|
||||
# Note:
|
||||
# Asset versions on ftrack are not deleted but marked as
|
||||
# "not published" which cause that they're invisible.
|
||||
#
|
||||
# Args:
|
||||
# data (dict): Data sent to product loader with full context.
|
||||
# """
|
||||
#
|
||||
# # First check for ftrack id on folder entity
|
||||
# # - skip if ther is none
|
||||
# ftrack_id = data["folder"]["attrib"].get("ftrackId")
|
||||
# if not ftrack_id:
|
||||
# self.log.info((
|
||||
# "Folder does not have filled ftrack id. Skipped delete"
|
||||
# " of ftrack version."
|
||||
# ))
|
||||
# return
|
||||
#
|
||||
# # Check if ftrack module is enabled
|
||||
# addons_manager = AddonsManager()
|
||||
# ftrack_addon = addons_manager.get("ftrack")
|
||||
# if not ftrack_addon or not ftrack_addon.enabled:
|
||||
# return
|
||||
#
|
||||
# import ftrack_api
|
||||
#
|
||||
# session = ftrack_api.Session()
|
||||
# product_name = data["product"]["name"]
|
||||
# versions = {
|
||||
# '"{}"'.format(version_doc["name"])
|
||||
# for version_doc in data["versions"]
|
||||
# }
|
||||
# asset_versions = session.query(
|
||||
# (
|
||||
# "select id, is_published from AssetVersion where"
|
||||
# " asset.parent.id is \"{}\""
|
||||
# " and asset.name is \"{}\""
|
||||
# " and version in ({})"
|
||||
# ).format(
|
||||
# ftrack_id,
|
||||
# product_name,
|
||||
# ",".join(versions)
|
||||
# )
|
||||
# ).all()
|
||||
#
|
||||
# # Set attribute `is_published` to `False` on ftrack AssetVersions
|
||||
# for asset_version in asset_versions:
|
||||
# asset_version["is_published"] = False
|
||||
#
|
||||
# try:
|
||||
# session.commit()
|
||||
#
|
||||
# except Exception:
|
||||
# msg = (
|
||||
# "Could not set `is_published` attribute to `False`"
|
||||
# " for selected AssetVersions."
|
||||
# )
|
||||
# self.log.error(msg)
|
||||
# self.message(msg)
|
||||
#
|
||||
# def load(self, contexts, name=None, namespace=None, options=None):
|
||||
# try:
|
||||
# size = 0
|
||||
# for count, context in enumerate(contexts):
|
||||
# versions_to_keep = 2
|
||||
# remove_publish_folder = False
|
||||
# if options:
|
||||
# versions_to_keep = options.get(
|
||||
# "versions_to_keep", versions_to_keep
|
||||
# )
|
||||
# remove_publish_folder = options.get(
|
||||
# "remove_publish_folder", remove_publish_folder
|
||||
# )
|
||||
#
|
||||
# data = self.get_data(context, versions_to_keep)
|
||||
# if not data:
|
||||
# continue
|
||||
#
|
||||
# project_name = context["project"]["name"]
|
||||
# size += self.main(project_name, data, remove_publish_folder)
|
||||
# print("Progressing {}/{}".format(count + 1, len(contexts)))
|
||||
#
|
||||
# msg = "Total size of files: {}".format(format_file_size(size))
|
||||
# self.log.info(msg)
|
||||
# self.message(msg)
|
||||
#
|
||||
# except Exception:
|
||||
# self.log.error("Failed to delete versions.", exc_info=True)
|
||||
#
|
||||
#
|
||||
# class CalculateOldVersions(DeleteOldVersions):
|
||||
# """Calculate file size of old versions"""
|
||||
# label = "Calculate Old Versions"
|
||||
# order = 30
|
||||
# tool_names = ["library_loader"]
|
||||
#
|
||||
# options = [
|
||||
# qargparse.Integer(
|
||||
# "versions_to_keep", default=2, min=0, help="Versions to keep:"
|
||||
# ),
|
||||
# qargparse.Boolean(
|
||||
# "remove_publish_folder", help="Remove publish folder:"
|
||||
# )
|
||||
# ]
|
||||
#
|
||||
# def main(self, project_name, data, remove_publish_folder):
|
||||
# size = 0
|
||||
#
|
||||
# if not data:
|
||||
# return size
|
||||
#
|
||||
# if remove_publish_folder:
|
||||
# size = self.delete_whole_dir_paths(
|
||||
# data["dir_paths"].values(), delete=False
|
||||
# )
|
||||
# else:
|
||||
# size = self.delete_only_repre_files(
|
||||
# data["dir_paths"], data["file_paths_by_dir"], delete=False
|
||||
# )
|
||||
#
|
||||
# return size
|
||||
import collections
|
||||
import os
|
||||
import uuid
|
||||
|
||||
import clique
|
||||
import ayon_api
|
||||
from ayon_api.operations import OperationsSession
|
||||
import qargparse
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
from ayon_core import style
|
||||
from ayon_core.lib import format_file_size
|
||||
from ayon_core.pipeline import load, Anatomy
|
||||
from ayon_core.pipeline.load import (
|
||||
get_representation_path_with_anatomy,
|
||||
InvalidRepresentationContext,
|
||||
)
|
||||
|
||||
|
||||
class DeleteOldVersions(load.ProductLoaderPlugin):
|
||||
"""Deletes specific number of old version"""
|
||||
|
||||
is_multiple_contexts_compatible = True
|
||||
sequence_splitter = "__sequence_splitter__"
|
||||
|
||||
representations = ["*"]
|
||||
product_types = {"*"}
|
||||
tool_names = ["library_loader"]
|
||||
|
||||
label = "Delete Old Versions"
|
||||
order = 35
|
||||
icon = "trash"
|
||||
color = "#d8d8d8"
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
"versions_to_keep", default=2, min=0, help="Versions to keep:"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"remove_publish_folder", help="Remove publish folder:"
|
||||
)
|
||||
]
|
||||
|
||||
def delete_whole_dir_paths(self, dir_paths, delete=True):
|
||||
size = 0
|
||||
|
||||
for dir_path in dir_paths:
|
||||
# Delete all files and fodlers in dir path
|
||||
for root, dirs, files in os.walk(dir_path, topdown=False):
|
||||
for name in files:
|
||||
file_path = os.path.join(root, name)
|
||||
size += os.path.getsize(file_path)
|
||||
if delete:
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
for name in dirs:
|
||||
if delete:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
|
||||
if not delete:
|
||||
continue
|
||||
|
||||
# Delete even the folder and it's parents folders if they are empty
|
||||
while True:
|
||||
if not os.path.exists(dir_path):
|
||||
dir_path = os.path.dirname(dir_path)
|
||||
continue
|
||||
|
||||
if len(os.listdir(dir_path)) != 0:
|
||||
break
|
||||
|
||||
os.rmdir(os.path.join(dir_path))
|
||||
|
||||
return size
|
||||
|
||||
def path_from_representation(self, representation, anatomy):
|
||||
try:
|
||||
context = representation["context"]
|
||||
except KeyError:
|
||||
return (None, None)
|
||||
|
||||
try:
|
||||
path = get_representation_path_with_anatomy(
|
||||
representation, anatomy
|
||||
)
|
||||
except InvalidRepresentationContext:
|
||||
return (None, None)
|
||||
|
||||
sequence_path = None
|
||||
if "frame" in context:
|
||||
context["frame"] = self.sequence_splitter
|
||||
sequence_path = get_representation_path_with_anatomy(
|
||||
representation, anatomy
|
||||
)
|
||||
|
||||
if sequence_path:
|
||||
sequence_path = sequence_path.normalized()
|
||||
|
||||
return (path.normalized(), sequence_path)
|
||||
|
||||
def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
|
||||
size = 0
|
||||
|
||||
for dir_id, dir_path in dir_paths.items():
|
||||
dir_files = os.listdir(dir_path)
|
||||
collections, remainders = clique.assemble(dir_files)
|
||||
for file_path, seq_path in file_paths[dir_id]:
|
||||
file_path_base = os.path.split(file_path)[1]
|
||||
# Just remove file if `frame` key was not in context or
|
||||
# filled path is in remainders (single file sequence)
|
||||
if not seq_path or file_path_base in remainders:
|
||||
if not os.path.exists(file_path):
|
||||
self.log.debug(
|
||||
"File was not found: {}".format(file_path)
|
||||
)
|
||||
continue
|
||||
|
||||
size += os.path.getsize(file_path)
|
||||
|
||||
if delete:
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
|
||||
if file_path_base in remainders:
|
||||
remainders.remove(file_path_base)
|
||||
continue
|
||||
|
||||
seq_path_base = os.path.split(seq_path)[1]
|
||||
head, tail = seq_path_base.split(self.sequence_splitter)
|
||||
|
||||
final_col = None
|
||||
for collection in collections:
|
||||
if head != collection.head or tail != collection.tail:
|
||||
continue
|
||||
final_col = collection
|
||||
break
|
||||
|
||||
if final_col is not None:
|
||||
# Fill full path to head
|
||||
final_col.head = os.path.join(dir_path, final_col.head)
|
||||
for _file_path in final_col:
|
||||
if os.path.exists(_file_path):
|
||||
|
||||
size += os.path.getsize(_file_path)
|
||||
|
||||
if delete:
|
||||
os.remove(_file_path)
|
||||
self.log.debug(
|
||||
"Removed file: {}".format(_file_path)
|
||||
)
|
||||
|
||||
_seq_path = final_col.format("{head}{padding}{tail}")
|
||||
self.log.debug("Removed files: {}".format(_seq_path))
|
||||
collections.remove(final_col)
|
||||
|
||||
elif os.path.exists(file_path):
|
||||
size += os.path.getsize(file_path)
|
||||
|
||||
if delete:
|
||||
os.remove(file_path)
|
||||
self.log.debug("Removed file: {}".format(file_path))
|
||||
else:
|
||||
self.log.debug(
|
||||
"File was not found: {}".format(file_path)
|
||||
)
|
||||
|
||||
# Delete as much as possible parent folders
|
||||
if not delete:
|
||||
return size
|
||||
|
||||
for dir_path in dir_paths.values():
|
||||
while True:
|
||||
if not os.path.exists(dir_path):
|
||||
dir_path = os.path.dirname(dir_path)
|
||||
continue
|
||||
|
||||
if len(os.listdir(dir_path)) != 0:
|
||||
break
|
||||
|
||||
self.log.debug("Removed folder: {}".format(dir_path))
|
||||
os.rmdir(dir_path)
|
||||
|
||||
return size
|
||||
|
||||
def message(self, text):
|
||||
msgBox = QtWidgets.QMessageBox()
|
||||
msgBox.setText(text)
|
||||
msgBox.setStyleSheet(style.load_stylesheet())
|
||||
msgBox.setWindowFlags(
|
||||
msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint
|
||||
)
|
||||
msgBox.exec_()
|
||||
|
||||
def get_data(self, context, versions_count):
|
||||
product_entity = context["product"]
|
||||
folder_entity = context["folder"]
|
||||
project_name = context["project"]["name"]
|
||||
anatomy = Anatomy(project_name, project_entity=context["project"])
|
||||
|
||||
version_fields = ayon_api.get_default_fields_for_type("version")
|
||||
version_fields.add("tags")
|
||||
versions = list(ayon_api.get_versions(
|
||||
project_name,
|
||||
product_ids=[product_entity["id"]],
|
||||
active=None,
|
||||
hero=False,
|
||||
fields=version_fields
|
||||
))
|
||||
self.log.debug(
|
||||
"Version Number ({})".format(len(versions))
|
||||
)
|
||||
versions_by_parent = collections.defaultdict(list)
|
||||
for ent in versions:
|
||||
versions_by_parent[ent["productId"]].append(ent)
|
||||
|
||||
def sort_func(ent):
|
||||
return int(ent["version"])
|
||||
|
||||
all_last_versions = []
|
||||
for _parent_id, _versions in versions_by_parent.items():
|
||||
for idx, version in enumerate(
|
||||
sorted(_versions, key=sort_func, reverse=True)
|
||||
):
|
||||
if idx >= versions_count:
|
||||
break
|
||||
all_last_versions.append(version)
|
||||
|
||||
self.log.debug("Collected versions ({})".format(len(versions)))
|
||||
|
||||
# Filter latest versions
|
||||
for version in all_last_versions:
|
||||
versions.remove(version)
|
||||
|
||||
# Update versions_by_parent without filtered versions
|
||||
versions_by_parent = collections.defaultdict(list)
|
||||
for ent in versions:
|
||||
versions_by_parent[ent["productId"]].append(ent)
|
||||
|
||||
# Filter already deleted versions
|
||||
versions_to_pop = []
|
||||
for version in versions:
|
||||
if "deleted" in version["tags"]:
|
||||
versions_to_pop.append(version)
|
||||
|
||||
for version in versions_to_pop:
|
||||
msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format(
|
||||
folder_entity["path"],
|
||||
product_entity["name"],
|
||||
version["version"]
|
||||
)
|
||||
self.log.debug((
|
||||
"Skipping version. Already tagged as inactive. < {} >"
|
||||
).format(msg))
|
||||
versions.remove(version)
|
||||
|
||||
version_ids = [ent["id"] for ent in versions]
|
||||
|
||||
self.log.debug(
|
||||
"Filtered versions to delete ({})".format(len(version_ids))
|
||||
)
|
||||
|
||||
if not version_ids:
|
||||
msg = "Skipping processing. Nothing to delete on {}/{}".format(
|
||||
folder_entity["path"], product_entity["name"]
|
||||
)
|
||||
self.log.info(msg)
|
||||
print(msg)
|
||||
return
|
||||
|
||||
repres = list(ayon_api.get_representations(
|
||||
project_name, version_ids=version_ids
|
||||
))
|
||||
|
||||
self.log.debug(
|
||||
"Collected representations to remove ({})".format(len(repres))
|
||||
)
|
||||
|
||||
dir_paths = {}
|
||||
file_paths_by_dir = collections.defaultdict(list)
|
||||
for repre in repres:
|
||||
file_path, seq_path = self.path_from_representation(
|
||||
repre, anatomy
|
||||
)
|
||||
if file_path is None:
|
||||
self.log.debug((
|
||||
"Could not format path for represenation \"{}\""
|
||||
).format(str(repre)))
|
||||
continue
|
||||
|
||||
dir_path = os.path.dirname(file_path)
|
||||
dir_id = None
|
||||
for _dir_id, _dir_path in dir_paths.items():
|
||||
if _dir_path == dir_path:
|
||||
dir_id = _dir_id
|
||||
break
|
||||
|
||||
if dir_id is None:
|
||||
dir_id = uuid.uuid4()
|
||||
dir_paths[dir_id] = dir_path
|
||||
|
||||
file_paths_by_dir[dir_id].append([file_path, seq_path])
|
||||
|
||||
dir_ids_to_pop = []
|
||||
for dir_id, dir_path in dir_paths.items():
|
||||
if os.path.exists(dir_path):
|
||||
continue
|
||||
|
||||
dir_ids_to_pop.append(dir_id)
|
||||
|
||||
# Pop dirs from both dictionaries
|
||||
for dir_id in dir_ids_to_pop:
|
||||
dir_paths.pop(dir_id)
|
||||
paths = file_paths_by_dir.pop(dir_id)
|
||||
# TODO report of missing directories?
|
||||
paths_msg = ", ".join([
|
||||
"'{}'".format(path[0].replace("\\", "/")) for path in paths
|
||||
])
|
||||
self.log.debug((
|
||||
"Folder does not exist. Deleting its files skipped: {}"
|
||||
).format(paths_msg))
|
||||
|
||||
return {
|
||||
"dir_paths": dir_paths,
|
||||
"file_paths_by_dir": file_paths_by_dir,
|
||||
"versions": versions,
|
||||
"folder": folder_entity,
|
||||
"product": product_entity,
|
||||
"archive_product": versions_count == 0
|
||||
}
|
||||
|
||||
def main(self, project_name, data, remove_publish_folder):
|
||||
# Size of files.
|
||||
size = 0
|
||||
if not data:
|
||||
return size
|
||||
|
||||
if remove_publish_folder:
|
||||
size = self.delete_whole_dir_paths(data["dir_paths"].values())
|
||||
else:
|
||||
size = self.delete_only_repre_files(
|
||||
data["dir_paths"], data["file_paths_by_dir"]
|
||||
)
|
||||
|
||||
op_session = OperationsSession()
|
||||
for version in data["versions"]:
|
||||
orig_version_tags = version["tags"]
|
||||
version_tags = list(orig_version_tags)
|
||||
changes = {}
|
||||
if "deleted" not in version_tags:
|
||||
version_tags.append("deleted")
|
||||
changes["tags"] = version_tags
|
||||
|
||||
if version["active"]:
|
||||
changes["active"] = False
|
||||
|
||||
if not changes:
|
||||
continue
|
||||
op_session.update_entity(
|
||||
project_name, "version", version["id"], changes
|
||||
)
|
||||
|
||||
op_session.commit()
|
||||
|
||||
return size
|
||||
|
||||
def load(self, contexts, name=None, namespace=None, options=None):
|
||||
try:
|
||||
size = 0
|
||||
for count, context in enumerate(contexts):
|
||||
versions_to_keep = 2
|
||||
remove_publish_folder = False
|
||||
if options:
|
||||
versions_to_keep = options.get(
|
||||
"versions_to_keep", versions_to_keep
|
||||
)
|
||||
remove_publish_folder = options.get(
|
||||
"remove_publish_folder", remove_publish_folder
|
||||
)
|
||||
|
||||
data = self.get_data(context, versions_to_keep)
|
||||
if not data:
|
||||
continue
|
||||
project_name = context["project"]["name"]
|
||||
size += self.main(project_name, data, remove_publish_folder)
|
||||
print("Progressing {}/{}".format(count + 1, len(contexts)))
|
||||
|
||||
msg = "Total size of files: {}".format(format_file_size(size))
|
||||
self.log.info(msg)
|
||||
self.message(msg)
|
||||
|
||||
except Exception:
|
||||
self.log.error("Failed to delete versions.", exc_info=True)
|
||||
|
||||
|
||||
class CalculateOldVersions(DeleteOldVersions):
|
||||
"""Calculate file size of old versions"""
|
||||
label = "Calculate Old Versions"
|
||||
order = 30
|
||||
tool_names = ["library_loader"]
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
"versions_to_keep", default=2, min=0, help="Versions to keep:"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"remove_publish_folder", help="Remove publish folder:"
|
||||
)
|
||||
]
|
||||
|
||||
def main(self, project_name, data, remove_publish_folder):
|
||||
size = 0
|
||||
|
||||
if not data:
|
||||
return size
|
||||
|
||||
if remove_publish_folder:
|
||||
size = self.delete_whole_dir_paths(
|
||||
data["dir_paths"].values(), delete=False
|
||||
)
|
||||
else:
|
||||
size = self.delete_only_repre_files(
|
||||
data["dir_paths"], data["file_paths_by_dir"], delete=False
|
||||
)
|
||||
|
||||
return size
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
controller (AbstractWorkfilesFrontend): The control object.
|
||||
"""
|
||||
|
||||
refreshed = QtCore.Signal()
|
||||
|
||||
def __init__(self, controller):
|
||||
super(WorkAreaFilesModel, self).__init__()
|
||||
|
||||
|
|
@ -163,6 +165,12 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
self._fill_items()
|
||||
|
||||
def _fill_items(self):
|
||||
try:
|
||||
self._fill_items_impl()
|
||||
finally:
|
||||
self.refreshed.emit()
|
||||
|
||||
def _fill_items_impl(self):
|
||||
folder_id = self._selected_folder_id
|
||||
task_id = self._selected_task_id
|
||||
if not folder_id or not task_id:
|
||||
|
|
@ -285,6 +293,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
|||
selection_model.selectionChanged.connect(self._on_selection_change)
|
||||
view.double_clicked.connect(self._on_mouse_double_click)
|
||||
view.customContextMenuRequested.connect(self._on_context_menu)
|
||||
model.refreshed.connect(self._on_model_refresh)
|
||||
|
||||
controller.register_event_callback(
|
||||
"expected_selection_changed",
|
||||
|
|
@ -298,6 +307,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
|||
self._controller = controller
|
||||
|
||||
self._published_mode = False
|
||||
self._change_selection_on_refresh = True
|
||||
|
||||
def set_published_mode(self, published_mode):
|
||||
"""Set the published mode.
|
||||
|
|
@ -379,7 +389,9 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
|||
if not workfile_info["current"]:
|
||||
return
|
||||
|
||||
self._change_selection_on_refresh = False
|
||||
self._model.refresh()
|
||||
self._change_selection_on_refresh = True
|
||||
|
||||
workfile_name = workfile_info["name"]
|
||||
if (
|
||||
|
|
@ -394,3 +406,30 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
|||
self._controller.expected_workfile_selected(
|
||||
event["folder"]["id"], event["task"]["name"], workfile_name
|
||||
)
|
||||
|
||||
def _on_model_refresh(self):
|
||||
if (
|
||||
not self._change_selection_on_refresh
|
||||
or self._proxy_model.rowCount() < 1
|
||||
):
|
||||
return
|
||||
|
||||
# Find the row with latest date modified
|
||||
latest_index = max(
|
||||
(
|
||||
self._proxy_model.index(idx, 0)
|
||||
for idx in range(self._proxy_model.rowCount())
|
||||
),
|
||||
key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE)
|
||||
)
|
||||
|
||||
# Select row of latest modified
|
||||
selection_model = self._view.selectionModel()
|
||||
selection_model.select(
|
||||
latest_index,
|
||||
(
|
||||
QtCore.QItemSelectionModel.ClearAndSelect
|
||||
| QtCore.QItemSelectionModel.Current
|
||||
| QtCore.QItemSelectionModel.Rows
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -118,11 +118,11 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
overlay_invalid_host = InvalidHostOverlay(self)
|
||||
overlay_invalid_host.setVisible(False)
|
||||
|
||||
first_show_timer = QtCore.QTimer()
|
||||
first_show_timer.setSingleShot(True)
|
||||
first_show_timer.setInterval(50)
|
||||
show_timer = QtCore.QTimer()
|
||||
show_timer.setSingleShot(True)
|
||||
show_timer.setInterval(50)
|
||||
|
||||
first_show_timer.timeout.connect(self._on_first_show)
|
||||
show_timer.timeout.connect(self._on_show)
|
||||
|
||||
controller.register_event_callback(
|
||||
"save_as.finished",
|
||||
|
|
@ -159,7 +159,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
self._tasks_widget = tasks_widget
|
||||
self._side_panel = side_panel
|
||||
|
||||
self._first_show_timer = first_show_timer
|
||||
self._show_timer = show_timer
|
||||
|
||||
self._post_init()
|
||||
|
||||
|
|
@ -287,9 +287,9 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
|
||||
def showEvent(self, event):
|
||||
super(WorkfilesToolWindow, self).showEvent(event)
|
||||
self._show_timer.start()
|
||||
if self._first_show:
|
||||
self._first_show = False
|
||||
self._first_show_timer.start()
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
def keyPressEvent(self, event):
|
||||
|
|
@ -303,9 +303,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
|
||||
pass
|
||||
|
||||
def _on_first_show(self):
|
||||
if not self._controller_refreshed:
|
||||
self.refresh()
|
||||
def _on_show(self):
|
||||
self.refresh()
|
||||
|
||||
def _on_file_text_filter_change(self, text):
|
||||
self._files_widget.set_text_filter(text)
|
||||
|
|
|
|||
122
server_addon/substancepainter/server/settings/load_plugins.py
Normal file
122
server_addon/substancepainter/server/settings/load_plugins.py
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
|
||||
|
||||
def normal_map_format_enum():
|
||||
return [
|
||||
{"label": "DirectX", "value": "NormalMapFormat.DirectX"},
|
||||
{"label": "OpenGL", "value": "NormalMapFormat.OpenGL"},
|
||||
]
|
||||
|
||||
|
||||
def tangent_space_enum():
|
||||
return [
|
||||
{"label": "Per Fragment", "value": "TangentSpace.PerFragment"},
|
||||
{"label": "Per Vertex", "value": "TangentSpace.PerVertex"},
|
||||
]
|
||||
|
||||
|
||||
def uv_workflow_enum():
|
||||
return [
|
||||
{"label": "Default", "value": "ProjectWorkflow.Default"},
|
||||
{"label": "UV Tile", "value": "ProjectWorkflow.UVTile"},
|
||||
{"label": "Texture Set Per UV Tile",
|
||||
"value": "ProjectWorkflow.TextureSetPerUVTile"}
|
||||
]
|
||||
|
||||
|
||||
def document_resolution_enum():
|
||||
return [
|
||||
{"label": "128", "value": 128},
|
||||
{"label": "256", "value": 256},
|
||||
{"label": "512", "value": 512},
|
||||
{"label": "1024", "value": 1024},
|
||||
{"label": "2048", "value": 2048},
|
||||
{"label": "4096", "value": 4096}
|
||||
]
|
||||
|
||||
|
||||
class ProjectTemplatesModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
name: str = SettingsField("default", title="Template Name")
|
||||
default_texture_resolution: int = SettingsField(
|
||||
1024, enum_resolver=document_resolution_enum,
|
||||
title="Document Resolution",
|
||||
description=("Set texture resolution when "
|
||||
"creating new project.")
|
||||
)
|
||||
import_cameras: bool = SettingsField(
|
||||
True, title="Import Cameras",
|
||||
description="Import cameras from the mesh file.")
|
||||
normal_map_format: str = SettingsField(
|
||||
"DirectX", enum_resolver=normal_map_format_enum,
|
||||
title="Normal Map Format",
|
||||
description=("Set normal map format when "
|
||||
"creating new project.")
|
||||
)
|
||||
project_workflow: str = SettingsField(
|
||||
"Default", enum_resolver=uv_workflow_enum,
|
||||
title="UV Tile Settings",
|
||||
description=("Set UV workflow when "
|
||||
"creating new project.")
|
||||
)
|
||||
tangent_space_mode: str = SettingsField(
|
||||
"PerFragment", enum_resolver=tangent_space_enum,
|
||||
title="Tangent Space",
|
||||
description=("An option to compute tangent space "
|
||||
"when creating new project.")
|
||||
)
|
||||
preserve_strokes: bool = SettingsField(
|
||||
True, title="Preserve Strokes",
|
||||
description=("Preserve strokes positions on mesh.\n"
|
||||
"(only relevant when loading into "
|
||||
"existing project)")
|
||||
)
|
||||
|
||||
|
||||
class ProjectTemplateSettingModel(BaseSettingsModel):
|
||||
project_templates: list[ProjectTemplatesModel] = SettingsField(
|
||||
default_factory=ProjectTemplatesModel,
|
||||
title="Project Templates"
|
||||
)
|
||||
|
||||
|
||||
class LoadersModel(BaseSettingsModel):
|
||||
SubstanceLoadProjectMesh: ProjectTemplateSettingModel = SettingsField(
|
||||
default_factory=ProjectTemplateSettingModel,
|
||||
title="Load Mesh"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_LOADER_SETTINGS = {
|
||||
"SubstanceLoadProjectMesh": {
|
||||
"project_templates": [
|
||||
{
|
||||
"name": "2K(Default)",
|
||||
"default_texture_resolution": 2048,
|
||||
"import_cameras": True,
|
||||
"normal_map_format": "NormalMapFormat.DirectX",
|
||||
"project_workflow": "ProjectWorkflow.Default",
|
||||
"tangent_space_mode": "TangentSpace.PerFragment",
|
||||
"preserve_strokes": True
|
||||
},
|
||||
{
|
||||
"name": "2K(UV tile)",
|
||||
"default_texture_resolution": 2048,
|
||||
"import_cameras": True,
|
||||
"normal_map_format": "NormalMapFormat.DirectX",
|
||||
"project_workflow": "ProjectWorkflow.UVTile",
|
||||
"tangent_space_mode": "TangentSpace.PerFragment",
|
||||
"preserve_strokes": True
|
||||
},
|
||||
{
|
||||
"name": "4K(Custom)",
|
||||
"default_texture_resolution": 4096,
|
||||
"import_cameras": True,
|
||||
"normal_map_format": "NormalMapFormat.OpenGL",
|
||||
"project_workflow": "ProjectWorkflow.UVTile",
|
||||
"tangent_space_mode": "TangentSpace.PerFragment",
|
||||
"preserve_strokes": True
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from .imageio import ImageIOSettings, DEFAULT_IMAGEIO_SETTINGS
|
||||
from .load_plugins import LoadersModel, DEFAULT_LOADER_SETTINGS
|
||||
|
||||
|
||||
class ShelvesSettingsModel(BaseSettingsModel):
|
||||
|
|
@ -17,9 +18,12 @@ class SubstancePainterSettings(BaseSettingsModel):
|
|||
default_factory=list,
|
||||
title="Shelves"
|
||||
)
|
||||
load: LoadersModel = SettingsField(
|
||||
default_factory=DEFAULT_LOADER_SETTINGS, title="Loaders")
|
||||
|
||||
|
||||
DEFAULT_SPAINTER_SETTINGS = {
|
||||
"imageio": DEFAULT_IMAGEIO_SETTINGS,
|
||||
"shelves": []
|
||||
"shelves": [],
|
||||
"load": DEFAULT_LOADER_SETTINGS,
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue