Merge branch 'develop' into bugfix/AY-4570_Substance-project-attributes

This commit is contained in:
Kayla Man 2024-04-24 16:07:42 +08:00
commit 038d2a9afd
10 changed files with 568 additions and 551 deletions

View file

@ -45,33 +45,11 @@ class AbcLoader(load.LoaderPlugin):
alembic = container.createNode("alembic", node_name=node_name)
alembic.setParms({"fileName": file_path})
# Add unpack node
unpack_name = "unpack_{}".format(name)
unpack = container.createNode("unpack", node_name=unpack_name)
unpack.setInput(0, alembic)
unpack.setParms({"transfer_attributes": "path"})
# Position nodes nicely
container.moveToGoodPosition()
container.layoutChildren()
# Add normal to points
# Order of menu ['point', 'vertex', 'prim', 'detail']
normal_name = "normal_{}".format(name)
normal_node = container.createNode("normal", node_name=normal_name)
normal_node.setParms({"type": 0})
normal_node.setInput(0, unpack)
null = container.createNode("null", node_name="OUT")
null.setInput(0, normal_node)
# Ensure display flag is on the Alembic input node and not on the OUT
# node to optimize "debug" displaying in the viewport.
alembic.setDisplayFlag(True)
# Set new position for unpack node else it gets cluttered
nodes = [container, alembic, unpack, normal_node, null]
for nr, node in enumerate(nodes):
node.setPosition([0, (0 - nr)])
self[:] = nodes
nodes = [container, alembic]
return pipeline.containerise(
node_name,

View file

@ -19,7 +19,7 @@ from .lib import pairwise
@contextlib.contextmanager
def _allow_export_from_render_setup_layer():
def allow_export_from_render_setup_layer():
"""Context manager to override Maya settings to allow RS layer export"""
try:
@ -102,7 +102,7 @@ def export_in_rs_layer(path, nodes, export=None):
cmds.disconnectAttr(src, dest)
# Export Selected
with _allow_export_from_render_setup_layer():
with allow_export_from_render_setup_layer():
cmds.select(nodes, noExpand=True)
if export:
export()

View file

@ -1,8 +1,13 @@
from typing import List
import maya.cmds as cmds
from ayon_core.hosts.maya.api import plugin
from ayon_core.hosts.maya.api import lib
from ayon_core.pipeline import registered_host
from ayon_core.pipeline.create import CreateContext
class YetiRigLoader(plugin.ReferenceLoader):
"""This loader will load Yeti rig."""
@ -15,6 +20,9 @@ class YetiRigLoader(plugin.ReferenceLoader):
icon = "code-fork"
color = "orange"
# From settings
create_cache_instance_on_load = True
def process_reference(
self, context, name=None, namespace=None, options=None
):
@ -49,4 +57,41 @@ class YetiRigLoader(plugin.ReferenceLoader):
)
self[:] = nodes
if self.create_cache_instance_on_load:
# Automatically create in instance to allow publishing the loaded
# yeti rig into a yeti cache
self._create_yeti_cache_instance(nodes, variant=namespace)
return nodes
def _create_yeti_cache_instance(self, nodes: List[str], variant: str):
"""Create a yeticache product type instance to publish the output.
This is similar to how loading animation rig will automatically create
an animation instance for publishing any loaded character rigs, but
then for yeti rigs.
Args:
nodes (List[str]): Nodes generated on load.
variant (str): Variant for the yeti cache instance to create.
"""
# Find the roots amongst the loaded nodes
yeti_nodes = cmds.ls(nodes, type="pgYetiMaya", long=True)
assert yeti_nodes, "No pgYetiMaya nodes in rig, this is a bug."
self.log.info("Creating variant: {}".format(variant))
creator_identifier = "io.openpype.creators.maya.yeticache"
host = registered_host()
create_context = CreateContext(host)
with lib.maintained_selection():
cmds.select(yeti_nodes, noExpand=True)
create_context.create(
creator_identifier=creator_identifier,
variant=variant,
pre_create_data={"use_selection": True}
)

View file

@ -12,7 +12,7 @@ class CollectFileDependencies(pyblish.api.ContextPlugin):
families = ["renderlayer"]
@classmethod
def apply_settings(cls, project_settings, system_settings):
def apply_settings(cls, project_settings):
# Disable plug-in if not used for deadline submission anyway
settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa
cls.enabled = settings.get("asset_dependencies", True)

View file

@ -5,7 +5,13 @@ import os
from maya import cmds
from ayon_core.pipeline import publish
from ayon_core.hosts.maya.api.lib import maintained_selection
from ayon_core.hosts.maya.api.lib import (
maintained_selection,
renderlayer
)
from ayon_core.hosts.maya.api.render_setup_tools import (
allow_export_from_render_setup_layer
)
class ExtractRedshiftProxy(publish.Extractor):
@ -18,6 +24,9 @@ class ExtractRedshiftProxy(publish.Extractor):
def process(self, instance):
"""Extractor entry point."""
# Make sure Redshift is loaded
cmds.loadPlugin("redshift4maya", quiet=True)
staging_dir = self.staging_dir(instance)
file_name = "{}.rs".format(instance.name)
file_path = os.path.join(staging_dir, file_name)
@ -60,14 +69,22 @@ class ExtractRedshiftProxy(publish.Extractor):
# Write out rs file
self.log.debug("Writing: '%s'" % file_path)
# Allow overriding what renderlayer to export from. By default force
# it to the default render layer. (Note that the renderlayer isn't
# currently exposed as an attribute to artists)
layer = instance.data.get("renderLayer", "defaultRenderLayer")
with maintained_selection():
cmds.select(instance.data["setMembers"], noExpand=True)
cmds.file(file_path,
pr=False,
force=True,
type="Redshift Proxy",
exportSelected=True,
options=rs_options)
with renderlayer(layer):
with allow_export_from_render_setup_layer():
cmds.select(instance.data["setMembers"], noExpand=True)
cmds.file(file_path,
preserveReferences=False,
force=True,
type="Redshift Proxy",
exportSelected=True,
options=rs_options)
if "representations" not in instance.data:
instance.data["representations"] = []

View file

@ -1,501 +1,426 @@
# TODO This plugin is not converted for AYON
#
# import collections
# import os
# import uuid
#
# import clique
# import ayon_api
# from pymongo import UpdateOne
# import qargparse
# from qtpy import QtWidgets, QtCore
#
# from ayon_core import style
# from ayon_core.addon import AddonsManager
# from ayon_core.lib import format_file_size
# from ayon_core.pipeline import load, Anatomy
# from ayon_core.pipeline.load import (
# get_representation_path_with_anatomy,
# InvalidRepresentationContext,
# )
#
#
# class DeleteOldVersions(load.ProductLoaderPlugin):
# """Deletes specific number of old version"""
#
# is_multiple_contexts_compatible = True
# sequence_splitter = "__sequence_splitter__"
#
# representations = {"*"}
# product_types = {"*"}
# tool_names = ["library_loader"]
#
# label = "Delete Old Versions"
# order = 35
# icon = "trash"
# color = "#d8d8d8"
#
# options = [
# qargparse.Integer(
# "versions_to_keep", default=2, min=0, help="Versions to keep:"
# ),
# qargparse.Boolean(
# "remove_publish_folder", help="Remove publish folder:"
# )
# ]
#
# def delete_whole_dir_paths(self, dir_paths, delete=True):
# size = 0
#
# for dir_path in dir_paths:
# # Delete all files and fodlers in dir path
# for root, dirs, files in os.walk(dir_path, topdown=False):
# for name in files:
# file_path = os.path.join(root, name)
# size += os.path.getsize(file_path)
# if delete:
# os.remove(file_path)
# self.log.debug("Removed file: {}".format(file_path))
#
# for name in dirs:
# if delete:
# os.rmdir(os.path.join(root, name))
#
# if not delete:
# continue
#
# # Delete even the folder and it's parents folders if they are empty
# while True:
# if not os.path.exists(dir_path):
# dir_path = os.path.dirname(dir_path)
# continue
#
# if len(os.listdir(dir_path)) != 0:
# break
#
# os.rmdir(os.path.join(dir_path))
#
# return size
#
# def path_from_representation(self, representation, anatomy):
# try:
# context = representation["context"]
# except KeyError:
# return (None, None)
#
# try:
# path = get_representation_path_with_anatomy(
# representation, anatomy
# )
# except InvalidRepresentationContext:
# return (None, None)
#
# sequence_path = None
# if "frame" in context:
# context["frame"] = self.sequence_splitter
# sequence_path = get_representation_path_with_anatomy(
# representation, anatomy
# )
#
# if sequence_path:
# sequence_path = sequence_path.normalized()
#
# return (path.normalized(), sequence_path)
#
# def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
# size = 0
#
# for dir_id, dir_path in dir_paths.items():
# dir_files = os.listdir(dir_path)
# collections, remainders = clique.assemble(dir_files)
# for file_path, seq_path in file_paths[dir_id]:
# file_path_base = os.path.split(file_path)[1]
# # Just remove file if `frame` key was not in context or
# # filled path is in remainders (single file sequence)
# if not seq_path or file_path_base in remainders:
# if not os.path.exists(file_path):
# self.log.debug(
# "File was not found: {}".format(file_path)
# )
# continue
#
# size += os.path.getsize(file_path)
#
# if delete:
# os.remove(file_path)
# self.log.debug("Removed file: {}".format(file_path))
#
# if file_path_base in remainders:
# remainders.remove(file_path_base)
# continue
#
# seq_path_base = os.path.split(seq_path)[1]
# head, tail = seq_path_base.split(self.sequence_splitter)
#
# final_col = None
# for collection in collections:
# if head != collection.head or tail != collection.tail:
# continue
# final_col = collection
# break
#
# if final_col is not None:
# # Fill full path to head
# final_col.head = os.path.join(dir_path, final_col.head)
# for _file_path in final_col:
# if os.path.exists(_file_path):
#
# size += os.path.getsize(_file_path)
#
# if delete:
# os.remove(_file_path)
# self.log.debug(
# "Removed file: {}".format(_file_path)
# )
#
# _seq_path = final_col.format("{head}{padding}{tail}")
# self.log.debug("Removed files: {}".format(_seq_path))
# collections.remove(final_col)
#
# elif os.path.exists(file_path):
# size += os.path.getsize(file_path)
#
# if delete:
# os.remove(file_path)
# self.log.debug("Removed file: {}".format(file_path))
# else:
# self.log.debug(
# "File was not found: {}".format(file_path)
# )
#
# # Delete as much as possible parent folders
# if not delete:
# return size
#
# for dir_path in dir_paths.values():
# while True:
# if not os.path.exists(dir_path):
# dir_path = os.path.dirname(dir_path)
# continue
#
# if len(os.listdir(dir_path)) != 0:
# break
#
# self.log.debug("Removed folder: {}".format(dir_path))
# os.rmdir(dir_path)
#
# return size
#
# def message(self, text):
# msgBox = QtWidgets.QMessageBox()
# msgBox.setText(text)
# msgBox.setStyleSheet(style.load_stylesheet())
# msgBox.setWindowFlags(
# msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint
# )
# msgBox.exec_()
#
# def get_data(self, context, versions_count):
# product_entity = context["product"]
# folder_entity = context["folder"]
# project_name = context["project"]["name"]
# anatomy = Anatomy(project_name)
#
# versions = list(ayon_api.get_versions(
# project_name, product_ids=[product_entity["id"]]
# ))
#
# versions_by_parent = collections.defaultdict(list)
# for ent in versions:
# versions_by_parent[ent["productId"]].append(ent)
#
# def sort_func(ent):
# return int(ent["version"])
#
# all_last_versions = []
# for _parent_id, _versions in versions_by_parent.items():
# for idx, version in enumerate(
# sorted(_versions, key=sort_func, reverse=True)
# ):
# if idx >= versions_count:
# break
# all_last_versions.append(version)
#
# self.log.debug("Collected versions ({})".format(len(versions)))
#
# # Filter latest versions
# for version in all_last_versions:
# versions.remove(version)
#
# # Update versions_by_parent without filtered versions
# versions_by_parent = collections.defaultdict(list)
# for ent in versions:
# versions_by_parent[ent["productId"]].append(ent)
#
# # Filter already deleted versions
# versions_to_pop = []
# for version in versions:
# version_tags = version["data"].get("tags")
# if version_tags and "deleted" in version_tags:
# versions_to_pop.append(version)
#
# for version in versions_to_pop:
# msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format(
# folder_entity["path"],
# product_entity["name"],
# version["version"]
# )
# self.log.debug((
# "Skipping version. Already tagged as `deleted`. < {} >"
# ).format(msg))
# versions.remove(version)
#
# version_ids = [ent["id"] for ent in versions]
#
# self.log.debug(
# "Filtered versions to delete ({})".format(len(version_ids))
# )
#
# if not version_ids:
# msg = "Skipping processing. Nothing to delete on {}/{}".format(
# folder_entity["path"], product_entity["name"]
# )
# self.log.info(msg)
# print(msg)
# return
#
# repres = list(ayon_api.get_representations(
# project_name, version_ids=version_ids
# ))
#
# self.log.debug(
# "Collected representations to remove ({})".format(len(repres))
# )
#
# dir_paths = {}
# file_paths_by_dir = collections.defaultdict(list)
# for repre in repres:
# file_path, seq_path = self.path_from_representation(
# repre, anatomy
# )
# if file_path is None:
# self.log.debug((
# "Could not format path for represenation \"{}\""
# ).format(str(repre)))
# continue
#
# dir_path = os.path.dirname(file_path)
# dir_id = None
# for _dir_id, _dir_path in dir_paths.items():
# if _dir_path == dir_path:
# dir_id = _dir_id
# break
#
# if dir_id is None:
# dir_id = uuid.uuid4()
# dir_paths[dir_id] = dir_path
#
# file_paths_by_dir[dir_id].append([file_path, seq_path])
#
# dir_ids_to_pop = []
# for dir_id, dir_path in dir_paths.items():
# if os.path.exists(dir_path):
# continue
#
# dir_ids_to_pop.append(dir_id)
#
# # Pop dirs from both dictionaries
# for dir_id in dir_ids_to_pop:
# dir_paths.pop(dir_id)
# paths = file_paths_by_dir.pop(dir_id)
# # TODO report of missing directories?
# paths_msg = ", ".join([
# "'{}'".format(path[0].replace("\\", "/")) for path in paths
# ])
# self.log.debug((
# "Folder does not exist. Deleting it's files skipped: {}"
# ).format(paths_msg))
#
# return {
# "dir_paths": dir_paths,
# "file_paths_by_dir": file_paths_by_dir,
# "versions": versions,
# "folder": folder_entity,
# "product": product_entity,
# "archive_product": versions_count == 0
# }
#
# def main(self, project_name, data, remove_publish_folder):
# # Size of files.
# size = 0
# if not data:
# return size
#
# if remove_publish_folder:
# size = self.delete_whole_dir_paths(data["dir_paths"].values())
# else:
# size = self.delete_only_repre_files(
# data["dir_paths"], data["file_paths_by_dir"]
# )
#
# mongo_changes_bulk = []
# for version in data["versions"]:
# orig_version_tags = version["data"].get("tags") or []
# version_tags = [tag for tag in orig_version_tags]
# if "deleted" not in version_tags:
# version_tags.append("deleted")
#
# if version_tags == orig_version_tags:
# continue
#
# update_query = {"id": version["id"]}
# update_data = {"$set": {"data.tags": version_tags}}
# mongo_changes_bulk.append(UpdateOne(update_query, update_data))
#
# if data["archive_product"]:
# mongo_changes_bulk.append(UpdateOne(
# {
# "id": data["product"]["id"],
# "type": "subset"
# },
# {"$set": {"type": "archived_subset"}}
# ))
#
# if mongo_changes_bulk:
# dbcon = AvalonMongoDB()
# dbcon.Session["AYON_PROJECT_NAME"] = project_name
# dbcon.install()
# dbcon.bulk_write(mongo_changes_bulk)
# dbcon.uninstall()
#
# self._ftrack_delete_versions(data)
#
# return size
#
# def _ftrack_delete_versions(self, data):
# """Delete version on ftrack.
#
# Handling of ftrack logic in this plugin is not ideal. But in OP3 it is
# almost impossible to solve the issue other way.
#
# Note:
# Asset versions on ftrack are not deleted but marked as
# "not published" which cause that they're invisible.
#
# Args:
# data (dict): Data sent to product loader with full context.
# """
#
# # First check for ftrack id on folder entity
# # - skip if ther is none
# ftrack_id = data["folder"]["attrib"].get("ftrackId")
# if not ftrack_id:
# self.log.info((
# "Folder does not have filled ftrack id. Skipped delete"
# " of ftrack version."
# ))
# return
#
# # Check if ftrack module is enabled
# addons_manager = AddonsManager()
# ftrack_addon = addons_manager.get("ftrack")
# if not ftrack_addon or not ftrack_addon.enabled:
# return
#
# import ftrack_api
#
# session = ftrack_api.Session()
# product_name = data["product"]["name"]
# versions = {
# '"{}"'.format(version_doc["name"])
# for version_doc in data["versions"]
# }
# asset_versions = session.query(
# (
# "select id, is_published from AssetVersion where"
# " asset.parent.id is \"{}\""
# " and asset.name is \"{}\""
# " and version in ({})"
# ).format(
# ftrack_id,
# product_name,
# ",".join(versions)
# )
# ).all()
#
# # Set attribute `is_published` to `False` on ftrack AssetVersions
# for asset_version in asset_versions:
# asset_version["is_published"] = False
#
# try:
# session.commit()
#
# except Exception:
# msg = (
# "Could not set `is_published` attribute to `False`"
# " for selected AssetVersions."
# )
# self.log.error(msg)
# self.message(msg)
#
# def load(self, contexts, name=None, namespace=None, options=None):
# try:
# size = 0
# for count, context in enumerate(contexts):
# versions_to_keep = 2
# remove_publish_folder = False
# if options:
# versions_to_keep = options.get(
# "versions_to_keep", versions_to_keep
# )
# remove_publish_folder = options.get(
# "remove_publish_folder", remove_publish_folder
# )
#
# data = self.get_data(context, versions_to_keep)
# if not data:
# continue
#
# project_name = context["project"]["name"]
# size += self.main(project_name, data, remove_publish_folder)
# print("Progressing {}/{}".format(count + 1, len(contexts)))
#
# msg = "Total size of files: {}".format(format_file_size(size))
# self.log.info(msg)
# self.message(msg)
#
# except Exception:
# self.log.error("Failed to delete versions.", exc_info=True)
#
#
# class CalculateOldVersions(DeleteOldVersions):
# """Calculate file size of old versions"""
# label = "Calculate Old Versions"
# order = 30
# tool_names = ["library_loader"]
#
# options = [
# qargparse.Integer(
# "versions_to_keep", default=2, min=0, help="Versions to keep:"
# ),
# qargparse.Boolean(
# "remove_publish_folder", help="Remove publish folder:"
# )
# ]
#
# def main(self, project_name, data, remove_publish_folder):
# size = 0
#
# if not data:
# return size
#
# if remove_publish_folder:
# size = self.delete_whole_dir_paths(
# data["dir_paths"].values(), delete=False
# )
# else:
# size = self.delete_only_repre_files(
# data["dir_paths"], data["file_paths_by_dir"], delete=False
# )
#
# return size
import collections
import os
import uuid
import clique
import ayon_api
from ayon_api.operations import OperationsSession
import qargparse
from qtpy import QtWidgets, QtCore
from ayon_core import style
from ayon_core.lib import format_file_size
from ayon_core.pipeline import load, Anatomy
from ayon_core.pipeline.load import (
get_representation_path_with_anatomy,
InvalidRepresentationContext,
)
class DeleteOldVersions(load.ProductLoaderPlugin):
"""Deletes specific number of old version"""
is_multiple_contexts_compatible = True
sequence_splitter = "__sequence_splitter__"
representations = ["*"]
product_types = {"*"}
tool_names = ["library_loader"]
label = "Delete Old Versions"
order = 35
icon = "trash"
color = "#d8d8d8"
options = [
qargparse.Integer(
"versions_to_keep", default=2, min=0, help="Versions to keep:"
),
qargparse.Boolean(
"remove_publish_folder", help="Remove publish folder:"
)
]
def delete_whole_dir_paths(self, dir_paths, delete=True):
size = 0
for dir_path in dir_paths:
# Delete all files and fodlers in dir path
for root, dirs, files in os.walk(dir_path, topdown=False):
for name in files:
file_path = os.path.join(root, name)
size += os.path.getsize(file_path)
if delete:
os.remove(file_path)
self.log.debug("Removed file: {}".format(file_path))
for name in dirs:
if delete:
os.rmdir(os.path.join(root, name))
if not delete:
continue
# Delete even the folder and it's parents folders if they are empty
while True:
if not os.path.exists(dir_path):
dir_path = os.path.dirname(dir_path)
continue
if len(os.listdir(dir_path)) != 0:
break
os.rmdir(os.path.join(dir_path))
return size
def path_from_representation(self, representation, anatomy):
try:
context = representation["context"]
except KeyError:
return (None, None)
try:
path = get_representation_path_with_anatomy(
representation, anatomy
)
except InvalidRepresentationContext:
return (None, None)
sequence_path = None
if "frame" in context:
context["frame"] = self.sequence_splitter
sequence_path = get_representation_path_with_anatomy(
representation, anatomy
)
if sequence_path:
sequence_path = sequence_path.normalized()
return (path.normalized(), sequence_path)
def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
size = 0
for dir_id, dir_path in dir_paths.items():
dir_files = os.listdir(dir_path)
collections, remainders = clique.assemble(dir_files)
for file_path, seq_path in file_paths[dir_id]:
file_path_base = os.path.split(file_path)[1]
# Just remove file if `frame` key was not in context or
# filled path is in remainders (single file sequence)
if not seq_path or file_path_base in remainders:
if not os.path.exists(file_path):
self.log.debug(
"File was not found: {}".format(file_path)
)
continue
size += os.path.getsize(file_path)
if delete:
os.remove(file_path)
self.log.debug("Removed file: {}".format(file_path))
if file_path_base in remainders:
remainders.remove(file_path_base)
continue
seq_path_base = os.path.split(seq_path)[1]
head, tail = seq_path_base.split(self.sequence_splitter)
final_col = None
for collection in collections:
if head != collection.head or tail != collection.tail:
continue
final_col = collection
break
if final_col is not None:
# Fill full path to head
final_col.head = os.path.join(dir_path, final_col.head)
for _file_path in final_col:
if os.path.exists(_file_path):
size += os.path.getsize(_file_path)
if delete:
os.remove(_file_path)
self.log.debug(
"Removed file: {}".format(_file_path)
)
_seq_path = final_col.format("{head}{padding}{tail}")
self.log.debug("Removed files: {}".format(_seq_path))
collections.remove(final_col)
elif os.path.exists(file_path):
size += os.path.getsize(file_path)
if delete:
os.remove(file_path)
self.log.debug("Removed file: {}".format(file_path))
else:
self.log.debug(
"File was not found: {}".format(file_path)
)
# Delete as much as possible parent folders
if not delete:
return size
for dir_path in dir_paths.values():
while True:
if not os.path.exists(dir_path):
dir_path = os.path.dirname(dir_path)
continue
if len(os.listdir(dir_path)) != 0:
break
self.log.debug("Removed folder: {}".format(dir_path))
os.rmdir(dir_path)
return size
def message(self, text):
msgBox = QtWidgets.QMessageBox()
msgBox.setText(text)
msgBox.setStyleSheet(style.load_stylesheet())
msgBox.setWindowFlags(
msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint
)
msgBox.exec_()
def get_data(self, context, versions_count):
product_entity = context["product"]
folder_entity = context["folder"]
project_name = context["project"]["name"]
anatomy = Anatomy(project_name, project_entity=context["project"])
version_fields = ayon_api.get_default_fields_for_type("version")
version_fields.add("tags")
versions = list(ayon_api.get_versions(
project_name,
product_ids=[product_entity["id"]],
active=None,
hero=False,
fields=version_fields
))
self.log.debug(
"Version Number ({})".format(len(versions))
)
versions_by_parent = collections.defaultdict(list)
for ent in versions:
versions_by_parent[ent["productId"]].append(ent)
def sort_func(ent):
return int(ent["version"])
all_last_versions = []
for _parent_id, _versions in versions_by_parent.items():
for idx, version in enumerate(
sorted(_versions, key=sort_func, reverse=True)
):
if idx >= versions_count:
break
all_last_versions.append(version)
self.log.debug("Collected versions ({})".format(len(versions)))
# Filter latest versions
for version in all_last_versions:
versions.remove(version)
# Update versions_by_parent without filtered versions
versions_by_parent = collections.defaultdict(list)
for ent in versions:
versions_by_parent[ent["productId"]].append(ent)
# Filter already deleted versions
versions_to_pop = []
for version in versions:
if "deleted" in version["tags"]:
versions_to_pop.append(version)
for version in versions_to_pop:
msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format(
folder_entity["path"],
product_entity["name"],
version["version"]
)
self.log.debug((
"Skipping version. Already tagged as inactive. < {} >"
).format(msg))
versions.remove(version)
version_ids = [ent["id"] for ent in versions]
self.log.debug(
"Filtered versions to delete ({})".format(len(version_ids))
)
if not version_ids:
msg = "Skipping processing. Nothing to delete on {}/{}".format(
folder_entity["path"], product_entity["name"]
)
self.log.info(msg)
print(msg)
return
repres = list(ayon_api.get_representations(
project_name, version_ids=version_ids
))
self.log.debug(
"Collected representations to remove ({})".format(len(repres))
)
dir_paths = {}
file_paths_by_dir = collections.defaultdict(list)
for repre in repres:
file_path, seq_path = self.path_from_representation(
repre, anatomy
)
if file_path is None:
self.log.debug((
"Could not format path for represenation \"{}\""
).format(str(repre)))
continue
dir_path = os.path.dirname(file_path)
dir_id = None
for _dir_id, _dir_path in dir_paths.items():
if _dir_path == dir_path:
dir_id = _dir_id
break
if dir_id is None:
dir_id = uuid.uuid4()
dir_paths[dir_id] = dir_path
file_paths_by_dir[dir_id].append([file_path, seq_path])
dir_ids_to_pop = []
for dir_id, dir_path in dir_paths.items():
if os.path.exists(dir_path):
continue
dir_ids_to_pop.append(dir_id)
# Pop dirs from both dictionaries
for dir_id in dir_ids_to_pop:
dir_paths.pop(dir_id)
paths = file_paths_by_dir.pop(dir_id)
# TODO report of missing directories?
paths_msg = ", ".join([
"'{}'".format(path[0].replace("\\", "/")) for path in paths
])
self.log.debug((
"Folder does not exist. Deleting its files skipped: {}"
).format(paths_msg))
return {
"dir_paths": dir_paths,
"file_paths_by_dir": file_paths_by_dir,
"versions": versions,
"folder": folder_entity,
"product": product_entity,
"archive_product": versions_count == 0
}
def main(self, project_name, data, remove_publish_folder):
# Size of files.
size = 0
if not data:
return size
if remove_publish_folder:
size = self.delete_whole_dir_paths(data["dir_paths"].values())
else:
size = self.delete_only_repre_files(
data["dir_paths"], data["file_paths_by_dir"]
)
op_session = OperationsSession()
for version in data["versions"]:
orig_version_tags = version["tags"]
version_tags = list(orig_version_tags)
changes = {}
if "deleted" not in version_tags:
version_tags.append("deleted")
changes["tags"] = version_tags
if version["active"]:
changes["active"] = False
if not changes:
continue
op_session.update_entity(
project_name, "version", version["id"], changes
)
op_session.commit()
return size
def load(self, contexts, name=None, namespace=None, options=None):
try:
size = 0
for count, context in enumerate(contexts):
versions_to_keep = 2
remove_publish_folder = False
if options:
versions_to_keep = options.get(
"versions_to_keep", versions_to_keep
)
remove_publish_folder = options.get(
"remove_publish_folder", remove_publish_folder
)
data = self.get_data(context, versions_to_keep)
if not data:
continue
project_name = context["project"]["name"]
size += self.main(project_name, data, remove_publish_folder)
print("Progressing {}/{}".format(count + 1, len(contexts)))
msg = "Total size of files: {}".format(format_file_size(size))
self.log.info(msg)
self.message(msg)
except Exception:
self.log.error("Failed to delete versions.", exc_info=True)
class CalculateOldVersions(DeleteOldVersions):
"""Calculate file size of old versions"""
label = "Calculate Old Versions"
order = 30
tool_names = ["library_loader"]
options = [
qargparse.Integer(
"versions_to_keep", default=2, min=0, help="Versions to keep:"
),
qargparse.Boolean(
"remove_publish_folder", help="Remove publish folder:"
)
]
def main(self, project_name, data, remove_publish_folder):
size = 0
if not data:
return size
if remove_publish_folder:
size = self.delete_whole_dir_paths(
data["dir_paths"].values(), delete=False
)
else:
size = self.delete_only_repre_files(
data["dir_paths"], data["file_paths_by_dir"], delete=False
)
return size

View file

@ -20,6 +20,8 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
controller (AbstractWorkfilesFrontend): The control object.
"""
refreshed = QtCore.Signal()
def __init__(self, controller):
super(WorkAreaFilesModel, self).__init__()
@ -163,6 +165,12 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
self._fill_items()
def _fill_items(self):
try:
self._fill_items_impl()
finally:
self.refreshed.emit()
def _fill_items_impl(self):
folder_id = self._selected_folder_id
task_id = self._selected_task_id
if not folder_id or not task_id:
@ -285,6 +293,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
selection_model.selectionChanged.connect(self._on_selection_change)
view.double_clicked.connect(self._on_mouse_double_click)
view.customContextMenuRequested.connect(self._on_context_menu)
model.refreshed.connect(self._on_model_refresh)
controller.register_event_callback(
"expected_selection_changed",
@ -298,6 +307,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
self._controller = controller
self._published_mode = False
self._change_selection_on_refresh = True
def set_published_mode(self, published_mode):
"""Set the published mode.
@ -379,7 +389,9 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
if not workfile_info["current"]:
return
self._change_selection_on_refresh = False
self._model.refresh()
self._change_selection_on_refresh = True
workfile_name = workfile_info["name"]
if (
@ -394,3 +406,30 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
self._controller.expected_workfile_selected(
event["folder"]["id"], event["task"]["name"], workfile_name
)
def _on_model_refresh(self):
if (
not self._change_selection_on_refresh
or self._proxy_model.rowCount() < 1
):
return
# Find the row with latest date modified
latest_index = max(
(
self._proxy_model.index(idx, 0)
for idx in range(self._proxy_model.rowCount())
),
key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE)
)
# Select row of latest modified
selection_model = self._view.selectionModel()
selection_model.select(
latest_index,
(
QtCore.QItemSelectionModel.ClearAndSelect
| QtCore.QItemSelectionModel.Current
| QtCore.QItemSelectionModel.Rows
)
)

View file

@ -118,11 +118,11 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
overlay_invalid_host = InvalidHostOverlay(self)
overlay_invalid_host.setVisible(False)
first_show_timer = QtCore.QTimer()
first_show_timer.setSingleShot(True)
first_show_timer.setInterval(50)
show_timer = QtCore.QTimer()
show_timer.setSingleShot(True)
show_timer.setInterval(50)
first_show_timer.timeout.connect(self._on_first_show)
show_timer.timeout.connect(self._on_show)
controller.register_event_callback(
"save_as.finished",
@ -159,7 +159,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
self._tasks_widget = tasks_widget
self._side_panel = side_panel
self._first_show_timer = first_show_timer
self._show_timer = show_timer
self._post_init()
@ -287,9 +287,9 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
def showEvent(self, event):
super(WorkfilesToolWindow, self).showEvent(event)
self._show_timer.start()
if self._first_show:
self._first_show = False
self._first_show_timer.start()
self.setStyleSheet(style.load_stylesheet())
def keyPressEvent(self, event):
@ -303,9 +303,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
pass
def _on_first_show(self):
if not self._controller_refreshed:
self.refresh()
def _on_show(self):
self.refresh()
def _on_file_text_filter_change(self, text):
self._files_widget.set_text_filter(text)

View file

@ -103,6 +103,17 @@ class ImportLoaderModel(BaseSettingsModel):
group_name: str = SettingsField(title="Group name")
class YetiRigLoaderModel(LoaderEnabledModel):
create_cache_instance_on_load: bool = SettingsField(
title="Create Yeti Cache instance on load",
description=(
"When enabled, upon loading a Yeti Rig product a new Yeti cache "
"instance is automatically created as preparation to publishing "
"the output directly."
)
)
class LoadersModel(BaseSettingsModel):
colors: ColorsSetting = SettingsField(
default_factory=ColorsSetting,
@ -195,8 +206,8 @@ class LoadersModel(BaseSettingsModel):
default_factory=LoaderEnabledModel,
title="Yeti Cache Loader"
)
YetiRigLoader: LoaderEnabledModel = SettingsField(
default_factory=LoaderEnabledModel,
YetiRigLoader: YetiRigLoaderModel = SettingsField(
default_factory=YetiRigLoaderModel,
title="Yeti Rig Loader"
)
@ -266,5 +277,8 @@ DEFAULT_LOADERS_SETTING = {
"VRaySceneLoader": {"enabled": True},
"XgenLoader": {"enabled": True},
"YetiCacheLoader": {"enabled": True},
"YetiRigLoader": {"enabled": True},
"YetiRigLoader": {
"enabled": True,
"create_cache_instance_on_load": True
},
}

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring addon version."""
__version__ = "0.1.15"
__version__ = "0.1.16"