[Automated] Merged develop into main

This commit is contained in:
pypebot 2022-12-03 04:27:53 +01:00 committed by GitHub
commit 58568610eb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
33 changed files with 799 additions and 372 deletions

View file

@ -0,0 +1,72 @@
import os
import bpy
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
class ExtractAnimationABC(publish.Extractor):
"""Extract as ABC."""
label = "Extract Animation ABC"
hosts = ["blender"]
families = ["animation"]
optional = True
def process(self, instance):
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.abc"
filepath = os.path.join(stagingdir, filename)
context = bpy.context
# Perform extraction
self.log.info("Performing extraction..")
plugin.deselect_all()
selected = []
asset_group = None
objects = []
for obj in instance:
if isinstance(obj, bpy.types.Collection):
for child in obj.all_objects:
objects.append(child)
for obj in objects:
children = [o for o in bpy.data.objects if o.parent == obj]
for child in children:
objects.append(child)
for obj in objects:
obj.select_set(True)
selected.append(obj)
context = plugin.create_blender_context(
active=asset_group, selected=selected)
# We export the abc
bpy.ops.wm.alembic_export(
context,
filepath=filepath,
selected=True,
flatten=False
)
plugin.deselect_all()
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -127,14 +127,14 @@ def get_main_window():
@contextlib.contextmanager
def suspended_refresh():
def suspended_refresh(suspend=True):
"""Suspend viewport refreshes"""
original_state = cmds.refresh(query=True, suspend=True)
try:
cmds.refresh(suspend=True)
cmds.refresh(suspend=suspend)
yield
finally:
cmds.refresh(suspend=False)
cmds.refresh(suspend=original_state)
@contextlib.contextmanager

View file

@ -28,6 +28,7 @@ class CreatePointCache(plugin.Creator):
self.data["visibleOnly"] = False # only nodes that are visible
self.data["includeParentHierarchy"] = False # Include parent groups
self.data["worldSpace"] = True # Default to exporting world-space
self.data["refresh"] = False # Default to suspend refresh.
# Add options for custom attributes
self.data["attr"] = ""

View file

@ -115,6 +115,10 @@ class ExtractPlayblast(publish.Extractor):
else:
preset["viewport_options"] = {"imagePlane": image_plane}
# Disable Pan/Zoom.
pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"]))
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False)
with lib.maintained_time():
filename = preset.get("filename", "%TEMP%")
@ -135,6 +139,8 @@ class ExtractPlayblast(publish.Extractor):
path = capture.capture(log=self.log, **preset)
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom)
self.log.debug("playblast path {}".format(path))
collected_files = os.listdir(stagingdir)

View file

@ -86,13 +86,15 @@ class ExtractAlembic(publish.Extractor):
start=start,
end=end))
with suspended_refresh():
with suspended_refresh(suspend=instance.data.get("refresh", False)):
with maintained_selection():
cmds.select(nodes, noExpand=True)
extract_alembic(file=path,
startFrame=start,
endFrame=end,
**options)
extract_alembic(
file=path,
startFrame=start,
endFrame=end,
**options
)
if "representations" not in instance.data:
instance.data["representations"] = []

View file

@ -117,6 +117,10 @@ class ExtractThumbnail(publish.Extractor):
else:
preset["viewport_options"] = {"imagePlane": image_plane}
# Disable Pan/Zoom.
pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"]))
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False)
with lib.maintained_time():
# Force viewer to False in call to capture because we have our own
# viewer opening call to allow a signal to trigger between
@ -136,6 +140,7 @@ class ExtractThumbnail(publish.Extractor):
_, thumbnail = os.path.split(playblast)
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom)
self.log.info("file list {}".format(thumbnail))

View file

@ -2,107 +2,150 @@
#include "OpenPypePublishInstance.h"
#include "AssetRegistryModule.h"
#include "NotificationManager.h"
#include "SNotificationList.h"
//Moves all the invalid pointers to the end to prepare them for the shrinking
#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \
VAR.Shrink();
UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer)
: UObject(ObjectInitializer)
: UPrimaryDataAsset(ObjectInitializer)
{
FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>("AssetRegistry");
FString path = UOpenPypePublishInstance::GetPathName();
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<
FAssetRegistryModule>("AssetRegistry");
const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked<FPropertyEditorModule>(
"PropertyEditor");
FString Left, Right;
GetPathName().Split("/" + GetName(), &Left, &Right);
FARFilter Filter;
Filter.PackagePaths.Add(FName(*path));
Filter.PackagePaths.Emplace(FName(Left));
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded);
TArray<FAssetData> FoundAssets;
AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets);
for (const FAssetData& AssetData : FoundAssets)
OnAssetCreated(AssetData);
REMOVE_INVALID_ENTRIES(AssetDataInternal)
REMOVE_INVALID_ENTRIES(AssetDataExternal)
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated);
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved);
AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed);
AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated);
}
void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData)
void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
{
TArray<FString> split;
// get directory of current container
FString selfFullPath = UOpenPypePublishInstance::GetPathName();
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
UObject* Asset = InAssetData.GetAsset();
// get asset path and class
FString assetPath = AssetData.GetFullName();
FString assetFName = AssetData.AssetClass.ToString();
// split path
assetPath.ParseIntoArray(split, TEXT(" "), true);
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
// take interest only in paths starting with path of current container
if (assetDir.StartsWith(*selfDir))
if (!IsValid(Asset))
{
// exclude self
if (assetFName != "OpenPypePublishInstance")
UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."),
*InAssetData.ObjectPath.ToString());
return;
}
const bool result = IsUnderSameDir(Asset) && Cast<UOpenPypePublishInstance>(Asset) == nullptr;
if (result)
{
if (AssetDataInternal.Emplace(Asset).IsValidId())
{
assets.Add(assetPath);
UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir);
UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"),
*this->GetName(), *Asset->GetName());
}
}
}
void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData)
void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData)
{
TArray<FString> split;
// get directory of current container
FString selfFullPath = UOpenPypePublishInstance::GetPathName();
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
// get asset path and class
FString assetPath = AssetData.GetFullName();
FString assetFName = AssetData.AssetClass.ToString();
// split path
assetPath.ParseIntoArray(split, TEXT(" "), true);
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
// take interest only in paths starting with path of current container
FString path = UOpenPypePublishInstance::GetPathName();
FString lpp = FPackageName::GetLongPackagePath(*path);
if (assetDir.StartsWith(*selfDir))
if (Cast<UOpenPypePublishInstance>(InAssetData.GetAsset()) == nullptr)
{
// exclude self
if (assetFName != "OpenPypePublishInstance")
if (AssetDataInternal.Contains(nullptr))
{
// UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp);
assets.Remove(assetPath);
AssetDataInternal.Remove(nullptr);
REMOVE_INVALID_ENTRIES(AssetDataInternal)
}
else
{
AssetDataExternal.Remove(nullptr);
REMOVE_INVALID_ENTRIES(AssetDataExternal)
}
}
}
void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str)
void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData)
{
TArray<FString> split;
REMOVE_INVALID_ENTRIES(AssetDataInternal);
REMOVE_INVALID_ENTRIES(AssetDataExternal);
}
// get directory of current container
FString selfFullPath = UOpenPypePublishInstance::GetPathName();
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const
{
FString ThisLeft, ThisRight;
this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight);
// get asset path and class
FString assetPath = AssetData.GetFullName();
FString assetFName = AssetData.AssetClass.ToString();
return InAsset->GetPathName().StartsWith(ThisLeft);
}
// split path
assetPath.ParseIntoArray(split, TEXT(" "), true);
#ifdef WITH_EDITOR
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
if (assetDir.StartsWith(*selfDir))
void UOpenPypePublishInstance::SendNotification(const FString& Text) const
{
FNotificationInfo Info{FText::FromString(Text)};
Info.bFireAndForget = true;
Info.bUseLargeFont = false;
Info.bUseThrobber = false;
Info.bUseSuccessFailIcons = false;
Info.ExpireDuration = 4.f;
Info.FadeOutDuration = 2.f;
FSlateNotificationManager::Get().AddNotification(Info);
UE_LOG(LogAssetData, Warning,
TEXT(
"Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!"
), *GetName()
)
}
void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent)
{
Super::PostEditChangeProperty(PropertyChangedEvent);
if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet &&
PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED(
UOpenPypePublishInstance, AssetDataExternal))
{
// exclude self
if (assetFName != "AssetContainer")
// Check for duplicated assets
for (const auto& Asset : AssetDataInternal)
{
if (AssetDataExternal.Contains(Asset))
{
AssetDataExternal.Remove(Asset);
return SendNotification(
"You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!");
}
}
assets.Remove(str);
assets.Add(assetPath);
// UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str);
// Check if no UOpenPypePublishInstance type assets are included
for (const auto& Asset : AssetDataExternal)
{
if (Cast<UOpenPypePublishInstance>(Asset.Get()) != nullptr)
{
AssetDataExternal.Remove(Asset);
return SendNotification("You are not allowed to add publish instances!");
}
}
}
}
#endif

View file

@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn
bEditorImport = true;
}
UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
UOpenPypePublishInstance* OpenPypePublishInstance = NewObject<UOpenPypePublishInstance>(InParent, Class, Name, Flags);
return OpenPypePublishInstance;
check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass()));
return NewObject<UOpenPypePublishInstance>(InParent, InClass, InName, Flags);
}
bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const {

View file

@ -5,17 +5,99 @@
UCLASS(Blueprintable)
class OPENPYPE_API UOpenPypePublishInstance : public UObject
class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset
{
GENERATED_BODY()
GENERATED_UCLASS_BODY()
public:
UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer);
/**
/**
* Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is
* placed in)
*
* @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5
*/
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetInternalAssets() const
{
//For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed.
TSet<UObject*> ResultSet;
for (const auto& Asset : AssetDataInternal)
ResultSet.Add(Asset.LoadSynchronous());
return ResultSet;
}
/**
* Retrieves all the assets which have been added manually by the Publish Instance
*
* @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5
*/
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetExternalAssets() const
{
//For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed.
TSet<UObject*> ResultSet;
for (const auto& Asset : AssetDataExternal)
ResultSet.Add(Asset.LoadSynchronous());
return ResultSet;
}
/**
* Function for returning all the assets in the container combined.
*
* @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are
* returning raw pointers. Seems like an issue in UE5
*
* @attention If the bAddExternalAssets variable is false, external assets won't be included!
*/
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetAllAssets() const
{
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal;
//Create a new TSet only with raw pointers.
TSet<UObject*> ResultSet;
for (auto& Asset : IteratedSet)
ResultSet.Add(Asset.LoadSynchronous());
return ResultSet;
}
UPROPERTY(EditAnywhere, BlueprintReadOnly)
TArray<FString> assets;
private:
void OnAssetAdded(const FAssetData& AssetData);
void OnAssetRemoved(const FAssetData& AssetData);
void OnAssetRenamed(const FAssetData& AssetData, const FString& str);
};
UPROPERTY(VisibleAnywhere, Category="Assets")
TSet<TSoftObjectPtr<UObject>> AssetDataInternal;
/**
* This property allows exposing the array to include other assets from any other directory than what it's currently
* monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added!
*/
UPROPERTY(EditAnywhere, Category = "Assets")
bool bAddExternalAssets = false;
UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets")
TSet<TSoftObjectPtr<UObject>> AssetDataExternal;
void OnAssetCreated(const FAssetData& InAssetData);
void OnAssetRemoved(const FAssetData& InAssetData);
void OnAssetUpdated(const FAssetData& InAssetData);
bool IsUnderSameDir(const UObject* InAsset) const;
#ifdef WITH_EDITOR
void SendNotification(const FString& Text) const;
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
#endif
};

View file

@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory
public:
UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer);
virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
virtual bool ShouldShowInNewMenu() const override;
};
};

View file

@ -2,107 +2,151 @@
#include "OpenPypePublishInstance.h"
#include "AssetRegistryModule.h"
#include "AssetToolsModule.h"
#include "Framework/Notifications/NotificationManager.h"
#include "SNotificationList.h"
//Moves all the invalid pointers to the end to prepare them for the shrinking
#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \
VAR.Shrink();
UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer)
: UObject(ObjectInitializer)
: UPrimaryDataAsset(ObjectInitializer)
{
FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>("AssetRegistry");
FString path = UOpenPypePublishInstance::GetPathName();
const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<
FAssetRegistryModule>("AssetRegistry");
FString Left, Right;
GetPathName().Split(GetName(), &Left, &Right);
FARFilter Filter;
Filter.PackagePaths.Add(FName(*path));
Filter.PackagePaths.Emplace(FName(Left));
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded);
TArray<FAssetData> FoundAssets;
AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets);
for (const FAssetData& AssetData : FoundAssets)
OnAssetCreated(AssetData);
REMOVE_INVALID_ENTRIES(AssetDataInternal)
REMOVE_INVALID_ENTRIES(AssetDataExternal)
AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated);
AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved);
AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed);
AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated);
}
void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData)
void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData)
{
TArray<FString> split;
// get directory of current container
FString selfFullPath = UOpenPypePublishInstance::GetPathName();
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
const TObjectPtr<UObject> Asset = InAssetData.GetAsset();
// get asset path and class
FString assetPath = AssetData.GetFullName();
FString assetFName = AssetData.AssetClass.ToString();
// split path
assetPath.ParseIntoArray(split, TEXT(" "), true);
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
// take interest only in paths starting with path of current container
if (assetDir.StartsWith(*selfDir))
if (!IsValid(Asset))
{
// exclude self
if (assetFName != "OpenPypePublishInstance")
UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."),
*InAssetData.ObjectPath.ToString());
return;
}
const bool result = IsUnderSameDir(Asset) && Cast<UOpenPypePublishInstance>(Asset) == nullptr;
if (result)
{
if (AssetDataInternal.Emplace(Asset).IsValidId())
{
assets.Add(assetPath);
UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir);
UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"),
*this->GetName(), *Asset->GetName());
}
}
}
void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData)
void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData)
{
TArray<FString> split;
// get directory of current container
FString selfFullPath = UOpenPypePublishInstance::GetPathName();
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
// get asset path and class
FString assetPath = AssetData.GetFullName();
FString assetFName = AssetData.AssetClass.ToString();
// split path
assetPath.ParseIntoArray(split, TEXT(" "), true);
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
// take interest only in paths starting with path of current container
FString path = UOpenPypePublishInstance::GetPathName();
FString lpp = FPackageName::GetLongPackagePath(*path);
if (assetDir.StartsWith(*selfDir))
if (Cast<UOpenPypePublishInstance>(InAssetData.GetAsset()) == nullptr)
{
// exclude self
if (assetFName != "OpenPypePublishInstance")
if (AssetDataInternal.Contains(nullptr))
{
// UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp);
assets.Remove(assetPath);
AssetDataInternal.Remove(nullptr);
REMOVE_INVALID_ENTRIES(AssetDataInternal)
}
else
{
AssetDataExternal.Remove(nullptr);
REMOVE_INVALID_ENTRIES(AssetDataExternal)
}
}
}
void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str)
void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData)
{
TArray<FString> split;
REMOVE_INVALID_ENTRIES(AssetDataInternal);
REMOVE_INVALID_ENTRIES(AssetDataExternal);
}
// get directory of current container
FString selfFullPath = UOpenPypePublishInstance::GetPathName();
FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath);
bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr<UObject>& InAsset) const
{
FString ThisLeft, ThisRight;
this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight);
// get asset path and class
FString assetPath = AssetData.GetFullName();
FString assetFName = AssetData.AssetClass.ToString();
return InAsset->GetPathName().StartsWith(ThisLeft);
}
// split path
assetPath.ParseIntoArray(split, TEXT(" "), true);
#ifdef WITH_EDITOR
FString assetDir = FPackageName::GetLongPackagePath(*split[1]);
if (assetDir.StartsWith(*selfDir))
void UOpenPypePublishInstance::SendNotification(const FString& Text) const
{
FNotificationInfo Info{FText::FromString(Text)};
Info.bFireAndForget = true;
Info.bUseLargeFont = false;
Info.bUseThrobber = false;
Info.bUseSuccessFailIcons = false;
Info.ExpireDuration = 4.f;
Info.FadeOutDuration = 2.f;
FSlateNotificationManager::Get().AddNotification(Info);
UE_LOG(LogAssetData, Warning,
TEXT(
"Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!"
), *GetName()
)
}
void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent)
{
Super::PostEditChangeProperty(PropertyChangedEvent);
if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet &&
PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED(
UOpenPypePublishInstance, AssetDataExternal))
{
// exclude self
if (assetFName != "AssetContainer")
{
assets.Remove(str);
assets.Add(assetPath);
// UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str);
// Check for duplicated assets
for (const auto& Asset : AssetDataInternal)
{
if (AssetDataExternal.Contains(Asset))
{
AssetDataExternal.Remove(Asset);
return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!");
}
}
// Check if no UOpenPypePublishInstance type assets are included
for (const auto& Asset : AssetDataExternal)
{
if (Cast<UOpenPypePublishInstance>(Asset.Get()) != nullptr)
{
AssetDataExternal.Remove(Asset);
return SendNotification("You are not allowed to add publish instances!");
}
}
}
}
#endif

View file

@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn
bEditorImport = true;
}
UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn)
{
UOpenPypePublishInstance* OpenPypePublishInstance = NewObject<UOpenPypePublishInstance>(InParent, Class, Name, Flags);
return OpenPypePublishInstance;
check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass()));
return NewObject<UOpenPypePublishInstance>(InParent, InClass, InName, Flags);
}
bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const {

View file

@ -1,21 +1,97 @@
#pragma once
#include "EditorTutorial.h"
#include "Engine.h"
#include "OpenPypePublishInstance.generated.h"
UCLASS(Blueprintable)
class OPENPYPE_API UOpenPypePublishInstance : public UObject
class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset
{
GENERATED_BODY()
GENERATED_UCLASS_BODY()
public:
UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer);
/**
* Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is
* placed in)
*
* @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5
*/
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetInternalAssets() const
{
//For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed.
TSet<UObject*> ResultSet;
for (const auto& Asset : AssetDataInternal)
ResultSet.Add(Asset.LoadSynchronous());
return ResultSet;
}
/**
* Retrieves all the assets which have been added manually by the Publish Instance
*
* @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5
*/
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetExternalAssets() const
{
//For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed.
TSet<UObject*> ResultSet;
for (const auto& Asset : AssetDataExternal)
ResultSet.Add(Asset.LoadSynchronous());
return ResultSet;
}
/**
* Function for returning all the assets in the container combined.
*
* @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are
* returning raw pointers. Seems like an issue in UE5
*
* @attention If the bAddExternalAssets variable is false, external assets won't be included!
*/
UFUNCTION(BlueprintCallable, BlueprintPure)
TSet<UObject*> GetAllAssets() const
{
const TSet<TSoftObjectPtr<UObject>>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal;
//Create a new TSet only with raw pointers.
TSet<UObject*> ResultSet;
for (auto& Asset : IteratedSet)
ResultSet.Add(Asset.LoadSynchronous());
return ResultSet;
}
UPROPERTY(EditAnywhere, BlueprintReadOnly)
TArray<FString> assets;
private:
void OnAssetAdded(const FAssetData& AssetData);
void OnAssetRemoved(const FAssetData& AssetData);
void OnAssetRenamed(const FAssetData& AssetData, const FString& str);
};
UPROPERTY(VisibleAnywhere, Category="Assets")
TSet<TSoftObjectPtr<UObject>> AssetDataInternal;
/**
* This property allows the instance to include other assets from any other directory than what it's currently
* monitoring.
* @attention assets have to be added manually! They are not automatically registered or added!
*/
UPROPERTY(EditAnywhere, Category="Assets")
bool bAddExternalAssets = false;
UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets"))
TSet<TSoftObjectPtr<UObject>> AssetDataExternal;
void OnAssetCreated(const FAssetData& InAssetData);
void OnAssetRemoved(const FAssetData& InAssetData);
void OnAssetUpdated(const FAssetData& InAssetData);
bool IsUnderSameDir(const TObjectPtr<UObject>& InAsset) const;
#ifdef WITH_EDITOR
void SendNotification(const FString& Text) const;
virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override;
#endif
};

View file

@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory
public:
UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer);
virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override;
virtual bool ShouldShowInNewMenu() const override;
};
};

View file

@ -598,8 +598,12 @@ class ExtractReview(pyblish.api.InstancePlugin):
if temp_data["input_is_sequence"]:
# Set start frame of input sequence (just frame in filename)
# - definition of input filepath
# - add handle start if output should be without handles
start_number = temp_data["first_sequence_frame"]
if temp_data["without_handles"] and temp_data["handles_are_set"]:
start_number += temp_data["handle_start"]
ffmpeg_input_args.extend([
"-start_number", str(temp_data["first_sequence_frame"])
"-start_number", str(start_number)
])
# TODO add fps mapping `{fps: fraction}` ?
@ -609,49 +613,50 @@ class ExtractReview(pyblish.api.InstancePlugin):
# "23.976": "24000/1001"
# }
# Add framerate to input when input is sequence
ffmpeg_input_args.append(
"-framerate {}".format(temp_data["fps"])
)
ffmpeg_input_args.extend([
"-framerate", str(temp_data["fps"])
])
# Add duration of an input sequence if output is video
if not temp_data["output_is_sequence"]:
ffmpeg_input_args.extend([
"-to", "{:0.10f}".format(duration_seconds)
])
if temp_data["output_is_sequence"]:
# Set start frame of output sequence (just frame in filename)
# - this is definition of an output
ffmpeg_output_args.append(
"-start_number {}".format(temp_data["output_frame_start"])
)
ffmpeg_output_args.extend([
"-start_number", str(temp_data["output_frame_start"])
])
# Change output's duration and start point if should not contain
# handles
start_sec = 0
if temp_data["without_handles"] and temp_data["handles_are_set"]:
# Set start time without handles
# - check if handle_start is bigger than 0 to avoid zero division
if temp_data["handle_start"] > 0:
start_sec = float(temp_data["handle_start"]) / temp_data["fps"]
ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec))
# Set output duration in seconds
ffmpeg_output_args.extend([
"-t", "{:0.10}".format(duration_seconds)
])
# Set output duration inn seconds
ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds))
# Add -ss (start offset in seconds) if input is not sequence
if not temp_data["input_is_sequence"]:
start_sec = float(temp_data["handle_start"]) / temp_data["fps"]
# Set start time without handles
# - Skip if start sec is 0.0
if start_sec > 0.0:
ffmpeg_input_args.extend([
"-ss", "{:0.10f}".format(start_sec)
])
# Set frame range of output when input or output is sequence
elif temp_data["output_is_sequence"]:
ffmpeg_output_args.append("-frames:v {}".format(output_frames_len))
# Add duration of an input sequence if output is video
if (
temp_data["input_is_sequence"]
and not temp_data["output_is_sequence"]
):
ffmpeg_input_args.append("-to {:0.10f}".format(
duration_seconds + start_sec
))
ffmpeg_output_args.extend([
"-frames:v", str(output_frames_len)
])
# Add video/image input path
ffmpeg_input_args.append(
"-i {}".format(
path_to_subprocess_arg(temp_data["full_input_path"])
)
)
ffmpeg_input_args.extend([
"-i", path_to_subprocess_arg(temp_data["full_input_path"])
])
# Add audio arguments if there are any. Skipped when output are images.
if not temp_data["output_ext_is_image"] and temp_data["with_audio"]:

View file

@ -1,5 +1,15 @@
Automatic tests for OpenPype
============================
Requirements:
============
Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path.
You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/
You can test that `mongorestore` is available by running this in console, or cmd:
```mongorestore --version```
Structure:
- integration - end to end tests, slow (see README.md in the integration folder for more info)
- openpype/modules/MODULE_NAME - structure follow directory structure in code base

View file

@ -43,3 +43,15 @@ def app_variant(request):
@pytest.fixture(scope="module")
def timeout(request):
return request.config.getoption("--timeout")
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
# execute all other hooks to obtain the report object
outcome = yield
rep = outcome.get_result()
# set a report attribute for each phase of a call, which can
# be "setup", "call", "teardown"
setattr(item, "rep_" + rep.when, rep)

View file

@ -2,10 +2,13 @@ import os
import pytest
import shutil
from tests.lib.testing_classes import HostFixtures
from tests.lib.testing_classes import (
HostFixtures,
PublishTest,
)
class AfterEffectsTestClass(HostFixtures):
class AEHostFixtures(HostFixtures):
@pytest.fixture(scope="module")
def last_workfile_path(self, download_test_data, output_folder_url):
"""Get last_workfile_path from source data.
@ -15,15 +18,15 @@ class AfterEffectsTestClass(HostFixtures):
src_path = os.path.join(download_test_data,
"input",
"workfile",
"test_project_test_asset_TestTask_v001.aep")
dest_folder = os.path.join(download_test_data,
"test_project_test_asset_test_task_v001.aep")
dest_folder = os.path.join(output_folder_url,
self.PROJECT,
self.ASSET,
"work",
self.TASK)
os.makedirs(dest_folder)
dest_path = os.path.join(dest_folder,
"test_project_test_asset_TestTask_v001.aep")
"test_project_test_asset_test_task_v001.aep")
shutil.copy(src_path, dest_path)
yield dest_path
@ -32,3 +35,12 @@ class AfterEffectsTestClass(HostFixtures):
def startup_scripts(self, monkeypatch_session, download_test_data):
"""Points Maya to userSetup file from input data"""
pass
@pytest.fixture(scope="module")
def skip_compare_folders(self):
# skip folder that contain "Logs", these come only from Deadline
return ["Logs", "Auto-Save"]
class AELocalPublishTestClass(AEHostFixtures, PublishTest):
"""Testing class for local publishes."""

View file

@ -1,14 +1,16 @@
import logging
from tests.lib.assert_classes import DBAssert
from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass
from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass
log = logging.getLogger("test_publish_in_aftereffects")
class TestPublishInAfterEffects(AfterEffectsTestClass):
class TestPublishInAfterEffects(AELocalPublishTestClass):
"""Basic test case for publishing in AfterEffects
Uses old Pyblish schema of created instances.
Uses generic TestCase to prepare fixtures for test data, testing DBs,
env vars.
@ -27,15 +29,15 @@ class TestPublishInAfterEffects(AfterEffectsTestClass):
PERSIST = False
TEST_FILES = [
("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf",
"test_aftereffects_publish.zip",
("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-",
"test_aftereffects_publish_legacy.zip",
"")
]
APP = "aftereffects"
APP_GROUP = "aftereffects"
APP_VARIANT = ""
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
TIMEOUT = 120 # publish timeout
@ -49,23 +51,37 @@ class TestPublishInAfterEffects(AfterEffectsTestClass):
failures.append(
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="imageMainBackgroundcopy"))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="workfileTest_task"))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="reviewTesttask"))
name="renderTest_taskMain"))
failures.append(
DBAssert.count_of_types(dbcon, "representation", 4))
additional_args = {"context.subset": "renderTestTaskDefault",
additional_args = {"context.subset": "workfileTest_task",
"context.ext": "aep"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
additional_args=additional_args))
additional_args = {"context.subset": "renderTest_taskMain",
"context.ext": "png"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 2,
additional_args=additional_args))
additional_args = {"context.subset": "renderTest_taskMain",
"name": "thumbnail"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
additional_args=additional_args))
additional_args = {"context.subset": "renderTest_taskMain",
"name": "png_png"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
additional_args=additional_args))

View file

@ -1,64 +0,0 @@
import logging
from tests.lib.assert_classes import DBAssert
from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass
log = logging.getLogger("test_publish_in_aftereffects")
class TestPublishInAfterEffects(AfterEffectsTestClass):
"""Basic test case for publishing in AfterEffects
Should publish 5 frames
"""
PERSIST = True
TEST_FILES = [
("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT",
"test_aftereffects_publish_multiframe.zip",
"")
]
APP = "aftereffects"
APP_VARIANT = ""
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
TIMEOUT = 120 # publish timeout
def test_db_asserts(self, dbcon, publish_finished):
"""Host and input data dependent expected results in DB."""
print("test_db_asserts")
failures = []
failures.append(DBAssert.count_of_types(dbcon, "version", 2))
failures.append(
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="imageMainBackgroundcopy"))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="workfileTest_task"))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="reviewTesttask"))
failures.append(
DBAssert.count_of_types(dbcon, "representation", 4))
additional_args = {"context.subset": "renderTestTaskDefault",
"context.ext": "png"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
additional_args=additional_args))
assert not any(failures)
if __name__ == "__main__":
test_case = TestPublishInAfterEffects()

View file

@ -2,10 +2,13 @@ import os
import pytest
import shutil
from tests.lib.testing_classes import HostFixtures
from tests.lib.testing_classes import (
HostFixtures,
PublishTest,
)
class MayaTestClass(HostFixtures):
class MayaHostFixtures(HostFixtures):
@pytest.fixture(scope="module")
def last_workfile_path(self, download_test_data, output_folder_url):
"""Get last_workfile_path from source data.
@ -15,7 +18,7 @@ class MayaTestClass(HostFixtures):
src_path = os.path.join(download_test_data,
"input",
"workfile",
"test_project_test_asset_TestTask_v001.mb")
"test_project_test_asset_test_task_v001.mb")
dest_folder = os.path.join(output_folder_url,
self.PROJECT,
self.ASSET,
@ -23,7 +26,7 @@ class MayaTestClass(HostFixtures):
self.TASK)
os.makedirs(dest_folder)
dest_path = os.path.join(dest_folder,
"test_project_test_asset_TestTask_v001.mb")
"test_project_test_asset_test_task_v001.mb")
shutil.copy(src_path, dest_path)
yield dest_path
@ -39,3 +42,11 @@ class MayaTestClass(HostFixtures):
"{}{}{}".format(startup_path,
os.pathsep,
original_pythonpath))
@pytest.fixture(scope="module")
def skip_compare_folders(self):
yield []
class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest):
"""Testing class for local publishes."""

View file

@ -1,7 +1,8 @@
from tests.integration.hosts.maya.lib import MayaTestClass
from tests.lib.assert_classes import DBAssert
from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass
class TestPublishInMaya(MayaTestClass):
class TestPublishInMaya(MayaLocalPublishTestClass):
"""Basic test case for publishing in Maya
Shouldnt be running standalone only via 'runtests' pype command! (??)
@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass):
("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "")
]
APP = "maya"
APP_GROUP = "maya"
# keep empty to locate latest installed variant or explicit
APP_VARIANT = ""
@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass):
def test_db_asserts(self, dbcon, publish_finished):
"""Host and input data dependent expected results in DB."""
print("test_db_asserts")
assert 5 == dbcon.count_documents({"type": "version"}), \
"Not expected no of versions"
failures = []
failures.append(DBAssert.count_of_types(dbcon, "version", 2))
assert 0 == dbcon.count_documents({"type": "version",
"name": {"$ne": 1}}), \
"Only versions with 1 expected"
failures.append(
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
assert 1 == dbcon.count_documents({"type": "subset",
"name": "modelMain"}), \
"modelMain subset must be present"
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="modelMain"))
assert 1 == dbcon.count_documents({"type": "subset",
"name": "workfileTest_task"}), \
"workfileTest_task subset must be present"
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="workfileTest_task"))
assert 11 == dbcon.count_documents({"type": "representation"}), \
"Not expected no of representations"
failures.append(DBAssert.count_of_types(dbcon, "representation", 5))
assert 2 == dbcon.count_documents({"type": "representation",
"context.subset": "modelMain",
"context.ext": "abc"}), \
"Not expected no of representations with ext 'abc'"
additional_args = {"context.subset": "modelMain",
"context.ext": "abc"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 2,
additional_args=additional_args))
assert 2 == dbcon.count_documents({"type": "representation",
"context.subset": "modelMain",
"context.ext": "ma"}), \
"Not expected no of representations with ext 'abc'"
additional_args = {"context.subset": "modelMain",
"context.ext": "ma"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 2,
additional_args=additional_args))
additional_args = {"context.subset": "workfileTest_task",
"context.ext": "mb"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
additional_args=additional_args))
assert not any(failures)
if __name__ == "__main__":

View file

@ -1,17 +1,20 @@
import os
import pytest
import shutil
import re
from tests.lib.testing_classes import HostFixtures
from tests.lib.testing_classes import (
HostFixtures,
PublishTest,
)
class NukeTestClass(HostFixtures):
class NukeHostFixtures(HostFixtures):
@pytest.fixture(scope="module")
def last_workfile_path(self, download_test_data, output_folder_url):
"""Get last_workfile_path from source data.
"""
source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk"
source_file_name = "test_project_test_asset_test_task_v001.nk"
src_path = os.path.join(download_test_data,
"input",
"workfile",
@ -27,7 +30,16 @@ class NukeTestClass(HostFixtures):
dest_path = os.path.join(dest_folder,
source_file_name)
shutil.copy(src_path, dest_path)
# rewrite old root with temporary file
# TODO - using only C:/projects seems wrong - but where to get root ?
replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE)
with open(src_path, "r") as fp:
updated = fp.read()
updated = replace_pattern.sub(output_folder_url.replace("\\", '/'),
updated)
with open(dest_path, "w") as fp:
fp.write(updated)
yield dest_path
@ -41,4 +53,12 @@ class NukeTestClass(HostFixtures):
monkeypatch_session.setenv("NUKE_PATH",
"{}{}{}".format(startup_path,
os.pathsep,
original_nuke_path))
original_nuke_path))
@pytest.fixture(scope="module")
def skip_compare_folders(self):
yield ["renders"]
class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest):
"""Testing class for local publishes."""

View file

@ -1,17 +1,25 @@
import logging
from tests.lib.assert_classes import DBAssert
from tests.integration.hosts.nuke.lib import NukeTestClass
from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass
log = logging.getLogger("test_publish_in_nuke")
class TestPublishInNuke(NukeTestClass):
class TestPublishInNuke(NukeLocalPublishTestClass):
"""Basic test case for publishing in Nuke
Uses generic TestCase to prepare fixtures for test data, testing DBs,
env vars.
!!!
It expects modified path in WriteNode,
use '[python {nuke.script_directory()}]' instead of regular root
dir (eg. instead of `c:/projects/test_project/test_asset/test_task`).
Access file path by selecting WriteNode group, CTRL+Enter, update file
input
!!!
Opens Nuke, run publish on prepared workile.
Then checks content of DB (if subset, version, representations were
@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass):
How to run:
(in cmd with activated {OPENPYPE_ROOT}/.venv)
{OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501
{OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py
runtests ../tests/integration/hosts/nuke # noqa: E501
To check log/errors from launched app's publish process keep PERSIST
to True and check `test_openpype.logs` collection.
@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass):
("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "")
]
APP = "nuke"
APP_GROUP = "nuke"
TIMEOUT = 120 # publish timeout
TIMEOUT = 50 # publish timeout
# could be overwritten by command line arguments
# keep empty to locate latest installed variant or explicit
APP_VARIANT = ""
PERSIST = True # True - keep test_db, test_openpype, outputted test files
PERSIST = False # True - keep test_db, test_openpype, outputted test files
TEST_DATA_FOLDER = None
def test_db_asserts(self, dbcon, publish_finished):
@ -52,7 +61,7 @@ class TestPublishInNuke(NukeTestClass):
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
name="renderCompositingInNukeMain"))
name="renderTest_taskMain"))
failures.append(
DBAssert.count_of_types(dbcon, "subset", 1,
@ -61,7 +70,7 @@ class TestPublishInNuke(NukeTestClass):
failures.append(
DBAssert.count_of_types(dbcon, "representation", 4))
additional_args = {"context.subset": "renderCompositingInNukeMain",
additional_args = {"context.subset": "renderTest_taskMain",
"context.ext": "exr"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,

View file

@ -2,10 +2,13 @@ import os
import pytest
import shutil
from tests.lib.testing_classes import HostFixtures
from tests.lib.testing_classes import (
HostFixtures,
PublishTest
)
class PhotoshopTestClass(HostFixtures):
class PhotoshopTestClass(HostFixtures, PublishTest):
@pytest.fixture(scope="module")
def last_workfile_path(self, download_test_data, output_folder_url):
"""Get last_workfile_path from source data.
@ -32,3 +35,7 @@ class PhotoshopTestClass(HostFixtures):
def startup_scripts(self, monkeypatch_session, download_test_data):
"""Points Maya to userSetup file from input data"""
pass
@pytest.fixture(scope="module")
def skip_compare_folders(self):
yield []

View file

@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass):
("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "")
]
APP = "photoshop"
APP_GROUP = "photoshop"
# keep empty to locate latest installed variant or explicit
APP_VARIANT = ""
APP_NAME = "{}/{}".format(APP, APP_VARIANT)
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
TIMEOUT = 120 # publish timeout
@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass):
name="workfileTest_task"))
failures.append(
DBAssert.count_of_types(dbcon, "representation", 8))
DBAssert.count_of_types(dbcon, "representation", 6))
additional_args = {"context.subset": "imageMainForeground",
"context.ext": "png"}

View file

@ -118,9 +118,8 @@ class DBHandler:
"Run with overwrite=True")
else:
if collection:
coll = self.client[db_name_out].get(collection)
if coll:
coll.drop()
if collection in self.client[db_name_out].list_collection_names(): # noqa
self.client[db_name_out][collection].drop()
else:
self.teardown(db_name_out)
@ -133,7 +132,11 @@ class DBHandler:
db_name=db_name, db_name_out=db_name_out,
collection=collection)
print("mongorestore query:: {}".format(query))
subprocess.run(query)
try:
subprocess.run(query)
except FileNotFoundError:
raise RuntimeError("'mongorestore' utility must be on path."
"Please install it.")
def teardown(self, db_name):
"""Drops 'db_name' if exists."""
@ -231,13 +234,15 @@ class DBHandler:
# Examples
# handler = DBHandler(uri="mongodb://localhost:27017")
# #
# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps"
# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa
# # #
# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project")
# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project")
# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql",
# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa
#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa
# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa
# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql",
# collection="test_project",
# drop=False, mode="upsert")
# handler.setup_from_sql("test_db", "c:\\projects\\sql",
# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql",
# collection="test_project",
# drop=False, mode="upsert")

View file

@ -8,6 +8,7 @@ import tempfile
import shutil
import glob
import platform
import re
from tests.lib.db_handler import DBHandler
from common.openpype_common.distribution.file_handler import RemoteFileHandler
@ -36,9 +37,9 @@ class ModuleUnitTest(BaseTest):
PERSIST = False # True to not purge temporary folder nor test DB
TEST_OPENPYPE_MONGO = "mongodb://localhost:27017"
TEST_DB_NAME = "test_db"
TEST_DB_NAME = "avalon_tests"
TEST_PROJECT_NAME = "test_project"
TEST_OPENPYPE_NAME = "test_openpype"
TEST_OPENPYPE_NAME = "openpype_tests"
TEST_FILES = []
@ -57,7 +58,7 @@ class ModuleUnitTest(BaseTest):
m.undo()
@pytest.fixture(scope="module")
def download_test_data(self, test_data_folder, persist=False):
def download_test_data(self, test_data_folder, persist, request):
test_data_folder = test_data_folder or self.TEST_DATA_FOLDER
if test_data_folder:
print("Using existing folder {}".format(test_data_folder))
@ -78,7 +79,8 @@ class ModuleUnitTest(BaseTest):
print("Temporary folder created:: {}".format(tmpdir))
yield tmpdir
persist = persist or self.PERSIST
persist = (persist or self.PERSIST or
self.is_test_failed(request))
if not persist:
print("Removing {}".format(tmpdir))
shutil.rmtree(tmpdir)
@ -125,7 +127,8 @@ class ModuleUnitTest(BaseTest):
monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data)
@pytest.fixture(scope="module")
def db_setup(self, download_test_data, env_var, monkeypatch_session):
def db_setup(self, download_test_data, env_var, monkeypatch_session,
request):
"""Restore prepared MongoDB dumps into selected DB."""
backup_dir = os.path.join(download_test_data, "input", "dumps")
@ -135,13 +138,14 @@ class ModuleUnitTest(BaseTest):
overwrite=True,
db_name_out=self.TEST_DB_NAME)
db_handler.setup_from_dump("openpype", backup_dir,
db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir,
overwrite=True,
db_name_out=self.TEST_OPENPYPE_NAME)
yield db_handler
if not self.PERSIST:
persist = self.PERSIST or self.is_test_failed(request)
if not persist:
db_handler.teardown(self.TEST_DB_NAME)
db_handler.teardown(self.TEST_OPENPYPE_NAME)
@ -166,6 +170,13 @@ class ModuleUnitTest(BaseTest):
mongo_client = OpenPypeMongoConnection.get_mongo_client()
yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"]
def is_test_failed(self, request):
# if request.node doesn't have rep_call, something failed
try:
return request.node.rep_call.failed
except AttributeError:
return True
class PublishTest(ModuleUnitTest):
"""Test class for publishing in hosts.
@ -188,7 +199,7 @@ class PublishTest(ModuleUnitTest):
TODO: implement test on file size, file content
"""
APP = ""
APP_GROUP = ""
TIMEOUT = 120 # publish timeout
@ -210,10 +221,10 @@ class PublishTest(ModuleUnitTest):
if not app_variant:
variant = (
application_manager.find_latest_available_variant_for_group(
self.APP))
self.APP_GROUP))
app_variant = variant.name
yield "{}/{}".format(self.APP, app_variant)
yield "{}/{}".format(self.APP_GROUP, app_variant)
@pytest.fixture(scope="module")
def output_folder_url(self, download_test_data):
@ -310,7 +321,8 @@ class PublishTest(ModuleUnitTest):
yield True
def test_folder_structure_same(self, dbcon, publish_finished,
download_test_data, output_folder_url):
download_test_data, output_folder_url,
skip_compare_folders):
"""Check if expected and published subfolders contain same files.
Compares only presence, not size nor content!
@ -328,12 +340,33 @@ class PublishTest(ModuleUnitTest):
glob.glob(expected_dir_base + "\\**", recursive=True)
if f != expected_dir_base and os.path.exists(f))
not_matched = expected.symmetric_difference(published)
assert not not_matched, "Missing {} files".format(
"\n".join(sorted(not_matched)))
filtered_published = self._filter_files(published,
skip_compare_folders)
# filter out temp files also in expected
# could be polluted by accident by copying 'output' to zip file
filtered_expected = self._filter_files(expected, skip_compare_folders)
not_mtched = filtered_expected.symmetric_difference(filtered_published)
if not_mtched:
raise AssertionError("Missing {} files".format(
"\n".join(sorted(not_mtched))))
def _filter_files(self, source_files, skip_compare_folders):
"""Filter list of files according to regex pattern."""
filtered = set()
for file_path in source_files:
if skip_compare_folders:
if not any([re.search(val, file_path)
for val in skip_compare_folders]):
filtered.add(file_path)
else:
filtered.add(file_path)
return filtered
class HostFixtures(PublishTest):
class HostFixtures():
"""Host specific fixtures. Should be implemented once per host."""
@pytest.fixture(scope="module")
def last_workfile_path(self, download_test_data, output_folder_url):
@ -344,3 +377,8 @@ class HostFixtures(PublishTest):
def startup_scripts(self, monkeypatch_session, download_test_data):
""""Adds init scripts (like userSetup) to expected location"""
raise NotImplementedError
@pytest.fixture(scope="module")
def skip_compare_folders(self):
"""Use list of regexs to filter out published folders from comparing"""
raise NotImplementedError

Binary file not shown.

View file

@ -33,11 +33,11 @@ def test_openpype_version(printer):
assert str(v2) == "1.2.3-x"
assert v1 > v2
v3 = OpenPypeVersion(1, 2, 3, staging=True)
assert str(v3) == "1.2.3+staging"
v3 = OpenPypeVersion(1, 2, 3)
assert str(v3) == "1.2.3"
v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1")
assert str(v4) == "1.2.3-rc.1+staging"
v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1")
assert str(v4) == "1.2.3-rc.1"
assert v3 > v4
assert v1 > v4
assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1")
@ -73,7 +73,7 @@ def test_openpype_version(printer):
OpenPypeVersion(4, 8, 10),
OpenPypeVersion(4, 8, 20),
OpenPypeVersion(4, 8, 9),
OpenPypeVersion(1, 2, 3, staging=True),
OpenPypeVersion(1, 2, 3),
OpenPypeVersion(1, 2, 3, build="foo")
]
res = sorted(sort_versions)
@ -104,27 +104,26 @@ def test_openpype_version(printer):
with pytest.raises(ValueError):
_ = OpenPypeVersion(version="booobaa")
v11 = OpenPypeVersion(version="4.6.7-foo+staging")
v11 = OpenPypeVersion(version="4.6.7-foo")
assert v11.major == 4
assert v11.minor == 6
assert v11.patch == 7
assert v11.staging is True
assert v11.prerelease == "foo"
def test_get_main_version():
ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo")
ver = OpenPypeVersion(1, 2, 3, prerelease="foo")
assert ver.get_main_version() == "1.2.3"
def test_get_version_path_from_list():
versions = [
OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')),
OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")),
OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")),
OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo"))
]
path = BootstrapRepos.get_version_path_from_list(
"3.4.5+staging", versions)
"3.4.5", versions)
assert path == Path("/bar/baz")
@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
result = fix_bootstrap.find_openpype(include_zips=True)
# we should have results as file were created
assert result is not None, "no OpenPype version found"
# latest item in `result` should be latest version found.
# latest item in `result` should be the latest version found.
# this will be `7.2.10-foo+staging` even with *staging* in since we've
# dropped the logic to handle staging separately and in alphabetical
# sorting it is after `strange`.
expected_path = Path(
d_path / "{}{}{}".format(
test_versions_2[3].prefix,
test_versions_2[3].version,
test_versions_2[3].suffix
test_versions_2[4].prefix,
test_versions_2[4].version,
test_versions_2[4].suffix
)
)
assert result, "nothing found"

View file

@ -112,4 +112,6 @@ $mongoPath = Find-Mongo $preferred_version
Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan
Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan
New-Item -ItemType Directory -Force -Path $($dbpath)
Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null

View file

@ -51,7 +51,9 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v
#### Run from source
For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this.
For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. To run the powershell scripts you may have to enable unrestricted execution as administrator:
`Set-ExecutionPolicy -ExecutionPolicy unrestricted`
To start OpenPype from source you need to

View file

@ -55,7 +55,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li
## Python
**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)).
**Python 3.7.9** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)).
If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed.