mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/OP-6020_Nuke-custom-setFrameRange-script
This commit is contained in:
commit
3d4b1bc1a4
32 changed files with 515 additions and 93 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,7 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.15.9-nightly.1
|
||||
- 3.15.8
|
||||
- 3.15.8-nightly.3
|
||||
- 3.15.8-nightly.2
|
||||
|
|
@ -134,7 +135,6 @@ body:
|
|||
- 3.14.2-nightly.5
|
||||
- 3.14.2-nightly.4
|
||||
- 3.14.2-nightly.3
|
||||
- 3.14.2-nightly.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -14,10 +14,10 @@ AppId={{B9E9DF6A-5BDA-42DD-9F35-C09D564C4D93}
|
|||
AppName={#MyAppName}
|
||||
AppVersion={#AppVer}
|
||||
AppVerName={#MyAppName} version {#AppVer}
|
||||
AppPublisher=Orbi Tools s.r.o
|
||||
AppPublisherURL=http://pype.club
|
||||
AppSupportURL=http://pype.club
|
||||
AppUpdatesURL=http://pype.club
|
||||
AppPublisher=Ynput s.r.o
|
||||
AppPublisherURL=https://ynput.io
|
||||
AppSupportURL=https://ynput.io
|
||||
AppUpdatesURL=https://ynput.io
|
||||
DefaultDirName={autopf}\{#MyAppName}\{#AppVer}
|
||||
UsePreviousAppDir=no
|
||||
DisableProgramGroupPage=yes
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ from openpype.lib import (
|
|||
emit_event
|
||||
)
|
||||
import openpype.hosts.blender
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
|
|
@ -83,6 +85,31 @@ def uninstall():
|
|||
ops.unregister()
|
||||
|
||||
|
||||
def show_message(title, message):
|
||||
from openpype.widgets.message_window import Window
|
||||
from .ops import BlenderApplication
|
||||
|
||||
BlenderApplication.get_app()
|
||||
|
||||
Window(
|
||||
parent=None,
|
||||
title=title,
|
||||
message=message,
|
||||
level="warning")
|
||||
|
||||
|
||||
def message_window(title, message):
|
||||
from .ops import (
|
||||
MainThreadItem,
|
||||
execute_in_main_thread,
|
||||
_process_app_events
|
||||
)
|
||||
|
||||
mti = MainThreadItem(show_message, title, message)
|
||||
execute_in_main_thread(mti)
|
||||
_process_app_events()
|
||||
|
||||
|
||||
def set_start_end_frames():
|
||||
project_name = legacy_io.active_project()
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
|
|
@ -125,10 +152,36 @@ def set_start_end_frames():
|
|||
def on_new():
|
||||
set_start_end_frames()
|
||||
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
settings = get_project_settings(project)
|
||||
|
||||
unit_scale_settings = settings.get("blender").get("unit_scale_settings")
|
||||
unit_scale_enabled = unit_scale_settings.get("enabled")
|
||||
if unit_scale_enabled:
|
||||
unit_scale = unit_scale_settings.get("base_file_unit_scale")
|
||||
bpy.context.scene.unit_settings.scale_length = unit_scale
|
||||
|
||||
|
||||
def on_open():
|
||||
set_start_end_frames()
|
||||
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
settings = get_project_settings(project)
|
||||
|
||||
unit_scale_settings = settings.get("blender").get("unit_scale_settings")
|
||||
unit_scale_enabled = unit_scale_settings.get("enabled")
|
||||
apply_on_opening = unit_scale_settings.get("apply_on_opening")
|
||||
if unit_scale_enabled and apply_on_opening:
|
||||
unit_scale = unit_scale_settings.get("base_file_unit_scale")
|
||||
prev_unit_scale = bpy.context.scene.unit_settings.scale_length
|
||||
|
||||
if unit_scale != prev_unit_scale:
|
||||
bpy.context.scene.unit_settings.scale_length = unit_scale
|
||||
|
||||
message_window(
|
||||
"Base file unit scale changed",
|
||||
"Base file unit scale changed to match the project settings.")
|
||||
|
||||
|
||||
@bpy.app.handlers.persistent
|
||||
def _on_save_pre(*args):
|
||||
|
|
|
|||
209
openpype/hosts/blender/plugins/load/load_camera_abc.py
Normal file
209
openpype/hosts/blender/plugins/load/load_camera_abc.py
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
"""Load an asset in Blender from an Alembic file."""
|
||||
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from openpype.pipeline import (
|
||||
get_representation_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.hosts.blender.api import plugin, lib
|
||||
from openpype.hosts.blender.api.pipeline import (
|
||||
AVALON_CONTAINERS,
|
||||
AVALON_PROPERTY,
|
||||
)
|
||||
|
||||
|
||||
class AbcCameraLoader(plugin.AssetLoader):
|
||||
"""Load a camera from Alembic file.
|
||||
|
||||
Stores the imported asset in an empty named after the asset.
|
||||
"""
|
||||
|
||||
families = ["camera"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Load Camera (ABC)"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, asset_group):
|
||||
objects = list(asset_group.children)
|
||||
|
||||
for obj in objects:
|
||||
if obj.type == "CAMERA":
|
||||
bpy.data.cameras.remove(obj.data)
|
||||
elif obj.type == "EMPTY":
|
||||
objects.extend(obj.children)
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
def _process(self, libpath, asset_group, group_name):
|
||||
plugin.deselect_all()
|
||||
|
||||
bpy.ops.wm.alembic_import(filepath=libpath)
|
||||
|
||||
objects = lib.get_selection()
|
||||
|
||||
for obj in objects:
|
||||
obj.parent = asset_group
|
||||
|
||||
for obj in objects:
|
||||
name = obj.name
|
||||
obj.name = f"{group_name}:{name}"
|
||||
if obj.type != "EMPTY":
|
||||
name_data = obj.data.name
|
||||
obj.data.name = f"{group_name}:{name_data}"
|
||||
|
||||
if not obj.get(AVALON_PROPERTY):
|
||||
obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
plugin.deselect_all()
|
||||
|
||||
return objects
|
||||
|
||||
def process_asset(
|
||||
self,
|
||||
context: dict,
|
||||
name: str,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None,
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
|
||||
asset_name = plugin.asset_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
avalon_container.objects.link(asset_group)
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name)
|
||||
|
||||
objects = []
|
||||
nodes = list(asset_group.children)
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or "",
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"objectName": group_name,
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}")
|
||||
assert libpath, (
|
||||
f"No existing library file found for {container['objectName']}")
|
||||
assert libpath.is_file(), f"The file doesn't exist: {libpath}"
|
||||
assert extension in plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}")
|
||||
|
||||
metadata = asset_group.get(AVALON_PROPERTY)
|
||||
group_libpath = metadata["libpath"]
|
||||
|
||||
normalized_group_libpath = str(
|
||||
Path(bpy.path.abspath(group_libpath)).resolve())
|
||||
normalized_libpath = str(
|
||||
Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
self.log.debug(
|
||||
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_group_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_group_libpath == normalized_libpath:
|
||||
self.log.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
mat = asset_group.matrix_basis.copy()
|
||||
|
||||
self._remove(asset_group)
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (openpype:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
|
||||
if not asset_group:
|
||||
return False
|
||||
|
||||
self._remove(asset_group)
|
||||
|
||||
bpy.data.objects.remove(asset_group)
|
||||
|
||||
return True
|
||||
|
|
@ -13,7 +13,6 @@ class ValidateInstanceHasMembers(pyblish.api.InstancePlugin):
|
|||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
invalid = list()
|
||||
if not instance.data["setMembers"]:
|
||||
objectset_name = instance.data['name']
|
||||
|
|
@ -22,6 +21,10 @@ class ValidateInstanceHasMembers(pyblish.api.InstancePlugin):
|
|||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
# Allow renderlayer and workfile to be empty
|
||||
skip_families = ["workfile", "renderlayer", "rendersetup"]
|
||||
if instance.data.get("family") in skip_families:
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
|
|
|
|||
|
|
@ -151,6 +151,7 @@ class NukeHost(
|
|||
def add_nuke_callbacks():
|
||||
""" Adding all available nuke callbacks
|
||||
"""
|
||||
nuke_settings = get_current_project_settings()["nuke"]
|
||||
workfile_settings = WorkfileSettings()
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(
|
||||
|
|
@ -169,7 +170,10 @@ def add_nuke_callbacks():
|
|||
# # set apply all workfile settings on script load and save
|
||||
nuke.addOnScriptLoad(WorkfileSettings().set_context_settings)
|
||||
|
||||
nuke.addFilenameFilter(dirmap_file_name_filter)
|
||||
if nuke_settings["nuke-dirmap"]["enabled"]:
|
||||
log.info("Added Nuke's dirmaping callback ...")
|
||||
# Add dirmap for file paths.
|
||||
nuke.addFilenameFilter(dirmap_file_name_filter)
|
||||
|
||||
log.info("Added Nuke callbacks ...")
|
||||
|
||||
|
|
|
|||
|
|
@ -24,6 +24,8 @@ from .lib import (
|
|||
get_project_manager,
|
||||
get_current_project,
|
||||
get_current_timeline,
|
||||
get_any_timeline,
|
||||
get_new_timeline,
|
||||
create_bin,
|
||||
get_media_pool_item,
|
||||
create_media_pool_item,
|
||||
|
|
@ -95,6 +97,8 @@ __all__ = [
|
|||
"get_project_manager",
|
||||
"get_current_project",
|
||||
"get_current_timeline",
|
||||
"get_any_timeline",
|
||||
"get_new_timeline",
|
||||
"create_bin",
|
||||
"get_media_pool_item",
|
||||
"create_media_pool_item",
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ log = Logger.get_logger(__name__)
|
|||
self = sys.modules[__name__]
|
||||
self.project_manager = None
|
||||
self.media_storage = None
|
||||
self.current_project = None
|
||||
|
||||
# OpenPype sequential rename variables
|
||||
self.rename_index = 0
|
||||
|
|
@ -85,22 +86,60 @@ def get_media_storage():
|
|||
|
||||
|
||||
def get_current_project():
|
||||
# initialize project manager
|
||||
get_project_manager()
|
||||
"""Get current project object.
|
||||
"""
|
||||
if not self.current_project:
|
||||
self.current_project = get_project_manager().GetCurrentProject()
|
||||
|
||||
return self.project_manager.GetCurrentProject()
|
||||
return self.current_project
|
||||
|
||||
|
||||
def get_current_timeline(new=False):
|
||||
# get current project
|
||||
"""Get current timeline object.
|
||||
|
||||
Args:
|
||||
new (bool)[optional]: [DEPRECATED] if True it will create
|
||||
new timeline if none exists
|
||||
|
||||
Returns:
|
||||
TODO: will need to reflect future `None`
|
||||
object: resolve.Timeline
|
||||
"""
|
||||
project = get_current_project()
|
||||
timeline = project.GetCurrentTimeline()
|
||||
|
||||
# return current timeline if any
|
||||
if timeline:
|
||||
return timeline
|
||||
|
||||
# TODO: [deprecated] and will be removed in future
|
||||
if new:
|
||||
media_pool = project.GetMediaPool()
|
||||
new_timeline = media_pool.CreateEmptyTimeline(self.pype_timeline_name)
|
||||
project.SetCurrentTimeline(new_timeline)
|
||||
return get_new_timeline()
|
||||
|
||||
return project.GetCurrentTimeline()
|
||||
|
||||
def get_any_timeline():
|
||||
"""Get any timeline object.
|
||||
|
||||
Returns:
|
||||
object | None: resolve.Timeline
|
||||
"""
|
||||
project = get_current_project()
|
||||
timeline_count = project.GetTimelineCount()
|
||||
if timeline_count > 0:
|
||||
return project.GetTimelineByIndex(1)
|
||||
|
||||
|
||||
def get_new_timeline():
|
||||
"""Get new timeline object.
|
||||
|
||||
Returns:
|
||||
object: resolve.Timeline
|
||||
"""
|
||||
project = get_current_project()
|
||||
media_pool = project.GetMediaPool()
|
||||
new_timeline = media_pool.CreateEmptyTimeline(self.pype_timeline_name)
|
||||
project.SetCurrentTimeline(new_timeline)
|
||||
return new_timeline
|
||||
|
||||
|
||||
def create_bin(name: str, root: object = None) -> object:
|
||||
|
|
@ -312,7 +351,13 @@ def get_current_timeline_items(
|
|||
track_type = track_type or "video"
|
||||
selecting_color = selecting_color or "Chocolate"
|
||||
project = get_current_project()
|
||||
timeline = get_current_timeline()
|
||||
|
||||
# get timeline anyhow
|
||||
timeline = (
|
||||
get_current_timeline() or
|
||||
get_any_timeline() or
|
||||
get_new_timeline()
|
||||
)
|
||||
selected_clips = []
|
||||
|
||||
# get all tracks count filtered by track type
|
||||
|
|
|
|||
|
|
@ -327,7 +327,10 @@ class ClipLoader:
|
|||
self.active_timeline = options["timeline"]
|
||||
else:
|
||||
# create new sequence
|
||||
self.active_timeline = lib.get_current_timeline(new=True)
|
||||
self.active_timeline = (
|
||||
lib.get_current_timeline() or
|
||||
lib.get_new_timeline()
|
||||
)
|
||||
else:
|
||||
self.active_timeline = lib.get_current_timeline()
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from openpype.lib.transcoding import (
|
|||
IMAGE_EXTENSIONS
|
||||
)
|
||||
|
||||
|
||||
class LoadClip(plugin.TimelineItemLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
#! python3
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.resolve import api as bmdvr
|
||||
from openpype.hosts.resolve.api.lib import get_current_project
|
||||
|
||||
if __name__ == "__main__":
|
||||
install_host(bmdvr)
|
||||
project = get_current_project()
|
||||
timeline_count = project.GetTimelineCount()
|
||||
print(f"Timeline count: {timeline_count}")
|
||||
timeline = project.GetTimelineByIndex(timeline_count)
|
||||
print(f"Timeline name: {timeline.GetName()}")
|
||||
print(timeline.GetTrackCount("video"))
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import shutil
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib import Logger, is_running_from_build
|
||||
|
||||
RESOLVE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
@ -41,6 +41,13 @@ def setup(env):
|
|||
# copy scripts into Resolve's utility scripts dir
|
||||
for directory, scripts in scripts.items():
|
||||
for script in scripts:
|
||||
if (
|
||||
is_running_from_build() and
|
||||
script in ["tests", "develop"]
|
||||
):
|
||||
# only copy those if started from build
|
||||
continue
|
||||
|
||||
src = os.path.join(directory, script)
|
||||
dst = os.path.join(util_scripts_dir, script)
|
||||
log.info("Copying `{}` to `{}`...".format(src, dst))
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ class CreateUAsset(UnrealAssetCreator):
|
|||
family = "uasset"
|
||||
icon = "cube"
|
||||
|
||||
extension = ".uasset"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
|
@ -37,10 +39,28 @@ class CreateUAsset(UnrealAssetCreator):
|
|||
f"{Path(obj).name} is not on the disk. Likely it needs to"
|
||||
"be saved first.")
|
||||
|
||||
if Path(sys_path).suffix != ".uasset":
|
||||
raise CreatorError(f"{Path(sys_path).name} is not a UAsset.")
|
||||
if Path(sys_path).suffix != self.extension:
|
||||
raise CreatorError(
|
||||
f"{Path(sys_path).name} is not a {self.label}.")
|
||||
|
||||
super(CreateUAsset, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
||||
class CreateUMap(CreateUAsset):
|
||||
"""Create Level."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.umap"
|
||||
label = "Level"
|
||||
family = "uasset"
|
||||
extension = ".umap"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
instance_data["families"] = ["umap"]
|
||||
|
||||
super(CreateUMap, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
|
|
|||
|
|
@ -21,6 +21,8 @@ class UAssetLoader(plugin.Loader):
|
|||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
extension = "uasset"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
|
|
@ -42,26 +44,29 @@ class UAssetLoader(plugin.Loader):
|
|||
root = "/Game/Ayon/Assets"
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
|
||||
asset_name = f"{asset}_{name}" if asset else f"{name}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}", suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
unique_number = 1
|
||||
while unreal.EditorAssetLibrary.does_directory_exist(
|
||||
f"{asset_dir}_{unique_number:02}"
|
||||
):
|
||||
unique_number += 1
|
||||
|
||||
asset_dir = f"{asset_dir}_{unique_number:02}"
|
||||
container_name = f"{container_name}_{unique_number:02}{suffix}"
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
destination_path = asset_dir.replace(
|
||||
"/Game",
|
||||
Path(unreal.Paths.project_content_dir()).as_posix(),
|
||||
1)
|
||||
"/Game", Path(unreal.Paths.project_content_dir()).as_posix(), 1)
|
||||
|
||||
shutil.copy(self.fname, f"{destination_path}/{name}.uasset")
|
||||
shutil.copy(
|
||||
self.fname,
|
||||
f"{destination_path}/{name}_{unique_number:02}.{self.extension}")
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
|
|
@ -77,7 +82,7 @@ class UAssetLoader(plugin.Loader):
|
|||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["_id"],
|
||||
"parent": context["representation"]["parent"],
|
||||
"family": context["representation"]["context"]["family"]
|
||||
"family": context["representation"]["context"]["family"],
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
|
|
@ -96,10 +101,10 @@ class UAssetLoader(plugin.Loader):
|
|||
asset_dir = container["namespace"]
|
||||
name = representation["context"]["subset"]
|
||||
|
||||
unique_number = container["container_name"].split("_")[-2]
|
||||
|
||||
destination_path = asset_dir.replace(
|
||||
"/Game",
|
||||
Path(unreal.Paths.project_content_dir()).as_posix(),
|
||||
1)
|
||||
"/Game", Path(unreal.Paths.project_content_dir()).as_posix(), 1)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=False, include_folder=True
|
||||
|
|
@ -107,22 +112,24 @@ class UAssetLoader(plugin.Loader):
|
|||
|
||||
for asset in asset_content:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if not obj.get_class().get_name() == 'AyonAssetContainer':
|
||||
if obj.get_class().get_name() != "AyonAssetContainer":
|
||||
unreal.EditorAssetLibrary.delete_asset(asset)
|
||||
|
||||
update_filepath = get_representation_path(representation)
|
||||
|
||||
shutil.copy(update_filepath, f"{destination_path}/{name}.uasset")
|
||||
shutil.copy(
|
||||
update_filepath,
|
||||
f"{destination_path}/{name}_{unique_number}.{self.extension}")
|
||||
|
||||
container_path = "{}/{}".format(container["namespace"],
|
||||
container["objectName"])
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": str(representation["_id"]),
|
||||
"parent": str(representation["parent"])
|
||||
})
|
||||
"parent": str(representation["parent"]),
|
||||
}
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
|
|
@ -143,3 +150,13 @@ class UAssetLoader(plugin.Loader):
|
|||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
||||
|
||||
class UMapLoader(UAssetLoader):
|
||||
"""Load Level."""
|
||||
|
||||
families = ["uasset"]
|
||||
label = "Load Level"
|
||||
representations = ["umap"]
|
||||
|
||||
extension = "umap"
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class CollectInstanceMembers(pyblish.api.InstancePlugin):
|
|||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
inst_path = instance.data.get('instance_path')
|
||||
inst_name = instance.data.get('objectName')
|
||||
inst_name = inst_path.split('/')[-1]
|
||||
|
||||
pub_instance = ar.get_asset_by_object_path(
|
||||
f"{inst_path}.{inst_name}").get_asset()
|
||||
|
|
|
|||
|
|
@ -11,16 +11,17 @@ class ExtractUAsset(publish.Extractor):
|
|||
|
||||
label = "Extract UAsset"
|
||||
hosts = ["unreal"]
|
||||
families = ["uasset"]
|
||||
families = ["uasset", "umap"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
extension = (
|
||||
"umap" if "umap" in instance.data.get("families") else "uasset")
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.uasset".format(instance.name)
|
||||
filename = f"{instance.name}.{extension}"
|
||||
|
||||
members = instance.data.get("members", [])
|
||||
|
||||
|
|
@ -36,13 +37,15 @@ class ExtractUAsset(publish.Extractor):
|
|||
|
||||
shutil.copy(sys_path, staging_dir)
|
||||
|
||||
self.log.info(f"instance.data: {instance.data}")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'uasset',
|
||||
'ext': 'uasset',
|
||||
'files': filename,
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -275,7 +275,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
args = [
|
||||
"--headless",
|
||||
'publish',
|
||||
rootless_metadata_path,
|
||||
'"{}"'.format(rootless_metadata_path),
|
||||
"--targets", "deadline",
|
||||
"--targets", "farm"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
import requests
|
||||
from qtpy import QtCore, QtGui, QtWidgets
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import appdirs
|
||||
import requests
|
||||
|
||||
from openpype.modules import OpenPypeModule, ITrayModule
|
||||
|
||||
|
||||
|
|
@ -110,16 +112,10 @@ class MusterModule(OpenPypeModule, ITrayModule):
|
|||
self.save_credentials(token)
|
||||
|
||||
def save_credentials(self, token):
|
||||
"""
|
||||
Save credentials to JSON file
|
||||
"""
|
||||
data = {
|
||||
'token': token
|
||||
}
|
||||
"""Save credentials to JSON file."""
|
||||
|
||||
file = open(self.cred_path, 'w')
|
||||
file.write(json.dumps(data))
|
||||
file.close()
|
||||
with open(self.cred_path, "w") as f:
|
||||
json.dump({'token': token}, f)
|
||||
|
||||
def show_login(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ import pyblish.api
|
|||
from openpype.lib import (
|
||||
Logger,
|
||||
import_filepath,
|
||||
filter_profiles
|
||||
filter_profiles,
|
||||
is_func_signature_supported,
|
||||
)
|
||||
from openpype.settings import (
|
||||
get_project_settings,
|
||||
|
|
@ -30,8 +31,6 @@ from .contants import (
|
|||
TRANSIENT_DIR_TEMPLATE
|
||||
)
|
||||
|
||||
_ARG_PLACEHOLDER = object()
|
||||
|
||||
|
||||
def get_template_name_profiles(
|
||||
project_name, project_settings=None, logger=None
|
||||
|
|
@ -498,12 +497,26 @@ def filter_pyblish_plugins(plugins):
|
|||
# iterate over plugins
|
||||
for plugin in plugins[:]:
|
||||
# Apply settings to plugins
|
||||
if hasattr(plugin, "apply_settings"):
|
||||
|
||||
apply_settings_func = getattr(plugin, "apply_settings", None)
|
||||
if apply_settings_func is not None:
|
||||
# Use classmethod 'apply_settings'
|
||||
# - can be used to target settings from custom settings place
|
||||
# - skip default behavior when successful
|
||||
try:
|
||||
plugin.apply_settings(project_settings, system_settings)
|
||||
# Support to pass only project settings
|
||||
# - make sure that both settings are passed, when can be
|
||||
# - that covers cases when *args are in method parameters
|
||||
both_supported = is_func_signature_supported(
|
||||
apply_settings_func, project_settings, system_settings
|
||||
)
|
||||
project_supported = is_func_signature_supported(
|
||||
apply_settings_func, project_settings
|
||||
)
|
||||
if not both_supported and project_supported:
|
||||
plugin.apply_settings(project_settings)
|
||||
else:
|
||||
plugin.apply_settings(project_settings, system_settings)
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
|
|
@ -870,31 +883,24 @@ def add_repre_files_for_cleanup(instance, repre):
|
|||
instance.context.data["cleanupFullPaths"].append(expected_file)
|
||||
|
||||
|
||||
def get_publish_instance_label(instance, default=_ARG_PLACEHOLDER):
|
||||
def get_publish_instance_label(instance):
|
||||
"""Try to get label from pyblish instance.
|
||||
|
||||
First are checked 'label' and 'name' keys in instance data. If are not set
|
||||
a default value is returned. Instance object is converted to string
|
||||
if default value is not specific.
|
||||
First are used values in instance data under 'label' and 'name' keys. Then
|
||||
is used string conversion of instance object -> 'instance._name'.
|
||||
|
||||
Todos:
|
||||
Maybe 'subset' key could be used too.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Pyblish instance.
|
||||
default (Optional[Any]): Default value to return if any
|
||||
|
||||
Returns:
|
||||
Union[Any]: Instance label or default label.
|
||||
str: Instance label.
|
||||
"""
|
||||
|
||||
label = (
|
||||
return (
|
||||
instance.data.get("label")
|
||||
or instance.data.get("name")
|
||||
or str(instance)
|
||||
)
|
||||
if label:
|
||||
return label
|
||||
|
||||
if default is _ARG_PLACEHOLDER:
|
||||
return str(instance)
|
||||
return default
|
||||
|
|
|
|||
|
|
@ -1,4 +1,9 @@
|
|||
{
|
||||
"unit_scale_settings": {
|
||||
"enabled": true,
|
||||
"apply_on_opening": false,
|
||||
"base_file_unit_scale": 0.01
|
||||
},
|
||||
"imageio": {
|
||||
"ocio_config": {
|
||||
"enabled": false,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,32 @@
|
|||
"label": "Blender",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"key": "unit_scale_settings",
|
||||
"type": "dict",
|
||||
"label": "Set Unit Scale",
|
||||
"collapsible": true,
|
||||
"is_group": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"key": "apply_on_opening",
|
||||
"type": "boolean",
|
||||
"label": "Apply on Opening Existing Files"
|
||||
},
|
||||
{
|
||||
"key": "base_file_unit_scale",
|
||||
"type": "number",
|
||||
"label": "Base File Unit Scale",
|
||||
"decimal": 10
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "imageio",
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -676,7 +676,15 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
self._tabs_widget.set_current_tab(identifier)
|
||||
|
||||
def set_current_tab(self, tab):
|
||||
self._set_current_tab(tab)
|
||||
if tab == "create":
|
||||
self._go_to_create_tab()
|
||||
elif tab == "publish":
|
||||
self._go_to_publish_tab()
|
||||
elif tab == "report":
|
||||
self._go_to_report_tab()
|
||||
elif tab == "details":
|
||||
self._go_to_details_tab()
|
||||
|
||||
if not self._window_is_visible:
|
||||
self.set_tab_on_reset(tab)
|
||||
|
||||
|
|
@ -686,6 +694,12 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
def _go_to_create_tab(self):
|
||||
if self._create_tab.isEnabled():
|
||||
self._set_current_tab("create")
|
||||
return
|
||||
|
||||
self._overlay_object.add_message(
|
||||
"Can't switch to Create tab because publishing is paused.",
|
||||
message_type="info"
|
||||
)
|
||||
|
||||
def _go_to_publish_tab(self):
|
||||
self._set_current_tab("publish")
|
||||
|
|
|
|||
|
|
@ -872,7 +872,6 @@ class WrappedCallbackItem:
|
|||
self.log.warning("- item is already processed")
|
||||
return
|
||||
|
||||
self.log.debug("Running callback: {}".format(str(self._callback)))
|
||||
try:
|
||||
result = self._callback(*self._args, **self._kwargs)
|
||||
self._result = result
|
||||
|
|
|
|||
|
|
@ -127,8 +127,7 @@ class OverlayMessageWidget(QtWidgets.QFrame):
|
|||
if timeout:
|
||||
self._timeout_timer.setInterval(timeout)
|
||||
|
||||
if message_type:
|
||||
set_style_property(self, "type", message_type)
|
||||
set_style_property(self, "type", message_type)
|
||||
|
||||
self._timeout_timer.start()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.8"
|
||||
__version__ = "3.15.9-nightly.1"
|
||||
|
|
|
|||
|
|
@ -15,16 +15,16 @@ Structure:
|
|||
- openpype/modules/MODULE_NAME - structure follow directory structure in code base
|
||||
- fixture - sample data `(MongoDB dumps, test files etc.)`
|
||||
- `tests.py` - single or more pytest files for MODULE_NAME
|
||||
- unit - quick unit test
|
||||
- MODULE_NAME
|
||||
- unit - quick unit test
|
||||
- MODULE_NAME
|
||||
- fixture
|
||||
- `tests.py`
|
||||
|
||||
|
||||
How to run:
|
||||
----------
|
||||
- use Openpype command 'runtests' from command line (`.venv` in ${OPENPYPE_ROOT} must be activated to use configured Python!)
|
||||
-- `python ${OPENPYPE_ROOT}/start.py runtests`
|
||||
|
||||
|
||||
By default, this command will run all tests in ${OPENPYPE_ROOT}/tests.
|
||||
|
||||
Specific location could be provided to this command as an argument, either as absolute path, or relative path to ${OPENPYPE_ROOT}.
|
||||
|
|
@ -41,17 +41,15 @@ In some cases your tests might be so localized, that you don't care about all en
|
|||
In that case you might add this dummy configuration BEFORE any imports in your test file
|
||||
```
|
||||
import os
|
||||
os.environ["AVALON_MONGO"] = "mongodb://localhost:27017"
|
||||
os.environ["OPENPYPE_DEBUG"] = "1"
|
||||
os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017"
|
||||
os.environ["AVALON_DB"] = "avalon"
|
||||
os.environ["OPENPYPE_DATABASE_NAME"] = "openpype"
|
||||
os.environ["AVALON_TIMEOUT"] = '3000'
|
||||
os.environ["OPENPYPE_DEBUG"] = "3"
|
||||
os.environ["AVALON_CONFIG"] = "pype"
|
||||
os.environ["AVALON_DB"] = "avalon"
|
||||
os.environ["AVALON_TIMEOUT"] = "3000"
|
||||
os.environ["AVALON_ASSET"] = "Asset"
|
||||
os.environ["AVALON_PROJECT"] = "test_project"
|
||||
```
|
||||
(AVALON_ASSET and AVALON_PROJECT values should exist in your environment)
|
||||
|
||||
This might be enough to run your test file separately. Do not commit this skeleton though.
|
||||
Use only when you know what you are doing!
|
||||
Use only when you know what you are doing!
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue