mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into bugfix/remove-addon-from-python-path-on-update
This commit is contained in:
commit
a7dc724639
103 changed files with 1662 additions and 9102 deletions
3
.gitmodules
vendored
3
.gitmodules
vendored
|
|
@ -1,3 +0,0 @@
|
|||
[submodule "client/ayon_core/hosts/unreal/integration"]
|
||||
path = server_addon/unreal/client/ayon_unreal/integration
|
||||
url = https://github.com/ynput/ayon-unreal-plugin.git
|
||||
|
|
@ -28,16 +28,16 @@ from .interfaces import (
|
|||
)
|
||||
|
||||
# Files that will be always ignored on addons import
|
||||
IGNORED_FILENAMES = (
|
||||
IGNORED_FILENAMES = {
|
||||
"__pycache__",
|
||||
)
|
||||
}
|
||||
# Files ignored on addons import from "./ayon_core/modules"
|
||||
IGNORED_DEFAULT_FILENAMES = (
|
||||
IGNORED_DEFAULT_FILENAMES = {
|
||||
"__init__.py",
|
||||
"base.py",
|
||||
"interfaces.py",
|
||||
"click_wrap.py",
|
||||
)
|
||||
}
|
||||
|
||||
# When addon was moved from ayon-core codebase
|
||||
# - this is used to log the missing addon
|
||||
|
|
@ -411,82 +411,56 @@ def _load_addons_in_core(
|
|||
):
|
||||
# Add current directory at first place
|
||||
# - has small differences in import logic
|
||||
hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts")
|
||||
modules_dir = os.path.join(AYON_CORE_ROOT, "modules")
|
||||
if not os.path.exists(modules_dir):
|
||||
log.warning(
|
||||
f"Could not find path when loading AYON addons \"{modules_dir}\""
|
||||
)
|
||||
return
|
||||
|
||||
for dirpath in {hosts_dir, modules_dir}:
|
||||
if not os.path.exists(dirpath):
|
||||
log.warning((
|
||||
"Could not find path when loading AYON addons \"{}\""
|
||||
).format(dirpath))
|
||||
ignored_filenames = IGNORED_FILENAMES | IGNORED_DEFAULT_FILENAMES
|
||||
|
||||
for filename in os.listdir(modules_dir):
|
||||
# Ignore filenames
|
||||
if filename in ignored_filenames:
|
||||
continue
|
||||
|
||||
is_in_modules_dir = dirpath == modules_dir
|
||||
ignored_filenames = set()
|
||||
if is_in_modules_dir:
|
||||
ignored_filenames = set(IGNORED_DEFAULT_FILENAMES)
|
||||
fullpath = os.path.join(modules_dir, filename)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
|
||||
for filename in os.listdir(dirpath):
|
||||
# Ignore filenames
|
||||
if filename in IGNORED_FILENAMES or filename in ignored_filenames:
|
||||
if basename in ignore_addon_names:
|
||||
continue
|
||||
|
||||
# Validations
|
||||
if os.path.isdir(fullpath):
|
||||
# Check existence of init file
|
||||
init_path = os.path.join(fullpath, "__init__.py")
|
||||
if not os.path.exists(init_path):
|
||||
log.debug((
|
||||
"Addon directory does not contain __init__.py"
|
||||
f" file {fullpath}"
|
||||
))
|
||||
continue
|
||||
|
||||
fullpath = os.path.join(dirpath, filename)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
elif ext != ".py":
|
||||
continue
|
||||
|
||||
if basename in ignore_addon_names:
|
||||
continue
|
||||
# TODO add more logic how to define if folder is addon or not
|
||||
# - check manifest and content of manifest
|
||||
try:
|
||||
# Don't import dynamically current directory modules
|
||||
new_import_str = f"{modules_key}.{basename}"
|
||||
|
||||
# Validations
|
||||
if os.path.isdir(fullpath):
|
||||
# Check existence of init file
|
||||
init_path = os.path.join(fullpath, "__init__.py")
|
||||
if not os.path.exists(init_path):
|
||||
log.debug((
|
||||
"Addon directory does not contain __init__.py"
|
||||
" file {}"
|
||||
).format(fullpath))
|
||||
continue
|
||||
import_str = f"ayon_core.modules.{basename}"
|
||||
default_module = __import__(import_str, fromlist=("", ))
|
||||
sys.modules[new_import_str] = default_module
|
||||
setattr(openpype_modules, basename, default_module)
|
||||
|
||||
elif ext not in (".py", ):
|
||||
continue
|
||||
|
||||
# TODO add more logic how to define if folder is addon or not
|
||||
# - check manifest and content of manifest
|
||||
try:
|
||||
# Don't import dynamically current directory modules
|
||||
new_import_str = "{}.{}".format(modules_key, basename)
|
||||
if is_in_modules_dir:
|
||||
import_str = "ayon_core.modules.{}".format(basename)
|
||||
default_module = __import__(import_str, fromlist=("", ))
|
||||
sys.modules[new_import_str] = default_module
|
||||
setattr(openpype_modules, basename, default_module)
|
||||
|
||||
else:
|
||||
import_str = "ayon_core.hosts.{}".format(basename)
|
||||
# Until all hosts are converted to be able use them as
|
||||
# modules is this error check needed
|
||||
try:
|
||||
default_module = __import__(
|
||||
import_str, fromlist=("", )
|
||||
)
|
||||
sys.modules[new_import_str] = default_module
|
||||
setattr(openpype_modules, basename, default_module)
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to import host folder {}".format(basename),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
except Exception:
|
||||
if is_in_modules_dir:
|
||||
msg = "Failed to import in-core addon '{}'.".format(
|
||||
basename
|
||||
)
|
||||
else:
|
||||
msg = "Failed to import addon '{}'.".format(fullpath)
|
||||
log.error(msg, exc_info=True)
|
||||
except Exception:
|
||||
log.error(
|
||||
f"Failed to import in-core addon '{basename}'.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def _load_addons():
|
||||
|
|
|
|||
|
|
@ -64,9 +64,10 @@ class Commands:
|
|||
get_global_context,
|
||||
)
|
||||
|
||||
# Register target and host
|
||||
import ayon_api
|
||||
import pyblish.util
|
||||
|
||||
# Register target and host
|
||||
if not isinstance(path, str):
|
||||
raise RuntimeError("Path to JSON must be a string.")
|
||||
|
||||
|
|
@ -86,6 +87,19 @@ class Commands:
|
|||
|
||||
log = Logger.get_logger("CLI-publish")
|
||||
|
||||
# Make public ayon api behave as other user
|
||||
# - this works only if public ayon api is using service user
|
||||
username = os.environ.get("AYON_USERNAME")
|
||||
if username:
|
||||
# NOTE: ayon-python-api does not have public api function to find
|
||||
# out if is used service user. So we need to have try > except
|
||||
# block.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
try:
|
||||
con.set_default_service_username(username)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
install_ayon_plugins()
|
||||
|
||||
manager = AddonsManager()
|
||||
|
|
|
|||
|
|
@ -14,22 +14,20 @@ class CollectFarmTarget(pyblish.api.InstancePlugin):
|
|||
if not instance.data.get("farm"):
|
||||
return
|
||||
|
||||
context = instance.context
|
||||
addons_manager = instance.context.data.get("ayonAddonsManager")
|
||||
|
||||
farm_name = ""
|
||||
addons_manager = context.data.get("ayonAddonsManager")
|
||||
|
||||
for farm_renderer in ["deadline", "royalrender"]:
|
||||
addon = addons_manager.get(farm_renderer, False)
|
||||
|
||||
if not addon:
|
||||
self.log.error("Cannot find AYON addon '{0}'.".format(
|
||||
farm_renderer))
|
||||
elif addon.enabled:
|
||||
farm_renderer_addons = ["deadline", "royalrender"]
|
||||
for farm_renderer in farm_renderer_addons:
|
||||
addon = addons_manager.get(farm_renderer)
|
||||
if addon and addon.enabled:
|
||||
farm_name = farm_renderer
|
||||
|
||||
if farm_name:
|
||||
self.log.debug("Collected render target: {0}".format(farm_name))
|
||||
instance.data["toBeRenderedOn"] = farm_name
|
||||
break
|
||||
else:
|
||||
AssertionError("No AYON renderer addon found")
|
||||
# No enabled farm render addon found, then report all farm
|
||||
# addons that were searched for yet not found
|
||||
for farm_renderer in farm_renderer_addons:
|
||||
self.log.error(f"Cannot find AYON addon '{farm_renderer}'.")
|
||||
raise RuntimeError("No AYON renderer addon found.")
|
||||
|
||||
self.log.debug("Collected render target: {0}".format(farm_name))
|
||||
instance.data["toBeRenderedOn"] = farm_name
|
||||
|
|
|
|||
|
|
@ -380,29 +380,28 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
data = {
|
||||
"families": get_instance_families(instance)
|
||||
}
|
||||
attribibutes = {}
|
||||
attributes = {}
|
||||
|
||||
product_group = instance.data.get("productGroup")
|
||||
if product_group:
|
||||
attribibutes["productGroup"] = product_group
|
||||
attributes["productGroup"] = product_group
|
||||
elif existing_product_entity:
|
||||
# Preserve previous product group if new version does not set it
|
||||
product_group = existing_product_entity.get("attrib", {}).get(
|
||||
"productGroup"
|
||||
)
|
||||
if product_group is not None:
|
||||
attribibutes["productGroup"] = product_group
|
||||
attributes["productGroup"] = product_group
|
||||
|
||||
product_id = None
|
||||
if existing_product_entity:
|
||||
product_id = existing_product_entity["id"]
|
||||
|
||||
product_entity = new_product_entity(
|
||||
product_name,
|
||||
product_type,
|
||||
folder_entity["id"],
|
||||
data=data,
|
||||
attribs=attribibutes,
|
||||
attribs=attributes,
|
||||
entity_id=product_id
|
||||
)
|
||||
|
||||
|
|
@ -464,6 +463,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
version_number,
|
||||
product_entity["id"],
|
||||
task_id=task_id,
|
||||
status=instance.data.get("status"),
|
||||
data=version_data,
|
||||
attribs=version_attributes,
|
||||
entity_id=version_id,
|
||||
|
|
|
|||
|
|
@ -14,9 +14,10 @@ from ayon_core.lib import (
|
|||
convert_ffprobe_fps_value,
|
||||
)
|
||||
|
||||
FFMPEG_EXE_COMMAND = subprocess.list2cmdline(get_ffmpeg_tool_args("ffmpeg"))
|
||||
FFMPEG = (
|
||||
'{}%(input_args)s -i "%(input)s" %(filters)s %(args)s%(output)s'
|
||||
).format(subprocess.list2cmdline(get_ffmpeg_tool_args("ffmpeg")))
|
||||
).format(FFMPEG_EXE_COMMAND)
|
||||
|
||||
DRAWTEXT = (
|
||||
"drawtext@'%(label)s'=fontfile='%(font)s':text=\\'%(text)s\\':"
|
||||
|
|
@ -482,10 +483,19 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
|
|||
)
|
||||
print("Launching command: {}".format(command))
|
||||
|
||||
use_shell = True
|
||||
try:
|
||||
test_proc = subprocess.Popen(
|
||||
f"{FFMPEG_EXE_COMMAND} --help", shell=True
|
||||
)
|
||||
test_proc.wait()
|
||||
except BaseException:
|
||||
use_shell = False
|
||||
|
||||
kwargs = {
|
||||
"stdout": subprocess.PIPE,
|
||||
"stderr": subprocess.PIPE,
|
||||
"shell": True,
|
||||
"shell": use_shell,
|
||||
}
|
||||
proc = subprocess.Popen(command, **kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,80 @@
|
|||
import ayon_api
|
||||
import json
|
||||
import collections
|
||||
|
||||
from ayon_core.lib import CacheItem
|
||||
import ayon_api
|
||||
from ayon_api.graphql import FIELD_VALUE, GraphQlQuery, fields_to_dict
|
||||
|
||||
from ayon_core.lib import NestedCacheItem
|
||||
|
||||
|
||||
# --- Implementation that should be in ayon-python-api ---
|
||||
# The implementation is not available in all versions of ayon-python-api.
|
||||
def users_graphql_query(fields):
|
||||
query = GraphQlQuery("Users")
|
||||
names_var = query.add_variable("userNames", "[String!]")
|
||||
project_name_var = query.add_variable("projectName", "String!")
|
||||
|
||||
users_field = query.add_field_with_edges("users")
|
||||
users_field.set_filter("names", names_var)
|
||||
users_field.set_filter("projectName", project_name_var)
|
||||
|
||||
nested_fields = fields_to_dict(set(fields))
|
||||
|
||||
query_queue = collections.deque()
|
||||
for key, value in nested_fields.items():
|
||||
query_queue.append((key, value, users_field))
|
||||
|
||||
while query_queue:
|
||||
item = query_queue.popleft()
|
||||
key, value, parent = item
|
||||
field = parent.add_field(key)
|
||||
if value is FIELD_VALUE:
|
||||
continue
|
||||
|
||||
for k, v in value.items():
|
||||
query_queue.append((k, v, field))
|
||||
return query
|
||||
|
||||
|
||||
def get_users(project_name=None, usernames=None, fields=None):
|
||||
"""Get Users.
|
||||
|
||||
Only administrators and managers can fetch all users. For other users
|
||||
it is required to pass in 'project_name' filter.
|
||||
|
||||
Args:
|
||||
project_name (Optional[str]): Project name.
|
||||
usernames (Optional[Iterable[str]]): Filter by usernames.
|
||||
fields (Optional[Iterable[str]]): Fields to be queried
|
||||
for users.
|
||||
|
||||
Returns:
|
||||
Generator[dict[str, Any]]: Queried users.
|
||||
|
||||
"""
|
||||
filters = {}
|
||||
if usernames is not None:
|
||||
usernames = set(usernames)
|
||||
if not usernames:
|
||||
return
|
||||
filters["userNames"] = list(usernames)
|
||||
|
||||
if project_name is not None:
|
||||
filters["projectName"] = project_name
|
||||
|
||||
con = ayon_api.get_server_api_connection()
|
||||
if not fields:
|
||||
fields = con.get_default_fields_for_type("user")
|
||||
|
||||
query = users_graphql_query(set(fields))
|
||||
for attr, filter_value in filters.items():
|
||||
query.set_variable_value(attr, filter_value)
|
||||
|
||||
for parsed_data in query.continuous_query(con):
|
||||
for user in parsed_data["users"]:
|
||||
user["accessGroups"] = json.loads(user["accessGroups"])
|
||||
yield user
|
||||
# --- END of ayon-python-api implementation ---
|
||||
|
||||
|
||||
class UserItem:
|
||||
|
|
@ -32,19 +106,19 @@ class UserItem:
|
|||
class UsersModel:
|
||||
def __init__(self, controller):
|
||||
self._controller = controller
|
||||
self._users_cache = CacheItem(default_factory=list)
|
||||
self._users_cache = NestedCacheItem(default_factory=list)
|
||||
|
||||
def get_user_items(self):
|
||||
def get_user_items(self, project_name):
|
||||
"""Get user items.
|
||||
|
||||
Returns:
|
||||
List[UserItem]: List of user items.
|
||||
|
||||
"""
|
||||
self._invalidate_cache()
|
||||
return self._users_cache.get_data()
|
||||
self._invalidate_cache(project_name)
|
||||
return self._users_cache[project_name].get_data()
|
||||
|
||||
def get_user_items_by_name(self):
|
||||
def get_user_items_by_name(self, project_name):
|
||||
"""Get user items by name.
|
||||
|
||||
Implemented as most of cases using this model will need to find
|
||||
|
|
@ -56,10 +130,10 @@ class UsersModel:
|
|||
"""
|
||||
return {
|
||||
user_item.username: user_item
|
||||
for user_item in self.get_user_items()
|
||||
for user_item in self.get_user_items(project_name)
|
||||
}
|
||||
|
||||
def get_user_item_by_username(self, username):
|
||||
def get_user_item_by_username(self, project_name, username):
|
||||
"""Get user item by username.
|
||||
|
||||
Args:
|
||||
|
|
@ -69,16 +143,22 @@ class UsersModel:
|
|||
Union[UserItem, None]: User item or None if not found.
|
||||
|
||||
"""
|
||||
self._invalidate_cache()
|
||||
for user_item in self.get_user_items():
|
||||
self._invalidate_cache(project_name)
|
||||
for user_item in self.get_user_items(project_name):
|
||||
if user_item.username == username:
|
||||
return user_item
|
||||
return None
|
||||
|
||||
def _invalidate_cache(self):
|
||||
if self._users_cache.is_valid:
|
||||
def _invalidate_cache(self, project_name):
|
||||
cache = self._users_cache[project_name]
|
||||
if cache.is_valid:
|
||||
return
|
||||
self._users_cache.update_data([
|
||||
|
||||
if project_name is None:
|
||||
cache.update_data([])
|
||||
return
|
||||
|
||||
self._users_cache[project_name].update_data([
|
||||
UserItem.from_entity_data(user)
|
||||
for user in ayon_api.get_users()
|
||||
for user in get_users(project_name)
|
||||
])
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
from .window import ContextDialog, main
|
||||
from .window import ContextDialog, main, ask_for_context
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ContextDialog",
|
||||
"main",
|
||||
"ask_for_context"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -791,3 +791,12 @@ def main(
|
|||
window.show()
|
||||
app.exec_()
|
||||
controller.store_output()
|
||||
|
||||
|
||||
def ask_for_context(strict=True):
|
||||
controller = ContextDialogController()
|
||||
controller.set_strict(strict)
|
||||
window = ContextDialog(controller=controller)
|
||||
window.exec_()
|
||||
|
||||
return controller.get_selected_context()
|
||||
|
|
|
|||
|
|
@ -177,7 +177,7 @@ class VersionItem:
|
|||
other_version = abs(other.version)
|
||||
# Hero version is greater than non-hero
|
||||
if version == other_version:
|
||||
return self.is_hero
|
||||
return not self.is_hero
|
||||
return version > other_version
|
||||
|
||||
def __lt__(self, other):
|
||||
|
|
@ -188,7 +188,7 @@ class VersionItem:
|
|||
other_version = abs(other.version)
|
||||
# Non-hero version is lesser than hero
|
||||
if version == other_version:
|
||||
return not self.is_hero
|
||||
return self.is_hero
|
||||
return version < other_version
|
||||
|
||||
def __ge__(self, other):
|
||||
|
|
|
|||
|
|
@ -90,7 +90,6 @@ class ContainerItem:
|
|||
representation_id,
|
||||
loader_name,
|
||||
namespace,
|
||||
name,
|
||||
object_name,
|
||||
item_id
|
||||
):
|
||||
|
|
@ -98,7 +97,6 @@ class ContainerItem:
|
|||
self.loader_name = loader_name
|
||||
self.object_name = object_name
|
||||
self.namespace = namespace
|
||||
self.name = name
|
||||
self.item_id = item_id
|
||||
|
||||
@classmethod
|
||||
|
|
@ -107,7 +105,6 @@ class ContainerItem:
|
|||
representation_id=container["representation"],
|
||||
loader_name=container["loader"],
|
||||
namespace=container["namespace"],
|
||||
name=container["name"],
|
||||
object_name=container["objectName"],
|
||||
item_id=uuid.uuid4().hex,
|
||||
)
|
||||
|
|
@ -204,7 +201,7 @@ class ContainersModel:
|
|||
def get_container_items(self):
|
||||
self._update_cache()
|
||||
return list(self._items_cache)
|
||||
|
||||
|
||||
def get_container_items_by_id(self, item_ids):
|
||||
return {
|
||||
item_id: self._container_items_by_id.get(item_id)
|
||||
|
|
@ -329,15 +326,25 @@ class ContainersModel:
|
|||
containers = list(host.ls())
|
||||
else:
|
||||
containers = []
|
||||
|
||||
container_items = []
|
||||
containers_by_id = {}
|
||||
container_items_by_id = {}
|
||||
for container in containers:
|
||||
item = ContainerItem.from_container_data(container)
|
||||
try:
|
||||
item = ContainerItem.from_container_data(container)
|
||||
except Exception as e:
|
||||
# skip item if required data are missing
|
||||
self._controller.log_error(
|
||||
f"Failed to create item: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
containers_by_id[item.item_id] = container
|
||||
container_items_by_id[item.item_id] = item
|
||||
container_items.append(item)
|
||||
|
||||
|
||||
self._containers_by_id = containers_by_id
|
||||
self._container_items_by_id = container_items_by_id
|
||||
self._items_cache = container_items
|
||||
|
|
|
|||
|
|
@ -834,12 +834,13 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_workarea_file_items(self, folder_id, task_id):
|
||||
def get_workarea_file_items(self, folder_id, task_name, sender=None):
|
||||
"""Get workarea file items.
|
||||
|
||||
Args:
|
||||
folder_id (str): Folder id.
|
||||
task_id (str): Task id.
|
||||
task_name (str): Task name.
|
||||
sender (Optional[str]): Who requested workarea file items.
|
||||
|
||||
Returns:
|
||||
list[FileItem]: List of workarea file items.
|
||||
|
|
@ -905,12 +906,12 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_workfile_info(self, folder_id, task_id, filepath):
|
||||
def get_workfile_info(self, folder_id, task_name, filepath):
|
||||
"""Workfile info from database.
|
||||
|
||||
Args:
|
||||
folder_id (str): Folder id.
|
||||
task_id (str): Task id.
|
||||
task_name (str): Task id.
|
||||
filepath (str): Workfile path.
|
||||
|
||||
Returns:
|
||||
|
|
@ -921,7 +922,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_workfile_info(self, folder_id, task_id, filepath, note):
|
||||
def save_workfile_info(self, folder_id, task_name, filepath, note):
|
||||
"""Save workfile info to database.
|
||||
|
||||
At this moment the only information which can be saved about
|
||||
|
|
@ -932,7 +933,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
|
||||
Args:
|
||||
folder_id (str): Folder id.
|
||||
task_id (str): Task id.
|
||||
task_name (str): Task id.
|
||||
filepath (str): Workfile path.
|
||||
note (Union[str, None]): Note.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -278,7 +278,8 @@ class BaseWorkfileController(
|
|||
)
|
||||
|
||||
def get_user_items_by_name(self):
|
||||
return self._users_model.get_user_items_by_name()
|
||||
project_name = self.get_current_project_name()
|
||||
return self._users_model.get_user_items_by_name(project_name)
|
||||
|
||||
# Host information
|
||||
def get_workfile_extensions(self):
|
||||
|
|
@ -410,9 +411,11 @@ class BaseWorkfileController(
|
|||
return self._workfiles_model.get_workarea_dir_by_context(
|
||||
folder_id, task_id)
|
||||
|
||||
def get_workarea_file_items(self, folder_id, task_id):
|
||||
def get_workarea_file_items(self, folder_id, task_name, sender=None):
|
||||
task_id = self._get_task_id(folder_id, task_name)
|
||||
return self._workfiles_model.get_workarea_file_items(
|
||||
folder_id, task_id)
|
||||
folder_id, task_id, task_name
|
||||
)
|
||||
|
||||
def get_workarea_save_as_data(self, folder_id, task_id):
|
||||
return self._workfiles_model.get_workarea_save_as_data(
|
||||
|
|
@ -447,12 +450,14 @@ class BaseWorkfileController(
|
|||
return self._workfiles_model.get_published_file_items(
|
||||
folder_id, task_name)
|
||||
|
||||
def get_workfile_info(self, folder_id, task_id, filepath):
|
||||
def get_workfile_info(self, folder_id, task_name, filepath):
|
||||
task_id = self._get_task_id(folder_id, task_name)
|
||||
return self._workfiles_model.get_workfile_info(
|
||||
folder_id, task_id, filepath
|
||||
)
|
||||
|
||||
def save_workfile_info(self, folder_id, task_id, filepath, note):
|
||||
def save_workfile_info(self, folder_id, task_name, filepath, note):
|
||||
task_id = self._get_task_id(folder_id, task_name)
|
||||
self._workfiles_model.save_workfile_info(
|
||||
folder_id, task_id, filepath, note
|
||||
)
|
||||
|
|
@ -627,6 +632,17 @@ class BaseWorkfileController(
|
|||
def _emit_event(self, topic, data=None):
|
||||
self.emit_event(topic, data, "controller")
|
||||
|
||||
def _get_task_id(self, folder_id, task_name, sender=None):
|
||||
task_item = self._hierarchy_model.get_task_item_by_name(
|
||||
self.get_current_project_name(),
|
||||
folder_id,
|
||||
task_name,
|
||||
sender
|
||||
)
|
||||
if not task_item:
|
||||
return None
|
||||
return task_item.id
|
||||
|
||||
# Expected selection
|
||||
# - expected selection is used to restore selection after refresh
|
||||
# or when current context should be used
|
||||
|
|
@ -722,7 +738,7 @@ class BaseWorkfileController(
|
|||
self._host_save_workfile(dst_filepath)
|
||||
|
||||
# Make sure workfile info exists
|
||||
self.save_workfile_info(folder_id, task_id, dst_filepath, None)
|
||||
self.save_workfile_info(folder_id, task_name, dst_filepath, None)
|
||||
|
||||
# Create extra folders
|
||||
create_workdir_extra_folders(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
import uuid
|
||||
|
||||
import arrow
|
||||
import ayon_api
|
||||
|
|
@ -173,7 +174,7 @@ class WorkareaModel:
|
|||
folder_mapping[task_id] = workdir
|
||||
return workdir
|
||||
|
||||
def get_file_items(self, folder_id, task_id):
|
||||
def get_file_items(self, folder_id, task_id, task_name):
|
||||
items = []
|
||||
if not folder_id or not task_id:
|
||||
return items
|
||||
|
|
@ -192,7 +193,7 @@ class WorkareaModel:
|
|||
continue
|
||||
|
||||
workfile_info = self._controller.get_workfile_info(
|
||||
folder_id, task_id, filepath
|
||||
folder_id, task_name, filepath
|
||||
)
|
||||
modified = os.path.getmtime(filepath)
|
||||
items.append(FileItem(
|
||||
|
|
@ -587,6 +588,7 @@ class WorkfileEntitiesModel:
|
|||
|
||||
username = self._get_current_username()
|
||||
workfile_info = {
|
||||
"id": uuid.uuid4().hex,
|
||||
"path": rootless_path,
|
||||
"taskId": task_id,
|
||||
"attrib": {
|
||||
|
|
@ -770,19 +772,21 @@ class WorkfilesModel:
|
|||
return self._workarea_model.get_workarea_dir_by_context(
|
||||
folder_id, task_id)
|
||||
|
||||
def get_workarea_file_items(self, folder_id, task_id):
|
||||
def get_workarea_file_items(self, folder_id, task_id, task_name):
|
||||
"""Workfile items for passed context from workarea.
|
||||
|
||||
Args:
|
||||
folder_id (Union[str, None]): Folder id.
|
||||
task_id (Union[str, None]): Task id.
|
||||
task_name (Union[str, None]): Task name.
|
||||
|
||||
Returns:
|
||||
list[FileItem]: List of file items matching workarea of passed
|
||||
context.
|
||||
"""
|
||||
|
||||
return self._workarea_model.get_file_items(folder_id, task_id)
|
||||
return self._workarea_model.get_file_items(
|
||||
folder_id, task_id, task_name
|
||||
)
|
||||
|
||||
def get_workarea_save_as_data(self, folder_id, task_id):
|
||||
return self._workarea_model.get_workarea_save_as_data(
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
self._empty_item_used = False
|
||||
self._published_mode = False
|
||||
self._selected_folder_id = None
|
||||
self._selected_task_id = None
|
||||
self._selected_task_name = None
|
||||
|
||||
self._add_missing_context_item()
|
||||
|
||||
|
|
@ -153,7 +153,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
|
||||
def _on_task_changed(self, event):
|
||||
self._selected_folder_id = event["folder_id"]
|
||||
self._selected_task_id = event["task_id"]
|
||||
self._selected_task_name = event["task_name"]
|
||||
if not self._published_mode:
|
||||
self._fill_items()
|
||||
|
||||
|
|
@ -179,13 +179,13 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
|
||||
def _fill_items_impl(self):
|
||||
folder_id = self._selected_folder_id
|
||||
task_id = self._selected_task_id
|
||||
if not folder_id or not task_id:
|
||||
task_name = self._selected_task_name
|
||||
if not folder_id or not task_name:
|
||||
self._add_missing_context_item()
|
||||
return
|
||||
|
||||
file_items = self._controller.get_workarea_file_items(
|
||||
folder_id, task_id
|
||||
folder_id, task_name
|
||||
)
|
||||
root_item = self.invisibleRootItem()
|
||||
if not file_items:
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class SidePanelWidget(QtWidgets.QWidget):
|
|||
self._btn_note_save = btn_note_save
|
||||
|
||||
self._folder_id = None
|
||||
self._task_id = None
|
||||
self._task_name = None
|
||||
self._filepath = None
|
||||
self._orig_note = ""
|
||||
self._controller = controller
|
||||
|
|
@ -93,10 +93,10 @@ class SidePanelWidget(QtWidgets.QWidget):
|
|||
|
||||
def _on_selection_change(self, event):
|
||||
folder_id = event["folder_id"]
|
||||
task_id = event["task_id"]
|
||||
task_name = event["task_name"]
|
||||
filepath = event["path"]
|
||||
|
||||
self._set_context(folder_id, task_id, filepath)
|
||||
self._set_context(folder_id, task_name, filepath)
|
||||
|
||||
def _on_note_change(self):
|
||||
text = self._note_input.toPlainText()
|
||||
|
|
@ -106,19 +106,19 @@ class SidePanelWidget(QtWidgets.QWidget):
|
|||
note = self._note_input.toPlainText()
|
||||
self._controller.save_workfile_info(
|
||||
self._folder_id,
|
||||
self._task_id,
|
||||
self._task_name,
|
||||
self._filepath,
|
||||
note
|
||||
)
|
||||
self._orig_note = note
|
||||
self._btn_note_save.setEnabled(False)
|
||||
|
||||
def _set_context(self, folder_id, task_id, filepath):
|
||||
def _set_context(self, folder_id, task_name, filepath):
|
||||
workfile_info = None
|
||||
# Check if folder, task and file are selected
|
||||
if bool(folder_id) and bool(task_id) and bool(filepath):
|
||||
if bool(folder_id) and bool(task_name) and bool(filepath):
|
||||
workfile_info = self._controller.get_workfile_info(
|
||||
folder_id, task_id, filepath
|
||||
folder_id, task_name, filepath
|
||||
)
|
||||
enabled = workfile_info is not None
|
||||
|
||||
|
|
@ -127,7 +127,7 @@ class SidePanelWidget(QtWidgets.QWidget):
|
|||
self._btn_note_save.setEnabled(enabled)
|
||||
|
||||
self._folder_id = folder_id
|
||||
self._task_id = task_id
|
||||
self._task_name = task_name
|
||||
self._filepath = filepath
|
||||
|
||||
# Disable inputs and remove texts if any required arguments are
|
||||
|
|
|
|||
|
|
@ -17,3 +17,4 @@ Click = "^8"
|
|||
OpenTimelineIO = "0.16.0"
|
||||
opencolorio = "2.2.1"
|
||||
Pillow = "9.5.0"
|
||||
websocket-client = ">=0.40.0,<2"
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ unfixable = []
|
|||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
exclude = [
|
||||
"client/ayon_core/hosts/unreal/integration/*",
|
||||
"client/ayon_core/modules/click_wrap.py",
|
||||
"client/ayon_core/scripts/slates/__init__.py",
|
||||
"server_addon/deadline/client/ayon_deadline/repository/custom/plugins/CelAction/*",
|
||||
|
|
@ -106,7 +105,7 @@ line-ending = "auto"
|
|||
|
||||
[tool.codespell]
|
||||
# Ignore words that are not in the dictionary.
|
||||
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement,ue"
|
||||
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement"
|
||||
|
||||
# Ignore lines that contain this regex. This is hack for missing inline ignore.
|
||||
# Remove with next codespell release (>2.2.6)
|
||||
|
|
|
|||
|
|
@ -454,9 +454,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.plugin_info = self.get_plugin_info()
|
||||
self.aux_files = self.get_aux_files()
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
job_id = self.process_submission(auth, verify)
|
||||
job_id = self.process_submission()
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
|
|
@ -469,10 +467,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
render_job_id = self.submit(payload, auth, verify)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self, auth=None, verify=True):
|
||||
def process_submission(self):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
|
|
@ -483,6 +483,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth, verify)
|
||||
|
||||
@abstractmethod
|
||||
|
|
|
|||
|
|
@ -175,8 +175,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth=auth, verify=verify)
|
||||
|
||||
def from_published_scene(self):
|
||||
|
|
|
|||
|
|
@ -182,8 +182,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
self.log.debug("Submitting 3dsMax render..")
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if instance.data.get("multiCamera"):
|
||||
self.log.debug("Submitting jobs for multiple cameras..")
|
||||
payload = self._use_published_name_for_multiples(
|
||||
|
|
|
|||
|
|
@ -294,7 +294,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self, auth=None, verify=True):
|
||||
def process_submission(self):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
|
|
@ -331,10 +331,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
for x in ['vrayscene', 'assscene']), (
|
||||
"Vray Scene and Ass Scene options are mutually exclusive")
|
||||
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
|
||||
export_job = self.submit(vray_export_payload,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
|
@ -409,6 +410,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
files = exp
|
||||
assembly_files = files
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
|
||||
# Define frame tile jobs
|
||||
frame_file_hash = {}
|
||||
frame_payloads = {}
|
||||
|
|
@ -458,8 +462,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit frame tile jobs
|
||||
frame_tile_job_id = {}
|
||||
for frame, tile_job_payload in frame_payloads.items():
|
||||
job_id = self.submit(tile_job_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
job_id = self.submit(
|
||||
tile_job_payload, auth, verify)
|
||||
frame_tile_job_id[frame] = job_id
|
||||
|
||||
# Define assembly payloads
|
||||
|
|
@ -562,8 +566,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit assembly jobs
|
||||
assembly_job_ids = []
|
||||
num_assemblies = len(assembly_payloads)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
for i, payload in enumerate(assembly_payloads):
|
||||
self.log.debug(
|
||||
"submitting assembly job {} of {}".format(i + 1,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,10 @@
|
|||
"""Creator plugin for creating publishable Houdini Digital Assets."""
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.pipeline import (
|
||||
CreatorError,
|
||||
get_current_project_name
|
||||
)
|
||||
from ayon_houdini.api import plugin
|
||||
import hou
|
||||
|
||||
|
|
@ -56,8 +59,18 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
raise CreatorError(
|
||||
"cannot create hda from node {}".format(to_hda))
|
||||
|
||||
# Pick a unique type name for HDA product per folder path per project.
|
||||
type_name = (
|
||||
"{project_name}{folder_path}_{node_name}".format(
|
||||
project_name=get_current_project_name(),
|
||||
folder_path=folder_path.replace("/","_"),
|
||||
node_name=node_name
|
||||
)
|
||||
)
|
||||
|
||||
hda_node = to_hda.createDigitalAsset(
|
||||
name=node_name,
|
||||
name=type_name,
|
||||
description=node_name,
|
||||
hda_file_name="$HIP/{}.hda".format(node_name)
|
||||
)
|
||||
hda_node.layoutChildren()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from ayon_core.pipeline import get_representation_path
|
||||
from ayon_core.pipeline.load import LoadError
|
||||
from ayon_houdini.api import (
|
||||
pipeline,
|
||||
plugin
|
||||
|
|
@ -28,14 +29,18 @@ class HdaLoader(plugin.HoudiniLoader):
|
|||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Create a unique name
|
||||
counter = 1
|
||||
namespace = namespace or context["folder"]["name"]
|
||||
formatted = "{}_{}".format(namespace, name) if namespace else name
|
||||
node_name = "{0}_{1:03d}".format(formatted, counter)
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
hou.hda.installFile(file_path)
|
||||
hda_node = obj.createNode(name, node_name)
|
||||
|
||||
# Get the type name from the HDA definition.
|
||||
hda_defs = hou.hda.definitionsInFile(file_path)
|
||||
if not hda_defs:
|
||||
raise LoadError(f"No HDA definitions found in file: {file_path}")
|
||||
|
||||
type_name = hda_defs[0].nodeTypeName()
|
||||
hda_node = obj.createNode(type_name, node_name)
|
||||
|
||||
self[:] = [hda_node]
|
||||
|
||||
|
|
|
|||
|
|
@ -272,10 +272,8 @@ def reset_frame_range(fps: bool = True):
|
|||
scene frame rate in frames-per-second.
|
||||
"""
|
||||
if fps:
|
||||
task_entity = get_current_task_entity()
|
||||
task_attributes = task_entity["attrib"]
|
||||
fps_number = float(task_attributes["fps"])
|
||||
rt.frameRate = fps_number
|
||||
rt.frameRate = float(get_fps_for_current_context())
|
||||
|
||||
frame_range = get_frame_range()
|
||||
|
||||
set_timeline(
|
||||
|
|
@ -284,6 +282,22 @@ def reset_frame_range(fps: bool = True):
|
|||
frame_range["frameStartHandle"], frame_range["frameEndHandle"])
|
||||
|
||||
|
||||
def get_fps_for_current_context():
|
||||
"""Get fps that should be set for current context.
|
||||
|
||||
Todos:
|
||||
- Skip project value.
|
||||
- Merge logic with 'get_frame_range' and 'reset_scene_resolution' ->
|
||||
all the values in the functions can be collected at one place as
|
||||
they have same requirements.
|
||||
|
||||
Returns:
|
||||
Union[int, float]: FPS value.
|
||||
"""
|
||||
task_entity = get_current_task_entity(fields={"attrib"})
|
||||
return task_entity["attrib"]["fps"]
|
||||
|
||||
|
||||
def reset_unit_scale():
|
||||
"""Apply the unit scale setting to 3dsMax
|
||||
"""
|
||||
|
|
@ -358,7 +372,7 @@ def is_headless():
|
|||
def set_timeline(frameStart, frameEnd):
|
||||
"""Set frame range for timeline editor in Max
|
||||
"""
|
||||
rt.animationRange = rt.interval(frameStart, frameEnd)
|
||||
rt.animationRange = rt.interval(int(frameStart), int(frameEnd))
|
||||
return rt.animationRange
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -145,7 +145,27 @@ attributes "OpenPypeContext"
|
|||
rt.saveMaxFile(dst_path)
|
||||
|
||||
|
||||
def ls() -> list:
|
||||
def parse_container(container):
|
||||
"""Return the container node's full container data.
|
||||
|
||||
Args:
|
||||
container (str): A container node name.
|
||||
|
||||
Returns:
|
||||
dict: The container schema data for this container node.
|
||||
|
||||
"""
|
||||
data = lib.read(container)
|
||||
|
||||
# Backwards compatibility pre-schemas for containers
|
||||
data["schema"] = data.get("schema", "openpype:container-3.0")
|
||||
|
||||
# Append transient data
|
||||
data["objectName"] = container.Name
|
||||
return data
|
||||
|
||||
|
||||
def ls():
|
||||
"""Get all AYON containers."""
|
||||
objs = rt.objects
|
||||
containers = [
|
||||
|
|
@ -156,7 +176,7 @@ def ls() -> list:
|
|||
]
|
||||
|
||||
for container in sorted(containers, key=attrgetter("name")):
|
||||
yield lib.read(container)
|
||||
yield parse_container(container)
|
||||
|
||||
|
||||
def on_new():
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ Because of limited api, alembics can be only loaded, but not easily updated.
|
|||
import os
|
||||
from ayon_core.pipeline import load, get_representation_path
|
||||
from ayon_max.api import lib, maintained_selection
|
||||
from ayon_max.api.lib import unique_namespace
|
||||
from ayon_max.api.lib import unique_namespace, reset_frame_range
|
||||
from ayon_max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
|
|
@ -38,6 +38,9 @@ class AbcLoader(load.LoaderPlugin):
|
|||
}
|
||||
|
||||
rt.AlembicImport.ImportToRoot = False
|
||||
# TODO: it will be removed after the improvement
|
||||
# on the post-system setup
|
||||
reset_frame_range()
|
||||
rt.importFile(file_path, rt.name("noPrompt"), using=rt.AlembicImport)
|
||||
|
||||
abc_after = {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from ayon_core.lib import (
|
|||
)
|
||||
|
||||
|
||||
def _get_animation_attr_defs(cls):
|
||||
def _get_animation_attr_defs():
|
||||
"""Get Animation generic definitions."""
|
||||
defs = lib.collect_animation_defs()
|
||||
defs.extend(
|
||||
|
|
@ -99,9 +99,7 @@ class CreateAnimation(plugin.MayaHiddenCreator):
|
|||
return node_data
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
defs = super(CreateAnimation, self).get_instance_attr_defs()
|
||||
defs += _get_animation_attr_defs(self)
|
||||
return defs
|
||||
return _get_animation_attr_defs()
|
||||
|
||||
|
||||
class CreatePointCache(plugin.MayaCreator):
|
||||
|
|
@ -123,9 +121,7 @@ class CreatePointCache(plugin.MayaCreator):
|
|||
return node_data
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
defs = super(CreatePointCache, self).get_instance_attr_defs()
|
||||
defs += _get_animation_attr_defs(self)
|
||||
return defs
|
||||
return _get_animation_attr_defs()
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
instance = super(CreatePointCache, self).create(
|
||||
|
|
|
|||
|
|
@ -42,6 +42,6 @@ class CreateLook(plugin.MayaCreator):
|
|||
|
||||
def get_pre_create_attr_defs(self):
|
||||
# Show same attributes on create but include use selection
|
||||
defs = super(CreateLook, self).get_pre_create_attr_defs()
|
||||
defs = list(super().get_pre_create_attr_defs())
|
||||
defs.extend(self.get_instance_attr_defs())
|
||||
return defs
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'maya' version."""
|
||||
__version__ = "0.2.2"
|
||||
__version__ = "0.2.3"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "maya"
|
||||
title = "Maya"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
client_dir = "ayon_maya"
|
||||
|
||||
ayon_required_addons = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import re
|
||||
import json
|
||||
import six
|
||||
|
|
@ -37,6 +36,7 @@ from ayon_core.pipeline import (
|
|||
get_current_host_name,
|
||||
get_current_project_name,
|
||||
get_current_folder_path,
|
||||
get_current_task_name,
|
||||
AYON_INSTANCE_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
)
|
||||
|
|
@ -154,15 +154,9 @@ def set_node_data(node, knobname, data):
|
|||
"""
|
||||
# if exists then update data
|
||||
if knobname in node.knobs():
|
||||
log.debug("Updating knobname `{}` on node `{}`".format(
|
||||
knobname, node.name()
|
||||
))
|
||||
update_node_data(node, knobname, data)
|
||||
return
|
||||
|
||||
log.debug("Creating knobname `{}` on node `{}`".format(
|
||||
knobname, node.name()
|
||||
))
|
||||
# else create new
|
||||
knob_value = JSON_PREFIX + json.dumps(data)
|
||||
knob = nuke.String_Knob(knobname)
|
||||
|
|
@ -513,11 +507,9 @@ def get_avalon_knob_data(node, prefix="avalon:", create=True):
|
|||
# check if the node is avalon tracked
|
||||
try:
|
||||
# check if data available on the node
|
||||
test = node[DATA_GROUP_KEY].value()
|
||||
log.debug("Only testing if data available: `{}`".format(test))
|
||||
except NameError as e:
|
||||
_ = node[DATA_GROUP_KEY].value()
|
||||
except NameError:
|
||||
# if it doesn't then create it
|
||||
log.debug("Creating avalon knob: `{}`".format(e))
|
||||
if create:
|
||||
node = set_avalon_knob_data(node)
|
||||
return get_avalon_knob_data(node)
|
||||
|
|
@ -678,8 +670,6 @@ def get_imageio_node_setting(node_class, plugin_name, product_name):
|
|||
imageio_node = node
|
||||
break
|
||||
|
||||
log.debug("__ imageio_node: {}".format(imageio_node))
|
||||
|
||||
if not imageio_node:
|
||||
return
|
||||
|
||||
|
|
@ -690,8 +680,6 @@ def get_imageio_node_setting(node_class, plugin_name, product_name):
|
|||
product_name,
|
||||
imageio_node["knobs"]
|
||||
)
|
||||
|
||||
log.info("ImageIO node: {}".format(imageio_node))
|
||||
return imageio_node
|
||||
|
||||
|
||||
|
|
@ -706,8 +694,6 @@ def get_imageio_node_override_setting(
|
|||
# find matching override node
|
||||
override_imageio_node = None
|
||||
for onode in override_nodes:
|
||||
log.debug("__ onode: {}".format(onode))
|
||||
log.debug("__ productName: {}".format(product_name))
|
||||
if node_class not in onode["nuke_node_class"]:
|
||||
continue
|
||||
|
||||
|
|
@ -727,7 +713,6 @@ def get_imageio_node_override_setting(
|
|||
override_imageio_node = onode
|
||||
break
|
||||
|
||||
log.debug("__ override_imageio_node: {}".format(override_imageio_node))
|
||||
# add overrides to imageio_node
|
||||
if override_imageio_node:
|
||||
# get all knob names in imageio_node
|
||||
|
|
@ -740,7 +725,6 @@ def get_imageio_node_override_setting(
|
|||
for knob in knobs_settings:
|
||||
# add missing knobs into imageio_node
|
||||
if oknob_name not in knob_names:
|
||||
log.debug("_ adding knob: `{}`".format(oknob))
|
||||
knobs_settings.append(oknob)
|
||||
knob_names.append(oknob_name)
|
||||
continue
|
||||
|
|
@ -750,9 +734,6 @@ def get_imageio_node_override_setting(
|
|||
|
||||
knob_type = knob["type"]
|
||||
# override matching knob name
|
||||
log.debug(
|
||||
"_ overriding knob: `{}` > `{}`".format(knob, oknob)
|
||||
)
|
||||
if not oknob_value:
|
||||
# remove original knob if no value found in oknob
|
||||
knobs_settings.remove(knob)
|
||||
|
|
@ -923,7 +904,6 @@ def writes_version_sync():
|
|||
new_version = "v" + str("{" + ":0>{}".format(padding) + "}").format(
|
||||
int(rootVersion)
|
||||
)
|
||||
log.debug("new_version: {}".format(new_version))
|
||||
except Exception:
|
||||
return
|
||||
|
||||
|
|
@ -936,13 +916,11 @@ def writes_version_sync():
|
|||
|
||||
try:
|
||||
if avalon_knob_data["families"] not in ["render"]:
|
||||
log.debug(avalon_knob_data["families"])
|
||||
continue
|
||||
|
||||
node_file = each["file"].value()
|
||||
|
||||
node_version = "v" + get_version_from_path(node_file)
|
||||
log.debug("node_version: {}".format(node_version))
|
||||
|
||||
node_new_file = node_file.replace(node_version, new_version)
|
||||
each["file"].setValue(node_new_file)
|
||||
|
|
@ -1332,7 +1310,6 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
|
|||
kwargs (dict)[optional]: keys for formattable knob settings
|
||||
"""
|
||||
for knob in knob_settings:
|
||||
log.debug("__ knob: {}".format(pformat(knob)))
|
||||
knob_name = knob["name"]
|
||||
if knob_name not in node.knobs():
|
||||
continue
|
||||
|
|
@ -1486,13 +1463,17 @@ class WorkfileSettings(object):
|
|||
Context._project_entity = project_entity
|
||||
self._project_name = project_name
|
||||
self._folder_path = get_current_folder_path()
|
||||
self._task_name = get_current_task_name()
|
||||
self._folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, self._folder_path
|
||||
)
|
||||
self._root_node = root_node or nuke.root()
|
||||
self._nodes = self.get_nodes(nodes=nodes)
|
||||
|
||||
self.data = kwargs
|
||||
context_data = get_template_data_with_names(
|
||||
project_name, self._folder_path, self._task_name, "nuke"
|
||||
)
|
||||
self.formatting_data = context_data
|
||||
|
||||
def get_nodes(self, nodes=None, nodes_filter=None):
|
||||
|
||||
|
|
@ -1509,36 +1490,23 @@ class WorkfileSettings(object):
|
|||
for filter in nodes_filter:
|
||||
return [n for n in self._nodes if filter in n.Class()]
|
||||
|
||||
def set_viewers_colorspace(self, viewer_dict):
|
||||
def set_viewers_colorspace(self, imageio_nuke):
|
||||
''' Adds correct colorspace to viewer
|
||||
|
||||
Arguments:
|
||||
viewer_dict (dict): adjustments from presets
|
||||
imageio_nuke (dict): nuke colorspace configurations
|
||||
|
||||
'''
|
||||
if not isinstance(viewer_dict, dict):
|
||||
msg = "set_viewers_colorspace(): argument should be dictionary"
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
filter_knobs = [
|
||||
"viewerProcess",
|
||||
"wipe_position",
|
||||
"monitorOutOutputTransform"
|
||||
]
|
||||
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
viewer_dict["viewerProcess"]
|
||||
viewer_process = self._display_and_view_formatted(
|
||||
imageio_nuke["viewer"]
|
||||
)
|
||||
viewer_process = create_viewer_profile_string(
|
||||
viewer, display, path_like=False
|
||||
)
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
viewer_dict["output_transform"]
|
||||
)
|
||||
output_transform = create_viewer_profile_string(
|
||||
viewer, display, path_like=False
|
||||
output_transform = self._display_and_view_formatted(
|
||||
imageio_nuke["monitor"]
|
||||
)
|
||||
erased_viewers = []
|
||||
for v in nuke.allNodes(filter="Viewer"):
|
||||
|
|
@ -1547,8 +1515,10 @@ class WorkfileSettings(object):
|
|||
|
||||
if viewer_process not in v["viewerProcess"].value():
|
||||
copy_inputs = v.dependencies()
|
||||
copy_knobs = {k: v[k].value() for k in v.knobs()
|
||||
if k not in filter_knobs}
|
||||
copy_knobs = {
|
||||
k: v[k].value() for k in v.knobs()
|
||||
if k not in filter_knobs
|
||||
}
|
||||
|
||||
# delete viewer with wrong settings
|
||||
erased_viewers.append(v["name"].value())
|
||||
|
|
@ -1574,6 +1544,21 @@ class WorkfileSettings(object):
|
|||
"Attention! Viewer nodes {} were erased."
|
||||
"It had wrong color profile".format(erased_viewers))
|
||||
|
||||
def _display_and_view_formatted(self, view_profile):
|
||||
""" Format display and view profile string
|
||||
|
||||
Args:
|
||||
view_profile (dict): view and display profile
|
||||
|
||||
Returns:
|
||||
str: formatted display and view profile string
|
||||
"""
|
||||
display_view = create_viewer_profile_string(
|
||||
view_profile["view"], view_profile["display"], path_like=False
|
||||
)
|
||||
# format any template tokens used in the string
|
||||
return StringTemplate(display_view).format_strict(self.formatting_data)
|
||||
|
||||
def set_root_colorspace(self, imageio_host):
|
||||
''' Adds correct colorspace to root
|
||||
|
||||
|
|
@ -1590,12 +1575,12 @@ class WorkfileSettings(object):
|
|||
if not config_data:
|
||||
# no ocio config found and no custom path used
|
||||
if self._root_node["colorManagement"].value() \
|
||||
not in color_management:
|
||||
not in color_management:
|
||||
self._root_node["colorManagement"].setValue(color_management)
|
||||
|
||||
# second set ocio version
|
||||
if self._root_node["OCIO_config"].value() \
|
||||
not in native_ocio_config:
|
||||
not in native_ocio_config:
|
||||
self._root_node["OCIO_config"].setValue(native_ocio_config)
|
||||
|
||||
else:
|
||||
|
|
@ -1623,21 +1608,25 @@ class WorkfileSettings(object):
|
|||
if correct_settings:
|
||||
self._set_ocio_config_path_to_workfile(config_data)
|
||||
|
||||
workfile_settings_output = {}
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
workfile_settings["monitor_out_lut"],
|
||||
workfile_settings["monitor_lut"]
|
||||
)
|
||||
monitor_lut_data.update({
|
||||
"workingSpaceLUT": workfile_settings["working_space"],
|
||||
"int8Lut": workfile_settings["int_8_lut"],
|
||||
"int16Lut": workfile_settings["int_16_lut"],
|
||||
"logLut": workfile_settings["log_lut"],
|
||||
"floatLut": workfile_settings["float_lut"]
|
||||
})
|
||||
workfile_settings_output.update(monitor_lut_data)
|
||||
workfile_settings_output.update(
|
||||
{
|
||||
"workingSpaceLUT": workfile_settings["working_space"],
|
||||
"int8Lut": workfile_settings["int_8_lut"],
|
||||
"int16Lut": workfile_settings["int_16_lut"],
|
||||
"logLut": workfile_settings["log_lut"],
|
||||
"floatLut": workfile_settings["float_lut"],
|
||||
}
|
||||
)
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
for knob, value_ in workfile_settings_output.items():
|
||||
# skip unfilled ocio config path
|
||||
# it will be dict in value
|
||||
if isinstance(value_, dict):
|
||||
|
|
@ -1646,7 +1635,6 @@ class WorkfileSettings(object):
|
|||
if not value_:
|
||||
continue
|
||||
self._root_node[knob].setValue(str(value_))
|
||||
log.debug("nuke.root()['{}'] changed to: {}".format(knob, value_))
|
||||
|
||||
def _get_monitor_settings(self, viewer_lut, monitor_lut):
|
||||
""" Get monitor settings from viewer and monitor lut
|
||||
|
|
@ -1889,8 +1877,6 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
elif node_data:
|
||||
nuke_imageio_writes = get_write_node_template_attr(node)
|
||||
|
||||
log.debug("nuke_imageio_writes: `{}`".format(nuke_imageio_writes))
|
||||
|
||||
if not nuke_imageio_writes:
|
||||
return
|
||||
|
||||
|
|
@ -1938,7 +1924,6 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
"to": future
|
||||
}
|
||||
|
||||
log.debug(changes)
|
||||
if changes:
|
||||
msg = "Read nodes are not set to correct colorspace:\n\n"
|
||||
for nname, knobs in changes.items():
|
||||
|
|
@ -1972,7 +1957,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
|
||||
log.info("Setting colorspace to viewers...")
|
||||
try:
|
||||
self.set_viewers_colorspace(nuke_colorspace["viewer"])
|
||||
self.set_viewers_colorspace(nuke_colorspace)
|
||||
except AttributeError as _error:
|
||||
msg = "Set Colorspace to viewer error: {}".format(_error)
|
||||
nuke.message(msg)
|
||||
|
|
@ -2653,8 +2638,6 @@ class NukeDirmap(HostDirmap):
|
|||
def dirmap_routine(self, source_path, destination_path):
|
||||
source_path = source_path.lower().replace(os.sep, '/')
|
||||
destination_path = destination_path.lower().replace(os.sep, '/')
|
||||
log.debug("Map: {} with: {}->{}".format(self.file_name,
|
||||
source_path, destination_path))
|
||||
if platform.system().lower() == "windows":
|
||||
self.file_name = self.file_name.lower().replace(
|
||||
source_path, destination_path)
|
||||
|
|
|
|||
|
|
@ -37,8 +37,6 @@ from .lib import (
|
|||
INSTANCE_DATA_KNOB,
|
||||
get_main_window,
|
||||
WorkfileSettings,
|
||||
# TODO: remove this once workfile builder will be removed
|
||||
process_workfile_builder,
|
||||
start_workfile_template_builder,
|
||||
launch_workfiles_app,
|
||||
check_inventory_versions,
|
||||
|
|
@ -67,6 +65,7 @@ from .workio import (
|
|||
current_file
|
||||
)
|
||||
from .constants import ASSIST
|
||||
from . import push_to_project
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -159,9 +158,6 @@ def add_nuke_callbacks():
|
|||
# template builder callbacks
|
||||
nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root")
|
||||
|
||||
# TODO: remove this callback once workfile builder will be removed
|
||||
nuke.addOnCreate(process_workfile_builder, nodeClass="Root")
|
||||
|
||||
# fix ffmpeg settings on script
|
||||
nuke.addOnScriptLoad(on_script_load)
|
||||
|
||||
|
|
@ -332,6 +328,11 @@ def _install_menu():
|
|||
lambda: update_placeholder()
|
||||
)
|
||||
|
||||
menu.addCommand(
|
||||
"Push to Project",
|
||||
lambda: push_to_project.main()
|
||||
)
|
||||
|
||||
menu.addSeparator()
|
||||
menu.addCommand(
|
||||
"Experimental tools...",
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ from ayon_core.lib import (
|
|||
BoolDef,
|
||||
EnumDef
|
||||
)
|
||||
from ayon_core.lib import StringTemplate
|
||||
from ayon_core.pipeline import (
|
||||
LoaderPlugin,
|
||||
CreatorError,
|
||||
|
|
@ -38,7 +39,6 @@ from .lib import (
|
|||
set_node_data,
|
||||
get_node_data,
|
||||
get_view_process_node,
|
||||
get_viewer_config_from_string,
|
||||
get_filenames_without_hash,
|
||||
link_knobs
|
||||
)
|
||||
|
|
@ -638,12 +638,15 @@ class ExporterReview(object):
|
|||
from . import lib as opnlib
|
||||
nuke_imageio = opnlib.get_nuke_imageio_settings()
|
||||
|
||||
# TODO: this is only securing backward compatibility lets remove
|
||||
# this once all projects's anatomy are updated to newer config
|
||||
if "baking" in nuke_imageio.keys():
|
||||
return nuke_imageio["baking"]["viewerProcess"]
|
||||
if nuke_imageio["baking_target"]["enabled"]:
|
||||
return nuke_imageio["baking_target"]
|
||||
else:
|
||||
return nuke_imageio["viewer"]["viewerProcess"]
|
||||
# viewer is having display and view keys only and it is
|
||||
# display_view type
|
||||
return {
|
||||
"type": "display_view",
|
||||
"display_view": nuke_imageio["viewer"],
|
||||
}
|
||||
|
||||
|
||||
class ExporterReviewLut(ExporterReview):
|
||||
|
|
@ -790,6 +793,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
self.viewer_lut_raw = klass.viewer_lut_raw
|
||||
self.write_colorspace = instance.data["colorspace"]
|
||||
self.color_channels = instance.data["color_channels"]
|
||||
self.formatting_data = instance.data["anatomyData"]
|
||||
|
||||
self.name = name or "baked"
|
||||
self.ext = ext or "mov"
|
||||
|
|
@ -837,7 +841,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
with maintained_selection():
|
||||
self.log.info("Saving nodes as file... ")
|
||||
# create nk path
|
||||
path = os.path.splitext(self.path)[0] + ".nk"
|
||||
path = f"{os.path.splitext(self.path)[0]}.nk"
|
||||
# save file to the path
|
||||
if not os.path.exists(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
|
|
@ -861,21 +865,20 @@ class ExporterReviewMov(ExporterReview):
|
|||
bake_viewer_process = kwargs["bake_viewer_process"]
|
||||
bake_viewer_input_process_node = kwargs[
|
||||
"bake_viewer_input_process"]
|
||||
viewer_process_override = kwargs[
|
||||
"viewer_process_override"]
|
||||
|
||||
baking_view_profile = (
|
||||
viewer_process_override or self.get_imageio_baking_profile())
|
||||
baking_colorspace = self.get_imageio_baking_profile()
|
||||
|
||||
colorspace_override = kwargs["colorspace_override"]
|
||||
if colorspace_override["enabled"]:
|
||||
baking_colorspace = colorspace_override
|
||||
|
||||
fps = self.instance.context.data["fps"]
|
||||
|
||||
self.log.debug(">> baking_view_profile `{}`".format(
|
||||
baking_view_profile))
|
||||
self.log.debug(f">> baking_view_profile `{baking_colorspace}`")
|
||||
|
||||
add_custom_tags = kwargs.get("add_custom_tags", [])
|
||||
|
||||
self.log.info(
|
||||
"__ add_custom_tags: `{0}`".format(add_custom_tags))
|
||||
self.log.info(f"__ add_custom_tags: `{add_custom_tags}`")
|
||||
|
||||
product_name = self.instance.data["productName"]
|
||||
self._temp_nodes[product_name] = []
|
||||
|
|
@ -932,32 +935,64 @@ class ExporterReviewMov(ExporterReview):
|
|||
|
||||
if not self.viewer_lut_raw:
|
||||
# OCIODisplay
|
||||
dag_node = nuke.createNode("OCIODisplay")
|
||||
if baking_colorspace["type"] == "display_view":
|
||||
display_view = baking_colorspace["display_view"]
|
||||
|
||||
# assign display
|
||||
display, viewer = get_viewer_config_from_string(
|
||||
str(baking_view_profile)
|
||||
)
|
||||
if display:
|
||||
dag_node["display"].setValue(display)
|
||||
message = "OCIODisplay... '{}'"
|
||||
node = nuke.createNode("OCIODisplay")
|
||||
|
||||
# assign viewer
|
||||
dag_node["view"].setValue(viewer)
|
||||
# assign display and view
|
||||
display = display_view["display"]
|
||||
view = display_view["view"]
|
||||
|
||||
if config_data:
|
||||
# convert display and view to colorspace
|
||||
colorspace = get_display_view_colorspace_name(
|
||||
config_path=config_data["path"],
|
||||
display=display,
|
||||
view=viewer
|
||||
# display could not be set in nuke_default config
|
||||
if display:
|
||||
# format display string with anatomy data
|
||||
display = StringTemplate(display).format_strict(
|
||||
self.formatting_data
|
||||
)
|
||||
node["display"].setValue(display)
|
||||
|
||||
# format view string with anatomy data
|
||||
view = StringTemplate(view).format_strict(
|
||||
self.formatting_data)
|
||||
# assign viewer
|
||||
node["view"].setValue(view)
|
||||
|
||||
if config_data:
|
||||
# convert display and view to colorspace
|
||||
colorspace = get_display_view_colorspace_name(
|
||||
config_path=config_data["path"],
|
||||
display=display, view=view
|
||||
)
|
||||
|
||||
# OCIOColorSpace
|
||||
elif baking_colorspace["type"] == "colorspace":
|
||||
baking_colorspace = baking_colorspace["colorspace"]
|
||||
# format colorspace string with anatomy data
|
||||
baking_colorspace = StringTemplate(
|
||||
baking_colorspace).format_strict(self.formatting_data)
|
||||
node = nuke.createNode("OCIOColorSpace")
|
||||
message = "OCIOColorSpace... '{}'"
|
||||
# no need to set input colorspace since it is driven by
|
||||
# working colorspace
|
||||
node["out_colorspace"].setValue(baking_colorspace)
|
||||
colorspace = baking_colorspace
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Invalid baking color space type: "
|
||||
f"{baking_colorspace['type']}"
|
||||
)
|
||||
|
||||
self._connect_to_above_nodes(
|
||||
dag_node, product_name, "OCIODisplay... `{}`"
|
||||
node, product_name, message
|
||||
)
|
||||
|
||||
# Write node
|
||||
write_node = nuke.createNode("Write")
|
||||
self.log.debug("Path: {}".format(self.path))
|
||||
self.log.debug(f"Path: {self.path}")
|
||||
|
||||
write_node["file"].setValue(str(self.path))
|
||||
write_node["file_type"].setValue(str(self.ext))
|
||||
write_node["channels"].setValue(str(self.color_channels))
|
||||
|
|
@ -981,12 +1016,11 @@ class ExporterReviewMov(ExporterReview):
|
|||
self.log.info("`mov64_write_timecode` knob was not found")
|
||||
|
||||
write_node["raw"].setValue(1)
|
||||
|
||||
# connect
|
||||
write_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes[product_name].append(write_node)
|
||||
self.log.debug("Write... `{}`".format(
|
||||
self._temp_nodes[product_name])
|
||||
)
|
||||
self.log.debug(f"Write... `{self._temp_nodes[product_name]}`")
|
||||
# ---------- end nodes creation
|
||||
|
||||
# ---------- render or save to nk
|
||||
|
|
@ -1014,7 +1048,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
colorspace=colorspace,
|
||||
)
|
||||
|
||||
self.log.debug("Representation... `{}`".format(self.data))
|
||||
self.log.debug(f"Representation... `{self.data}`")
|
||||
|
||||
self.clean_nodes(product_name)
|
||||
nuke.scriptSave()
|
||||
|
|
|
|||
118
server_addon/nuke/client/ayon_nuke/api/push_to_project.py
Normal file
118
server_addon/nuke/client/ayon_nuke/api/push_to_project.py
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
from collections import defaultdict
|
||||
import shutil
|
||||
import os
|
||||
|
||||
from ayon_api import get_project, get_folder_by_id, get_task_by_id
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import Anatomy, registered_host
|
||||
from ayon_core.pipeline.template_data import get_template_data
|
||||
from ayon_core.pipeline.workfile import get_workdir_with_workdir_data
|
||||
from ayon_core.tools import context_dialog
|
||||
|
||||
from .utils import bake_gizmos_recursively
|
||||
from .lib import MENU_LABEL
|
||||
|
||||
import nuke
|
||||
|
||||
|
||||
def bake_container(container):
|
||||
"""Bake containers to read nodes."""
|
||||
|
||||
node = container["node"]
|
||||
|
||||
# Fetch knobs to remove in order.
|
||||
knobs_to_remove = []
|
||||
remove = False
|
||||
for count in range(0, node.numKnobs()):
|
||||
knob = node.knob(count)
|
||||
|
||||
# All knobs from "AYON" tab knob onwards.
|
||||
if knob.name() == MENU_LABEL:
|
||||
remove = True
|
||||
|
||||
if remove:
|
||||
knobs_to_remove.append(knob)
|
||||
|
||||
# Dont remove knobs from "containerId" onwards.
|
||||
if knob.name() == "containerId":
|
||||
remove = False
|
||||
|
||||
# Knobs needs to be remove in reverse order, because child knobs needs to
|
||||
# be remove first.
|
||||
for knob in reversed(knobs_to_remove):
|
||||
node.removeKnob(knob)
|
||||
|
||||
node["tile_color"].setValue(0)
|
||||
|
||||
|
||||
def main():
|
||||
context = context_dialog.ask_for_context()
|
||||
|
||||
if context is None:
|
||||
return
|
||||
|
||||
# Get workfile path to save to.
|
||||
project_name = context["project_name"]
|
||||
project = get_project(project_name)
|
||||
folder = get_folder_by_id(project_name, context["folder_id"])
|
||||
task = get_task_by_id(project_name, context["task_id"])
|
||||
host = registered_host()
|
||||
project_settings = get_project_settings(project_name)
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
workdir_data = get_template_data(
|
||||
project, folder, task, host.name, project_settings
|
||||
)
|
||||
|
||||
workdir = get_workdir_with_workdir_data(
|
||||
workdir_data,
|
||||
project_name,
|
||||
anatomy,
|
||||
project_settings=project_settings
|
||||
)
|
||||
# Save current workfile.
|
||||
current_file = host.current_file()
|
||||
host.save_file(current_file)
|
||||
|
||||
for container in host.ls():
|
||||
bake_container(container)
|
||||
|
||||
# Bake gizmos.
|
||||
bake_gizmos_recursively()
|
||||
|
||||
# Copy all read node files to "resources" folder next to workfile and
|
||||
# change file path.
|
||||
first_frame = int(nuke.root()["first_frame"].value())
|
||||
last_frame = int(nuke.root()["last_frame"].value())
|
||||
files_by_node_name = defaultdict(set)
|
||||
nodes_by_name = {}
|
||||
for count in range(first_frame, last_frame + 1):
|
||||
nuke.frame(count)
|
||||
for node in nuke.allNodes(filter="Read"):
|
||||
files_by_node_name[node.name()].add(
|
||||
nuke.filename(node, nuke.REPLACE)
|
||||
)
|
||||
nodes_by_name[node.name()] = node
|
||||
|
||||
resources_dir = os.path.join(workdir, "resources")
|
||||
for name, files in files_by_node_name.items():
|
||||
dir = os.path.join(resources_dir, name)
|
||||
if not os.path.exists(dir):
|
||||
os.makedirs(dir)
|
||||
|
||||
for f in files:
|
||||
shutil.copy(f, os.path.join(dir, os.path.basename(f)))
|
||||
|
||||
node = nodes_by_name[name]
|
||||
path = node["file"].value().replace(os.path.dirname(f), dir)
|
||||
node["file"].setValue(path.replace("\\", "/"))
|
||||
|
||||
# Save current workfile to new context.
|
||||
pushed_workfile = os.path.join(
|
||||
workdir, os.path.basename(current_file))
|
||||
host.save_file(pushed_workfile)
|
||||
|
||||
# Open current context workfile.
|
||||
host.open_file(current_file)
|
||||
|
||||
nuke.message(f"Pushed to project: \n{pushed_workfile}")
|
||||
|
|
@ -28,29 +28,6 @@ class ExtractReviewIntermediates(publish.Extractor):
|
|||
viewer_lut_raw = None
|
||||
outputs = {}
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
"""Apply the settings from the deprecated
|
||||
ExtractReviewDataMov plugin for backwards compatibility
|
||||
"""
|
||||
nuke_publish = project_settings["nuke"]["publish"]
|
||||
deprecated_setting = nuke_publish["ExtractReviewDataMov"]
|
||||
current_setting = nuke_publish.get("ExtractReviewIntermediates")
|
||||
if not deprecated_setting["enabled"] and (
|
||||
not current_setting["enabled"]
|
||||
):
|
||||
cls.enabled = False
|
||||
|
||||
if deprecated_setting["enabled"]:
|
||||
# Use deprecated settings if they are still enabled
|
||||
cls.viewer_lut_raw = deprecated_setting["viewer_lut_raw"]
|
||||
cls.outputs = deprecated_setting["outputs"]
|
||||
elif current_setting is None:
|
||||
pass
|
||||
elif current_setting["enabled"]:
|
||||
cls.viewer_lut_raw = current_setting["viewer_lut_raw"]
|
||||
cls.outputs = current_setting["outputs"]
|
||||
|
||||
def process(self, instance):
|
||||
# TODO 'families' should not be included for filtering of outputs
|
||||
families = set(instance.data["families"])
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'nuke' version."""
|
||||
__version__ = "0.2.2"
|
||||
__version__ = "0.2.3"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "nuke"
|
||||
title = "Nuke"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
|
||||
client_dir = "ayon_nuke"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
from typing import Type
|
||||
from typing import Type, Any
|
||||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import NukeSettings, DEFAULT_VALUES
|
||||
from .settings import (
|
||||
NukeSettings,
|
||||
DEFAULT_VALUES,
|
||||
convert_settings_overrides
|
||||
)
|
||||
|
||||
|
||||
class NukeAddon(BaseServerAddon):
|
||||
|
|
@ -11,3 +15,13 @@ class NukeAddon(BaseServerAddon):
|
|||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
return settings_model_cls(**DEFAULT_VALUES)
|
||||
|
||||
async def convert_settings_overrides(
|
||||
self,
|
||||
source_version: str,
|
||||
overrides: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
convert_settings_overrides(source_version, overrides)
|
||||
# Use super conversion
|
||||
return await super().convert_settings_overrides(
|
||||
source_version, overrides)
|
||||
|
|
|
|||
|
|
@ -2,9 +2,12 @@ from .main import (
|
|||
NukeSettings,
|
||||
DEFAULT_VALUES,
|
||||
)
|
||||
from .conversion import convert_settings_overrides
|
||||
|
||||
|
||||
__all__ = (
|
||||
"NukeSettings",
|
||||
"DEFAULT_VALUES",
|
||||
|
||||
"convert_settings_overrides",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -133,3 +133,63 @@ class KnobModel(BaseSettingsModel):
|
|||
"",
|
||||
title="Expression"
|
||||
)
|
||||
|
||||
|
||||
colorspace_types_enum = [
|
||||
{"value": "colorspace", "label": "Use Colorspace"},
|
||||
{"value": "display_view", "label": "Use Display & View"},
|
||||
]
|
||||
|
||||
|
||||
class DisplayAndViewProfileModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
|
||||
display: str = SettingsField(
|
||||
"",
|
||||
title="Display",
|
||||
description="What display to use",
|
||||
)
|
||||
|
||||
view: str = SettingsField(
|
||||
"",
|
||||
title="View",
|
||||
description=(
|
||||
"What view to use. Anatomy context tokens can "
|
||||
"be used to dynamically set the value."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ColorspaceConfigurationModel(BaseSettingsModel):
|
||||
_isGroup: bool = True
|
||||
|
||||
enabled: bool = SettingsField(
|
||||
False,
|
||||
title="Enabled",
|
||||
description=(
|
||||
"Enable baking target (colorspace or display/view)."
|
||||
),
|
||||
)
|
||||
|
||||
type: str = SettingsField(
|
||||
"colorspace",
|
||||
title="Target baking type",
|
||||
description="Switch between different knob types",
|
||||
enum_resolver=lambda: colorspace_types_enum,
|
||||
conditionalEnum=True,
|
||||
)
|
||||
|
||||
colorspace: str = SettingsField(
|
||||
"",
|
||||
title="Colorspace",
|
||||
description=(
|
||||
"What colorspace name to use. Anatomy context tokens can "
|
||||
"be used to dynamically set the value."
|
||||
),
|
||||
)
|
||||
|
||||
display_view: DisplayAndViewProfileModel = SettingsField(
|
||||
title="Display & View",
|
||||
description="What display & view to use",
|
||||
default_factory=DisplayAndViewProfileModel,
|
||||
)
|
||||
|
|
|
|||
143
server_addon/nuke/server/settings/conversion.py
Normal file
143
server_addon/nuke/server/settings/conversion.py
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
import re
|
||||
from typing import Any
|
||||
|
||||
|
||||
def _get_viewer_config_from_string(input_string):
|
||||
"""Convert string to display and viewer string
|
||||
|
||||
Args:
|
||||
input_string (str): string with viewer
|
||||
|
||||
Raises:
|
||||
IndexError: if more then one slash in input string
|
||||
IndexError: if missing closing bracket
|
||||
|
||||
Returns:
|
||||
tuple[str]: display, viewer
|
||||
"""
|
||||
display = None
|
||||
viewer = input_string
|
||||
# check if () or / or \ in name
|
||||
if "/" in viewer:
|
||||
split = viewer.split("/")
|
||||
|
||||
# rise if more then one column
|
||||
if len(split) > 2:
|
||||
raise IndexError(
|
||||
"Viewer Input string is not correct. "
|
||||
f"More then two `/` slashes! {input_string}"
|
||||
)
|
||||
|
||||
viewer = split[1]
|
||||
display = split[0]
|
||||
elif "(" in viewer:
|
||||
pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]"
|
||||
result_ = re.findall(pattern, viewer)
|
||||
try:
|
||||
result_ = result_.pop()
|
||||
display = str(result_[1]).rstrip()
|
||||
viewer = str(result_[0]).rstrip()
|
||||
except IndexError as e:
|
||||
raise IndexError(
|
||||
"Viewer Input string is not correct. "
|
||||
f"Missing bracket! {input_string}"
|
||||
) from e
|
||||
|
||||
return (display, viewer)
|
||||
|
||||
|
||||
def _convert_imageio_baking_0_2_3(overrides):
|
||||
if "baking" not in overrides:
|
||||
return
|
||||
|
||||
baking_view_process = overrides["baking"].get("viewerProcess")
|
||||
|
||||
if baking_view_process is None:
|
||||
return
|
||||
|
||||
display, view = _get_viewer_config_from_string(baking_view_process)
|
||||
|
||||
overrides["baking_target"] = {
|
||||
"enabled": True,
|
||||
"type": "display_view",
|
||||
"display_view": {
|
||||
"display": display,
|
||||
"view": view,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _convert_viewers_0_2_3(overrides):
|
||||
if "viewer" not in overrides:
|
||||
return
|
||||
|
||||
viewer = overrides["viewer"]
|
||||
|
||||
if "viewerProcess" in viewer:
|
||||
viewer_process = viewer["viewerProcess"]
|
||||
display, view = _get_viewer_config_from_string(viewer_process)
|
||||
viewer.update({
|
||||
"display": display,
|
||||
"view": view,
|
||||
})
|
||||
if "output_transform" in viewer:
|
||||
output_transform = viewer["output_transform"]
|
||||
display, view = _get_viewer_config_from_string(output_transform)
|
||||
overrides["monitor"] = {
|
||||
"display": display,
|
||||
"view": view,
|
||||
}
|
||||
|
||||
|
||||
def _convert_imageio_configs_0_2_3(overrides):
|
||||
"""Image IO settings had changed.
|
||||
|
||||
0.2.2. is the latest version using the old way.
|
||||
"""
|
||||
if "imageio" not in overrides:
|
||||
return
|
||||
|
||||
imageio_overrides = overrides["imageio"]
|
||||
|
||||
_convert_imageio_baking_0_2_3(imageio_overrides)
|
||||
_convert_viewers_0_2_3(imageio_overrides)
|
||||
|
||||
|
||||
def _convert_extract_intermediate_files_0_2_3(publish_overrides):
|
||||
"""Extract intermediate files settings had changed.
|
||||
|
||||
0.2.2. is the latest version using the old way.
|
||||
"""
|
||||
# override can be either `display/view` or `view (display)`
|
||||
if "ExtractReviewIntermediates" in publish_overrides:
|
||||
extract_review_intermediates = publish_overrides[
|
||||
"ExtractReviewIntermediates"]
|
||||
|
||||
for output in extract_review_intermediates.get("outputs", []):
|
||||
if viewer_process_override := output.get("viewer_process_override"):
|
||||
display, view = _get_viewer_config_from_string(
|
||||
viewer_process_override)
|
||||
|
||||
output["colorspace_override"] = {
|
||||
"enabled": True,
|
||||
"type": "display_view",
|
||||
"display_view": {
|
||||
"display": display,
|
||||
"view": view,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _convert_publish_plugins(overrides):
|
||||
if "publish" not in overrides:
|
||||
return
|
||||
_convert_extract_intermediate_files_0_2_3(overrides["publish"])
|
||||
|
||||
|
||||
def convert_settings_overrides(
|
||||
source_version: str,
|
||||
overrides: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
_convert_imageio_configs_0_2_3(overrides)
|
||||
_convert_publish_plugins(overrides)
|
||||
return overrides
|
||||
|
|
@ -6,7 +6,10 @@ from ayon_server.settings import (
|
|||
ensure_unique_names,
|
||||
)
|
||||
|
||||
from .common import KnobModel
|
||||
from .common import (
|
||||
KnobModel,
|
||||
ColorspaceConfigurationModel,
|
||||
)
|
||||
|
||||
|
||||
class NodesModel(BaseSettingsModel):
|
||||
|
|
@ -52,6 +55,8 @@ class OverrideNodesModel(NodesModel):
|
|||
|
||||
|
||||
class NodesSetting(BaseSettingsModel):
|
||||
_isGroup: bool = True
|
||||
|
||||
required_nodes: list[RequiredNodesModel] = SettingsField(
|
||||
title="Plugin required",
|
||||
default_factory=list
|
||||
|
|
@ -83,6 +88,8 @@ def ocio_configs_switcher_enum():
|
|||
class WorkfileColorspaceSettings(BaseSettingsModel):
|
||||
"""Nuke workfile colorspace preset. """
|
||||
|
||||
_isGroup: bool = True
|
||||
|
||||
color_management: Literal["Nuke", "OCIO"] = SettingsField(
|
||||
title="Color Management Workflow"
|
||||
)
|
||||
|
|
@ -125,6 +132,8 @@ class ReadColorspaceRulesItems(BaseSettingsModel):
|
|||
|
||||
|
||||
class RegexInputsModel(BaseSettingsModel):
|
||||
_isGroup: bool = True
|
||||
|
||||
inputs: list[ReadColorspaceRulesItems] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Inputs"
|
||||
|
|
@ -132,15 +141,44 @@ class RegexInputsModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ViewProcessModel(BaseSettingsModel):
|
||||
viewerProcess: str = SettingsField(
|
||||
title="Viewer Process Name"
|
||||
_isGroup: bool = True
|
||||
|
||||
display: str = SettingsField(
|
||||
"",
|
||||
title="Display",
|
||||
description="What display to use",
|
||||
)
|
||||
output_transform: str = SettingsField(
|
||||
title="Output Transform"
|
||||
view: str = SettingsField(
|
||||
"",
|
||||
title="View",
|
||||
description=(
|
||||
"What view to use. Anatomy context tokens can "
|
||||
"be used to dynamically set the value."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class MonitorProcessModel(BaseSettingsModel):
|
||||
_isGroup: bool = True
|
||||
|
||||
display: str = SettingsField(
|
||||
"",
|
||||
title="Display",
|
||||
description="What display to use",
|
||||
)
|
||||
view: str = SettingsField(
|
||||
"",
|
||||
title="View",
|
||||
description=(
|
||||
"What view to use. Anatomy context tokens can "
|
||||
"be used to dynamically set the value."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ImageIOConfigModel(BaseSettingsModel):
|
||||
_isGroup: bool = True
|
||||
|
||||
override_global_config: bool = SettingsField(
|
||||
False,
|
||||
title="Override global OCIO config"
|
||||
|
|
@ -159,6 +197,8 @@ class ImageIOFileRuleModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class ImageIOFileRulesModel(BaseSettingsModel):
|
||||
_isGroup: bool = True
|
||||
|
||||
activate_host_rules: bool = SettingsField(False)
|
||||
rules: list[ImageIOFileRuleModel] = SettingsField(
|
||||
default_factory=list,
|
||||
|
|
@ -173,14 +213,7 @@ class ImageIOFileRulesModel(BaseSettingsModel):
|
|||
|
||||
class ImageIOSettings(BaseSettingsModel):
|
||||
"""Nuke color management project settings. """
|
||||
_isGroup: bool = True
|
||||
|
||||
"""# TODO: enhance settings with host api:
|
||||
to restructure settings for simplification.
|
||||
|
||||
now: nuke/imageio/viewer/viewerProcess
|
||||
future: nuke/imageio/viewer
|
||||
"""
|
||||
activate_host_color_management: bool = SettingsField(
|
||||
True, title="Enable Color Management")
|
||||
ocio_config: ImageIOConfigModel = SettingsField(
|
||||
|
|
@ -197,18 +230,13 @@ class ImageIOSettings(BaseSettingsModel):
|
|||
description="""Viewer profile is used during
|
||||
Creation of new viewer node at knob viewerProcess"""
|
||||
)
|
||||
|
||||
"""# TODO: enhance settings with host api:
|
||||
to restructure settings for simplification.
|
||||
|
||||
now: nuke/imageio/baking/viewerProcess
|
||||
future: nuke/imageio/baking
|
||||
"""
|
||||
baking: ViewProcessModel = SettingsField(
|
||||
default_factory=ViewProcessModel,
|
||||
title="Baking",
|
||||
description="""Baking profile is used during
|
||||
publishing baked colorspace data at knob viewerProcess"""
|
||||
monitor: MonitorProcessModel = SettingsField(
|
||||
default_factory=MonitorProcessModel,
|
||||
title="Monitor OUT"
|
||||
)
|
||||
baking_target: ColorspaceConfigurationModel = SettingsField(
|
||||
default_factory=ColorspaceConfigurationModel,
|
||||
title="Baking Target Colorspace"
|
||||
)
|
||||
|
||||
workfile: WorkfileColorspaceSettings = SettingsField(
|
||||
|
|
@ -231,13 +259,12 @@ class ImageIOSettings(BaseSettingsModel):
|
|||
|
||||
|
||||
DEFAULT_IMAGEIO_SETTINGS = {
|
||||
"viewer": {
|
||||
"viewerProcess": "ACES/sRGB",
|
||||
"output_transform": "ACES/sRGB"
|
||||
},
|
||||
"baking": {
|
||||
"viewerProcess": "ACES/Rec.709",
|
||||
"output_transform": "ACES/Rec.709"
|
||||
"viewer": {"display": "ACES", "view": "sRGB"},
|
||||
"monitor": {"display": "ACES", "view": "Rec.709"},
|
||||
"baking_target": {
|
||||
"enabled": True,
|
||||
"type": "colorspace",
|
||||
"colorspace": "Output - Rec.709",
|
||||
},
|
||||
"workfile": {
|
||||
"color_management": "OCIO",
|
||||
|
|
@ -248,170 +275,67 @@ DEFAULT_IMAGEIO_SETTINGS = {
|
|||
"int_8_lut": "role_matte_paint",
|
||||
"int_16_lut": "role_texture_paint",
|
||||
"log_lut": "role_compositing_log",
|
||||
"float_lut": "role_scene_linear"
|
||||
"float_lut": "role_scene_linear",
|
||||
},
|
||||
"nodes": {
|
||||
"required_nodes": [
|
||||
{
|
||||
"plugins": [
|
||||
"CreateWriteRender"
|
||||
],
|
||||
"plugins": ["CreateWriteRender"],
|
||||
"nuke_node_class": "Write",
|
||||
"knobs": [
|
||||
{
|
||||
"type": "text",
|
||||
"name": "file_type",
|
||||
"text": "exr"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "datatype",
|
||||
"text": "16 bit half"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "compression",
|
||||
"text": "Zip (1 scanline)"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "autocrop",
|
||||
"boolean": True
|
||||
},
|
||||
{"type": "text", "name": "file_type", "text": "exr"},
|
||||
{"type": "text", "name": "datatype", "text": "16 bit half"},
|
||||
{"type": "text", "name": "compression", "text": "Zip (1 scanline)"},
|
||||
{"type": "boolean", "name": "autocrop", "boolean": True},
|
||||
{
|
||||
"type": "color_gui",
|
||||
"name": "tile_color",
|
||||
"color_gui": [
|
||||
186,
|
||||
35,
|
||||
35
|
||||
]
|
||||
"color_gui": [186, 35, 35],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "channels",
|
||||
"text": "rgb"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "colorspace",
|
||||
"text": "scene_linear"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "create_directories",
|
||||
"boolean": True
|
||||
}
|
||||
]
|
||||
{"type": "text", "name": "channels", "text": "rgb"},
|
||||
{"type": "text", "name": "colorspace", "text": "scene_linear"},
|
||||
{"type": "boolean", "name": "create_directories", "boolean": True},
|
||||
],
|
||||
},
|
||||
{
|
||||
"plugins": [
|
||||
"CreateWritePrerender"
|
||||
],
|
||||
"plugins": ["CreateWritePrerender"],
|
||||
"nuke_node_class": "Write",
|
||||
"knobs": [
|
||||
{
|
||||
"type": "text",
|
||||
"name": "file_type",
|
||||
"text": "exr"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "datatype",
|
||||
"text": "16 bit half"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "compression",
|
||||
"text": "Zip (1 scanline)"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "autocrop",
|
||||
"boolean": True
|
||||
},
|
||||
{"type": "text", "name": "file_type", "text": "exr"},
|
||||
{"type": "text", "name": "datatype", "text": "16 bit half"},
|
||||
{"type": "text", "name": "compression", "text": "Zip (1 scanline)"},
|
||||
{"type": "boolean", "name": "autocrop", "boolean": True},
|
||||
{
|
||||
"type": "color_gui",
|
||||
"name": "tile_color",
|
||||
"color_gui": [
|
||||
171,
|
||||
171,
|
||||
10
|
||||
]
|
||||
"color_gui": [171, 171, 10],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "channels",
|
||||
"text": "rgb"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "colorspace",
|
||||
"text": "scene_linear"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "create_directories",
|
||||
"boolean": True
|
||||
}
|
||||
]
|
||||
{"type": "text", "name": "channels", "text": "rgb"},
|
||||
{"type": "text", "name": "colorspace", "text": "scene_linear"},
|
||||
{"type": "boolean", "name": "create_directories", "boolean": True},
|
||||
],
|
||||
},
|
||||
{
|
||||
"plugins": [
|
||||
"CreateWriteImage"
|
||||
],
|
||||
"plugins": ["CreateWriteImage"],
|
||||
"nuke_node_class": "Write",
|
||||
"knobs": [
|
||||
{
|
||||
"type": "text",
|
||||
"name": "file_type",
|
||||
"text": "tiff"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "datatype",
|
||||
"text": "16 bit"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "compression",
|
||||
"text": "Deflate"
|
||||
},
|
||||
{"type": "text", "name": "file_type", "text": "tiff"},
|
||||
{"type": "text", "name": "datatype", "text": "16 bit"},
|
||||
{"type": "text", "name": "compression", "text": "Deflate"},
|
||||
{
|
||||
"type": "color_gui",
|
||||
"name": "tile_color",
|
||||
"color_gui": [
|
||||
56,
|
||||
162,
|
||||
7
|
||||
]
|
||||
"color_gui": [56, 162, 7],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "channels",
|
||||
"text": "rgb"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "colorspace",
|
||||
"text": "texture_paint"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "create_directories",
|
||||
"boolean": True
|
||||
}
|
||||
]
|
||||
}
|
||||
{"type": "text", "name": "channels", "text": "rgb"},
|
||||
{"type": "text", "name": "colorspace", "text": "texture_paint"},
|
||||
{"type": "boolean", "name": "create_directories", "boolean": True},
|
||||
],
|
||||
},
|
||||
],
|
||||
"override_nodes": []
|
||||
"override_nodes": [],
|
||||
},
|
||||
"regex_inputs": {
|
||||
"inputs": [
|
||||
{
|
||||
"regex": "(beauty).*(?=.exr)",
|
||||
"colorspace": "linear"
|
||||
}
|
||||
]
|
||||
}
|
||||
"inputs": [{"regex": "(beauty).*(?=.exr)", "colorspace": "linear"}]
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,11 @@ from ayon_server.settings import (
|
|||
ensure_unique_names,
|
||||
task_types_enum
|
||||
)
|
||||
from .common import KnobModel, validate_json_dict
|
||||
from .common import (
|
||||
KnobModel,
|
||||
ColorspaceConfigurationModel,
|
||||
validate_json_dict,
|
||||
)
|
||||
|
||||
|
||||
def nuke_render_publish_types_enum():
|
||||
|
|
@ -130,19 +134,22 @@ class IntermediateOutputModel(BaseSettingsModel):
|
|||
title="Filter", default_factory=BakingStreamFilterModel)
|
||||
read_raw: bool = SettingsField(
|
||||
False,
|
||||
title="Read raw switch"
|
||||
)
|
||||
viewer_process_override: str = SettingsField(
|
||||
"",
|
||||
title="Viewer process override"
|
||||
title="Input read node RAW switch"
|
||||
)
|
||||
bake_viewer_process: bool = SettingsField(
|
||||
True,
|
||||
title="Bake viewer process"
|
||||
title="Bake viewer process",
|
||||
section="Baking target",
|
||||
)
|
||||
colorspace_override: ColorspaceConfigurationModel = SettingsField(
|
||||
title="Target baking colorspace override",
|
||||
description="Override Baking target with colorspace or display/view",
|
||||
default_factory=ColorspaceConfigurationModel
|
||||
)
|
||||
bake_viewer_input_process: bool = SettingsField(
|
||||
True,
|
||||
title="Bake viewer input process node (LUT)"
|
||||
title="Bake viewer input process node (LUT)",
|
||||
section="Baking additional",
|
||||
)
|
||||
reformat_nodes_config: ReformatNodesConfigModel = SettingsField(
|
||||
default_factory=ReformatNodesConfigModel,
|
||||
|
|
@ -155,18 +162,6 @@ class IntermediateOutputModel(BaseSettingsModel):
|
|||
title="Custom tags", default_factory=list)
|
||||
|
||||
|
||||
class ExtractReviewDataMovModel(BaseSettingsModel):
|
||||
"""[deprecated] use Extract Review Data Baking
|
||||
Streams instead.
|
||||
"""
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
viewer_lut_raw: bool = SettingsField(title="Viewer lut raw")
|
||||
outputs: list[IntermediateOutputModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Baking streams"
|
||||
)
|
||||
|
||||
|
||||
class ExtractReviewIntermediatesModel(BaseSettingsModel):
|
||||
enabled: bool = SettingsField(title="Enabled")
|
||||
viewer_lut_raw: bool = SettingsField(title="Viewer lut raw")
|
||||
|
|
@ -259,10 +254,6 @@ class PublishPluginsModel(BaseSettingsModel):
|
|||
title="Extract Review Data Lut",
|
||||
default_factory=ExtractReviewDataLutModel
|
||||
)
|
||||
ExtractReviewDataMov: ExtractReviewDataMovModel = SettingsField(
|
||||
title="Extract Review Data Mov",
|
||||
default_factory=ExtractReviewDataMovModel
|
||||
)
|
||||
ExtractReviewIntermediates: ExtractReviewIntermediatesModel = (
|
||||
SettingsField(
|
||||
title="Extract Review Intermediates",
|
||||
|
|
@ -332,62 +323,6 @@ DEFAULT_PUBLISH_PLUGIN_SETTINGS = {
|
|||
"ExtractReviewDataLut": {
|
||||
"enabled": False
|
||||
},
|
||||
"ExtractReviewDataMov": {
|
||||
"enabled": False,
|
||||
"viewer_lut_raw": False,
|
||||
"outputs": [
|
||||
{
|
||||
"name": "baking",
|
||||
"publish": False,
|
||||
"filter": {
|
||||
"task_types": [],
|
||||
"product_types": [],
|
||||
"product_names": []
|
||||
},
|
||||
"read_raw": False,
|
||||
"viewer_process_override": "",
|
||||
"bake_viewer_process": True,
|
||||
"bake_viewer_input_process": True,
|
||||
"reformat_nodes_config": {
|
||||
"enabled": False,
|
||||
"reposition_nodes": [
|
||||
{
|
||||
"node_class": "Reformat",
|
||||
"knobs": [
|
||||
{
|
||||
"type": "text",
|
||||
"name": "type",
|
||||
"text": "to format"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "format",
|
||||
"text": "HD_1080"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "filter",
|
||||
"text": "Lanczos6"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "black_outside",
|
||||
"boolean": True
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "pbb",
|
||||
"boolean": False
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"extension": "mov",
|
||||
"add_custom_tags": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"ExtractReviewIntermediates": {
|
||||
"enabled": True,
|
||||
"viewer_lut_raw": False,
|
||||
|
|
@ -401,7 +336,15 @@ DEFAULT_PUBLISH_PLUGIN_SETTINGS = {
|
|||
"product_names": []
|
||||
},
|
||||
"read_raw": False,
|
||||
"viewer_process_override": "",
|
||||
"colorspace_override": {
|
||||
"enabled": False,
|
||||
"type": "colorspace",
|
||||
"colorspace": "",
|
||||
"display_view": {
|
||||
"display": "",
|
||||
"view": ""
|
||||
}
|
||||
},
|
||||
"bake_viewer_process": True,
|
||||
"bake_viewer_input_process": True,
|
||||
"reformat_nodes_config": {
|
||||
|
|
|
|||
|
|
@ -145,7 +145,9 @@ def get_new_timeline(timeline_name: str = None):
|
|||
return new_timeline
|
||||
|
||||
|
||||
def create_bin(name: str, root: object = None) -> object:
|
||||
def create_bin(name: str,
|
||||
root: object = None,
|
||||
set_as_current: bool = True) -> object:
|
||||
"""
|
||||
Create media pool's folder.
|
||||
|
||||
|
|
@ -156,6 +158,8 @@ def create_bin(name: str, root: object = None) -> object:
|
|||
Args:
|
||||
name (str): name of folder / bin, or hierarchycal name "parent/name"
|
||||
root (resolve.Folder)[optional]: root folder / bin object
|
||||
set_as_current (resolve.Folder)[optional]: Whether to set the
|
||||
resulting bin as current folder or not.
|
||||
|
||||
Returns:
|
||||
object: resolve.Folder
|
||||
|
|
@ -168,22 +172,24 @@ def create_bin(name: str, root: object = None) -> object:
|
|||
if "/" in name.replace("\\", "/"):
|
||||
child_bin = None
|
||||
for bname in name.split("/"):
|
||||
child_bin = create_bin(bname, child_bin or root_bin)
|
||||
child_bin = create_bin(bname,
|
||||
root=child_bin or root_bin,
|
||||
set_as_current=set_as_current)
|
||||
if child_bin:
|
||||
return child_bin
|
||||
else:
|
||||
created_bin = None
|
||||
# Find existing folder or create it
|
||||
for subfolder in root_bin.GetSubFolderList():
|
||||
if subfolder.GetName() in name:
|
||||
if subfolder.GetName() == name:
|
||||
created_bin = subfolder
|
||||
|
||||
if not created_bin:
|
||||
new_folder = media_pool.AddSubFolder(root_bin, name)
|
||||
media_pool.SetCurrentFolder(new_folder)
|
||||
break
|
||||
else:
|
||||
created_bin = media_pool.AddSubFolder(root_bin, name)
|
||||
|
||||
if set_as_current:
|
||||
media_pool.SetCurrentFolder(created_bin)
|
||||
|
||||
return media_pool.GetCurrentFolder()
|
||||
return created_bin
|
||||
|
||||
|
||||
def remove_media_pool_item(media_pool_item: object) -> bool:
|
||||
|
|
@ -272,8 +278,7 @@ def create_timeline_item(
|
|||
# get all variables
|
||||
project = get_current_project()
|
||||
media_pool = project.GetMediaPool()
|
||||
_clip_property = media_pool_item.GetClipProperty
|
||||
clip_name = _clip_property("File Name")
|
||||
clip_name = media_pool_item.GetClipProperty("File Name")
|
||||
timeline = timeline or get_current_timeline()
|
||||
|
||||
# timing variables
|
||||
|
|
@ -298,16 +303,22 @@ def create_timeline_item(
|
|||
if source_end:
|
||||
clip_data["endFrame"] = source_end
|
||||
if timecode_in:
|
||||
# Note: specifying a recordFrame will fail to place the timeline
|
||||
# item if there's already an existing clip at that time on the
|
||||
# active track.
|
||||
clip_data["recordFrame"] = timeline_in
|
||||
|
||||
# add to timeline
|
||||
media_pool.AppendToTimeline([clip_data])
|
||||
output_timeline_item = media_pool.AppendToTimeline([clip_data])[0]
|
||||
|
||||
output_timeline_item = get_timeline_item(
|
||||
media_pool_item, timeline)
|
||||
# Adding the item may fail whilst Resolve will still return a
|
||||
# TimelineItem instance - however all `Get*` calls return None
|
||||
# Hence, we check whether the result is valid
|
||||
if output_timeline_item.GetDuration() is None:
|
||||
output_timeline_item = None
|
||||
|
||||
assert output_timeline_item, AssertionError((
|
||||
"Clip name '{}' was't created on the timeline: '{}' \n\n"
|
||||
"Clip name '{}' wasn't created on the timeline: '{}' \n\n"
|
||||
"Please check if correct track position is activated, \n"
|
||||
"or if a clip is not already at the timeline in \n"
|
||||
"position: '{}' out: '{}'. \n\n"
|
||||
|
|
@ -947,3 +958,13 @@ def get_reformated_path(path, padded=False, first=False):
|
|||
else:
|
||||
path = re.sub(num_pattern, "%d", path)
|
||||
return path
|
||||
|
||||
|
||||
def iter_all_media_pool_clips():
|
||||
"""Recursively iterate all media pool clips in current project"""
|
||||
root = get_current_project().GetMediaPool().GetRootFolder()
|
||||
queue = [root]
|
||||
for folder in queue:
|
||||
for clip in folder.GetClipList():
|
||||
yield clip
|
||||
queue.extend(folder.GetSubFolderList())
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
Basic avalon integration
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import contextlib
|
||||
from collections import OrderedDict
|
||||
|
||||
|
|
@ -12,6 +13,7 @@ from ayon_core.pipeline import (
|
|||
schema,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.host import (
|
||||
|
|
@ -38,6 +40,7 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
|||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
|
||||
|
|
@ -65,6 +68,7 @@ class ResolveHost(HostBase, IWorkfileHost, ILoadHost):
|
|||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled",
|
||||
|
|
@ -145,6 +149,26 @@ def ls():
|
|||
and the Maya equivalent, which is in `avalon.maya.pipeline`
|
||||
"""
|
||||
|
||||
# Media Pool instances from Load Media loader
|
||||
for clip in lib.iter_all_media_pool_clips():
|
||||
data = clip.GetMetadata(lib.pype_tag_name)
|
||||
if not data:
|
||||
continue
|
||||
data = json.loads(data)
|
||||
|
||||
# If not all required data, skip it
|
||||
required = ['schema', 'id', 'loader', 'representation']
|
||||
if not all(key in data for key in required):
|
||||
continue
|
||||
|
||||
container = {key: data[key] for key in required}
|
||||
container["objectName"] = clip.GetName() # Get path in folders
|
||||
container["namespace"] = clip.GetName()
|
||||
container["name"] = clip.GetUniqueId()
|
||||
container["_item"] = clip
|
||||
yield container
|
||||
|
||||
# Timeline instances from Load Clip loader
|
||||
# get all track items from current timeline
|
||||
all_timeline_items = lib.get_current_timeline_items(filter=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,31 @@
|
|||
from ayon_core.pipeline import (
|
||||
InventoryAction,
|
||||
)
|
||||
from ayon_core.pipeline.load.utils import remove_container
|
||||
|
||||
|
||||
class RemoveUnusedMedia(InventoryAction):
|
||||
|
||||
label = "Remove Unused Selected Media"
|
||||
icon = "trash"
|
||||
|
||||
@staticmethod
|
||||
def is_compatible(container):
|
||||
return (
|
||||
container.get("loader") == "LoadMedia"
|
||||
)
|
||||
|
||||
def process(self, containers):
|
||||
any_removed = False
|
||||
for container in containers:
|
||||
media_pool_item = container["_item"]
|
||||
usage = int(media_pool_item.GetClipProperty("Usage"))
|
||||
name = media_pool_item.GetName()
|
||||
if usage == 0:
|
||||
print(f"Removing {name}")
|
||||
remove_container(container)
|
||||
any_removed = True
|
||||
else:
|
||||
print(f"Keeping {name} with usage: {usage}")
|
||||
|
||||
return any_removed
|
||||
|
|
@ -0,0 +1,533 @@
|
|||
import json
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
from collections import defaultdict
|
||||
from typing import Union, List, Optional, TypedDict, Tuple
|
||||
|
||||
from ayon_api import version_is_latest
|
||||
from ayon_core.lib import StringTemplate
|
||||
from ayon_core.pipeline.colorspace import get_remapped_colorspace_to_native
|
||||
from ayon_core.pipeline import (
|
||||
Anatomy,
|
||||
LoaderPlugin,
|
||||
get_representation_path,
|
||||
registered_host
|
||||
)
|
||||
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
||||
from ayon_core.lib.transcoding import (
|
||||
VIDEO_EXTENSIONS,
|
||||
IMAGE_EXTENSIONS
|
||||
)
|
||||
from ayon_core.lib import BoolDef
|
||||
from ayon_resolve.api import lib
|
||||
from ayon_resolve.api.pipeline import AVALON_CONTAINER_ID
|
||||
|
||||
|
||||
FRAME_SPLITTER = "__frame_splitter__"
|
||||
|
||||
|
||||
class MetadataEntry(TypedDict):
|
||||
"""Metadata entry is dict with {"name": "key", "value: "value"}"""
|
||||
name: str
|
||||
value: str
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def project_color_science_mode(project=None, mode="davinciYRGBColorManagedv2"):
|
||||
"""Set project color science mode during context.
|
||||
|
||||
This is especially useful as context for setting the colorspace for media
|
||||
pool items, because when Resolve is not set to `davinciYRGBColorManagedv2`
|
||||
it fails to set its "Input Color Space" clip property even though it is
|
||||
accessible and settable via the Resolve User Interface.
|
||||
|
||||
Args
|
||||
project (Project): The active Resolve Project.
|
||||
mode (Optional[str]): The color science mode to apply during the
|
||||
context. Defaults to 'davinciYRGBColorManagedv2'
|
||||
|
||||
See Also:
|
||||
https://forum.blackmagicdesign.com/viewtopic.php?f=21&t=197441
|
||||
"""
|
||||
|
||||
if project is None:
|
||||
project = lib.get_current_project()
|
||||
|
||||
original_mode = project.GetSetting("colorScienceMode")
|
||||
if original_mode != mode:
|
||||
project.SetSetting("colorScienceMode", mode)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if project.GetSetting("colorScienceMode") != original_mode:
|
||||
project.SetSetting("colorScienceMode", original_mode)
|
||||
|
||||
|
||||
def set_colorspace(media_pool_item,
|
||||
colorspace,
|
||||
mode="davinciYRGBColorManagedv2"):
|
||||
"""Set MediaPoolItem colorspace.
|
||||
|
||||
This implements a workaround that you cannot set the input colorspace
|
||||
unless the Resolve project's color science mode is set to
|
||||
`davinciYRGBColorManagedv2`.
|
||||
|
||||
Args:
|
||||
media_pool_item (MediaPoolItem): The media pool item.
|
||||
colorspace (str): The colorspace to apply.
|
||||
mode (Optional[str]): The Resolve project color science mode to be in
|
||||
while setting the colorspace.
|
||||
Defaults to 'davinciYRGBColorManagedv2'
|
||||
|
||||
Returns:
|
||||
bool: Whether applying the colorspace succeeded.
|
||||
"""
|
||||
with project_color_science_mode(mode=mode):
|
||||
return media_pool_item.SetClipProperty("Input Color Space", colorspace)
|
||||
|
||||
|
||||
def find_clip_usage(media_pool_item, project=None):
|
||||
"""Return all Timeline Items in the project using the Media Pool Item.
|
||||
|
||||
Each entry in the list is a tuple of Timeline and TimelineItem so that
|
||||
it's easy to know which Timeline the TimelineItem belongs to.
|
||||
|
||||
Arguments:
|
||||
media_pool_item (MediaPoolItem): The Media Pool Item to search for.
|
||||
project (Project): The resolve project the media pool item resides in.
|
||||
|
||||
Returns:
|
||||
List[Tuple[Timeline, TimelineItem]]: A 2-tuple of a timeline with
|
||||
the timeline item.
|
||||
|
||||
"""
|
||||
usage = int(media_pool_item.GetClipProperty("Usage"))
|
||||
if not usage:
|
||||
return []
|
||||
|
||||
if project is None:
|
||||
project = lib.get_current_project()
|
||||
|
||||
matching_items = []
|
||||
unique_id = media_pool_item.GetUniqueId()
|
||||
for timeline_idx in range(project.GetTimelineCount()):
|
||||
timeline = project.GetTimelineByIndex(timeline_idx + 1)
|
||||
|
||||
# Consider audio and video tracks
|
||||
for track_type in ["video", "audio"]:
|
||||
for track_idx in range(timeline.GetTrackCount(track_type)):
|
||||
timeline_items = timeline.GetItemListInTrack(track_type,
|
||||
track_idx + 1)
|
||||
for timeline_item in timeline_items:
|
||||
timeline_item_mpi = timeline_item.GetMediaPoolItem()
|
||||
if not timeline_item_mpi:
|
||||
continue
|
||||
|
||||
if timeline_item_mpi.GetUniqueId() == unique_id:
|
||||
matching_items.append((timeline, timeline_item))
|
||||
usage -= 1
|
||||
if usage <= 0:
|
||||
# If there should be no usage left after this found
|
||||
# entry we return early
|
||||
return matching_items
|
||||
|
||||
return matching_items
|
||||
|
||||
|
||||
class LoadMedia(LoaderPlugin):
|
||||
"""Load product as media pool item."""
|
||||
|
||||
product_types = {"render2d", "source", "plate", "render", "review"}
|
||||
|
||||
representations = ["*"]
|
||||
extensions = set(
|
||||
ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS)
|
||||
)
|
||||
|
||||
label = "Load media"
|
||||
order = -20
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
options = [
|
||||
BoolDef(
|
||||
"load_to_timeline",
|
||||
label="Load to timeline",
|
||||
default=True,
|
||||
tooltip="Whether on load to automatically add it to the current "
|
||||
"timeline"
|
||||
),
|
||||
BoolDef(
|
||||
"load_once",
|
||||
label="Re-use existing",
|
||||
default=True,
|
||||
tooltip="When enabled - if this particular version is already"
|
||||
"loaded it will not be loaded again but will be re-used."
|
||||
)
|
||||
]
|
||||
|
||||
# for loader multiselection
|
||||
timeline = None
|
||||
|
||||
# presets
|
||||
clip_color_last = "Olive"
|
||||
clip_color_old = "Orange"
|
||||
|
||||
media_pool_bin_path = "Loader/{folder[path]}"
|
||||
|
||||
metadata: List[MetadataEntry] = []
|
||||
|
||||
# cached on apply settings
|
||||
_host_imageio_settings = None
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
super(LoadMedia, cls).apply_settings(project_settings)
|
||||
cls._host_imageio_settings = project_settings["resolve"]["imageio"]
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
# For loading multiselection, we store timeline before first load
|
||||
# because the current timeline can change with the imported media.
|
||||
if self.timeline is None:
|
||||
self.timeline = lib.get_current_timeline()
|
||||
|
||||
representation = context["representation"]
|
||||
self._project_name = context["project"]["name"]
|
||||
|
||||
project = lib.get_current_project()
|
||||
media_pool = project.GetMediaPool()
|
||||
|
||||
# Allow to use an existing media pool item and re-use it
|
||||
item = None
|
||||
if options.get("load_once", True):
|
||||
host = registered_host()
|
||||
repre_id = context["representation"]["id"]
|
||||
for container in host.ls():
|
||||
if container["representation"] != repre_id:
|
||||
continue
|
||||
|
||||
if container["loader"] != self.__class__.__name__:
|
||||
continue
|
||||
|
||||
print(f"Re-using existing container: {container}")
|
||||
item = container["_item"]
|
||||
|
||||
if item is None:
|
||||
item = self._import_media_to_bin(context, media_pool, representation)
|
||||
# Always update clip color - even if re-using existing clip
|
||||
color = self.get_item_color(context)
|
||||
item.SetClipColor(color)
|
||||
|
||||
if options.get("load_to_timeline", True):
|
||||
timeline = options.get("timeline", self.timeline)
|
||||
if timeline:
|
||||
# Add media to active timeline
|
||||
lib.create_timeline_item(
|
||||
media_pool_item=item,
|
||||
timeline=timeline
|
||||
)
|
||||
|
||||
def _import_media_to_bin(
|
||||
self, context, media_pool, representation
|
||||
):
|
||||
"""Import media to Resolve Media Pool.
|
||||
|
||||
Also create a bin if `media_pool_bin_path` is set.
|
||||
|
||||
Args:
|
||||
context (dict): The context dictionary.
|
||||
media_pool (resolve.MediaPool): The Resolve Media Pool.
|
||||
representation (dict): The representation data.
|
||||
|
||||
Returns:
|
||||
resolve.MediaPoolItem: The imported media pool item.
|
||||
"""
|
||||
# Create or set the bin folder, we add it in there
|
||||
# If bin path is not set we just add into the current active bin
|
||||
if self.media_pool_bin_path:
|
||||
media_pool_bin_path = StringTemplate(
|
||||
self.media_pool_bin_path).format_strict(context)
|
||||
|
||||
folder = lib.create_bin(
|
||||
# double slashes will create unconnected folders
|
||||
name=media_pool_bin_path.replace("//", "/"),
|
||||
root=media_pool.GetRootFolder(),
|
||||
set_as_current=False
|
||||
)
|
||||
media_pool.SetCurrentFolder(folder)
|
||||
|
||||
# Import media
|
||||
# Resolve API: ImportMedia function requires a list of dictionaries
|
||||
# with keys "FilePath", "StartIndex" and "EndIndex" for sequences
|
||||
# but only string with absolute path for single files.
|
||||
is_sequence, file_info = self._get_file_info(context)
|
||||
items = (
|
||||
media_pool.ImportMedia([file_info])
|
||||
if is_sequence
|
||||
else media_pool.ImportMedia([file_info["FilePath"]])
|
||||
)
|
||||
assert len(items) == 1, "Must import only one media item"
|
||||
|
||||
result = items[0]
|
||||
|
||||
self._set_metadata(result, context)
|
||||
self._set_colorspace_from_representation(result, representation)
|
||||
|
||||
data = self._get_container_data(context)
|
||||
|
||||
# Add containerise data only needed on first load
|
||||
data.update({
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"loader": str(self.__class__.__name__),
|
||||
})
|
||||
|
||||
result.SetMetadata(lib.pype_tag_name, json.dumps(data))
|
||||
|
||||
return result
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def update(self, container, context):
|
||||
# Update MediaPoolItem filepath and metadata
|
||||
item = container["_item"]
|
||||
|
||||
# Get the existing metadata before we update because the
|
||||
# metadata gets removed
|
||||
data = json.loads(item.GetMetadata(lib.pype_tag_name))
|
||||
|
||||
# Get metadata to preserve after the clip replacement
|
||||
# TODO: Maybe preserve more, like LUT, Alpha Mode, Input Sizing Preset
|
||||
colorspace_before = item.GetClipProperty("Input Color Space")
|
||||
|
||||
# Update path
|
||||
path = get_representation_path(context["representation"])
|
||||
success = item.ReplaceClip(path)
|
||||
if not success:
|
||||
raise RuntimeError(
|
||||
f"Failed to replace media pool item clip to filepath: {path}"
|
||||
)
|
||||
|
||||
# Update the metadata
|
||||
update_data = self._get_container_data(context)
|
||||
data.update(update_data)
|
||||
item.SetMetadata(lib.pype_tag_name, json.dumps(data))
|
||||
|
||||
self._set_metadata(media_pool_item=item, context=context)
|
||||
self._set_colorspace_from_representation(
|
||||
item,
|
||||
representation=context["representation"]
|
||||
)
|
||||
|
||||
# If no specific colorspace is set then we want to preserve the
|
||||
# colorspace a user might have set before the clip replacement
|
||||
if (
|
||||
item.GetClipProperty("Input Color Space") == "Project"
|
||||
and colorspace_before != "Project"
|
||||
):
|
||||
result = set_colorspace(item, colorspace_before)
|
||||
if not result:
|
||||
self.log.warning(
|
||||
f"Failed to re-apply colorspace: {colorspace_before}."
|
||||
)
|
||||
|
||||
# Update the clip color
|
||||
color = self.get_item_color(context)
|
||||
item.SetClipColor(color)
|
||||
|
||||
def remove(self, container):
|
||||
# Remove MediaPoolItem entry
|
||||
project = lib.get_current_project()
|
||||
media_pool = project.GetMediaPool()
|
||||
item = container["_item"]
|
||||
|
||||
# Delete any usages of the media pool item so there's no trail
|
||||
# left in existing timelines. Currently only the media pool item
|
||||
# gets removed which fits the Resolve workflow but is confusing
|
||||
# artists
|
||||
usage = find_clip_usage(media_pool_item=item, project=project)
|
||||
if usage:
|
||||
# Group all timeline items per timeline, so we can delete the clips
|
||||
# in the timeline at once. The Resolve objects are not hashable, so
|
||||
# we need to store them in the dict by id
|
||||
usage_by_timeline = defaultdict(list)
|
||||
timeline_by_id = {}
|
||||
for timeline, timeline_item in usage:
|
||||
timeline_id = timeline.GetUniqueId()
|
||||
timeline_by_id[timeline_id] = timeline
|
||||
usage_by_timeline[timeline.GetUniqueId()].append(timeline_item)
|
||||
|
||||
for timeline_id, timeline_items in usage_by_timeline.items():
|
||||
timeline = timeline_by_id[timeline_id]
|
||||
timeline.DeleteClips(timeline_items)
|
||||
|
||||
# Delete the media pool item
|
||||
media_pool.DeleteClips([item])
|
||||
|
||||
def _get_container_data(self, context: dict) -> dict:
|
||||
"""Return metadata related to the representation and version."""
|
||||
|
||||
# add additional metadata from the version to imprint AYON knob
|
||||
version = context["version"]
|
||||
data = {}
|
||||
|
||||
# version.attrib
|
||||
for key in [
|
||||
"frameStart", "frameEnd",
|
||||
"handleStart", "handleEnd",
|
||||
"source", "fps", "colorSpace"
|
||||
]:
|
||||
data[key] = version["attrib"][key]
|
||||
|
||||
# version.data
|
||||
for key in ["author"]:
|
||||
data[key] = version["data"][key]
|
||||
|
||||
# add variables related to version context
|
||||
data.update({
|
||||
"representation": context["representation"]["id"],
|
||||
"version": version["name"],
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_item_color(cls, context: dict) -> str:
|
||||
"""Return item color name.
|
||||
|
||||
Coloring depends on whether representation is the latest version.
|
||||
"""
|
||||
# Compare version with last version
|
||||
# set clip colour
|
||||
if version_is_latest(project_name=context["project"]["name"],
|
||||
version_id=context["version"]["id"]):
|
||||
return cls.clip_color_last
|
||||
else:
|
||||
return cls.clip_color_old
|
||||
|
||||
def _set_metadata(self, media_pool_item, context: dict):
|
||||
"""Set Media Pool Item Clip Properties"""
|
||||
|
||||
# Set more clip metadata based on the loaded clip's context
|
||||
for meta_item in self.metadata:
|
||||
clip_property = meta_item["name"]
|
||||
value = meta_item["value"]
|
||||
value_formatted = StringTemplate(value).format_strict(context)
|
||||
media_pool_item.SetClipProperty(clip_property, value_formatted)
|
||||
|
||||
def _get_file_info(self, context: dict) -> Tuple[bool, Union[str, dict]]:
|
||||
"""Return file info for Resolve ImportMedia.
|
||||
|
||||
Args:
|
||||
context (dict): The context dictionary.
|
||||
|
||||
Returns:
|
||||
Tuple[bool, Union[str, dict]]: A tuple of whether the file is a
|
||||
sequence and the file info dictionary.
|
||||
"""
|
||||
|
||||
representation = context["representation"]
|
||||
anatomy = Anatomy(self._project_name)
|
||||
|
||||
# Get path to representation with correct frame number
|
||||
repre_path = get_representation_path_with_anatomy(
|
||||
representation, anatomy)
|
||||
|
||||
first_frame = representation["context"].get("frame")
|
||||
|
||||
is_sequence = False
|
||||
# is not sequence
|
||||
if first_frame is None:
|
||||
return (
|
||||
is_sequence, {"FilePath": repre_path}
|
||||
)
|
||||
|
||||
# This is sequence
|
||||
is_sequence = True
|
||||
repre_files = [
|
||||
file["path"].format(root=anatomy.roots)
|
||||
for file in representation["files"]
|
||||
]
|
||||
|
||||
# Change frame in representation context to get path with frame
|
||||
# splitter.
|
||||
representation["context"]["frame"] = FRAME_SPLITTER
|
||||
frame_repre_path = get_representation_path_with_anatomy(
|
||||
representation, anatomy
|
||||
)
|
||||
frame_repre_path = Path(frame_repre_path)
|
||||
repre_dir, repre_filename = (
|
||||
frame_repre_path.parent, frame_repre_path.name)
|
||||
# Get sequence prefix and suffix
|
||||
file_prefix, file_suffix = repre_filename.split(FRAME_SPLITTER)
|
||||
# Get frame number from path as string to get frame padding
|
||||
frame_str = str(repre_path)[len(file_prefix):][:len(file_suffix)]
|
||||
frame_padding = len(frame_str)
|
||||
|
||||
file_name = f"{file_prefix}%0{frame_padding}d{file_suffix}"
|
||||
|
||||
abs_filepath = Path(repre_dir, file_name)
|
||||
|
||||
start_index = int(first_frame)
|
||||
end_index = int(int(first_frame) + len(repre_files) - 1)
|
||||
|
||||
# See Resolve API, to import for example clip "file_[001-100].dpx":
|
||||
# ImportMedia([{"FilePath":"file_%03d.dpx",
|
||||
# "StartIndex":1,
|
||||
# "EndIndex":100}])
|
||||
return (
|
||||
is_sequence,
|
||||
{
|
||||
"FilePath": abs_filepath.as_posix(),
|
||||
"StartIndex": start_index,
|
||||
"EndIndex": end_index,
|
||||
}
|
||||
)
|
||||
|
||||
def _get_colorspace(self, representation: dict) -> Optional[str]:
|
||||
"""Return Resolve native colorspace from OCIO colorspace data.
|
||||
|
||||
Returns:
|
||||
Optional[str]: The Resolve native colorspace name, if any mapped.
|
||||
"""
|
||||
|
||||
data = representation.get("data", {}).get("colorspaceData", {})
|
||||
if not data:
|
||||
return
|
||||
|
||||
ocio_colorspace = data["colorspace"]
|
||||
if not ocio_colorspace:
|
||||
return
|
||||
|
||||
resolve_colorspace = get_remapped_colorspace_to_native(
|
||||
ocio_colorspace_name=ocio_colorspace,
|
||||
host_name="resolve",
|
||||
imageio_host_settings=self._host_imageio_settings
|
||||
)
|
||||
if resolve_colorspace:
|
||||
return resolve_colorspace
|
||||
else:
|
||||
self.log.warning(
|
||||
f"No mapping from OCIO colorspace '{ocio_colorspace}' "
|
||||
"found to a Resolve colorspace. "
|
||||
"Ignoring colorspace."
|
||||
)
|
||||
|
||||
def _set_colorspace_from_representation(
|
||||
self, media_pool_item, representation: dict):
|
||||
"""Set the colorspace for the media pool item.
|
||||
|
||||
Args:
|
||||
media_pool_item (MediaPoolItem): The media pool item.
|
||||
representation (dict): The representation data.
|
||||
"""
|
||||
# Set the Resolve Input Color Space for the media.
|
||||
colorspace = self._get_colorspace(representation)
|
||||
if colorspace:
|
||||
result = set_colorspace(media_pool_item, colorspace)
|
||||
if not result:
|
||||
self.log.warning(
|
||||
f"Failed to apply colorspace: {colorspace}."
|
||||
)
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'resolve' version."""
|
||||
__version__ = "0.2.1"
|
||||
__version__ = "0.2.2"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name = "resolve"
|
||||
title = "DaVinci Resolve"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
|
||||
client_dir = "ayon_resolve"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,9 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from pydantic import validator
|
||||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
|
||||
from .imageio import ResolveImageIOModel
|
||||
|
||||
|
|
@ -56,7 +61,7 @@ class CreateShotClipModels(BaseSettingsModel):
|
|||
|
||||
workfileFrameStart: int = SettingsField(
|
||||
1001,
|
||||
title="Workfiles Start Frame",
|
||||
title="Workfile Start Frame",
|
||||
section="Shot Attributes"
|
||||
)
|
||||
handleStart: int = SettingsField(
|
||||
|
|
@ -76,6 +81,62 @@ class CreatorPluginsModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class MetadataMappingModel(BaseSettingsModel):
|
||||
"""Metadata mapping
|
||||
|
||||
Representation document context data are used for formatting of
|
||||
anatomy tokens. Following are supported:
|
||||
- version
|
||||
- task
|
||||
- asset
|
||||
|
||||
"""
|
||||
name: str = SettingsField(
|
||||
"",
|
||||
title="Metadata property name"
|
||||
)
|
||||
value: str = SettingsField(
|
||||
"",
|
||||
title="Metadata value template"
|
||||
)
|
||||
|
||||
|
||||
class LoadMediaModel(BaseSettingsModel):
|
||||
clip_color_last: str = SettingsField(
|
||||
"Olive",
|
||||
title="Clip color for last version"
|
||||
)
|
||||
clip_color_old: str = SettingsField(
|
||||
"Orange",
|
||||
title="Clip color for old version"
|
||||
)
|
||||
media_pool_bin_path: str = SettingsField(
|
||||
"Loader/{folder[path]}",
|
||||
title="Media Pool bin path template"
|
||||
)
|
||||
metadata: list[MetadataMappingModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Metadata mapping",
|
||||
description=(
|
||||
"Set these media pool item metadata values on load and update. The"
|
||||
" keys must match the exact Resolve metadata names like"
|
||||
" 'Clip Name' or 'Shot'"
|
||||
)
|
||||
)
|
||||
|
||||
@validator("metadata")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class LoaderPluginsModel(BaseSettingsModel):
|
||||
LoadMedia: LoadMediaModel = SettingsField(
|
||||
default_factory=LoadMediaModel,
|
||||
title="Load Media"
|
||||
)
|
||||
|
||||
|
||||
class ResolveSettings(BaseSettingsModel):
|
||||
launch_openpype_menu_on_start: bool = SettingsField(
|
||||
False, title="Launch OpenPype menu on start of Resolve"
|
||||
|
|
@ -88,6 +149,10 @@ class ResolveSettings(BaseSettingsModel):
|
|||
default_factory=CreatorPluginsModel,
|
||||
title="Creator plugins",
|
||||
)
|
||||
load: LoaderPluginsModel = SettingsField(
|
||||
default_factory=LoaderPluginsModel,
|
||||
title="Loader plugins",
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
|
|
@ -109,5 +174,35 @@ DEFAULT_VALUES = {
|
|||
"handleStart": 10,
|
||||
"handleEnd": 10
|
||||
}
|
||||
},
|
||||
"load": {
|
||||
"LoadMedia": {
|
||||
"clip_color_last": "Olive",
|
||||
"clip_color_old": "Orange",
|
||||
"media_pool_bin_path": (
|
||||
"Loader/{folder[path]}"
|
||||
),
|
||||
"metadata": [
|
||||
{
|
||||
"name": "Comments",
|
||||
"value": "{version[attrib][comment]}"
|
||||
},
|
||||
{
|
||||
"name": "Shot",
|
||||
"value": "{folder[path]}"
|
||||
},
|
||||
{
|
||||
"name": "Take",
|
||||
"value": "{product[name]} {version[name]}"
|
||||
},
|
||||
{
|
||||
"name": "Clip Name",
|
||||
"value": (
|
||||
"{folder[path]} {product[name]} "
|
||||
"{version[name]} ({representation[name]})"
|
||||
)
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
## Unreal Integration
|
||||
|
||||
Supported Unreal Engine version is 4.26+ (mainly because of major Python changes done there).
|
||||
|
||||
### Project naming
|
||||
Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are
|
||||
invalid. If Ayon detects such name it automatically prepends letter **P** to make it valid name, so `123_myProject`
|
||||
will become `P123_myProject`. There is also soft-limit on project name length to be shorter than 20 characters.
|
||||
Longer names will issue warning in Unreal Editor that there might be possible side effects.
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
from .version import __version__
|
||||
from .addon import UNREAL_ADDON_ROOT, UnrealAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
"UNREAL_ADDON_ROOT",
|
||||
"UnrealAddon",
|
||||
)
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
from .version import __version__
|
||||
|
||||
UNREAL_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class UnrealAddon(AYONAddon, IHostAddon):
|
||||
name = "unreal"
|
||||
version = __version__
|
||||
host_name = "unreal"
|
||||
|
||||
def get_global_environments(self):
|
||||
return {
|
||||
"AYON_UNREAL_ROOT": UNREAL_ADDON_ROOT,
|
||||
}
|
||||
|
||||
def add_implementation_envs(self, env, app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Set AYON_UNREAL_PLUGIN required for Unreal implementation
|
||||
# Imports are in this method for Python 2 compatiblity of an addon
|
||||
from pathlib import Path
|
||||
|
||||
from .lib import get_compatible_integration
|
||||
|
||||
from ayon_core.tools.utils import show_message_dialog
|
||||
|
||||
pattern = re.compile(r'^\d+-\d+$')
|
||||
|
||||
if not pattern.match(app.name):
|
||||
msg = (
|
||||
"Unreal application key in the settings must be in format"
|
||||
"'5-0' or '5-1'"
|
||||
)
|
||||
show_message_dialog(
|
||||
parent=None,
|
||||
title="Unreal application name format",
|
||||
message=msg,
|
||||
level="critical")
|
||||
raise ValueError(msg)
|
||||
|
||||
ue_version = app.name.replace("-", ".")
|
||||
unreal_plugin_path = os.path.join(
|
||||
UNREAL_ADDON_ROOT, "integration", "UE_{}".format(ue_version), "Ayon"
|
||||
)
|
||||
if not Path(unreal_plugin_path).exists():
|
||||
compatible_versions = get_compatible_integration(
|
||||
ue_version, Path(UNREAL_ADDON_ROOT) / "integration"
|
||||
)
|
||||
if compatible_versions:
|
||||
unreal_plugin_path = compatible_versions[-1] / "Ayon"
|
||||
unreal_plugin_path = unreal_plugin_path.as_posix()
|
||||
|
||||
if not env.get("AYON_UNREAL_PLUGIN") or \
|
||||
env.get("AYON_UNREAL_PLUGIN") != unreal_plugin_path:
|
||||
env["AYON_UNREAL_PLUGIN"] = unreal_plugin_path
|
||||
|
||||
# Set default environments if are not set via settings
|
||||
defaults = {
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"UE_PYTHONPATH": os.environ.get("PYTHONPATH", ""),
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(UNREAL_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".uproject"]
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unreal Editor Ayon host API."""
|
||||
|
||||
from .plugin import (
|
||||
UnrealActorCreator,
|
||||
UnrealAssetCreator,
|
||||
Loader
|
||||
)
|
||||
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
publish,
|
||||
containerise,
|
||||
show_creator,
|
||||
show_loader,
|
||||
show_publisher,
|
||||
show_manager,
|
||||
show_experimental_tools,
|
||||
show_tools_dialog,
|
||||
show_tools_popup,
|
||||
instantiate,
|
||||
UnrealHost,
|
||||
set_sequence_hierarchy,
|
||||
generate_sequence,
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"UnrealActorCreator",
|
||||
"UnrealAssetCreator",
|
||||
"Loader",
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"publish",
|
||||
"containerise",
|
||||
"show_creator",
|
||||
"show_loader",
|
||||
"show_publisher",
|
||||
"show_manager",
|
||||
"show_experimental_tools",
|
||||
"show_tools_dialog",
|
||||
"show_tools_popup",
|
||||
"instantiate",
|
||||
"UnrealHost",
|
||||
"set_sequence_hierarchy",
|
||||
"generate_sequence",
|
||||
"maintained_selection"
|
||||
]
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class AyonUnrealException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@unreal.uclass()
|
||||
class AyonHelpers(unreal.AyonLib):
|
||||
"""Class wrapping some useful functions for Ayon.
|
||||
|
||||
This class is extending native BP class in Ayon Integration Plugin.
|
||||
|
||||
"""
|
||||
|
||||
@unreal.ufunction(params=[str, unreal.LinearColor, bool])
|
||||
def set_folder_color(self, path: str, color: unreal.LinearColor) -> None:
|
||||
"""Set color on folder in Content Browser.
|
||||
|
||||
This method sets color on folder in Content Browser. Unfortunately
|
||||
there is no way to refresh Content Browser so new color isn't applied
|
||||
immediately. They are saved to config file and appears correctly
|
||||
only after Editor is restarted.
|
||||
|
||||
Args:
|
||||
path (str): Path to folder
|
||||
color (:class:`unreal.LinearColor`): Color of the folder
|
||||
|
||||
Example:
|
||||
|
||||
AyonHelpers().set_folder_color(
|
||||
"/Game/Path", unreal.LinearColor(a=1.0, r=1.0, g=0.5, b=0)
|
||||
)
|
||||
|
||||
Note:
|
||||
This will take effect only after Editor is restarted. I couldn't
|
||||
find a way to refresh it. Also, this saves the color definition
|
||||
into the project config, binding this path with color. So if you
|
||||
delete this path and later re-create, it will set this color
|
||||
again.
|
||||
|
||||
"""
|
||||
self.c_set_folder_color(path, color, False)
|
||||
|
|
@ -1,803 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
from contextlib import contextmanager
|
||||
import time
|
||||
|
||||
import semver
|
||||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
)
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core.host import HostBase, ILoadHost, IPublishHost
|
||||
from ayon_unreal import UNREAL_ADDON_ROOT
|
||||
|
||||
import unreal # noqa
|
||||
|
||||
# Rename to Ayon once parent module renames
|
||||
logger = logging.getLogger("ayon_core.hosts.unreal")
|
||||
|
||||
AYON_CONTAINERS = "AyonContainers"
|
||||
AYON_ASSET_DIR = "/Game/Ayon/Assets"
|
||||
CONTEXT_CONTAINER = "Ayon/context.json"
|
||||
UNREAL_VERSION = semver.VersionInfo(
|
||||
*os.getenv("AYON_UNREAL_VERSION").split(".")
|
||||
)
|
||||
|
||||
PLUGINS_DIR = os.path.join(UNREAL_ADDON_ROOT, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class UnrealHost(HostBase, ILoadHost, IPublishHost):
|
||||
"""Unreal host implementation.
|
||||
|
||||
For some time this class will re-use functions from module based
|
||||
implementation for backwards compatibility of older unreal projects.
|
||||
"""
|
||||
|
||||
name = "unreal"
|
||||
|
||||
def install(self):
|
||||
install()
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
@staticmethod
|
||||
def show_tools_popup():
|
||||
"""Show tools popup with actions leading to show other tools."""
|
||||
show_tools_popup()
|
||||
|
||||
@staticmethod
|
||||
def show_tools_dialog():
|
||||
"""Show tools dialog with actions leading to show other tools."""
|
||||
show_tools_dialog()
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
content_path = unreal.Paths.project_content_dir()
|
||||
op_ctx = content_path + CONTEXT_CONTAINER
|
||||
attempts = 3
|
||||
for i in range(attempts):
|
||||
try:
|
||||
with open(op_ctx, "w+") as f:
|
||||
json.dump(data, f)
|
||||
break
|
||||
except IOError as e:
|
||||
if i == attempts - 1:
|
||||
raise Exception(
|
||||
"Failed to write context data. Aborting.") from e
|
||||
unreal.log_warning("Failed to write context data. Retrying...")
|
||||
i += 1
|
||||
time.sleep(3)
|
||||
continue
|
||||
|
||||
def get_context_data(self):
|
||||
content_path = unreal.Paths.project_content_dir()
|
||||
op_ctx = content_path + CONTEXT_CONTAINER
|
||||
if not os.path.isfile(op_ctx):
|
||||
return {}
|
||||
with open(op_ctx, "r") as fp:
|
||||
data = json.load(fp)
|
||||
return data
|
||||
|
||||
|
||||
def install():
|
||||
"""Install Unreal configuration for AYON."""
|
||||
print("-=" * 40)
|
||||
logo = '''.
|
||||
.
|
||||
·
|
||||
│
|
||||
·∙/
|
||||
·-∙•∙-·
|
||||
/ \\ /∙· / \\
|
||||
∙ \\ │ / ∙
|
||||
\\ \\ · / /
|
||||
\\\\ ∙ ∙ //
|
||||
\\\\/ \\//
|
||||
___
|
||||
│ │
|
||||
│ │
|
||||
│ │
|
||||
│___│
|
||||
-·
|
||||
|
||||
·-─═─-∙ A Y O N ∙-─═─-·
|
||||
by YNPUT
|
||||
.
|
||||
'''
|
||||
print(logo)
|
||||
print("installing Ayon for Unreal ...")
|
||||
print("-=" * 40)
|
||||
logger.info("installing Ayon for Unreal")
|
||||
pyblish.api.register_host("unreal")
|
||||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
register_inventory_action_path(str(INVENTORY_PATH))
|
||||
_register_callbacks()
|
||||
_register_events()
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Unreal configuration for Ayon."""
|
||||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
deregister_loader_plugin_path(str(LOAD_PATH))
|
||||
deregister_creator_plugin_path(str(CREATE_PATH))
|
||||
deregister_inventory_action_path(str(INVENTORY_PATH))
|
||||
|
||||
|
||||
def _register_callbacks():
|
||||
"""
|
||||
TODO: Implement callbacks if supported by UE
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _register_events():
|
||||
"""
|
||||
TODO: Implement callbacks if supported by UE
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def ls():
|
||||
"""List all containers.
|
||||
|
||||
List all found in *Content Manager* of Unreal and return
|
||||
metadata from them. Adding `objectName` to set.
|
||||
|
||||
"""
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
# UE 5.1 changed how class name is specified
|
||||
class_name = ["/Script/Ayon", "AyonAssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AyonAssetContainer" # noqa
|
||||
ayon_containers = ar.get_assets_by_class(class_name, True)
|
||||
|
||||
# get_asset_by_class returns AssetData. To get all metadata we need to
|
||||
# load asset. get_tag_values() work only on metadata registered in
|
||||
# Asset Registry Project settings (and there is no way to set it with
|
||||
# python short of editing ini configuration file).
|
||||
for asset_data in ayon_containers:
|
||||
asset = asset_data.get_asset()
|
||||
data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset)
|
||||
data["objectName"] = asset_data.asset_name
|
||||
yield cast_map_to_str_dict(data)
|
||||
|
||||
|
||||
def ls_inst():
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
# UE 5.1 changed how class name is specified
|
||||
class_name = [
|
||||
"/Script/Ayon",
|
||||
"AyonPublishInstance"
|
||||
] if (
|
||||
UNREAL_VERSION.major == 5
|
||||
and UNREAL_VERSION.minor > 0
|
||||
) else "AyonPublishInstance" # noqa
|
||||
instances = ar.get_assets_by_class(class_name, True)
|
||||
|
||||
# get_asset_by_class returns AssetData. To get all metadata we need to
|
||||
# load asset. get_tag_values() work only on metadata registered in
|
||||
# Asset Registry Project settings (and there is no way to set it with
|
||||
# python short of editing ini configuration file).
|
||||
for asset_data in instances:
|
||||
asset = asset_data.get_asset()
|
||||
data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset)
|
||||
data["objectName"] = asset_data.asset_name
|
||||
yield cast_map_to_str_dict(data)
|
||||
|
||||
|
||||
def parse_container(container):
|
||||
"""To get data from container, AyonAssetContainer must be loaded.
|
||||
|
||||
Args:
|
||||
container(str): path to container
|
||||
|
||||
Returns:
|
||||
dict: metadata stored on container
|
||||
"""
|
||||
asset = unreal.EditorAssetLibrary.load_asset(container)
|
||||
data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset)
|
||||
data["objectName"] = asset.get_name()
|
||||
data = cast_map_to_str_dict(data)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def publish():
|
||||
"""Shorthand to publish from within host."""
|
||||
import pyblish.util
|
||||
|
||||
return pyblish.util.publish()
|
||||
|
||||
|
||||
def containerise(name, namespace, nodes, context, loader=None, suffix="_CON"):
|
||||
"""Bundles *nodes* (assets) into a *container* and add metadata to it.
|
||||
|
||||
Unreal doesn't support *groups* of assets that you can add metadata to.
|
||||
But it does support folders that helps to organize asset. Unfortunately
|
||||
those folders are just that - you cannot add any additional information
|
||||
to them. Ayon Integration Plugin is providing way out - Implementing
|
||||
`AssetContainer` Blueprint class. This class when added to folder can
|
||||
handle metadata on it using standard
|
||||
:func:`unreal.EditorAssetLibrary.set_metadata_tag()` and
|
||||
:func:`unreal.EditorAssetLibrary.get_metadata_tag_values()`. It also
|
||||
stores and monitor all changes in assets in path where it resides. List of
|
||||
those assets is available as `assets` property.
|
||||
|
||||
This is list of strings starting with asset type and ending with its path:
|
||||
`Material /Game/Ayon/Test/TestMaterial.TestMaterial`
|
||||
|
||||
"""
|
||||
# 1 - create directory for container
|
||||
root = "/Game"
|
||||
container_name = f"{name}{suffix}"
|
||||
new_name = move_assets_to_path(root, container_name, nodes)
|
||||
|
||||
# 2 - create Asset Container there
|
||||
path = f"{root}/{new_name}"
|
||||
create_container(container=container_name, path=path)
|
||||
|
||||
namespace = path
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"name": new_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": context["representation"]["id"],
|
||||
}
|
||||
# 3 - imprint data
|
||||
imprint(f"{path}/{container_name}", data)
|
||||
return path
|
||||
|
||||
|
||||
def instantiate(root, name, data, assets=None, suffix="_INS"):
|
||||
"""Bundles *nodes* into *container*.
|
||||
|
||||
Marking it with metadata as publishable instance. If assets are provided,
|
||||
they are moved to new path where `AyonPublishInstance` class asset is
|
||||
created and imprinted with metadata.
|
||||
|
||||
This can then be collected for publishing by Pyblish for example.
|
||||
|
||||
Args:
|
||||
root (str): root path where to create instance container
|
||||
name (str): name of the container
|
||||
data (dict): data to imprint on container
|
||||
assets (list of str): list of asset paths to include in publish
|
||||
instance
|
||||
suffix (str): suffix string to append to instance name
|
||||
|
||||
"""
|
||||
container_name = f"{name}{suffix}"
|
||||
|
||||
# if we specify assets, create new folder and move them there. If not,
|
||||
# just create empty folder
|
||||
if assets:
|
||||
new_name = move_assets_to_path(root, container_name, assets)
|
||||
else:
|
||||
new_name = create_folder(root, name)
|
||||
|
||||
path = f"{root}/{new_name}"
|
||||
create_publish_instance(instance=container_name, path=path)
|
||||
|
||||
imprint(f"{path}/{container_name}", data)
|
||||
|
||||
|
||||
def imprint(node, data):
|
||||
loaded_asset = unreal.EditorAssetLibrary.load_asset(node)
|
||||
for key, value in data.items():
|
||||
# Support values evaluated at imprint
|
||||
if callable(value):
|
||||
value = value()
|
||||
# Unreal doesn't support NoneType in metadata values
|
||||
if value is None:
|
||||
value = ""
|
||||
unreal.EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset, key, str(value)
|
||||
)
|
||||
|
||||
with unreal.ScopedEditorTransaction("Ayon containerising"):
|
||||
unreal.EditorAssetLibrary.save_asset(node)
|
||||
|
||||
|
||||
def show_tools_popup():
|
||||
"""Show popup with tools.
|
||||
|
||||
Popup will disappear on click or losing focus.
|
||||
"""
|
||||
from ayon_unreal.api import tools_ui
|
||||
|
||||
tools_ui.show_tools_popup()
|
||||
|
||||
|
||||
def show_tools_dialog():
|
||||
"""Show dialog with tools.
|
||||
|
||||
Dialog will stay visible.
|
||||
"""
|
||||
from ayon_unreal.api import tools_ui
|
||||
|
||||
tools_ui.show_tools_dialog()
|
||||
|
||||
|
||||
def show_creator():
|
||||
host_tools.show_creator()
|
||||
|
||||
|
||||
def show_loader():
|
||||
host_tools.show_loader(use_context=True)
|
||||
|
||||
|
||||
def show_publisher():
|
||||
host_tools.show_publish()
|
||||
|
||||
|
||||
def show_manager():
|
||||
host_tools.show_scene_inventory()
|
||||
|
||||
|
||||
def show_experimental_tools():
|
||||
host_tools.show_experimental_tools_dialog()
|
||||
|
||||
|
||||
def create_folder(root: str, name: str) -> str:
|
||||
"""Create new folder.
|
||||
|
||||
If folder exists, append number at the end and try again, incrementing
|
||||
if needed.
|
||||
|
||||
Args:
|
||||
root (str): path root
|
||||
name (str): folder name
|
||||
|
||||
Returns:
|
||||
str: folder name
|
||||
|
||||
Example:
|
||||
>>> create_folder("/Game/Foo")
|
||||
/Game/Foo
|
||||
>>> create_folder("/Game/Foo")
|
||||
/Game/Foo1
|
||||
|
||||
"""
|
||||
eal = unreal.EditorAssetLibrary
|
||||
index = 1
|
||||
while True:
|
||||
if eal.does_directory_exist(f"{root}/{name}"):
|
||||
name = f"{name}{index}"
|
||||
index += 1
|
||||
else:
|
||||
eal.make_directory(f"{root}/{name}")
|
||||
break
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def move_assets_to_path(root: str, name: str, assets: List[str]) -> str:
|
||||
"""Moving (renaming) list of asset paths to new destination.
|
||||
|
||||
Args:
|
||||
root (str): root of the path (eg. `/Game`)
|
||||
name (str): name of destination directory (eg. `Foo` )
|
||||
assets (list of str): list of asset paths
|
||||
|
||||
Returns:
|
||||
str: folder name
|
||||
|
||||
Example:
|
||||
This will get paths of all assets under `/Game/Test` and move them
|
||||
to `/Game/NewTest`. If `/Game/NewTest` already exists, then resulting
|
||||
path will be `/Game/NewTest1`
|
||||
|
||||
>>> assets = unreal.EditorAssetLibrary.list_assets("/Game/Test")
|
||||
>>> move_assets_to_path("/Game", "NewTest", assets)
|
||||
NewTest
|
||||
|
||||
"""
|
||||
eal = unreal.EditorAssetLibrary
|
||||
name = create_folder(root, name)
|
||||
|
||||
unreal.log(assets)
|
||||
for asset in assets:
|
||||
loaded = eal.load_asset(asset)
|
||||
eal.rename_asset(asset, f"{root}/{name}/{loaded.get_name()}")
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def create_container(container: str, path: str) -> unreal.Object:
|
||||
"""Helper function to create Asset Container class on given path.
|
||||
|
||||
This Asset Class helps to mark given path as Container
|
||||
and enable asset version control on it.
|
||||
|
||||
Args:
|
||||
container (str): Asset Container name
|
||||
path (str): Path where to create Asset Container. This path should
|
||||
point into container folder
|
||||
|
||||
Returns:
|
||||
:class:`unreal.Object`: instance of created asset
|
||||
|
||||
Example:
|
||||
|
||||
create_container(
|
||||
"/Game/modelingFooCharacter_CON",
|
||||
"modelingFooCharacter_CON"
|
||||
)
|
||||
|
||||
"""
|
||||
factory = unreal.AyonAssetContainerFactory()
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
return tools.create_asset(container, path, None, factory)
|
||||
|
||||
|
||||
def create_publish_instance(instance: str, path: str) -> unreal.Object:
|
||||
"""Helper function to create Ayon Publish Instance on given path.
|
||||
|
||||
This behaves similarly as :func:`create_ayon_container`.
|
||||
|
||||
Args:
|
||||
path (str): Path where to create Publish Instance.
|
||||
This path should point into container folder
|
||||
instance (str): Publish Instance name
|
||||
|
||||
Returns:
|
||||
:class:`unreal.Object`: instance of created asset
|
||||
|
||||
Example:
|
||||
|
||||
create_publish_instance(
|
||||
"/Game/modelingFooCharacter_INST",
|
||||
"modelingFooCharacter_INST"
|
||||
)
|
||||
|
||||
"""
|
||||
factory = unreal.AyonPublishInstanceFactory()
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
return tools.create_asset(instance, path, None, factory)
|
||||
|
||||
|
||||
def cast_map_to_str_dict(umap) -> dict:
|
||||
"""Cast Unreal Map to dict.
|
||||
|
||||
Helper function to cast Unreal Map object to plain old python
|
||||
dict. This will also cast values and keys to str. Useful for
|
||||
metadata dicts.
|
||||
|
||||
Args:
|
||||
umap: Unreal Map object
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
||||
"""
|
||||
return {str(key): str(value) for (key, value) in umap.items()}
|
||||
|
||||
|
||||
def get_subsequences(sequence: unreal.LevelSequence):
|
||||
"""Get list of subsequences from sequence.
|
||||
|
||||
Args:
|
||||
sequence (unreal.LevelSequence): Sequence
|
||||
|
||||
Returns:
|
||||
list(unreal.LevelSequence): List of subsequences
|
||||
|
||||
"""
|
||||
tracks = sequence.get_master_tracks()
|
||||
subscene_track = next(
|
||||
(
|
||||
t
|
||||
for t in tracks
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class()
|
||||
),
|
||||
None,
|
||||
)
|
||||
if subscene_track is not None and subscene_track.get_sections():
|
||||
return subscene_track.get_sections()
|
||||
return []
|
||||
|
||||
|
||||
def set_sequence_hierarchy(
|
||||
seq_i, seq_j, max_frame_i, min_frame_j, max_frame_j, map_paths
|
||||
):
|
||||
# Get existing sequencer tracks or create them if they don't exist
|
||||
tracks = seq_i.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if not subscene_track:
|
||||
subscene_track = seq_i.add_master_track(unreal.MovieSceneSubTrack)
|
||||
if not visibility_track:
|
||||
visibility_track = seq_i.add_master_track(
|
||||
unreal.MovieSceneLevelVisibilityTrack)
|
||||
|
||||
# Create the sub-scene section
|
||||
subscenes = subscene_track.get_sections()
|
||||
subscene = None
|
||||
for s in subscenes:
|
||||
if s.get_editor_property('sub_sequence') == seq_j:
|
||||
subscene = s
|
||||
break
|
||||
if not subscene:
|
||||
subscene = subscene_track.add_section()
|
||||
subscene.set_row_index(len(subscene_track.get_sections()))
|
||||
subscene.set_editor_property('sub_sequence', seq_j)
|
||||
subscene.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
|
||||
# Create the visibility section
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
maps = []
|
||||
for m in map_paths:
|
||||
# Unreal requires to load the level to get the map name
|
||||
unreal.EditorLevelLibrary.save_all_dirty_levels()
|
||||
unreal.EditorLevelLibrary.load_level(m)
|
||||
maps.append(str(ar.get_asset_by_object_path(m).asset_name))
|
||||
|
||||
vis_section = visibility_track.add_section()
|
||||
index = len(visibility_track.get_sections())
|
||||
|
||||
vis_section.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
vis_section.set_visibility(unreal.LevelVisibility.VISIBLE)
|
||||
vis_section.set_row_index(index)
|
||||
vis_section.set_level_names(maps)
|
||||
|
||||
if min_frame_j > 1:
|
||||
hid_section = visibility_track.add_section()
|
||||
hid_section.set_range(
|
||||
1,
|
||||
min_frame_j)
|
||||
hid_section.set_visibility(unreal.LevelVisibility.HIDDEN)
|
||||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
if max_frame_j < max_frame_i:
|
||||
hid_section = visibility_track.add_section()
|
||||
hid_section.set_range(
|
||||
max_frame_j + 1,
|
||||
max_frame_i + 1)
|
||||
hid_section.set_visibility(unreal.LevelVisibility.HIDDEN)
|
||||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
|
||||
|
||||
def generate_sequence(h, h_dir):
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
sequence = tools.create_asset(
|
||||
asset_name=h,
|
||||
package_path=h_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
project_name = get_current_project_name()
|
||||
# TODO Fix this does not return folder path
|
||||
folder_path = h_dir.split('/')[-1],
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name,
|
||||
folder_path,
|
||||
fields={"id", "attrib.fps"}
|
||||
)
|
||||
|
||||
start_frames = []
|
||||
end_frames = []
|
||||
|
||||
elements = list(ayon_api.get_folders(
|
||||
project_name,
|
||||
parent_ids=[folder_entity["id"]],
|
||||
fields={"id", "attrib.clipIn", "attrib.clipOut"}
|
||||
))
|
||||
for e in elements:
|
||||
start_frames.append(e["attrib"].get("clipIn"))
|
||||
end_frames.append(e["attrib"].get("clipOut"))
|
||||
|
||||
elements.extend(ayon_api.get_folders(
|
||||
project_name,
|
||||
parent_ids=[e["id"]],
|
||||
fields={"id", "attrib.clipIn", "attrib.clipOut"}
|
||||
))
|
||||
|
||||
min_frame = min(start_frames)
|
||||
max_frame = max(end_frames)
|
||||
|
||||
fps = folder_entity["attrib"].get("fps")
|
||||
|
||||
sequence.set_display_rate(
|
||||
unreal.FrameRate(fps, 1.0))
|
||||
sequence.set_playback_start(min_frame)
|
||||
sequence.set_playback_end(max_frame)
|
||||
|
||||
sequence.set_work_range_start(min_frame / fps)
|
||||
sequence.set_work_range_end(max_frame / fps)
|
||||
sequence.set_view_range_start(min_frame / fps)
|
||||
sequence.set_view_range_end(max_frame / fps)
|
||||
|
||||
tracks = sequence.get_master_tracks()
|
||||
track = None
|
||||
for t in tracks:
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneCameraCutTrack.static_class()):
|
||||
track = t
|
||||
break
|
||||
if not track:
|
||||
track = sequence.add_master_track(
|
||||
unreal.MovieSceneCameraCutTrack)
|
||||
|
||||
return sequence, (min_frame, max_frame)
|
||||
|
||||
|
||||
def _get_comps_and_assets(
|
||||
component_class, asset_class, old_assets, new_assets, selected
|
||||
):
|
||||
eas = unreal.get_editor_subsystem(unreal.EditorActorSubsystem)
|
||||
|
||||
components = []
|
||||
if selected:
|
||||
sel_actors = eas.get_selected_level_actors()
|
||||
for actor in sel_actors:
|
||||
comps = actor.get_components_by_class(component_class)
|
||||
components.extend(comps)
|
||||
else:
|
||||
comps = eas.get_all_level_actors_components()
|
||||
components = [
|
||||
c for c in comps if isinstance(c, component_class)
|
||||
]
|
||||
|
||||
# Get all the static meshes among the old assets in a dictionary with
|
||||
# the name as key
|
||||
selected_old_assets = {}
|
||||
for a in old_assets:
|
||||
asset = unreal.EditorAssetLibrary.load_asset(a)
|
||||
if isinstance(asset, asset_class):
|
||||
selected_old_assets[asset.get_name()] = asset
|
||||
|
||||
# Get all the static meshes among the new assets in a dictionary with
|
||||
# the name as key
|
||||
selected_new_assets = {}
|
||||
for a in new_assets:
|
||||
asset = unreal.EditorAssetLibrary.load_asset(a)
|
||||
if isinstance(asset, asset_class):
|
||||
selected_new_assets[asset.get_name()] = asset
|
||||
|
||||
return components, selected_old_assets, selected_new_assets
|
||||
|
||||
|
||||
def replace_static_mesh_actors(old_assets, new_assets, selected):
|
||||
smes = unreal.get_editor_subsystem(unreal.StaticMeshEditorSubsystem)
|
||||
|
||||
static_mesh_comps, old_meshes, new_meshes = _get_comps_and_assets(
|
||||
unreal.StaticMeshComponent,
|
||||
unreal.StaticMesh,
|
||||
old_assets,
|
||||
new_assets,
|
||||
selected
|
||||
)
|
||||
|
||||
for old_name, old_mesh in old_meshes.items():
|
||||
new_mesh = new_meshes.get(old_name)
|
||||
|
||||
if not new_mesh:
|
||||
continue
|
||||
|
||||
smes.replace_mesh_components_meshes(
|
||||
static_mesh_comps, old_mesh, new_mesh)
|
||||
|
||||
|
||||
def replace_skeletal_mesh_actors(old_assets, new_assets, selected):
|
||||
skeletal_mesh_comps, old_meshes, new_meshes = _get_comps_and_assets(
|
||||
unreal.SkeletalMeshComponent,
|
||||
unreal.SkeletalMesh,
|
||||
old_assets,
|
||||
new_assets,
|
||||
selected
|
||||
)
|
||||
|
||||
for old_name, old_mesh in old_meshes.items():
|
||||
new_mesh = new_meshes.get(old_name)
|
||||
|
||||
if not new_mesh:
|
||||
continue
|
||||
|
||||
for comp in skeletal_mesh_comps:
|
||||
if comp.get_skeletal_mesh_asset() == old_mesh:
|
||||
comp.set_skeletal_mesh_asset(new_mesh)
|
||||
|
||||
|
||||
def replace_geometry_cache_actors(old_assets, new_assets, selected):
|
||||
geometry_cache_comps, old_caches, new_caches = _get_comps_and_assets(
|
||||
unreal.GeometryCacheComponent,
|
||||
unreal.GeometryCache,
|
||||
old_assets,
|
||||
new_assets,
|
||||
selected
|
||||
)
|
||||
|
||||
for old_name, old_mesh in old_caches.items():
|
||||
new_mesh = new_caches.get(old_name)
|
||||
|
||||
if not new_mesh:
|
||||
continue
|
||||
|
||||
for comp in geometry_cache_comps:
|
||||
if comp.get_editor_property("geometry_cache") == old_mesh:
|
||||
comp.set_geometry_cache(new_mesh)
|
||||
|
||||
|
||||
def delete_asset_if_unused(container, asset_content):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
references = set()
|
||||
|
||||
for asset_path in asset_content:
|
||||
asset = ar.get_asset_by_object_path(asset_path)
|
||||
refs = ar.get_referencers(
|
||||
asset.package_name,
|
||||
unreal.AssetRegistryDependencyOptions(
|
||||
include_soft_package_references=False,
|
||||
include_hard_package_references=True,
|
||||
include_searchable_names=False,
|
||||
include_soft_management_references=False,
|
||||
include_hard_management_references=False
|
||||
))
|
||||
if not refs:
|
||||
continue
|
||||
references = references.union(set(refs))
|
||||
|
||||
# Filter out references that are in the Temp folder
|
||||
cleaned_references = {
|
||||
ref for ref in references if not str(ref).startswith("/Temp/")}
|
||||
|
||||
# Check which of the references are Levels
|
||||
for ref in cleaned_references:
|
||||
loaded_asset = unreal.EditorAssetLibrary.load_asset(ref)
|
||||
if isinstance(loaded_asset, unreal.World):
|
||||
# If there is at least a level, we can stop, we don't want to
|
||||
# delete the container
|
||||
return
|
||||
|
||||
unreal.log("Previous version unused, deleting...")
|
||||
|
||||
# No levels, delete the asset
|
||||
unreal.EditorAssetLibrary.delete_directory(container["namespace"])
|
||||
|
||||
|
||||
@contextmanager
|
||||
def maintained_selection():
|
||||
"""Stub to be either implemented or replaced.
|
||||
|
||||
This is needed for old publisher implementation, but
|
||||
it is not supported (yet) in UE.
|
||||
"""
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
pass
|
||||
|
|
@ -1,245 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import ast
|
||||
import collections
|
||||
import sys
|
||||
import six
|
||||
from abc import (
|
||||
ABC,
|
||||
ABCMeta,
|
||||
)
|
||||
|
||||
import unreal
|
||||
|
||||
from .pipeline import (
|
||||
create_publish_instance,
|
||||
imprint,
|
||||
ls_inst,
|
||||
UNREAL_VERSION
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
UILabelDef
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
Creator,
|
||||
LoaderPlugin,
|
||||
CreatorError,
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class UnrealBaseCreator(Creator):
|
||||
"""Base class for Unreal creator plugins."""
|
||||
root = "/Game/Ayon/AyonPublishInstances"
|
||||
suffix = "_INS"
|
||||
|
||||
@staticmethod
|
||||
def cache_instance_data(shared_data):
|
||||
"""Cache instances for Creators to shared data.
|
||||
|
||||
Create `unreal_cached_instances` key when needed in shared data and
|
||||
fill it with all collected instances from the scene under its
|
||||
respective creator identifiers.
|
||||
|
||||
If legacy instances are detected in the scene, create
|
||||
`unreal_cached_legacy_instances` there and fill it with
|
||||
all legacy products under family as a key.
|
||||
|
||||
Args:
|
||||
Dict[str, Any]: Shared data.
|
||||
|
||||
"""
|
||||
if "unreal_cached_instances" in shared_data:
|
||||
return
|
||||
|
||||
unreal_cached_instances = collections.defaultdict(list)
|
||||
unreal_cached_legacy_instances = collections.defaultdict(list)
|
||||
for instance in ls_inst():
|
||||
creator_id = instance.get("creator_identifier")
|
||||
if creator_id:
|
||||
unreal_cached_instances[creator_id].append(instance)
|
||||
else:
|
||||
family = instance.get("family")
|
||||
unreal_cached_legacy_instances[family].append(instance)
|
||||
|
||||
shared_data["unreal_cached_instances"] = unreal_cached_instances
|
||||
shared_data["unreal_cached_legacy_instances"] = (
|
||||
unreal_cached_legacy_instances
|
||||
)
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
try:
|
||||
instance_name = f"{product_name}{self.suffix}"
|
||||
pub_instance = create_publish_instance(instance_name, self.root)
|
||||
|
||||
instance_data["productName"] = product_name
|
||||
instance_data["instance_path"] = f"{self.root}/{instance_name}"
|
||||
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
pub_instance.set_editor_property('add_external_assets', True)
|
||||
assets = pub_instance.get_editor_property('asset_data_external')
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for member in pre_create_data.get("members", []):
|
||||
obj = ar.get_asset_by_object_path(member).get_asset()
|
||||
assets.add(obj)
|
||||
|
||||
imprint(f"{self.root}/{instance_name}", instance.data_to_store())
|
||||
|
||||
return instance
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
CreatorError,
|
||||
CreatorError(f"Creator error: {er}"),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def collect_instances(self):
|
||||
# cache instances if missing
|
||||
self.cache_instance_data(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data[
|
||||
"unreal_cached_instances"].get(self.identifier, []):
|
||||
# Unreal saves metadata as string, so we need to convert it back
|
||||
instance['creator_attributes'] = ast.literal_eval(
|
||||
instance.get('creator_attributes', '{}'))
|
||||
instance['publish_attributes'] = ast.literal_eval(
|
||||
instance.get('publish_attributes', '{}'))
|
||||
created_instance = CreatedInstance.from_existing(instance, self)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = created_inst.get("instance_path", "")
|
||||
|
||||
if not instance_node:
|
||||
unreal.log_warning(
|
||||
f"Instance node not found for {created_inst}")
|
||||
continue
|
||||
|
||||
new_values = {
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
imprint(
|
||||
instance_node,
|
||||
new_values
|
||||
)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
instance_node = instance.data.get("instance_path", "")
|
||||
if instance_node:
|
||||
unreal.EditorAssetLibrary.delete_asset(instance_node)
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class UnrealAssetCreator(UnrealBaseCreator):
|
||||
"""Base class for Unreal creator plugins based on assets."""
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
"""Create instance of the asset.
|
||||
|
||||
Args:
|
||||
product_name (str): Name of the product.
|
||||
instance_data (dict): Data for the instance.
|
||||
pre_create_data (dict): Data for the instance.
|
||||
|
||||
Returns:
|
||||
CreatedInstance: Created instance.
|
||||
"""
|
||||
try:
|
||||
# Check if instance data has members, filled by the plugin.
|
||||
# If not, use selection.
|
||||
if not pre_create_data.get("members"):
|
||||
pre_create_data["members"] = []
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
utilib = unreal.EditorUtilityLibrary
|
||||
sel_objects = utilib.get_selected_assets()
|
||||
pre_create_data["members"] = [
|
||||
a.get_path_name() for a in sel_objects]
|
||||
|
||||
super(UnrealAssetCreator, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
CreatorError,
|
||||
CreatorError(f"Creator error: {er}"),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef("use_selection", label="Use selection", default=True)
|
||||
]
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class UnrealActorCreator(UnrealBaseCreator):
|
||||
"""Base class for Unreal creator plugins based on actors."""
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
"""Create instance of the asset.
|
||||
|
||||
Args:
|
||||
product_name (str): Name of the product.
|
||||
instance_data (dict): Data for the instance.
|
||||
pre_create_data (dict): Data for the instance.
|
||||
|
||||
Returns:
|
||||
CreatedInstance: Created instance.
|
||||
"""
|
||||
try:
|
||||
if UNREAL_VERSION.major == 5:
|
||||
world = unreal.UnrealEditorSubsystem().get_editor_world()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
|
||||
# Check if the level is saved
|
||||
if world.get_path_name().startswith("/Temp/"):
|
||||
raise CreatorError(
|
||||
"Level must be saved before creating instances.")
|
||||
|
||||
# Check if instance data has members, filled by the plugin.
|
||||
# If not, use selection.
|
||||
if not instance_data.get("members"):
|
||||
actor_subsystem = unreal.EditorActorSubsystem()
|
||||
sel_actors = actor_subsystem.get_selected_level_actors()
|
||||
selection = [a.get_path_name() for a in sel_actors]
|
||||
|
||||
instance_data["members"] = selection
|
||||
|
||||
instance_data["level"] = world.get_path_name()
|
||||
|
||||
super(UnrealActorCreator, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
CreatorError,
|
||||
CreatorError(f"Creator error: {er}"),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
UILabelDef("Select actors to create instance from them.")
|
||||
]
|
||||
|
||||
|
||||
class Loader(LoaderPlugin, ABC):
|
||||
"""This serves as skeleton for future Ayon specific functionality"""
|
||||
pass
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
import os
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.tools.utils import show_message_dialog
|
||||
from ayon_unreal.api import pipeline
|
||||
|
||||
|
||||
queue = None
|
||||
executor = None
|
||||
|
||||
|
||||
def _queue_finish_callback(exec, success):
|
||||
unreal.log("Render completed. Success: " + str(success))
|
||||
|
||||
# Delete our reference so we don't keep it alive.
|
||||
global executor
|
||||
global queue
|
||||
del executor
|
||||
del queue
|
||||
|
||||
|
||||
def _job_finish_callback(job, success):
|
||||
# You can make any edits you want to the editor world here, and the world
|
||||
# will be duplicated when the next render happens. Make sure you undo your
|
||||
# edits in OnQueueFinishedCallback if you don't want to leak state changes
|
||||
# into the editor world.
|
||||
unreal.log("Individual job completed.")
|
||||
|
||||
|
||||
def start_rendering():
|
||||
"""
|
||||
Start the rendering process.
|
||||
"""
|
||||
unreal.log("Starting rendering...")
|
||||
|
||||
# Get selected sequences
|
||||
assets = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
|
||||
if not assets:
|
||||
show_message_dialog(
|
||||
title="No assets selected",
|
||||
message="No assets selected. Select a render instance.",
|
||||
level="warning")
|
||||
raise RuntimeError(
|
||||
"No assets selected. You need to select a render instance.")
|
||||
|
||||
# instances = pipeline.ls_inst()
|
||||
instances = [
|
||||
a for a in assets
|
||||
if a.get_class().get_name() == "AyonPublishInstance"]
|
||||
|
||||
inst_data = []
|
||||
|
||||
for i in instances:
|
||||
data = pipeline.parse_container(i.get_path_name())
|
||||
if data["productType"] == "render":
|
||||
inst_data.append(data)
|
||||
|
||||
try:
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Could not find render root in anatomy settings.") from e
|
||||
|
||||
render_dir = f"{root}/{project}"
|
||||
|
||||
# subsystem = unreal.get_editor_subsystem(
|
||||
# unreal.MoviePipelineQueueSubsystem)
|
||||
# queue = subsystem.get_queue()
|
||||
global queue
|
||||
queue = unreal.MoviePipelineQueue()
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
data = get_project_settings(project)
|
||||
config = None
|
||||
config_path = str(data.get("unreal").get("render_config_path"))
|
||||
if config_path and unreal.EditorAssetLibrary.does_asset_exist(config_path):
|
||||
unreal.log("Found saved render configuration")
|
||||
config = ar.get_asset_by_object_path(config_path).get_asset()
|
||||
|
||||
for i in inst_data:
|
||||
sequence = ar.get_asset_by_object_path(i["sequence"]).get_asset()
|
||||
|
||||
sequences = [{
|
||||
"sequence": sequence,
|
||||
"output": f"{i['output']}",
|
||||
"frame_range": (
|
||||
int(float(i["frameStart"])),
|
||||
int(float(i["frameEnd"])) + 1)
|
||||
}]
|
||||
render_list = []
|
||||
|
||||
# Get all the sequences to render. If there are subsequences,
|
||||
# add them and their frame ranges to the render list. We also
|
||||
# use the names for the output paths.
|
||||
for seq in sequences:
|
||||
subscenes = pipeline.get_subsequences(seq.get('sequence'))
|
||||
|
||||
if subscenes:
|
||||
for sub_seq in subscenes:
|
||||
sequences.append({
|
||||
"sequence": sub_seq.get_sequence(),
|
||||
"output": (f"{seq.get('output')}/"
|
||||
f"{sub_seq.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
sub_seq.get_start_frame(), sub_seq.get_end_frame())
|
||||
})
|
||||
else:
|
||||
# Avoid rendering camera sequences
|
||||
if "_camera" not in seq.get('sequence').get_name():
|
||||
render_list.append(seq)
|
||||
|
||||
# Create the rendering jobs and add them to the queue.
|
||||
for render_setting in render_list:
|
||||
job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob)
|
||||
job.sequence = unreal.SoftObjectPath(i["master_sequence"])
|
||||
job.map = unreal.SoftObjectPath(i["master_level"])
|
||||
job.author = "Ayon"
|
||||
|
||||
# If we have a saved configuration, copy it to the job.
|
||||
if config:
|
||||
job.get_configuration().copy_from(config)
|
||||
|
||||
# User data could be used to pass data to the job, that can be
|
||||
# read in the job's OnJobFinished callback. We could,
|
||||
# for instance, pass the AyonPublishInstance's path to the job.
|
||||
# job.user_data = ""
|
||||
|
||||
output_dir = render_setting.get('output')
|
||||
shot_name = render_setting.get('sequence').get_name()
|
||||
|
||||
settings = job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineOutputSetting)
|
||||
settings.output_resolution = unreal.IntPoint(1920, 1080)
|
||||
settings.custom_start_frame = render_setting.get("frame_range")[0]
|
||||
settings.custom_end_frame = render_setting.get("frame_range")[1]
|
||||
settings.use_custom_playback_range = True
|
||||
settings.file_name_format = f"{shot_name}" + ".{frame_number}"
|
||||
settings.output_directory.path = f"{render_dir}/{output_dir}"
|
||||
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineDeferredPassBase)
|
||||
|
||||
render_format = data.get("unreal").get("render_format", "png")
|
||||
|
||||
if render_format == "png":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_PNG)
|
||||
elif render_format == "exr":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_EXR)
|
||||
elif render_format == "jpg":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_JPG)
|
||||
elif render_format == "bmp":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_BMP)
|
||||
|
||||
# If there are jobs in the queue, start the rendering process.
|
||||
if queue.get_jobs():
|
||||
global executor
|
||||
executor = unreal.MoviePipelinePIEExecutor()
|
||||
|
||||
preroll_frames = data.get("unreal").get("preroll_frames", 0)
|
||||
|
||||
settings = unreal.MoviePipelinePIEExecutorSettings()
|
||||
settings.set_editor_property(
|
||||
"initial_delay_frame_count", preroll_frames)
|
||||
|
||||
executor.on_executor_finished_delegate.add_callable_unique(
|
||||
_queue_finish_callback)
|
||||
executor.on_individual_job_finished_delegate.add_callable_unique(
|
||||
_job_finish_callback) # Only available on PIE Executor
|
||||
executor.execute(queue)
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
import sys
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from ayon_core import (
|
||||
resources,
|
||||
style
|
||||
)
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
from ayon_unreal.api import rendering
|
||||
|
||||
|
||||
class ToolsBtnsWidget(QtWidgets.QWidget):
|
||||
"""Widget containing buttons which are clickable."""
|
||||
tool_required = QtCore.Signal(str)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(ToolsBtnsWidget, self).__init__(parent)
|
||||
|
||||
load_btn = QtWidgets.QPushButton("Load...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publisher...", self)
|
||||
manage_btn = QtWidgets.QPushButton("Manage...", self)
|
||||
render_btn = QtWidgets.QPushButton("Render...", self)
|
||||
experimental_tools_btn = QtWidgets.QPushButton(
|
||||
"Experimental tools...", self
|
||||
)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(load_btn, 0)
|
||||
layout.addWidget(publish_btn, 0)
|
||||
layout.addWidget(manage_btn, 0)
|
||||
layout.addWidget(render_btn, 0)
|
||||
layout.addWidget(experimental_tools_btn, 0)
|
||||
layout.addStretch(1)
|
||||
|
||||
load_btn.clicked.connect(self._on_load)
|
||||
publish_btn.clicked.connect(self._on_publish)
|
||||
manage_btn.clicked.connect(self._on_manage)
|
||||
render_btn.clicked.connect(self._on_render)
|
||||
experimental_tools_btn.clicked.connect(self._on_experimental)
|
||||
|
||||
def _on_create(self):
|
||||
self.tool_required.emit("creator")
|
||||
|
||||
def _on_load(self):
|
||||
self.tool_required.emit("loader")
|
||||
|
||||
def _on_publish(self):
|
||||
self.tool_required.emit("publisher")
|
||||
|
||||
def _on_manage(self):
|
||||
self.tool_required.emit("sceneinventory")
|
||||
|
||||
def _on_render(self):
|
||||
rendering.start_rendering()
|
||||
|
||||
def _on_experimental(self):
|
||||
self.tool_required.emit("experimental_tools")
|
||||
|
||||
|
||||
class ToolsDialog(QtWidgets.QDialog):
|
||||
"""Dialog with tool buttons that will stay opened until user close it."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ToolsDialog, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setWindowTitle("Ayon tools")
|
||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
||||
tools_widget = ToolsBtnsWidget(self)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(tools_widget)
|
||||
|
||||
tools_widget.tool_required.connect(self._on_tool_require)
|
||||
self._tools_widget = tools_widget
|
||||
|
||||
self._first_show = True
|
||||
|
||||
def sizeHint(self):
|
||||
result = super(ToolsDialog, self).sizeHint()
|
||||
result.setWidth(result.width() * 2)
|
||||
return result
|
||||
|
||||
def showEvent(self, event):
|
||||
super(ToolsDialog, self).showEvent(event)
|
||||
if self._first_show:
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self._first_show = False
|
||||
|
||||
def _on_tool_require(self, tool_name):
|
||||
host_tools.show_tool_by_name(tool_name, parent=self)
|
||||
|
||||
|
||||
class ToolsPopup(ToolsDialog):
|
||||
"""Popup with tool buttons that will close when loose focus."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ToolsPopup, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.FramelessWindowHint
|
||||
| QtCore.Qt.Popup
|
||||
)
|
||||
|
||||
def showEvent(self, event):
|
||||
super(ToolsPopup, self).showEvent(event)
|
||||
app = QtWidgets.QApplication.instance()
|
||||
app.processEvents()
|
||||
pos = QtGui.QCursor.pos()
|
||||
self.move(pos)
|
||||
|
||||
|
||||
class WindowCache:
|
||||
"""Cached objects and methods to be used in global scope."""
|
||||
_dialog = None
|
||||
_popup = None
|
||||
_first_show = True
|
||||
|
||||
@classmethod
|
||||
def _before_show(cls):
|
||||
"""Create QApplication if does not exist yet."""
|
||||
if not cls._first_show:
|
||||
return
|
||||
|
||||
cls._first_show = False
|
||||
if not QtWidgets.QApplication.instance():
|
||||
QtWidgets.QApplication(sys.argv)
|
||||
|
||||
@classmethod
|
||||
def show_popup(cls):
|
||||
cls._before_show()
|
||||
with qt_app_context():
|
||||
if cls._popup is None:
|
||||
cls._popup = ToolsPopup()
|
||||
|
||||
cls._popup.show()
|
||||
|
||||
@classmethod
|
||||
def show_dialog(cls):
|
||||
cls._before_show()
|
||||
with qt_app_context():
|
||||
if cls._dialog is None:
|
||||
cls._dialog = ToolsDialog()
|
||||
|
||||
cls._dialog.show()
|
||||
cls._dialog.raise_()
|
||||
cls._dialog.activateWindow()
|
||||
|
||||
|
||||
def show_tools_popup():
|
||||
WindowCache.show_popup()
|
||||
|
||||
|
||||
def show_tools_dialog():
|
||||
WindowCache.show_dialog()
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Hook to launch Unreal and prepare projects."""
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
from ayon_core import resources
|
||||
from ayon_applications import (
|
||||
PreLaunchHook,
|
||||
ApplicationLaunchFailed,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.pipeline.workfile import get_workfile_template_key
|
||||
import ayon_unreal.lib as unreal_lib
|
||||
from ayon_unreal.ue_workers import (
|
||||
UEProjectGenerationWorker,
|
||||
UEPluginInstallWorker
|
||||
)
|
||||
from ayon_unreal.ui import SplashScreen
|
||||
|
||||
|
||||
class UnrealPrelaunchHook(PreLaunchHook):
|
||||
"""Hook to handle launching Unreal.
|
||||
|
||||
This hook will check if current workfile path has Unreal
|
||||
project inside. IF not, it initializes it, and finally it pass
|
||||
path to the project by environment variable to Unreal launcher
|
||||
shell script.
|
||||
|
||||
"""
|
||||
app_groups = {"unreal"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.signature = f"( {self.__class__.__name__} )"
|
||||
|
||||
def _get_work_filename(self):
|
||||
# Use last workfile if was found
|
||||
if self.data.get("last_workfile_path"):
|
||||
last_workfile = Path(self.data.get("last_workfile_path"))
|
||||
if last_workfile and last_workfile.exists():
|
||||
return last_workfile.name
|
||||
|
||||
# Prepare data for fill data and for getting workfile template key
|
||||
anatomy = self.data["anatomy"]
|
||||
project_entity = self.data["project_entity"]
|
||||
|
||||
# Use already prepared workdir data
|
||||
workdir_data = copy.deepcopy(self.data["workdir_data"])
|
||||
task_type = workdir_data.get("task", {}).get("type")
|
||||
|
||||
# QUESTION raise exception if version is part of filename template?
|
||||
workdir_data["version"] = 1
|
||||
workdir_data["ext"] = "uproject"
|
||||
|
||||
# Get workfile template key for current context
|
||||
workfile_template_key = get_workfile_template_key(
|
||||
project_entity["name"],
|
||||
task_type,
|
||||
self.host_name,
|
||||
)
|
||||
# Fill templates
|
||||
template_obj = anatomy.get_template_item(
|
||||
"work", workfile_template_key, "file"
|
||||
)
|
||||
|
||||
# Return filename
|
||||
return template_obj.format_strict(workdir_data)
|
||||
|
||||
def exec_plugin_install(self, engine_path: Path, env: dict = None):
|
||||
# set up the QThread and worker with necessary signals
|
||||
env = env or os.environ
|
||||
q_thread = QtCore.QThread()
|
||||
ue_plugin_worker = UEPluginInstallWorker()
|
||||
|
||||
q_thread.started.connect(ue_plugin_worker.run)
|
||||
ue_plugin_worker.setup(engine_path, env)
|
||||
ue_plugin_worker.moveToThread(q_thread)
|
||||
|
||||
splash_screen = SplashScreen(
|
||||
"Installing plugin",
|
||||
resources.get_resource("app_icons", "ue4.png")
|
||||
)
|
||||
|
||||
# set up the splash screen with necessary triggers
|
||||
ue_plugin_worker.installing.connect(
|
||||
splash_screen.update_top_label_text
|
||||
)
|
||||
ue_plugin_worker.progress.connect(splash_screen.update_progress)
|
||||
ue_plugin_worker.log.connect(splash_screen.append_log)
|
||||
ue_plugin_worker.finished.connect(splash_screen.quit_and_close)
|
||||
ue_plugin_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
|
||||
if not splash_screen.was_proc_successful():
|
||||
raise ApplicationLaunchFailed("Couldn't run the application! "
|
||||
"Plugin failed to install!")
|
||||
|
||||
def exec_ue_project_gen(self,
|
||||
engine_version: str,
|
||||
unreal_project_name: str,
|
||||
engine_path: Path,
|
||||
project_dir: Path):
|
||||
self.log.info((
|
||||
f"{self.signature} Creating unreal "
|
||||
f"project [ {unreal_project_name} ]"
|
||||
))
|
||||
|
||||
q_thread = QtCore.QThread()
|
||||
ue_project_worker = UEProjectGenerationWorker()
|
||||
ue_project_worker.setup(
|
||||
engine_version,
|
||||
self.data["project_name"],
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
project_dir
|
||||
)
|
||||
ue_project_worker.moveToThread(q_thread)
|
||||
q_thread.started.connect(ue_project_worker.run)
|
||||
|
||||
splash_screen = SplashScreen(
|
||||
"Initializing UE project",
|
||||
resources.get_resource("app_icons", "ue4.png")
|
||||
)
|
||||
|
||||
ue_project_worker.stage_begin.connect(
|
||||
splash_screen.update_top_label_text
|
||||
)
|
||||
ue_project_worker.progress.connect(splash_screen.update_progress)
|
||||
ue_project_worker.log.connect(splash_screen.append_log)
|
||||
ue_project_worker.finished.connect(splash_screen.quit_and_close)
|
||||
ue_project_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
|
||||
if not splash_screen.was_proc_successful():
|
||||
raise ApplicationLaunchFailed("Couldn't run the application! "
|
||||
"Failed to generate the project!")
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
workdir = self.launch_context.env["AYON_WORKDIR"]
|
||||
executable = str(self.launch_context.executable)
|
||||
engine_version = self.app_name.split("/")[-1].replace("-", ".")
|
||||
try:
|
||||
if int(engine_version.split(".")[0]) < 4 and \
|
||||
int(engine_version.split(".")[1]) < 26:
|
||||
raise ApplicationLaunchFailed((
|
||||
f"{self.signature} Old unsupported version of UE "
|
||||
f"detected - {engine_version}"))
|
||||
except ValueError:
|
||||
# there can be string in minor version and in that case
|
||||
# int cast is failing. This probably happens only with
|
||||
# early access versions and is of no concert for this check
|
||||
# so let's keep it quiet.
|
||||
...
|
||||
|
||||
unreal_project_filename = self._get_work_filename()
|
||||
unreal_project_name = os.path.splitext(unreal_project_filename)[0]
|
||||
# Unreal is sensitive about project names longer then 20 chars
|
||||
if len(unreal_project_name) > 20:
|
||||
raise ApplicationLaunchFailed(
|
||||
f"Project name exceeds 20 characters ({unreal_project_name})!"
|
||||
)
|
||||
|
||||
# Unreal doesn't accept non alphabet characters at the start
|
||||
# of the project name. This is because project name is then used
|
||||
# in various places inside c++ code and there variable names cannot
|
||||
# start with non-alpha. We append 'P' before project name to solve it.
|
||||
# 😱
|
||||
if not unreal_project_name[:1].isalpha():
|
||||
self.log.warning((
|
||||
"Project name doesn't start with alphabet "
|
||||
f"character ({unreal_project_name}). Appending 'P'"
|
||||
))
|
||||
unreal_project_name = f"P{unreal_project_name}"
|
||||
unreal_project_filename = f'{unreal_project_name}.uproject'
|
||||
|
||||
project_path = Path(os.path.join(workdir, unreal_project_name))
|
||||
|
||||
self.log.info((
|
||||
f"{self.signature} requested UE version: "
|
||||
f"[ {engine_version} ]"
|
||||
))
|
||||
|
||||
project_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# engine_path points to the specific Unreal Engine root
|
||||
# so, we are going up from the executable itself 3 levels.
|
||||
engine_path: Path = Path(executable).parents[3]
|
||||
|
||||
# Check if new env variable exists, and if it does, if the path
|
||||
# actually contains the plugin. If not, install it.
|
||||
|
||||
built_plugin_path = self.launch_context.env.get(
|
||||
"AYON_BUILT_UNREAL_PLUGIN", None)
|
||||
|
||||
if unreal_lib.check_built_plugin_existance(built_plugin_path):
|
||||
self.log.info((
|
||||
f"{self.signature} using existing built Ayon plugin from "
|
||||
f"{built_plugin_path}"
|
||||
))
|
||||
unreal_lib.copy_built_plugin(engine_path, Path(built_plugin_path))
|
||||
else:
|
||||
# Set "AYON_UNREAL_PLUGIN" to current process environment for
|
||||
# execution of `create_unreal_project`
|
||||
env_key = "AYON_UNREAL_PLUGIN"
|
||||
if self.launch_context.env.get(env_key):
|
||||
self.log.info((
|
||||
f"{self.signature} using Ayon plugin from "
|
||||
f"{self.launch_context.env.get(env_key)}"
|
||||
))
|
||||
if self.launch_context.env.get(env_key):
|
||||
os.environ[env_key] = self.launch_context.env[env_key]
|
||||
|
||||
if not unreal_lib.check_plugin_existence(engine_path):
|
||||
self.exec_plugin_install(engine_path)
|
||||
|
||||
project_file = project_path / unreal_project_filename
|
||||
|
||||
if not project_file.is_file():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
Path(temp_dir))
|
||||
try:
|
||||
self.log.info((
|
||||
f"Moving from {temp_dir} to "
|
||||
f"{project_path.as_posix()}"
|
||||
))
|
||||
shutil.copytree(
|
||||
temp_dir, project_path, dirs_exist_ok=True)
|
||||
|
||||
except shutil.Error as e:
|
||||
raise ApplicationLaunchFailed((
|
||||
f"{self.signature} Cannot copy directory {temp_dir} "
|
||||
f"to {project_path.as_posix()} - {e}"
|
||||
)) from e
|
||||
|
||||
self.launch_context.env["AYON_UNREAL_VERSION"] = engine_version
|
||||
# Append project file to launch arguments
|
||||
self.launch_context.launch_args.append(
|
||||
f"\"{project_file.as_posix()}\"")
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 04b35dbf5fc42d905281fc30d3a22b139c1855e5
|
||||
|
|
@ -1,551 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unreal launching and project tools."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
from distutils import dir_util
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
def get_engine_versions(env=None):
|
||||
"""Detect Unreal Engine versions.
|
||||
|
||||
This will try to detect location and versions of installed Unreal Engine.
|
||||
Location can be overridden by `UNREAL_ENGINE_LOCATION` environment
|
||||
variable.
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Args:
|
||||
env (dict, optional): Environment to use.
|
||||
|
||||
Returns:
|
||||
OrderedDict: dictionary with version as a key and dir as value.
|
||||
so the highest version is first.
|
||||
|
||||
Example:
|
||||
>>> get_engine_versions()
|
||||
{
|
||||
"4.23": "C:/Epic Games/UE_4.23",
|
||||
"4.24": "C:/Epic Games/UE_4.24"
|
||||
}
|
||||
|
||||
"""
|
||||
env = env or os.environ
|
||||
engine_locations = {}
|
||||
try:
|
||||
root, dirs, _ = next(os.walk(env["UNREAL_ENGINE_LOCATION"]))
|
||||
|
||||
for directory in dirs:
|
||||
if directory.startswith("UE"):
|
||||
try:
|
||||
ver = re.split(r"[-_]", directory)[1]
|
||||
except IndexError:
|
||||
continue
|
||||
engine_locations[ver] = os.path.join(root, directory)
|
||||
except KeyError:
|
||||
# environment variable not set
|
||||
pass
|
||||
except OSError:
|
||||
# specified directory doesn't exist
|
||||
pass
|
||||
except StopIteration:
|
||||
# specified directory doesn't exist
|
||||
pass
|
||||
|
||||
# if we've got something, terminate auto-detection process
|
||||
if engine_locations:
|
||||
return OrderedDict(sorted(engine_locations.items()))
|
||||
|
||||
# else kick in platform specific detection
|
||||
if platform.system().lower() == "windows":
|
||||
return OrderedDict(sorted(_win_get_engine_versions().items()))
|
||||
if platform.system().lower() == "linux":
|
||||
# on linux, there is no installation and getting Unreal Engine involves
|
||||
# git clone. So we'll probably depend on `UNREAL_ENGINE_LOCATION`.
|
||||
pass
|
||||
if platform.system().lower() == "darwin":
|
||||
return OrderedDict(sorted(_darwin_get_engine_version().items()))
|
||||
|
||||
return OrderedDict()
|
||||
|
||||
|
||||
def get_editor_exe_path(engine_path: Path, engine_version: str) -> Path:
|
||||
"""Get UE Editor executable path."""
|
||||
ue_path = engine_path / "Engine/Binaries"
|
||||
|
||||
ue_name = "UnrealEditor"
|
||||
|
||||
# handle older versions of Unreal Engine
|
||||
if engine_version.split(".")[0] == "4":
|
||||
ue_name = "UE4Editor"
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
ue_path /= f"Win64/{ue_name}.exe"
|
||||
|
||||
elif platform.system().lower() == "linux":
|
||||
ue_path /= f"Linux/{ue_name}"
|
||||
|
||||
elif platform.system().lower() == "darwin":
|
||||
ue_path /= f"Mac/{ue_name}"
|
||||
|
||||
return ue_path
|
||||
|
||||
|
||||
def _win_get_engine_versions():
|
||||
"""Get Unreal Engine versions on Windows.
|
||||
|
||||
If engines are installed via Epic Games Launcher then there is:
|
||||
`%PROGRAMDATA%/Epic/UnrealEngineLauncher/LauncherInstalled.dat`
|
||||
This file is JSON file listing installed stuff, Unreal engines
|
||||
are marked with `"AppName" = "UE_X.XX"`` like `UE_4.24`
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Returns:
|
||||
dict: version as a key and path as a value.
|
||||
|
||||
"""
|
||||
install_json_path = os.path.join(
|
||||
os.getenv("PROGRAMDATA"),
|
||||
"Epic",
|
||||
"UnrealEngineLauncher",
|
||||
"LauncherInstalled.dat",
|
||||
)
|
||||
|
||||
return _parse_launcher_locations(install_json_path)
|
||||
|
||||
|
||||
def _darwin_get_engine_version() -> dict:
|
||||
"""Get Unreal Engine versions on MacOS.
|
||||
|
||||
It works the same as on Windows, just JSON file location is different.
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Returns:
|
||||
dict: version as a key and path as a value.
|
||||
|
||||
See Also:
|
||||
:func:`_win_get_engine_versions`.
|
||||
|
||||
"""
|
||||
install_json_path = os.path.join(
|
||||
os.getenv("HOME"),
|
||||
"Library",
|
||||
"Application Support",
|
||||
"Epic",
|
||||
"UnrealEngineLauncher",
|
||||
"LauncherInstalled.dat",
|
||||
)
|
||||
|
||||
return _parse_launcher_locations(install_json_path)
|
||||
|
||||
|
||||
def _parse_launcher_locations(install_json_path: str) -> dict:
|
||||
"""This will parse locations from json file.
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Args:
|
||||
install_json_path (str): Path to `LauncherInstalled.dat`.
|
||||
|
||||
Returns:
|
||||
dict: with unreal engine versions as keys and
|
||||
paths to those engine installations as value.
|
||||
|
||||
"""
|
||||
engine_locations = {}
|
||||
if os.path.isfile(install_json_path):
|
||||
with open(install_json_path, "r") as ilf:
|
||||
try:
|
||||
install_data = json.load(ilf)
|
||||
except json.JSONDecodeError as e:
|
||||
raise Exception(
|
||||
"Invalid `LauncherInstalled.dat file. `"
|
||||
"Cannot determine Unreal Engine location."
|
||||
) from e
|
||||
|
||||
for installation in install_data.get("InstallationList", []):
|
||||
if installation.get("AppName").startswith("UE_"):
|
||||
ver = installation.get("AppName").split("_")[1]
|
||||
engine_locations[ver] = installation.get("InstallLocation")
|
||||
|
||||
return engine_locations
|
||||
|
||||
|
||||
def create_unreal_project(project_name: str,
|
||||
unreal_project_name: str,
|
||||
ue_version: str,
|
||||
pr_dir: Path,
|
||||
engine_path: Path,
|
||||
dev_mode: bool = False,
|
||||
env: dict = None) -> None:
|
||||
"""This will create `.uproject` file at specified location.
|
||||
|
||||
As there is no way I know to create a project via command line, this is
|
||||
easiest option. Unreal project file is basically a JSON file. If we find
|
||||
the `AYON_UNREAL_PLUGIN` environment variable we assume this is the
|
||||
location of the Integration Plugin and we copy its content to the project
|
||||
folder and enable this plugin.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of the project in AYON.
|
||||
unreal_project_name (str): Name of the project in Unreal.
|
||||
ue_version (str): Unreal engine version (like 4.23).
|
||||
pr_dir (Path): Path to directory where project will be created.
|
||||
engine_path (Path): Path to Unreal Engine installation.
|
||||
dev_mode (bool, optional): Flag to trigger C++ style Unreal project
|
||||
needing Visual Studio and other tools to compile plugins from
|
||||
sources. This will trigger automatically if `Binaries`
|
||||
directory is not found in plugin folders as this indicates
|
||||
this is only source distribution of the plugin. Dev mode
|
||||
is also set in Settings.
|
||||
env (dict, optional): Environment to use. If not set, `os.environ`.
|
||||
|
||||
Throws:
|
||||
NotImplementedError: For unsupported platforms.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Deprecated:
|
||||
since 3.16.0
|
||||
|
||||
"""
|
||||
|
||||
preset = get_project_settings(project_name)["unreal"]["project_setup"]
|
||||
# get unreal engine identifier
|
||||
# -------------------------------------------------------------------------
|
||||
# FIXME (antirotor): As of 4.26 this is problem with UE4 built from
|
||||
# sources. In that case Engine ID is calculated per machine/user and not
|
||||
# from Engine files as this code then reads. This then prevents UE4
|
||||
# to directly open project as it will complain about project being
|
||||
# created in different UE4 version. When user convert such project
|
||||
# to his UE4 version, Engine ID is replaced in uproject file. If some
|
||||
# other user tries to open it, it will present him with similar error.
|
||||
|
||||
# engine_path should be the location of UE_X.X folder
|
||||
|
||||
ue_editor_exe: Path = get_editor_exe_path(engine_path, ue_version)
|
||||
cmdlet_project: Path = get_path_to_cmdlet_project(ue_version)
|
||||
|
||||
project_file = pr_dir / f"{unreal_project_name}.uproject"
|
||||
|
||||
print("--- Generating a new project ...")
|
||||
commandlet_cmd = [
|
||||
ue_editor_exe.as_posix(),
|
||||
cmdlet_project.as_posix(),
|
||||
"-run=AyonGenerateProject",
|
||||
project_file.resolve().as_posix()
|
||||
]
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
commandlet_cmd.append('-GenerateCode')
|
||||
|
||||
gen_process = subprocess.Popen(commandlet_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in gen_process.stdout:
|
||||
print(line.decode(), end='')
|
||||
gen_process.stdout.close()
|
||||
return_code = gen_process.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
raise RuntimeError(
|
||||
(f"Failed to generate '{unreal_project_name}' project! "
|
||||
f"Exited with return code {return_code}"))
|
||||
|
||||
print("--- Project has been generated successfully.")
|
||||
|
||||
with open(project_file.as_posix(), mode="r+") as pf:
|
||||
pf_json = json.load(pf)
|
||||
pf_json["EngineAssociation"] = get_build_id(engine_path, ue_version)
|
||||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
print("--- Engine ID has been written into the project file")
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
u_build_tool = get_path_to_ubt(engine_path, ue_version)
|
||||
|
||||
arch = "Win64"
|
||||
if platform.system().lower() == "windows":
|
||||
arch = "Win64"
|
||||
elif platform.system().lower() == "linux":
|
||||
arch = "Linux"
|
||||
elif platform.system().lower() == "darwin":
|
||||
# we need to test this out
|
||||
arch = "Mac"
|
||||
|
||||
command1 = [
|
||||
u_build_tool.as_posix(),
|
||||
"-projectfiles",
|
||||
f"-project={project_file}",
|
||||
"-progress"
|
||||
]
|
||||
|
||||
subprocess.run(command1)
|
||||
|
||||
command2 = [
|
||||
u_build_tool.as_posix(),
|
||||
f"-ModuleWithSuffix={unreal_project_name},3555",
|
||||
arch,
|
||||
"Development",
|
||||
"-TargetType=Editor",
|
||||
f"-Project={project_file}",
|
||||
project_file,
|
||||
"-IgnoreJunk"
|
||||
]
|
||||
|
||||
subprocess.run(command2)
|
||||
|
||||
# ensure we have PySide2 installed in engine
|
||||
python_path = None
|
||||
if platform.system().lower() == "windows":
|
||||
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Win64/python.exe")
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Linux/bin/python3")
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Mac/bin/python3")
|
||||
|
||||
if not python_path:
|
||||
raise NotImplementedError("Unsupported platform")
|
||||
if not python_path.exists():
|
||||
raise RuntimeError(f"Unreal Python not found at {python_path}")
|
||||
subprocess.check_call(
|
||||
[python_path.as_posix(), "-m", "pip", "install", "pyside2"])
|
||||
|
||||
|
||||
def get_path_to_uat(engine_path: Path) -> Path:
|
||||
if platform.system().lower() == "windows":
|
||||
return engine_path / "Engine/Build/BatchFiles/RunUAT.bat"
|
||||
|
||||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
return engine_path / "Engine/Build/BatchFiles/RunUAT.sh"
|
||||
|
||||
|
||||
def get_compatible_integration(
|
||||
ue_version: str, integration_root: Path) -> List[Path]:
|
||||
"""Get path to compatible version of integration plugin.
|
||||
|
||||
This will try to get the closest compatible versions to the one
|
||||
specified in sorted list.
|
||||
|
||||
Args:
|
||||
ue_version (str): version of the current Unreal Engine.
|
||||
integration_root (Path): path to built-in integration plugins.
|
||||
|
||||
Returns:
|
||||
list of Path: Sorted list of paths closest to the specified
|
||||
version.
|
||||
|
||||
"""
|
||||
major, minor = ue_version.split(".")
|
||||
integration_paths = [p for p in integration_root.iterdir()
|
||||
if p.is_dir()]
|
||||
|
||||
compatible_versions = []
|
||||
for i in integration_paths:
|
||||
# parse version from path
|
||||
try:
|
||||
i_major, i_minor = re.search(
|
||||
r"(?P<major>\d+).(?P<minor>\d+)$", i.name).groups()
|
||||
except AttributeError:
|
||||
# in case there is no match, just skip to next
|
||||
continue
|
||||
|
||||
# consider versions with different major so different that they
|
||||
# are incompatible
|
||||
if int(major) != int(i_major):
|
||||
continue
|
||||
|
||||
compatible_versions.append(i)
|
||||
|
||||
sorted(set(compatible_versions))
|
||||
return compatible_versions
|
||||
|
||||
|
||||
def get_path_to_cmdlet_project(ue_version: str) -> Path:
|
||||
cmd_project = Path(
|
||||
os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# For now, only tested on Windows (For Linux and Mac
|
||||
# it has to be implemented)
|
||||
cmd_project /= f"integration/UE_{ue_version}"
|
||||
|
||||
# if the integration doesn't exist for current engine version
|
||||
# try to find the closest to it.
|
||||
if cmd_project.exists():
|
||||
return cmd_project / "CommandletProject/CommandletProject.uproject"
|
||||
|
||||
if compatible_versions := get_compatible_integration(
|
||||
ue_version, cmd_project.parent
|
||||
):
|
||||
return compatible_versions[-1] / "CommandletProject/CommandletProject.uproject" # noqa: E501
|
||||
else:
|
||||
raise RuntimeError(
|
||||
("There are no compatible versions of Unreal "
|
||||
"integration plugin compatible with running version "
|
||||
f"of Unreal Engine {ue_version}"))
|
||||
|
||||
|
||||
def get_path_to_ubt(engine_path: Path, ue_version: str) -> Path:
|
||||
u_build_tool_path = engine_path / "Engine/Binaries/DotNET"
|
||||
|
||||
if ue_version.split(".")[0] == "4":
|
||||
u_build_tool_path /= "UnrealBuildTool.exe"
|
||||
elif ue_version.split(".")[0] == "5":
|
||||
u_build_tool_path /= "UnrealBuildTool/UnrealBuildTool.exe"
|
||||
|
||||
return Path(u_build_tool_path)
|
||||
|
||||
|
||||
def get_build_id(engine_path: Path, ue_version: str) -> str:
|
||||
ue_modules = Path()
|
||||
if platform.system().lower() == "windows":
|
||||
ue_modules_path = engine_path / "Engine/Binaries/Win64"
|
||||
if ue_version.split(".")[0] == "4":
|
||||
ue_modules_path /= "UE4Editor.modules"
|
||||
elif ue_version.split(".")[0] == "5":
|
||||
ue_modules_path /= "UnrealEditor.modules"
|
||||
ue_modules = Path(ue_modules_path)
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
ue_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
|
||||
"Linux", "UE4Editor.modules"))
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
ue_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
|
||||
"Mac", "UE4Editor.modules"))
|
||||
|
||||
if ue_modules.exists():
|
||||
print("--- Loading Engine ID from modules file ...")
|
||||
with open(ue_modules, "r") as mp:
|
||||
loaded_modules = json.load(mp)
|
||||
|
||||
if loaded_modules.get("BuildId"):
|
||||
return "{" + loaded_modules.get("BuildId") + "}"
|
||||
|
||||
|
||||
def check_built_plugin_existance(plugin_path) -> bool:
|
||||
if not plugin_path:
|
||||
return False
|
||||
|
||||
integration_plugin_path = Path(plugin_path)
|
||||
|
||||
if not integration_plugin_path.is_dir():
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
if not (integration_plugin_path / "Binaries").is_dir() \
|
||||
or not (integration_plugin_path / "Intermediate").is_dir():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def copy_built_plugin(engine_path: Path, plugin_path: Path) -> None:
|
||||
ayon_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not ayon_plugin_path.is_dir():
|
||||
ayon_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path: Path = ayon_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
dir_util.copy_tree(plugin_path.as_posix(), ayon_plugin_path.as_posix())
|
||||
|
||||
|
||||
def check_plugin_existence(engine_path: Path, env: dict = None) -> bool:
|
||||
env = env or os.environ
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(integration_plugin_path):
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
return False
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_installing_plugin(engine_path: Path, env: dict = None) -> None:
|
||||
env = env or os.environ
|
||||
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(integration_plugin_path):
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
op_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path: Path = op_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
_build_and_move_plugin(engine_path, op_plugin_path, env)
|
||||
|
||||
|
||||
def _build_and_move_plugin(engine_path: Path,
|
||||
plugin_build_path: Path,
|
||||
env: dict = None) -> None:
|
||||
uat_path: Path = get_path_to_uat(engine_path)
|
||||
|
||||
env = env or os.environ
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if uat_path.is_file():
|
||||
temp_dir: Path = integration_plugin_path.parent / "Temp"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
uplugin_path: Path = integration_plugin_path / "Ayon.uplugin"
|
||||
|
||||
# in order to successfully build the plugin,
|
||||
# It must be built outside the Engine directory and then moved
|
||||
build_plugin_cmd: List[str] = [f'{uat_path.as_posix()}',
|
||||
'BuildPlugin',
|
||||
f'-Plugin={uplugin_path.as_posix()}',
|
||||
f'-Package={temp_dir.as_posix()}']
|
||||
subprocess.run(build_plugin_cmd)
|
||||
|
||||
# Copy the contents of the 'Temp' dir into the
|
||||
# 'Ayon' directory in the engine
|
||||
dir_util.copy_tree(temp_dir.as_posix(), plugin_build_path.as_posix())
|
||||
|
||||
# We need to also copy the config folder.
|
||||
# The UAT doesn't include the Config folder in the build
|
||||
plugin_install_config_path: Path = plugin_build_path / "Config"
|
||||
integration_plugin_config_path = integration_plugin_path / "Config"
|
||||
|
||||
dir_util.copy_tree(integration_plugin_config_path.as_posix(),
|
||||
plugin_install_config_path.as_posix())
|
||||
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_unreal.api.pipeline import UNREAL_VERSION
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateCamera(UnrealAssetCreator):
|
||||
"""Create Camera."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.camera"
|
||||
label = "Camera"
|
||||
product_type = "camera"
|
||||
icon = "fa.camera"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
if len(selection) != 1:
|
||||
raise CreatorError("Please select only one object.")
|
||||
|
||||
# Add the current level path to the metadata
|
||||
if UNREAL_VERSION.major == 5:
|
||||
world = unreal.UnrealEditorSubsystem().get_editor_world()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
|
||||
instance_data["level"] = world.get_path_name()
|
||||
|
||||
super(CreateCamera, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealActorCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateLayout(UnrealActorCreator):
|
||||
"""Layout output for character rigs."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.layout"
|
||||
label = "Layout"
|
||||
product_type = "layout"
|
||||
icon = "cubes"
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_unreal.api.pipeline import (
|
||||
create_folder
|
||||
)
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator
|
||||
)
|
||||
from ayon_core.lib import UILabelDef
|
||||
|
||||
|
||||
class CreateLook(UnrealAssetCreator):
|
||||
"""Shader connections defining shape look."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.look"
|
||||
label = "Look"
|
||||
product_type = "look"
|
||||
icon = "paint-brush"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# We need to set this to True for the parent class to work
|
||||
pre_create_data["use_selection"] = True
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
if len(selection) != 1:
|
||||
raise CreatorError("Please select only one asset.")
|
||||
|
||||
selected_asset = selection[0]
|
||||
|
||||
look_directory = "/Game/Ayon/Looks"
|
||||
|
||||
# Create the folder
|
||||
folder_name = create_folder(look_directory, product_name)
|
||||
path = f"{look_directory}/{folder_name}"
|
||||
|
||||
instance_data["look"] = path
|
||||
|
||||
# Create a new cube static mesh
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
cube = ar.get_asset_by_object_path("/Engine/BasicShapes/Cube.Cube")
|
||||
|
||||
# Get the mesh of the selected object
|
||||
original_mesh = ar.get_asset_by_object_path(selected_asset).get_asset()
|
||||
materials = original_mesh.get_editor_property('static_materials')
|
||||
|
||||
pre_create_data["members"] = []
|
||||
|
||||
# Add the materials to the cube
|
||||
for material in materials:
|
||||
mat_name = material.get_editor_property('material_slot_name')
|
||||
object_path = f"{path}/{mat_name}.{mat_name}"
|
||||
unreal_object = unreal.EditorAssetLibrary.duplicate_loaded_asset(
|
||||
cube.get_asset(), object_path
|
||||
)
|
||||
|
||||
# Remove the default material of the cube object
|
||||
unreal_object.get_editor_property('static_materials').pop()
|
||||
|
||||
unreal_object.add_material(
|
||||
material.get_editor_property('material_interface'))
|
||||
|
||||
pre_create_data["members"].append(object_path)
|
||||
|
||||
unreal.EditorAssetLibrary.save_asset(object_path)
|
||||
|
||||
super(CreateLook, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
UILabelDef("Select the asset from which to create the look.")
|
||||
]
|
||||
|
|
@ -1,276 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_unreal.api.pipeline import (
|
||||
UNREAL_VERSION,
|
||||
create_folder,
|
||||
get_subsequences,
|
||||
)
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
UILabelDef,
|
||||
UISeparatorDef,
|
||||
BoolDef,
|
||||
NumberDef
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(UnrealAssetCreator):
|
||||
"""Create instance for sequence for rendering"""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.render"
|
||||
label = "Render"
|
||||
product_type = "render"
|
||||
icon = "eye"
|
||||
|
||||
def create_instance(
|
||||
self, instance_data, product_name, pre_create_data,
|
||||
selected_asset_path, master_seq, master_lvl, seq_data
|
||||
):
|
||||
instance_data["members"] = [selected_asset_path]
|
||||
instance_data["sequence"] = selected_asset_path
|
||||
instance_data["master_sequence"] = master_seq
|
||||
instance_data["master_level"] = master_lvl
|
||||
instance_data["output"] = seq_data.get('output')
|
||||
instance_data["frameStart"] = seq_data.get('frame_range')[0]
|
||||
instance_data["frameEnd"] = seq_data.get('frame_range')[1]
|
||||
|
||||
super(CreateRender, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
def create_with_new_sequence(
|
||||
self, product_name, instance_data, pre_create_data
|
||||
):
|
||||
# If the option to create a new level sequence is selected,
|
||||
# create a new level sequence and a master level.
|
||||
|
||||
root = "/Game/Ayon/Sequences"
|
||||
|
||||
# Create a new folder for the sequence in root
|
||||
sequence_dir_name = create_folder(root, product_name)
|
||||
sequence_dir = f"{root}/{sequence_dir_name}"
|
||||
|
||||
unreal.log_warning(f"sequence_dir: {sequence_dir}")
|
||||
|
||||
# Create the level sequence
|
||||
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||
seq = asset_tools.create_asset(
|
||||
asset_name=product_name,
|
||||
package_path=sequence_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew())
|
||||
|
||||
seq.set_playback_start(pre_create_data.get("start_frame"))
|
||||
seq.set_playback_end(pre_create_data.get("end_frame"))
|
||||
|
||||
pre_create_data["members"] = [seq.get_path_name()]
|
||||
|
||||
unreal.EditorAssetLibrary.save_asset(seq.get_path_name())
|
||||
|
||||
# Create the master level
|
||||
if UNREAL_VERSION.major >= 5:
|
||||
curr_level = unreal.LevelEditorSubsystem().get_current_level()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
levels = unreal.EditorLevelUtils.get_levels(world)
|
||||
curr_level = levels[0] if len(levels) else None
|
||||
if not curr_level:
|
||||
raise RuntimeError("No level loaded.")
|
||||
curr_level_path = curr_level.get_outer().get_path_name()
|
||||
|
||||
# If the level path does not start with "/Game/", the current
|
||||
# level is a temporary, unsaved level.
|
||||
if curr_level_path.startswith("/Game/"):
|
||||
if UNREAL_VERSION.major >= 5:
|
||||
unreal.LevelEditorSubsystem().save_current_level()
|
||||
else:
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
|
||||
ml_path = f"{sequence_dir}/{product_name}_MasterLevel"
|
||||
|
||||
if UNREAL_VERSION.major >= 5:
|
||||
unreal.LevelEditorSubsystem().new_level(ml_path)
|
||||
else:
|
||||
unreal.EditorLevelLibrary.new_level(ml_path)
|
||||
|
||||
seq_data = {
|
||||
"sequence": seq,
|
||||
"output": f"{seq.get_name()}",
|
||||
"frame_range": (
|
||||
seq.get_playback_start(),
|
||||
seq.get_playback_end())}
|
||||
|
||||
self.create_instance(
|
||||
instance_data, product_name, pre_create_data,
|
||||
seq.get_path_name(), seq.get_path_name(), ml_path, seq_data)
|
||||
|
||||
def create_from_existing_sequence(
|
||||
self, product_name, instance_data, pre_create_data
|
||||
):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [
|
||||
a.get_path_name() for a in sel_objects
|
||||
if a.get_class().get_name() == "LevelSequence"]
|
||||
|
||||
if len(selection) == 0:
|
||||
raise RuntimeError("Please select at least one Level Sequence.")
|
||||
|
||||
seq_data = None
|
||||
|
||||
for sel in selection:
|
||||
selected_asset = ar.get_asset_by_object_path(sel).get_asset()
|
||||
selected_asset_path = selected_asset.get_path_name()
|
||||
|
||||
# Check if the selected asset is a level sequence asset.
|
||||
if selected_asset.get_class().get_name() != "LevelSequence":
|
||||
unreal.log_warning(
|
||||
f"Skipping {selected_asset.get_name()}. It isn't a Level "
|
||||
"Sequence.")
|
||||
|
||||
if pre_create_data.get("use_hierarchy"):
|
||||
# The asset name is the the third element of the path which
|
||||
# contains the map.
|
||||
# To take the asset name, we remove from the path the prefix
|
||||
# "/Game/OpenPype/" and then we split the path by "/".
|
||||
sel_path = selected_asset_path
|
||||
asset_name = sel_path.replace(
|
||||
"/Game/Ayon/", "").split("/")[0]
|
||||
|
||||
search_path = f"/Game/Ayon/{asset_name}"
|
||||
else:
|
||||
search_path = Path(selected_asset_path).parent.as_posix()
|
||||
|
||||
# Get the master sequence and the master level.
|
||||
# There should be only one sequence and one level in the directory.
|
||||
try:
|
||||
ar_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[search_path],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(ar_filter)
|
||||
master_seq = sequences[0].get_asset().get_path_name()
|
||||
master_seq_obj = sequences[0].get_asset()
|
||||
ar_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[search_path],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(ar_filter)
|
||||
master_lvl = levels[0].get_asset().get_path_name()
|
||||
except IndexError:
|
||||
raise RuntimeError(
|
||||
"Could not find the hierarchy for the selected sequence.")
|
||||
|
||||
# If the selected asset is the master sequence, we get its data
|
||||
# and then we create the instance for the master sequence.
|
||||
# Otherwise, we cycle from the master sequence to find the selected
|
||||
# sequence and we get its data. This data will be used to create
|
||||
# the instance for the selected sequence. In particular,
|
||||
# we get the frame range of the selected sequence and its final
|
||||
# output path.
|
||||
master_seq_data = {
|
||||
"sequence": master_seq_obj,
|
||||
"output": f"{master_seq_obj.get_name()}",
|
||||
"frame_range": (
|
||||
master_seq_obj.get_playback_start(),
|
||||
master_seq_obj.get_playback_end())}
|
||||
|
||||
if (selected_asset_path == master_seq or
|
||||
pre_create_data.get("use_hierarchy")):
|
||||
seq_data = master_seq_data
|
||||
else:
|
||||
seq_data_list = [master_seq_data]
|
||||
|
||||
for seq in seq_data_list:
|
||||
subscenes = get_subsequences(seq.get('sequence'))
|
||||
|
||||
for sub_seq in subscenes:
|
||||
sub_seq_obj = sub_seq.get_sequence()
|
||||
curr_data = {
|
||||
"sequence": sub_seq_obj,
|
||||
"output": (f"{seq.get('output')}/"
|
||||
f"{sub_seq_obj.get_name()}"),
|
||||
"frame_range": (
|
||||
sub_seq.get_start_frame(),
|
||||
sub_seq.get_end_frame() - 1)}
|
||||
|
||||
# If the selected asset is the current sub-sequence,
|
||||
# we get its data and we break the loop.
|
||||
# Otherwise, we add the current sub-sequence data to
|
||||
# the list of sequences to check.
|
||||
if sub_seq_obj.get_path_name() == selected_asset_path:
|
||||
seq_data = curr_data
|
||||
break
|
||||
|
||||
seq_data_list.append(curr_data)
|
||||
|
||||
# If we found the selected asset, we break the loop.
|
||||
if seq_data is not None:
|
||||
break
|
||||
|
||||
# If we didn't find the selected asset, we don't create the
|
||||
# instance.
|
||||
if not seq_data:
|
||||
unreal.log_warning(
|
||||
f"Skipping {selected_asset.get_name()}. It isn't a "
|
||||
"sub-sequence of the master sequence.")
|
||||
continue
|
||||
|
||||
self.create_instance(
|
||||
instance_data, product_name, pre_create_data,
|
||||
selected_asset_path, master_seq, master_lvl, seq_data)
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("create_seq"):
|
||||
self.create_with_new_sequence(
|
||||
product_name, instance_data, pre_create_data)
|
||||
else:
|
||||
self.create_from_existing_sequence(
|
||||
product_name, instance_data, pre_create_data)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
UILabelDef(
|
||||
"Select a Level Sequence to render or create a new one."
|
||||
),
|
||||
BoolDef(
|
||||
"create_seq",
|
||||
label="Create a new Level Sequence",
|
||||
default=False
|
||||
),
|
||||
UILabelDef(
|
||||
"WARNING: If you create a new Level Sequence, the current\n"
|
||||
"level will be saved and a new Master Level will be created."
|
||||
),
|
||||
NumberDef(
|
||||
"start_frame",
|
||||
label="Start Frame",
|
||||
default=0,
|
||||
minimum=-999999,
|
||||
maximum=999999
|
||||
),
|
||||
NumberDef(
|
||||
"end_frame",
|
||||
label="Start Frame",
|
||||
default=150,
|
||||
minimum=-999999,
|
||||
maximum=999999
|
||||
),
|
||||
UISeparatorDef(),
|
||||
UILabelDef(
|
||||
"The following settings are valid only if you are not\n"
|
||||
"creating a new sequence."
|
||||
),
|
||||
BoolDef(
|
||||
"use_hierarchy",
|
||||
label="Use Hierarchy",
|
||||
default=False
|
||||
),
|
||||
]
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateStaticMeshFBX(UnrealAssetCreator):
|
||||
"""Create Static Meshes as FBX geometry."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.staticmeshfbx"
|
||||
label = "Static Mesh (FBX)"
|
||||
product_type = "unrealStaticMesh"
|
||||
icon = "cube"
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateUAsset(UnrealAssetCreator):
|
||||
"""Create UAsset."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.uasset"
|
||||
label = "UAsset"
|
||||
product_type = "uasset"
|
||||
icon = "cube"
|
||||
|
||||
extension = ".uasset"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
if len(selection) != 1:
|
||||
raise CreatorError("Please select only one object.")
|
||||
|
||||
obj = selection[0]
|
||||
|
||||
asset = ar.get_asset_by_object_path(obj).get_asset()
|
||||
sys_path = unreal.SystemLibrary.get_system_path(asset)
|
||||
|
||||
if not sys_path:
|
||||
raise CreatorError(
|
||||
f"{Path(obj).name} is not on the disk. Likely it needs to"
|
||||
"be saved first.")
|
||||
|
||||
if Path(sys_path).suffix != self.extension:
|
||||
raise CreatorError(
|
||||
f"{Path(sys_path).name} is not a {self.label}.")
|
||||
|
||||
super(CreateUAsset, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
||||
class CreateUMap(CreateUAsset):
|
||||
"""Create Level."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.umap"
|
||||
label = "Level"
|
||||
product_type = "uasset"
|
||||
extension = ".umap"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
instance_data["families"] = ["umap"]
|
||||
|
||||
super(CreateUMap, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
import unreal
|
||||
|
||||
from ayon_unreal.api.tools_ui import qt_app_context
|
||||
from ayon_unreal.api.pipeline import delete_asset_if_unused
|
||||
from ayon_core.pipeline import InventoryAction
|
||||
|
||||
|
||||
class DeleteUnusedAssets(InventoryAction):
|
||||
"""Delete all the assets that are not used in any level.
|
||||
"""
|
||||
|
||||
label = "Delete Unused Assets"
|
||||
icon = "trash"
|
||||
color = "red"
|
||||
order = 1
|
||||
|
||||
dialog = None
|
||||
|
||||
def _delete_unused_assets(self, containers):
|
||||
allowed_families = ["model", "rig"]
|
||||
|
||||
for container in containers:
|
||||
container_dir = container.get("namespace")
|
||||
if container.get("family") not in allowed_families:
|
||||
unreal.log_warning(
|
||||
f"Container {container_dir} is not supported.")
|
||||
continue
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
container_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
delete_asset_if_unused(container, asset_content)
|
||||
|
||||
def _show_confirmation_dialog(self, containers):
|
||||
from qtpy import QtCore
|
||||
from ayon_core.tools.utils import SimplePopup
|
||||
from ayon_core.style import load_stylesheet
|
||||
|
||||
dialog = SimplePopup()
|
||||
dialog.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
dialog.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
dialog.setWindowTitle("Delete all unused assets")
|
||||
dialog.set_message(
|
||||
"You are about to delete all the assets in the project that \n"
|
||||
"are not used in any level. Are you sure you want to continue?"
|
||||
)
|
||||
dialog.set_button_text("Delete")
|
||||
|
||||
dialog.on_clicked.connect(
|
||||
lambda: self._delete_unused_assets(containers)
|
||||
)
|
||||
|
||||
dialog.show()
|
||||
dialog.raise_()
|
||||
dialog.activateWindow()
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
|
||||
self.dialog = dialog
|
||||
|
||||
def process(self, containers):
|
||||
with qt_app_context():
|
||||
self._show_confirmation_dialog(containers)
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
import unreal
|
||||
|
||||
from ayon_unreal.api.pipeline import (
|
||||
ls,
|
||||
replace_static_mesh_actors,
|
||||
replace_skeletal_mesh_actors,
|
||||
replace_geometry_cache_actors,
|
||||
)
|
||||
from ayon_core.pipeline import InventoryAction
|
||||
|
||||
|
||||
def update_assets(containers, selected):
|
||||
allowed_families = ["model", "rig"]
|
||||
|
||||
# Get all the containers in the Unreal Project
|
||||
all_containers = ls()
|
||||
|
||||
for container in containers:
|
||||
container_dir = container.get("namespace")
|
||||
if container.get("family") not in allowed_families:
|
||||
unreal.log_warning(
|
||||
f"Container {container_dir} is not supported.")
|
||||
continue
|
||||
|
||||
# Get all containers with same asset_name but different objectName.
|
||||
# These are the containers that need to be updated in the level.
|
||||
sa_containers = [
|
||||
i
|
||||
for i in all_containers
|
||||
if (
|
||||
i.get("asset_name") == container.get("asset_name") and
|
||||
i.get("objectName") != container.get("objectName")
|
||||
)
|
||||
]
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
container_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
# Update all actors in level
|
||||
for sa_cont in sa_containers:
|
||||
sa_dir = sa_cont.get("namespace")
|
||||
old_content = unreal.EditorAssetLibrary.list_assets(
|
||||
sa_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
if container.get("family") == "rig":
|
||||
replace_skeletal_mesh_actors(
|
||||
old_content, asset_content, selected)
|
||||
replace_static_mesh_actors(
|
||||
old_content, asset_content, selected)
|
||||
elif container.get("family") == "model":
|
||||
if container.get("loader") == "PointCacheAlembicLoader":
|
||||
replace_geometry_cache_actors(
|
||||
old_content, asset_content, selected)
|
||||
else:
|
||||
replace_static_mesh_actors(
|
||||
old_content, asset_content, selected)
|
||||
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
|
||||
|
||||
class UpdateAllActors(InventoryAction):
|
||||
"""Update all the Actors in the current level to the version of the asset
|
||||
selected in the scene manager.
|
||||
"""
|
||||
|
||||
label = "Replace all Actors in level to this version"
|
||||
icon = "arrow-up"
|
||||
|
||||
def process(self, containers):
|
||||
update_assets(containers, False)
|
||||
|
||||
|
||||
class UpdateSelectedActors(InventoryAction):
|
||||
"""Update only the selected Actors in the current level to the version
|
||||
of the asset selected in the scene manager.
|
||||
"""
|
||||
|
||||
label = "Replace selected Actors in level to this version"
|
||||
icon = "arrow-up"
|
||||
|
||||
def process(self, containers):
|
||||
update_assets(containers, True)
|
||||
|
|
@ -1,176 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Alembic Animation."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class AnimationAlembicLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from Alembic"""
|
||||
|
||||
product_types = {"animation"}
|
||||
label = "Import Alembic Animation"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def get_task(self, filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
sm_settings = unreal.AbcStaticMeshSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, -1.0])
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
options.static_mesh_settings = sm_settings
|
||||
options.conversion_settings = conversion_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and ayon container
|
||||
root = unreal_pipeline.AYON_ASSET_DIR
|
||||
folder_name = context["folder"]["name"]
|
||||
folder_path = context["folder"]["path"]
|
||||
product_type = context["product"]["productType"]
|
||||
suffix = "_CON"
|
||||
if folder_name:
|
||||
asset_name = "{}_{}".format(folder_name, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
task = self.get_task(path, asset_dir, asset_name, False)
|
||||
|
||||
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||
asset_tools.import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
unreal_pipeline.imprint(
|
||||
f"{asset_dir}/{container_name}", data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_name = container["asset_name"]
|
||||
repre_entity = context["representation"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = self.get_task(
|
||||
source_path, destination_path, folder_name, True
|
||||
)
|
||||
|
||||
# do import fbx and replace existing data
|
||||
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||
asset_tools.import_asset_tasks([task])
|
||||
|
||||
container_path = f"{container['namespace']}/{container['objectName']}"
|
||||
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
})
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,337 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load FBX with animations."""
|
||||
import os
|
||||
import json
|
||||
|
||||
import unreal
|
||||
from unreal import EditorAssetLibrary
|
||||
from unreal import MovieSceneSkeletalAnimationTrack
|
||||
from unreal import MovieSceneSkeletalAnimationSection
|
||||
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
|
||||
|
||||
class AnimationFBXLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from FBX."""
|
||||
|
||||
product_types = {"animation"}
|
||||
label = "Import FBX Animation"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def _process(self, path, asset_dir, asset_name, instance_name):
|
||||
automated = False
|
||||
actor = None
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
if instance_name:
|
||||
automated = True
|
||||
# Old method to get the actor
|
||||
# actor_name = 'PersistentLevel.' + instance_name
|
||||
# actor = unreal.EditorLevelLibrary.get_actor_reference(actor_name)
|
||||
actors = unreal.EditorLevelLibrary.get_all_level_actors()
|
||||
for a in actors:
|
||||
if a.get_class().get_name() != "SkeletalMeshActor":
|
||||
continue
|
||||
if a.get_actor_label() == instance_name:
|
||||
actor = a
|
||||
break
|
||||
if not actor:
|
||||
raise Exception(f"Could not find actor {instance_name}")
|
||||
skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
||||
if not actor:
|
||||
return None
|
||||
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
|
||||
task.set_editor_property('filename', path)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', automated)
|
||||
task.set_editor_property('save', False)
|
||||
|
||||
# set import options here
|
||||
task.options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
task.options.set_editor_property(
|
||||
'original_import_type', unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
task.options.set_editor_property(
|
||||
'mesh_type_to_import', unreal.FBXImportType.FBXIT_ANIMATION)
|
||||
task.options.set_editor_property('import_mesh', False)
|
||||
task.options.set_editor_property('import_animations', True)
|
||||
task.options.set_editor_property('override_full_name', True)
|
||||
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'animation_length',
|
||||
unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME
|
||||
)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_meshes_in_bone_hierarchy', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'use_default_sample_rate', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'custom_sample_rate', folder_entity.get("attrib", {}).get("fps"))
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_custom_attribute', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_bone_tracks', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'remove_redundant_keys', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
animation = None
|
||||
|
||||
for a in asset_content:
|
||||
imported_asset_data = EditorAssetLibrary.find_asset_data(a)
|
||||
imported_asset = unreal.AssetRegistryHelpers.get_asset(
|
||||
imported_asset_data)
|
||||
if imported_asset.__class__ == unreal.AnimSequence:
|
||||
animation = imported_asset
|
||||
break
|
||||
|
||||
if animation:
|
||||
animation.set_editor_property('enable_root_motion', True)
|
||||
actor.skeletal_mesh_component.set_editor_property(
|
||||
'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
|
||||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
return animation
|
||||
|
||||
def load(self, context, name, namespace, options=None):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
root = "/Game/Ayon"
|
||||
folder_path = context["folder"]["path"]
|
||||
hierarchy = folder_path.lstrip("/").split("/")
|
||||
folder_name = hierarchy.pop(-1)
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/Animations/{folder_name}/{name}", suffix="")
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{hierarchy[0]}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
hierarchy_dir = root
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir = f"{hierarchy_dir}/{folder_name}"
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{hierarchy_dir}/"],
|
||||
recursive_paths=True)
|
||||
levels = ar.get_assets(_filter)
|
||||
level = levels[0].get_asset().get_path_name()
|
||||
|
||||
unreal.EditorLevelLibrary.save_all_dirty_levels()
|
||||
unreal.EditorLevelLibrary.load_level(level)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
libpath = path.replace(".fbx", ".json")
|
||||
|
||||
with open(libpath, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
instance_name = data.get("instance_name")
|
||||
|
||||
animation = self._process(path, asset_dir, asset_name, instance_name)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
hierarchy_dir, recursive=True, include_folder=False)
|
||||
|
||||
# Get the sequence for the layout, excluding the camera one.
|
||||
sequences = [a for a in asset_content
|
||||
if (EditorAssetLibrary.find_asset_data(a).get_class() ==
|
||||
unreal.LevelSequence.static_class() and
|
||||
"_camera" not in a.split("/")[-1])]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for s in sequences:
|
||||
sequence = ar.get_asset_by_object_path(s).get_asset()
|
||||
possessables = [
|
||||
p for p in sequence.get_possessables()
|
||||
if p.get_display_name() == instance_name]
|
||||
|
||||
for p in possessables:
|
||||
tracks = [
|
||||
t for t in p.get_tracks()
|
||||
if (t.get_class() ==
|
||||
MovieSceneSkeletalAnimationTrack.static_class())]
|
||||
|
||||
for t in tracks:
|
||||
sections = [
|
||||
s for s in t.get_sections()
|
||||
if (s.get_class() ==
|
||||
MovieSceneSkeletalAnimationSection.static_class())]
|
||||
|
||||
for s in sections:
|
||||
s.params.set_editor_property('animation', animation)
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"folder_path": folder_path,
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
imported_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in imported_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
unreal.EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
def update(self, container, context):
|
||||
repre_entity = context["representation"]
|
||||
folder_name = container["asset_name"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
task.set_editor_property('filename', source_path)
|
||||
task.set_editor_property('destination_path', destination_path)
|
||||
# strip suffix
|
||||
task.set_editor_property('destination_name', folder_name)
|
||||
task.set_editor_property('replace_existing', True)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
task.options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
task.options.set_editor_property(
|
||||
'original_import_type', unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
task.options.set_editor_property(
|
||||
'mesh_type_to_import', unreal.FBXImportType.FBXIT_ANIMATION)
|
||||
task.options.set_editor_property('import_mesh', False)
|
||||
task.options.set_editor_property('import_animations', True)
|
||||
task.options.set_editor_property('override_full_name', True)
|
||||
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'animation_length',
|
||||
unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME
|
||||
)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_meshes_in_bone_hierarchy', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'use_default_sample_rate', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'custom_sample_rate', folder_entity.get("attrib", {}).get("fps"))
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_custom_attribute', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_bone_tracks', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'remove_redundant_keys', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
skeletal_mesh = EditorAssetLibrary.load_asset(
|
||||
container.get('namespace') + "/" + container.get('asset_name'))
|
||||
skeleton = skeletal_mesh.get_editor_property('skeleton')
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
||||
# do import fbx and replace existing data
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
})
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,591 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load camera from FBX."""
|
||||
from pathlib import Path
|
||||
|
||||
import ayon_api
|
||||
|
||||
import unreal
|
||||
from unreal import (
|
||||
EditorAssetLibrary,
|
||||
EditorLevelLibrary,
|
||||
EditorLevelUtils,
|
||||
LevelSequenceEditorBlueprintLibrary as LevelSequenceLib,
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
generate_sequence,
|
||||
set_sequence_hierarchy,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
|
||||
|
||||
class CameraLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from FBX"""
|
||||
|
||||
product_types = {"camera"}
|
||||
label = "Load Camera"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def _import_camera(
|
||||
self, world, sequence, bindings, import_fbx_settings, import_filename
|
||||
):
|
||||
ue_version = unreal.SystemLibrary.get_engine_version().split('.')
|
||||
ue_major = int(ue_version[0])
|
||||
ue_minor = int(ue_version[1])
|
||||
|
||||
if ue_major == 4 and ue_minor <= 26:
|
||||
unreal.SequencerTools.import_fbx(
|
||||
world,
|
||||
sequence,
|
||||
bindings,
|
||||
import_fbx_settings,
|
||||
import_filename
|
||||
)
|
||||
elif (ue_major == 4 and ue_minor >= 27) or ue_major == 5:
|
||||
unreal.SequencerTools.import_level_sequence_fbx(
|
||||
world,
|
||||
sequence,
|
||||
bindings,
|
||||
import_fbx_settings,
|
||||
import_filename
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unreal version {ue_major} not supported")
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
folder_entity = context["folder"]
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy_parts = folder_path.split("/")
|
||||
# Remove empty string
|
||||
hierarchy_parts.pop(0)
|
||||
# Pop folder name
|
||||
folder_name = hierarchy_parts.pop(-1)
|
||||
|
||||
root = "/Game/Ayon"
|
||||
hierarchy_dir = root
|
||||
hierarchy_dir_list = []
|
||||
for h in hierarchy_parts:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir_list.append(hierarchy_dir)
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else name
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
# Create a unique name for the camera directory
|
||||
unique_number = 1
|
||||
if EditorAssetLibrary.does_directory_exist(
|
||||
f"{hierarchy_dir}/{folder_name}"
|
||||
):
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
f"{root}/{folder_name}", recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
# Get highest number to make a unique name
|
||||
folders = [a for a in asset_content
|
||||
if a[-1] == "/" and f"{name}_" in a]
|
||||
# Get number from folder name. Splits the string by "_" and
|
||||
# removes the last element (which is a "/").
|
||||
f_numbers = [int(f.split("_")[-1][:-1]) for f in folders]
|
||||
f_numbers.sort()
|
||||
unique_number = f_numbers[-1] + 1 if f_numbers else 1
|
||||
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{hierarchy_dir}/{folder_name}/{name}_{unique_number:02d}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
# Create map for the shot, and create hierarchy of map. If the maps
|
||||
# already exist, we will use them.
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy_dir[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
|
||||
level = (
|
||||
f"{asset_dir}/{folder_name}_map_camera.{folder_name}_map_camera"
|
||||
)
|
||||
if not EditorAssetLibrary.does_asset_exist(level):
|
||||
EditorLevelLibrary.new_level(
|
||||
f"{asset_dir}/{folder_name}_map_camera"
|
||||
)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
frame_ranges = []
|
||||
sequences = []
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy_parts):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
if existing_sequences:
|
||||
for seq in existing_sequences:
|
||||
sequences.append(seq.get_asset())
|
||||
frame_ranges.append((
|
||||
seq.get_asset().get_playback_start(),
|
||||
seq.get_asset().get_playback_end()))
|
||||
else:
|
||||
sequence, frame_range = generate_sequence(h, h_dir)
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append(frame_range)
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
cam_seq = tools.create_asset(
|
||||
asset_name=f"{folder_name}_camera",
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
# Add sequences data to hierarchy
|
||||
for i in range(len(sequences) - 1):
|
||||
set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
[level])
|
||||
|
||||
clip_in = folder_attributes.get("clipIn")
|
||||
clip_out = folder_attributes.get("clipOut")
|
||||
|
||||
cam_seq.set_display_rate(
|
||||
unreal.FrameRate(folder_attributes.get("fps"), 1.0))
|
||||
cam_seq.set_playback_start(clip_in)
|
||||
cam_seq.set_playback_end(clip_out + 1)
|
||||
set_sequence_hierarchy(
|
||||
sequences[-1], cam_seq,
|
||||
frame_ranges[-1][1],
|
||||
clip_in, clip_out,
|
||||
[level])
|
||||
|
||||
settings = unreal.MovieSceneUserImportFBXSettings()
|
||||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
if cam_seq:
|
||||
path = self.filepath_from_context(context)
|
||||
self._import_camera(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
cam_seq,
|
||||
cam_seq.get_bindings(),
|
||||
settings,
|
||||
path
|
||||
)
|
||||
|
||||
# Set range of all sections
|
||||
# Changing the range of the section is not enough. We need to change
|
||||
# the frame of all the keys in the section.
|
||||
for possessable in cam_seq.get_possessables():
|
||||
for tracks in possessable.get_tracks():
|
||||
for section in tracks.get_sections():
|
||||
section.set_range(clip_in, clip_out + 1)
|
||||
for channel in section.get_all_channels():
|
||||
for key in channel.get_keys():
|
||||
old_time = key.get_time().get_editor_property(
|
||||
'frame_number')
|
||||
old_time_value = old_time.get_editor_property(
|
||||
'value')
|
||||
new_time = old_time_value + (
|
||||
clip_in - folder_attributes.get('frameStart')
|
||||
)
|
||||
key.set_time(unreal.FrameNumber(value=new_time))
|
||||
|
||||
# Create Asset Container
|
||||
create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_name,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
# Save all assets in the hierarchy
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
hierarchy_dir_list[0], recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
curr_level_sequence = LevelSequenceLib.get_current_level_sequence()
|
||||
curr_time = LevelSequenceLib.get_current_time()
|
||||
is_cam_lock = LevelSequenceLib.is_camera_cut_locked_to_viewport()
|
||||
|
||||
editor_subsystem = unreal.UnrealEditorSubsystem()
|
||||
vp_loc, vp_rot = editor_subsystem.get_level_viewport_camera_info()
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(_filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
EditorLevelLibrary.load_level(maps[0].get_asset().get_path_name())
|
||||
|
||||
level_sequence = sequences[0].get_asset()
|
||||
|
||||
display_rate = level_sequence.get_display_rate()
|
||||
playback_start = level_sequence.get_playback_start()
|
||||
playback_end = level_sequence.get_playback_end()
|
||||
|
||||
sequence_name = f"{container.get('asset')}_camera"
|
||||
|
||||
# Get the actors in the level sequence.
|
||||
objs = unreal.SequencerTools.get_bound_objects(
|
||||
unreal.EditorLevelLibrary.get_editor_world(),
|
||||
level_sequence,
|
||||
level_sequence.get_bindings(),
|
||||
unreal.SequencerScriptingRange(
|
||||
has_start_value=True,
|
||||
has_end_value=True,
|
||||
inclusive_start=level_sequence.get_playback_start(),
|
||||
exclusive_end=level_sequence.get_playback_end()
|
||||
)
|
||||
)
|
||||
|
||||
# Delete actors from the map
|
||||
for o in objs:
|
||||
if o.bound_objects[0].get_class().get_name() == "CineCameraActor":
|
||||
actor_path = o.bound_objects[0].get_path_name().split(":")[-1]
|
||||
actor = EditorLevelLibrary.get_actor_reference(actor_path)
|
||||
EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/Ayon"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
sub_scene = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == sequence_name:
|
||||
parent = s
|
||||
sub_scene = ss
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
for i, ss in enumerate(sections):
|
||||
ss.set_row_index(i)
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
EditorAssetLibrary.delete_asset(level_sequence.get_path_name())
|
||||
|
||||
settings = unreal.MovieSceneUserImportFBXSettings()
|
||||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
new_sequence = tools.create_asset(
|
||||
asset_name=sequence_name,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
new_sequence.set_display_rate(display_rate)
|
||||
new_sequence.set_playback_start(playback_start)
|
||||
new_sequence.set_playback_end(playback_end)
|
||||
|
||||
sub_scene.set_sequence(new_sequence)
|
||||
|
||||
repre_entity = context["representation"]
|
||||
repre_path = get_representation_path(repre_entity)
|
||||
self._import_camera(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
new_sequence,
|
||||
new_sequence.get_bindings(),
|
||||
settings,
|
||||
repre_path
|
||||
)
|
||||
|
||||
# Set range of all sections
|
||||
# Changing the range of the section is not enough. We need to change
|
||||
# the frame of all the keys in the section.
|
||||
project_name = get_current_project_name()
|
||||
folder_path = container.get("folder_path")
|
||||
if folder_path is None:
|
||||
folder_path = container.get("asset")
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
clip_in = folder_attributes["clipIn"]
|
||||
clip_out = folder_attributes["clipOut"]
|
||||
frame_start = folder_attributes["frameStart"]
|
||||
for possessable in new_sequence.get_possessables():
|
||||
for tracks in possessable.get_tracks():
|
||||
for section in tracks.get_sections():
|
||||
section.set_range(clip_in, clip_out + 1)
|
||||
for channel in section.get_all_channels():
|
||||
for key in channel.get_keys():
|
||||
old_time = key.get_time().get_editor_property(
|
||||
'frame_number')
|
||||
old_time_value = old_time.get_editor_property(
|
||||
'value')
|
||||
new_time = old_time_value + (
|
||||
clip_in - frame_start
|
||||
)
|
||||
key.set_time(unreal.FrameNumber(value=new_time))
|
||||
|
||||
data = {
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
}
|
||||
imprint(f"{asset_dir}/{container.get('container_name')}", data)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
f"{root}/{ms_asset}", recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
if curr_level_sequence:
|
||||
LevelSequenceLib.open_level_sequence(curr_level_sequence)
|
||||
LevelSequenceLib.set_current_time(curr_time)
|
||||
LevelSequenceLib.set_lock_camera_cut_to_viewport(is_cam_lock)
|
||||
|
||||
editor_subsystem.set_level_viewport_camera_info(vp_loc, vp_rot)
|
||||
|
||||
def remove(self, container):
|
||||
asset_dir = container.get('namespace')
|
||||
path = Path(asset_dir)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
|
||||
if not sequences:
|
||||
raise Exception("Could not find sequence.")
|
||||
|
||||
world = ar.get_asset_by_object_path(
|
||||
EditorLevelLibrary.get_editor_world().get_path_name())
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(_filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
if not maps:
|
||||
raise Exception("Could not find map.")
|
||||
|
||||
map = maps[0]
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(map.get_asset().get_path_name())
|
||||
|
||||
# Remove the camera from the level.
|
||||
actors = EditorLevelLibrary.get_all_level_actors()
|
||||
|
||||
for a in actors:
|
||||
if a.__class__ == unreal.CineCameraActor:
|
||||
EditorLevelLibrary.destroy_actor(a)
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(world.get_asset().get_path_name())
|
||||
|
||||
# There should be only one sequence in the path.
|
||||
sequence_name = sequences[0].asset_name
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/Ayon"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_full_name()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == sequence_name:
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
for i, ss in enumerate(sections):
|
||||
ss.set_row_index(i)
|
||||
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map_camera")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
# Create a temporary level to delete the layout level.
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
tmp_level = f"{root}/tmp/temp_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
EditorLevelLibrary.new_level(tmp_level)
|
||||
else:
|
||||
EditorLevelLibrary.load_level(tmp_level)
|
||||
|
||||
# Delete the layout directory.
|
||||
EditorAssetLibrary.delete_directory(asset_dir)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
# Check if there isn't any more assets in the parent folder, and
|
||||
# delete it if not.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
path.parent.as_posix(), recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(path.parent.as_posix())
|
||||
|
|
@ -1,251 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for published alembics."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class PointCacheAlembicLoader(plugin.Loader):
|
||||
"""Load Point Cache from Alembic"""
|
||||
|
||||
product_types = {"model", "pointcache"}
|
||||
label = "Import Alembic Point Cache"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(
|
||||
filename, asset_dir, asset_name, replace,
|
||||
frame_start=None, frame_end=None
|
||||
):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
gc_settings = unreal.AbcGeometryCacheSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings()
|
||||
sampling_settings = unreal.AbcSamplingSettings()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.GEOMETRY_CACHE)
|
||||
|
||||
gc_settings.set_editor_property('flatten_tracks', False)
|
||||
|
||||
conversion_settings.set_editor_property('flip_u', False)
|
||||
conversion_settings.set_editor_property('flip_v', True)
|
||||
conversion_settings.set_editor_property(
|
||||
'scale', unreal.Vector(x=100.0, y=100.0, z=100.0))
|
||||
conversion_settings.set_editor_property(
|
||||
'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0))
|
||||
|
||||
if frame_start is not None:
|
||||
sampling_settings.set_editor_property('frame_start', frame_start)
|
||||
if frame_end is not None:
|
||||
sampling_settings.set_editor_property('frame_end', frame_end)
|
||||
|
||||
options.geometry_cache_settings = gc_settings
|
||||
options.conversion_settings = conversion_settings
|
||||
options.sampling_settings = sampling_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name,
|
||||
frame_start, frame_end
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False, frame_start, frame_end)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
frame_start,
|
||||
frame_end,
|
||||
product_type,
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"frame_start": frame_start,
|
||||
"frame_end": frame_end,
|
||||
"product_type": product_type,
|
||||
"folder_path": folder_path,
|
||||
# TODO these should be probably removed
|
||||
"family": product_type,
|
||||
"asset": folder_path,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_entity = context["folder"]
|
||||
folder_path = folder_entity["path"]
|
||||
folder_name = folder_entity["name"]
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
|
||||
# If frame start and end are the same, we increase the end frame by
|
||||
# one, otherwise Unreal will not import it
|
||||
if frame_start == frame_end:
|
||||
frame_end += 1
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name,
|
||||
frame_start, frame_end)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
frame_start,
|
||||
frame_end,
|
||||
context["product"]["productType"]
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
# Create directory for folder and Ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
frame_start = int(container.get("frame_start"))
|
||||
frame_end = int(container.get("frame_end"))
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name,
|
||||
frame_start, frame_end)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
frame_start,
|
||||
frame_end,
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,916 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for layouts."""
|
||||
import json
|
||||
import collections
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
from unreal import (
|
||||
EditorAssetLibrary,
|
||||
EditorLevelLibrary,
|
||||
EditorLevelUtils,
|
||||
AssetToolsHelpers,
|
||||
FBXImportType,
|
||||
MovieSceneLevelVisibilityTrack,
|
||||
MovieSceneSubTrack,
|
||||
LevelSequenceEditorBlueprintLibrary as LevelSequenceLib,
|
||||
)
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
discover_loader_plugins,
|
||||
loaders_from_representation,
|
||||
load_container,
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
)
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
generate_sequence,
|
||||
set_sequence_hierarchy,
|
||||
create_container,
|
||||
imprint,
|
||||
ls,
|
||||
)
|
||||
|
||||
|
||||
class LayoutLoader(plugin.Loader):
|
||||
"""Load Layout from a JSON file"""
|
||||
|
||||
product_types = {"layout"}
|
||||
representations = {"json"}
|
||||
|
||||
label = "Load Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
ASSET_ROOT = "/Game/Ayon"
|
||||
|
||||
def _get_asset_containers(self, path):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
path, recursive=True)
|
||||
|
||||
asset_containers = []
|
||||
|
||||
# Get all the asset containers
|
||||
for a in asset_content:
|
||||
obj = ar.get_asset_by_object_path(a)
|
||||
if obj.get_asset().get_class().get_name() == 'AyonAssetContainer':
|
||||
asset_containers.append(obj)
|
||||
|
||||
return asset_containers
|
||||
|
||||
@staticmethod
|
||||
def _get_fbx_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshFBXLoader"
|
||||
elif family == 'model':
|
||||
name = "StaticMeshFBXLoader"
|
||||
elif family == 'camera':
|
||||
name = "CameraLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_abc_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshAlembicLoader"
|
||||
elif family == 'model':
|
||||
name = "StaticMeshAlembicLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
def _transform_from_basis(self, transform, basis):
|
||||
"""Transform a transform from a basis to a new basis."""
|
||||
# Get the basis matrix
|
||||
basis_matrix = unreal.Matrix(
|
||||
basis[0],
|
||||
basis[1],
|
||||
basis[2],
|
||||
basis[3]
|
||||
)
|
||||
transform_matrix = unreal.Matrix(
|
||||
transform[0],
|
||||
transform[1],
|
||||
transform[2],
|
||||
transform[3]
|
||||
)
|
||||
|
||||
new_transform = (
|
||||
basis_matrix.get_inverse() * transform_matrix * basis_matrix)
|
||||
|
||||
return new_transform.transform()
|
||||
|
||||
def _process_family(
|
||||
self, assets, class_name, transform, basis, sequence, inst_name=None
|
||||
):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
actors = []
|
||||
bindings = []
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == class_name:
|
||||
t = self._transform_from_basis(transform, basis)
|
||||
actor = EditorLevelLibrary.spawn_actor_from_object(
|
||||
obj, t.translation
|
||||
)
|
||||
actor.set_actor_rotation(t.rotation.rotator(), False)
|
||||
actor.set_actor_scale3d(t.scale3d)
|
||||
|
||||
if class_name == 'SkeletalMesh':
|
||||
skm_comp = actor.get_editor_property(
|
||||
'skeletal_mesh_component')
|
||||
skm_comp.set_bounds_scale(10.0)
|
||||
|
||||
actors.append(actor)
|
||||
|
||||
if sequence:
|
||||
binding = None
|
||||
for p in sequence.get_possessables():
|
||||
if p.get_name() == actor.get_name():
|
||||
binding = p
|
||||
break
|
||||
|
||||
if not binding:
|
||||
binding = sequence.add_possessable(actor)
|
||||
|
||||
bindings.append(binding)
|
||||
|
||||
return actors, bindings
|
||||
|
||||
def _import_animation(
|
||||
self, asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence
|
||||
):
|
||||
anim_file = Path(animation_file)
|
||||
anim_file_name = anim_file.with_suffix('')
|
||||
|
||||
anim_path = f"{asset_dir}/animations/{anim_file_name}"
|
||||
|
||||
folder_entity = get_current_folder_entity()
|
||||
# Import animation
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
task.set_editor_property(
|
||||
'filename', str(path.with_suffix(f".{animation_file}")))
|
||||
task.set_editor_property('destination_path', anim_path)
|
||||
task.set_editor_property(
|
||||
'destination_name', f"{instance_name}_animation")
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', False)
|
||||
|
||||
# set import options here
|
||||
task.options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
task.options.set_editor_property(
|
||||
'original_import_type', FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
task.options.set_editor_property(
|
||||
'mesh_type_to_import', FBXImportType.FBXIT_ANIMATION)
|
||||
task.options.set_editor_property('import_mesh', False)
|
||||
task.options.set_editor_property('import_animations', True)
|
||||
task.options.set_editor_property('override_full_name', True)
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'animation_length',
|
||||
unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME
|
||||
)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_meshes_in_bone_hierarchy', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'use_default_sample_rate', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'custom_sample_rate', folder_entity.get("attrib", {}).get("fps"))
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_custom_attribute', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_bone_tracks', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'remove_redundant_keys', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
anim_path, recursive=False, include_folder=False
|
||||
)
|
||||
|
||||
animation = None
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a)
|
||||
imported_asset = unreal.AssetRegistryHelpers.get_asset(
|
||||
imported_asset_data)
|
||||
if imported_asset.__class__ == unreal.AnimSequence:
|
||||
animation = imported_asset
|
||||
break
|
||||
|
||||
if animation:
|
||||
actor = None
|
||||
if actors_dict.get(instance_name):
|
||||
for a in actors_dict.get(instance_name):
|
||||
if a.get_class().get_name() == 'SkeletalMeshActor':
|
||||
actor = a
|
||||
break
|
||||
|
||||
animation.set_editor_property('enable_root_motion', True)
|
||||
actor.skeletal_mesh_component.set_editor_property(
|
||||
'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
|
||||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
if sequence:
|
||||
# Add animation to the sequencer
|
||||
bindings = bindings_dict.get(instance_name)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for binding in bindings:
|
||||
tracks = binding.get_tracks()
|
||||
track = None
|
||||
track = tracks[0] if tracks else binding.add_track(
|
||||
unreal.MovieSceneSkeletalAnimationTrack)
|
||||
|
||||
sections = track.get_sections()
|
||||
section = None
|
||||
if not sections:
|
||||
section = track.add_section()
|
||||
else:
|
||||
section = sections[0]
|
||||
|
||||
sec_params = section.get_editor_property('params')
|
||||
curr_anim = sec_params.get_editor_property('animation')
|
||||
|
||||
if curr_anim:
|
||||
# Checks if the animation path has a container.
|
||||
# If it does, it means that the animation is
|
||||
# already in the sequencer.
|
||||
anim_path = str(Path(
|
||||
curr_anim.get_path_name()).parent
|
||||
).replace('\\', '/')
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["AyonAssetContainer"],
|
||||
package_paths=[anim_path],
|
||||
recursive_paths=False)
|
||||
containers = ar.get_assets(_filter)
|
||||
|
||||
if len(containers) > 0:
|
||||
return
|
||||
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
sec_params = section.get_editor_property('params')
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
|
||||
def _get_repre_entities_by_version_id(self, data):
|
||||
version_ids = {
|
||||
element.get("version")
|
||||
for element in data
|
||||
if element.get("representation")
|
||||
}
|
||||
version_ids.discard(None)
|
||||
|
||||
output = collections.defaultdict(list)
|
||||
if not version_ids:
|
||||
return output
|
||||
|
||||
project_name = get_current_project_name()
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name,
|
||||
representation_names={"fbx", "abc"},
|
||||
version_ids=version_ids,
|
||||
fields={"id", "versionId", "name"}
|
||||
)
|
||||
for repre_entity in repre_entities:
|
||||
version_id = repre_entity["versionId"]
|
||||
output[version_id].append(repre_entity)
|
||||
return output
|
||||
|
||||
def _process(self, lib_path, asset_dir, sequence, repr_loaded=None):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
all_loaders = discover_loader_plugins()
|
||||
|
||||
if not repr_loaded:
|
||||
repr_loaded = []
|
||||
|
||||
path = Path(lib_path)
|
||||
|
||||
skeleton_dict = {}
|
||||
actors_dict = {}
|
||||
bindings_dict = {}
|
||||
|
||||
loaded_assets = []
|
||||
|
||||
repre_entities_by_version_id = self._get_repre_entities_by_version_id(
|
||||
data
|
||||
)
|
||||
for element in data:
|
||||
repre_id = None
|
||||
repr_format = None
|
||||
if element.get('representation'):
|
||||
version_id = element.get("version")
|
||||
repre_entities = repre_entities_by_version_id[version_id]
|
||||
if not repre_entities:
|
||||
self.log.error(
|
||||
f"No valid representation found for version"
|
||||
f" {version_id}")
|
||||
continue
|
||||
repre_entity = repre_entities[0]
|
||||
repre_id = repre_entity["id"]
|
||||
repr_format = repre_entity["name"]
|
||||
|
||||
# This is to keep compatibility with old versions of the
|
||||
# json format.
|
||||
elif element.get('reference_fbx'):
|
||||
repre_id = element.get('reference_fbx')
|
||||
repr_format = 'fbx'
|
||||
elif element.get('reference_abc'):
|
||||
repre_id = element.get('reference_abc')
|
||||
repr_format = 'abc'
|
||||
|
||||
# If reference is None, this element is skipped, as it cannot be
|
||||
# imported in Unreal
|
||||
if not repre_id:
|
||||
continue
|
||||
|
||||
instance_name = element.get('instance_name')
|
||||
|
||||
skeleton = None
|
||||
|
||||
if repre_id not in repr_loaded:
|
||||
repr_loaded.append(repre_id)
|
||||
|
||||
product_type = element.get("product_type")
|
||||
if product_type is None:
|
||||
product_type = element.get("family")
|
||||
loaders = loaders_from_representation(
|
||||
all_loaders, repre_id)
|
||||
|
||||
loader = None
|
||||
|
||||
if repr_format == 'fbx':
|
||||
loader = self._get_fbx_loader(loaders, product_type)
|
||||
elif repr_format == 'abc':
|
||||
loader = self._get_abc_loader(loaders, product_type)
|
||||
|
||||
if not loader:
|
||||
self.log.error(
|
||||
f"No valid loader found for {repre_id}")
|
||||
continue
|
||||
|
||||
options = {
|
||||
# "asset_dir": asset_dir
|
||||
}
|
||||
|
||||
assets = load_container(
|
||||
loader,
|
||||
repre_id,
|
||||
namespace=instance_name,
|
||||
options=options
|
||||
)
|
||||
|
||||
container = None
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == 'AyonAssetContainer':
|
||||
container = obj
|
||||
if obj.get_class().get_name() == 'Skeleton':
|
||||
skeleton = obj
|
||||
|
||||
loaded_assets.append(container.get_path_name())
|
||||
|
||||
instances = [
|
||||
item for item in data
|
||||
if ((item.get('version') and
|
||||
item.get('version') == element.get('version')) or
|
||||
item.get('reference_fbx') == repre_id or
|
||||
item.get('reference_abc') == repre_id)]
|
||||
|
||||
for instance in instances:
|
||||
# transform = instance.get('transform')
|
||||
transform = instance.get('transform_matrix')
|
||||
basis = instance.get('basis')
|
||||
inst = instance.get('instance_name')
|
||||
|
||||
actors = []
|
||||
|
||||
if product_type == 'model':
|
||||
actors, _ = self._process_family(
|
||||
assets, 'StaticMesh', transform, basis,
|
||||
sequence, inst
|
||||
)
|
||||
elif product_type == 'rig':
|
||||
actors, bindings = self._process_family(
|
||||
assets, 'SkeletalMesh', transform, basis,
|
||||
sequence, inst
|
||||
)
|
||||
actors_dict[inst] = actors
|
||||
bindings_dict[inst] = bindings
|
||||
|
||||
if skeleton:
|
||||
skeleton_dict[repre_id] = skeleton
|
||||
else:
|
||||
skeleton = skeleton_dict.get(repre_id)
|
||||
|
||||
animation_file = element.get('animation')
|
||||
|
||||
if animation_file and skeleton:
|
||||
self._import_animation(
|
||||
asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence)
|
||||
|
||||
return loaded_assets
|
||||
|
||||
@staticmethod
|
||||
def _remove_family(assets, components, class_name, prop_name):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
objects = []
|
||||
for a in assets:
|
||||
obj = ar.get_asset_by_object_path(a)
|
||||
if obj.get_asset().get_class().get_name() == class_name:
|
||||
objects.append(obj)
|
||||
for obj in objects:
|
||||
for comp in components:
|
||||
if comp.get_editor_property(prop_name) == obj.get_asset():
|
||||
comp.get_owner().destroy_actor()
|
||||
|
||||
def _remove_actors(self, path):
|
||||
asset_containers = self._get_asset_containers(path)
|
||||
|
||||
# Get all the static and skeletal meshes components in the level
|
||||
components = EditorLevelLibrary.get_all_level_actors_components()
|
||||
static_meshes_comp = [
|
||||
c for c in components
|
||||
if c.get_class().get_name() == 'StaticMeshComponent']
|
||||
skel_meshes_comp = [
|
||||
c for c in components
|
||||
if c.get_class().get_name() == 'SkeletalMeshComponent']
|
||||
|
||||
# For all the asset containers, get the static and skeletal meshes.
|
||||
# Then, check the components in the level and destroy the matching
|
||||
# actors.
|
||||
for asset_container in asset_containers:
|
||||
package_path = asset_container.get_editor_property('package_path')
|
||||
family = EditorAssetLibrary.get_metadata_tag(
|
||||
asset_container.get_asset(), "family")
|
||||
assets = EditorAssetLibrary.list_assets(
|
||||
str(package_path), recursive=False)
|
||||
if family == 'model':
|
||||
self._remove_family(
|
||||
assets, static_meshes_comp, 'StaticMesh', 'static_mesh')
|
||||
elif family == 'rig':
|
||||
self._remove_family(
|
||||
assets, skel_meshes_comp, 'SkeletalMesh', 'skeletal_mesh')
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
options (dict): Those would be data to be imprinted. This is not
|
||||
used now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
folder_entity = context["folder"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy = folder_path.lstrip("/").split("/")
|
||||
# Remove folder name
|
||||
folder_name = hierarchy.pop(-1)
|
||||
root = self.ASSET_ROOT
|
||||
hierarchy_dir = root
|
||||
hierarchy_dir_list = []
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir_list.append(hierarchy_dir)
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else name
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(hierarchy_dir, folder_name, name),
|
||||
suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
master_level = None
|
||||
shot = None
|
||||
sequences = []
|
||||
|
||||
level = f"{asset_dir}/{folder_name}_map.{folder_name}_map"
|
||||
EditorLevelLibrary.new_level(f"{asset_dir}/{folder_name}_map")
|
||||
|
||||
if create_sequences:
|
||||
# Create map for the shot, and create hierarchy of map. If the
|
||||
# maps already exist, we will use them.
|
||||
if hierarchy:
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
frame_ranges = []
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
if not existing_sequences:
|
||||
sequence, frame_range = generate_sequence(h, h_dir)
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append(frame_range)
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
frame_ranges.append((
|
||||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
|
||||
shot = tools.create_asset(
|
||||
asset_name=folder_name,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
# sequences and frame_ranges have the same length
|
||||
for i in range(0, len(sequences) - 1):
|
||||
set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
[level])
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_attributes = (
|
||||
ayon_api.get_folder_by_path(project_name, folder_path)["attrib"]
|
||||
)
|
||||
shot.set_display_rate(
|
||||
unreal.FrameRate(folder_attributes.get("fps"), 1.0))
|
||||
shot.set_playback_start(0)
|
||||
shot.set_playback_end(
|
||||
folder_attributes.get('clipOut')
|
||||
- folder_attributes.get('clipIn')
|
||||
+ 1
|
||||
)
|
||||
if sequences:
|
||||
set_sequence_hierarchy(
|
||||
sequences[-1],
|
||||
shot,
|
||||
frame_ranges[-1][1],
|
||||
folder_attributes.get('clipIn'),
|
||||
folder_attributes.get('clipOut'),
|
||||
[level])
|
||||
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
loaded_assets = self._process(path, asset_dir, shot)
|
||||
|
||||
for s in sequences:
|
||||
EditorAssetLibrary.save_asset(s.get_path_name())
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
# Create Asset Container
|
||||
create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"asset": folder_name,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"family": context["product"]["productType"],
|
||||
"loaded_assets": loaded_assets
|
||||
}
|
||||
imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
||||
save_dir = hierarchy_dir_list[0] if create_sequences else asset_dir
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
save_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
curr_level_sequence = LevelSequenceLib.get_current_level_sequence()
|
||||
curr_time = LevelSequenceLib.get_current_time()
|
||||
is_cam_lock = LevelSequenceLib.is_camera_cut_locked_to_viewport()
|
||||
|
||||
editor_subsystem = unreal.UnrealEditorSubsystem()
|
||||
vp_loc, vp_rot = editor_subsystem.get_level_viewport_camera_info()
|
||||
|
||||
root = "/Game/Ayon"
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
folder_entity = context["folder"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
hierarchy = folder_entity["path"].lstrip("/").split("/")
|
||||
first_parent_name = hierarchy[0]
|
||||
|
||||
sequence = None
|
||||
master_level = None
|
||||
|
||||
if create_sequences:
|
||||
h_dir = f"{root}/{first_parent_name}"
|
||||
h_asset = first_parent_name
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
sequence = sequences[0].get_asset()
|
||||
|
||||
prev_level = None
|
||||
|
||||
if not master_level:
|
||||
curr_level = unreal.LevelEditorSubsystem().get_current_level()
|
||||
curr_level_path = curr_level.get_outer().get_path_name()
|
||||
# If the level path does not start with "/Game/", the current
|
||||
# level is a temporary, unsaved level.
|
||||
if curr_level_path.startswith("/Game/"):
|
||||
prev_level = curr_level_path
|
||||
|
||||
# Get layout level
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
|
||||
layout_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(layout_level)
|
||||
|
||||
# Delete all the actors in the level
|
||||
actors = unreal.EditorLevelLibrary.get_all_level_actors()
|
||||
for actor in actors:
|
||||
unreal.EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
if create_sequences:
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/")
|
||||
|
||||
source_path = get_representation_path(repre_entity)
|
||||
|
||||
loaded_assets = self._process(source_path, asset_dir, sequence)
|
||||
|
||||
data = {
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
"loaded_assets": loaded_assets,
|
||||
}
|
||||
imprint(
|
||||
"{}/{}".format(asset_dir, container.get('container_name')), data)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
save_dir = f"{root}/{first_parent_name}" if create_sequences else asset_dir
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
save_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
elif prev_level:
|
||||
EditorLevelLibrary.load_level(prev_level)
|
||||
|
||||
if curr_level_sequence:
|
||||
LevelSequenceLib.open_level_sequence(curr_level_sequence)
|
||||
LevelSequenceLib.set_current_time(curr_time)
|
||||
LevelSequenceLib.set_lock_camera_cut_to_viewport(is_cam_lock)
|
||||
|
||||
editor_subsystem.set_level_viewport_camera_info(vp_loc, vp_rot)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Delete the layout. First, check if the assets loaded with the layout
|
||||
are used by other layouts. If not, delete the assets.
|
||||
"""
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
root = "/Game/Ayon"
|
||||
path = Path(container.get("namespace"))
|
||||
|
||||
containers = ls()
|
||||
layout_containers = [
|
||||
c for c in containers
|
||||
if (c.get('asset_name') != container.get('asset_name') and
|
||||
c.get('family') == "layout")]
|
||||
|
||||
# Check if the assets have been loaded by other layouts, and deletes
|
||||
# them if they haven't.
|
||||
for asset in eval(container.get('loaded_assets')):
|
||||
layouts = [
|
||||
lc for lc in layout_containers
|
||||
if asset in lc.get('loaded_assets')]
|
||||
|
||||
if not layouts:
|
||||
EditorAssetLibrary.delete_directory(str(Path(asset).parent))
|
||||
|
||||
# Delete the parent folder if there aren't any more
|
||||
# layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
str(Path(asset).parent.parent), recursive=False,
|
||||
include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(
|
||||
str(Path(asset).parent.parent))
|
||||
|
||||
master_sequence = None
|
||||
master_level = None
|
||||
sequences = []
|
||||
|
||||
if create_sequences:
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to
|
||||
# find the level sequence.
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if (ss.get_sequence().get_name() ==
|
||||
container.get('asset')):
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
# Create a temporary level to delete the layout level.
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
tmp_level = f"{root}/tmp/temp_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
EditorLevelLibrary.new_level(tmp_level)
|
||||
else:
|
||||
EditorLevelLibrary.load_level(tmp_level)
|
||||
|
||||
# Delete the layout directory.
|
||||
EditorAssetLibrary.delete_directory(str(path))
|
||||
|
||||
if create_sequences:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
# Delete the parent folder if there aren't any more layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
str(path.parent), recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(str(path.parent))
|
||||
|
|
@ -1,451 +0,0 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
from unreal import EditorLevelLibrary
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
discover_loader_plugins,
|
||||
loaders_from_representation,
|
||||
load_container,
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as upipeline
|
||||
|
||||
|
||||
class ExistingLayoutLoader(plugin.Loader):
|
||||
"""
|
||||
Load Layout for an existing scene, and match the existing assets.
|
||||
"""
|
||||
|
||||
product_types = {"layout"}
|
||||
representations = {"json"}
|
||||
|
||||
label = "Load Layout on Existing Scene"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
ASSET_ROOT = "/Game/Ayon"
|
||||
|
||||
delete_unmatched_assets = True
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
super(ExistingLayoutLoader, cls).apply_settings(
|
||||
project_settings
|
||||
)
|
||||
cls.delete_unmatched_assets = (
|
||||
project_settings["unreal"]["delete_unmatched_assets"]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _create_container(
|
||||
asset_name,
|
||||
asset_dir,
|
||||
folder_path,
|
||||
representation,
|
||||
version_id,
|
||||
product_type
|
||||
):
|
||||
container_name = f"{asset_name}_CON"
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_asset_exist(
|
||||
f"{asset_dir}/{container_name}"
|
||||
):
|
||||
container = upipeline.create_container(container_name, asset_dir)
|
||||
else:
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
obj = ar.get_asset_by_object_path(
|
||||
f"{asset_dir}/{container_name}.{container_name}")
|
||||
container = obj.get_asset()
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
# "loader": str(self.__class__.__name__),
|
||||
"representation": representation,
|
||||
"parent": version_id,
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
|
||||
upipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
||||
return container.get_path_name()
|
||||
|
||||
@staticmethod
|
||||
def _get_current_level():
|
||||
ue_version = unreal.SystemLibrary.get_engine_version().split('.')
|
||||
ue_major = ue_version[0]
|
||||
|
||||
if ue_major == '4':
|
||||
return EditorLevelLibrary.get_editor_world()
|
||||
elif ue_major == '5':
|
||||
return unreal.LevelEditorSubsystem().get_current_level()
|
||||
|
||||
raise NotImplementedError(
|
||||
f"Unreal version {ue_major} not supported")
|
||||
|
||||
def _transform_from_basis(self, transform, basis):
|
||||
"""Transform a transform from a basis to a new basis."""
|
||||
# Get the basis matrix
|
||||
basis_matrix = unreal.Matrix(
|
||||
basis[0],
|
||||
basis[1],
|
||||
basis[2],
|
||||
basis[3]
|
||||
)
|
||||
transform_matrix = unreal.Matrix(
|
||||
transform[0],
|
||||
transform[1],
|
||||
transform[2],
|
||||
transform[3]
|
||||
)
|
||||
|
||||
new_transform = (
|
||||
basis_matrix.get_inverse() * transform_matrix * basis_matrix)
|
||||
|
||||
return new_transform.transform()
|
||||
|
||||
def _spawn_actor(self, obj, lasset):
|
||||
actor = EditorLevelLibrary.spawn_actor_from_object(
|
||||
obj, unreal.Vector(0.0, 0.0, 0.0)
|
||||
)
|
||||
|
||||
actor.set_actor_label(lasset.get('instance_name'))
|
||||
|
||||
transform = lasset.get('transform_matrix')
|
||||
basis = lasset.get('basis')
|
||||
|
||||
computed_transform = self._transform_from_basis(transform, basis)
|
||||
|
||||
actor.set_actor_transform(computed_transform, False, True)
|
||||
|
||||
@staticmethod
|
||||
def _get_fbx_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshFBXLoader"
|
||||
elif family == 'model' or family == 'staticMesh':
|
||||
name = "StaticMeshFBXLoader"
|
||||
elif family == 'camera':
|
||||
name = "CameraLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_abc_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshAlembicLoader"
|
||||
elif family == 'model':
|
||||
name = "StaticMeshAlembicLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
def _load_asset(self, repr_data, representation, instance_name, family):
|
||||
repr_format = repr_data.get('name')
|
||||
|
||||
all_loaders = discover_loader_plugins()
|
||||
loaders = loaders_from_representation(
|
||||
all_loaders, representation)
|
||||
|
||||
loader = None
|
||||
|
||||
if repr_format == 'fbx':
|
||||
loader = self._get_fbx_loader(loaders, family)
|
||||
elif repr_format == 'abc':
|
||||
loader = self._get_abc_loader(loaders, family)
|
||||
|
||||
if not loader:
|
||||
self.log.error(f"No valid loader found for {representation}")
|
||||
return []
|
||||
|
||||
# This option is necessary to avoid importing the assets with a
|
||||
# different conversion compared to the other assets. For ABC files,
|
||||
# it is in fact impossible to access the conversion settings. So,
|
||||
# we must assume that the Maya conversion settings have been applied.
|
||||
options = {
|
||||
"default_conversion": True
|
||||
}
|
||||
|
||||
assets = load_container(
|
||||
loader,
|
||||
representation,
|
||||
namespace=instance_name,
|
||||
options=options
|
||||
)
|
||||
|
||||
return assets
|
||||
|
||||
def _get_valid_repre_entities(self, project_name, version_ids):
|
||||
valid_formats = ['fbx', 'abc']
|
||||
|
||||
repre_entities = list(ayon_api.get_representations(
|
||||
project_name,
|
||||
representation_names=valid_formats,
|
||||
version_ids=version_ids
|
||||
))
|
||||
repre_entities_by_version_id = {}
|
||||
for repre_entity in repre_entities:
|
||||
version_id = repre_entity["versionId"]
|
||||
repre_entities_by_version_id[version_id] = repre_entity
|
||||
return repre_entities_by_version_id
|
||||
|
||||
def _process(self, lib_path, project_name):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
actors = EditorLevelLibrary.get_all_level_actors()
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
elements = []
|
||||
repre_ids = set()
|
||||
# Get all the representations in the JSON from the database.
|
||||
for element in data:
|
||||
repre_id = element.get('representation')
|
||||
if repre_id:
|
||||
repre_ids.add(repre_id)
|
||||
elements.append(element)
|
||||
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name, representation_ids=repre_ids
|
||||
)
|
||||
repre_entities_by_id = {
|
||||
repre_entity["id"]: repre_entity
|
||||
for repre_entity in repre_entities
|
||||
}
|
||||
layout_data = []
|
||||
version_ids = set()
|
||||
for element in elements:
|
||||
repre_id = element.get("representation")
|
||||
repre_entity = repre_entities_by_id.get(repre_id)
|
||||
if not repre_entity:
|
||||
raise AssertionError("Representation not found")
|
||||
if not (
|
||||
repre_entity.get("attrib")
|
||||
or repre_entity["attrib"].get("path")
|
||||
):
|
||||
raise AssertionError("Representation does not have path")
|
||||
if not repre_entity.get('context'):
|
||||
raise AssertionError("Representation does not have context")
|
||||
|
||||
layout_data.append((repre_entity, element))
|
||||
version_ids.add(repre_entity["versionId"])
|
||||
|
||||
repre_parents_by_id = ayon_api.get_representation_parents(
|
||||
project_name, repre_entities_by_id.keys()
|
||||
)
|
||||
|
||||
# Prequery valid repre documents for all elements at once
|
||||
valid_repre_entities_by_version_id = self._get_valid_repre_entities(
|
||||
project_name, version_ids)
|
||||
containers = []
|
||||
actors_matched = []
|
||||
|
||||
for (repre_entity, lasset) in layout_data:
|
||||
# For every actor in the scene, check if it has a representation in
|
||||
# those we got from the JSON. If so, create a container for it.
|
||||
# Otherwise, remove it from the scene.
|
||||
found = False
|
||||
repre_id = repre_entity["id"]
|
||||
repre_parents = repre_parents_by_id[repre_id]
|
||||
folder_path = repre_parents.folder["path"]
|
||||
folder_name = repre_parents.folder["name"]
|
||||
product_name = repre_parents.product["name"]
|
||||
product_type = repre_parents.product["productType"]
|
||||
|
||||
for actor in actors:
|
||||
if not actor.get_class().get_name() == 'StaticMeshActor':
|
||||
continue
|
||||
if actor in actors_matched:
|
||||
continue
|
||||
|
||||
# Get the original path of the file from which the asset has
|
||||
# been imported.
|
||||
smc = actor.get_editor_property('static_mesh_component')
|
||||
mesh = smc.get_editor_property('static_mesh')
|
||||
import_data = mesh.get_editor_property('asset_import_data')
|
||||
filename = import_data.get_first_filename()
|
||||
path = Path(filename)
|
||||
|
||||
if (not path.name or
|
||||
path.name not in repre_entity["attrib"]["path"]):
|
||||
continue
|
||||
|
||||
actor.set_actor_label(lasset.get('instance_name'))
|
||||
|
||||
mesh_path = Path(mesh.get_path_name()).parent.as_posix()
|
||||
|
||||
# Create the container for the asset.
|
||||
container = self._create_container(
|
||||
f"{folder_name}_{product_name}",
|
||||
mesh_path,
|
||||
folder_path,
|
||||
repre_entity["id"],
|
||||
repre_entity["versionId"],
|
||||
product_type
|
||||
)
|
||||
containers.append(container)
|
||||
|
||||
# Set the transform for the actor.
|
||||
transform = lasset.get('transform_matrix')
|
||||
basis = lasset.get('basis')
|
||||
|
||||
computed_transform = self._transform_from_basis(
|
||||
transform, basis)
|
||||
actor.set_actor_transform(computed_transform, False, True)
|
||||
|
||||
actors_matched.append(actor)
|
||||
found = True
|
||||
break
|
||||
|
||||
# If an actor has not been found for this representation,
|
||||
# we check if it has been loaded already by checking all the
|
||||
# loaded containers. If so, we add it to the scene. Otherwise,
|
||||
# we load it.
|
||||
if found:
|
||||
continue
|
||||
|
||||
all_containers = upipeline.ls()
|
||||
|
||||
loaded = False
|
||||
|
||||
for container in all_containers:
|
||||
repre_id = container.get('representation')
|
||||
|
||||
if not repre_id == repre_entity["id"]:
|
||||
continue
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
arfilter = unreal.ARFilter(
|
||||
class_names=["StaticMesh"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
assets = ar.get_assets(arfilter)
|
||||
|
||||
for asset in assets:
|
||||
obj = asset.get_asset()
|
||||
self._spawn_actor(obj, lasset)
|
||||
|
||||
loaded = True
|
||||
break
|
||||
|
||||
# If the asset has not been loaded yet, we load it.
|
||||
if loaded:
|
||||
continue
|
||||
|
||||
version_id = lasset.get('version')
|
||||
assets = self._load_asset(
|
||||
valid_repre_entities_by_version_id.get(version_id),
|
||||
lasset.get('representation'),
|
||||
lasset.get('instance_name'),
|
||||
lasset.get('family')
|
||||
)
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if not obj.get_class().get_name() == 'StaticMesh':
|
||||
continue
|
||||
self._spawn_actor(obj, lasset)
|
||||
|
||||
break
|
||||
|
||||
# Check if an actor was not matched to a representation.
|
||||
# If so, remove it from the scene.
|
||||
for actor in actors:
|
||||
if not actor.get_class().get_name() == 'StaticMeshActor':
|
||||
continue
|
||||
if actor not in actors_matched:
|
||||
self.log.warning(f"Actor {actor.get_name()} not matched.")
|
||||
if self.delete_unmatched_assets:
|
||||
EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
return containers
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
print("Loading Layout and Match Assets")
|
||||
|
||||
folder_name = context["folder"]["name"]
|
||||
folder_path = context["folder"]["path"]
|
||||
product_type = context["product"]["productType"]
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else name
|
||||
container_name = f"{folder_name}_{name}_CON"
|
||||
|
||||
curr_level = self._get_current_level()
|
||||
|
||||
if not curr_level:
|
||||
raise AssertionError("Current level not saved")
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
path = self.filepath_from_context(context)
|
||||
containers = self._process(path, project_name)
|
||||
|
||||
curr_level_path = Path(
|
||||
curr_level.get_outer().get_path_name()).parent.as_posix()
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_asset_exist(
|
||||
f"{curr_level_path}/{container_name}"
|
||||
):
|
||||
upipeline.create_container(
|
||||
container=container_name, path=curr_level_path)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": curr_level_path,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
"loaded_assets": containers,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
upipeline.imprint(f"{curr_level_path}/{container_name}", data)
|
||||
|
||||
def update(self, container, context):
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
source_path = get_representation_path(repre_entity)
|
||||
containers = self._process(source_path, project_name)
|
||||
|
||||
data = {
|
||||
"representation": repre_entity["id"],
|
||||
"loaded_assets": containers,
|
||||
"parent": repre_entity["versionId"],
|
||||
}
|
||||
upipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container.get('container_name')), data)
|
||||
|
|
@ -1,220 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Skeletal Mesh alembics."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class SkeletalMeshAlembicLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from Alembic"""
|
||||
|
||||
product_types = {"pointcache", "skeletalMesh"}
|
||||
label = "Import Alembic Skeletal Mesh"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace, default_conversion):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
if not default_conversion:
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
options.conversion_settings = conversion_settings
|
||||
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name,
|
||||
default_conversion=False
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False, default_conversion)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
product_type
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
default_conversion = False
|
||||
if options.get("default_conversion"):
|
||||
default_conversion = options.get("default_conversion")
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name, default_conversion)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for folder and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type,
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,222 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Skeletal Meshes form FBX."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class SkeletalMeshFBXLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from FBX."""
|
||||
|
||||
product_types = {"rig", "skeletalMesh"}
|
||||
label = "Import FBX Skeletal Mesh"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.FbxImportUI()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
options.set_editor_property('import_as_skeletal', True)
|
||||
options.set_editor_property('import_animations', False)
|
||||
options.set_editor_property('import_mesh', True)
|
||||
options.set_editor_property('import_materials', False)
|
||||
options.set_editor_property('import_textures', False)
|
||||
options.set_editor_property('skeleton', None)
|
||||
options.set_editor_property('create_physics_asset', False)
|
||||
|
||||
options.set_editor_property(
|
||||
'mesh_type_to_import',
|
||||
unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'import_content_type',
|
||||
unreal.FBXImportContentType.FBXICT_ALL)
|
||||
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'normal_import_method',
|
||||
unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS)
|
||||
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
product_type
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_name = context["folder"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version_entity = context["version"]
|
||||
# Check if version is hero version and use different name
|
||||
version = version_entity["version"]
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_name,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,223 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for Static Mesh alembics."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class StaticMeshAlembicLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from Alembic"""
|
||||
|
||||
product_types = {"model", "staticMesh"}
|
||||
label = "Import Alembic Static Mesh"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace, default_conversion):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
sm_settings = unreal.AbcStaticMeshSettings()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.STATIC_MESH)
|
||||
|
||||
sm_settings.set_editor_property('merge_meshes', True)
|
||||
|
||||
if not default_conversion:
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
options.conversion_settings = conversion_settings
|
||||
|
||||
options.static_mesh_settings = sm_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name,
|
||||
default_conversion=False
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False, default_conversion)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
product_type,
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["path"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
default_conversion = False
|
||||
if options.get("default_conversion"):
|
||||
default_conversion = options.get("default_conversion")
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name, default_conversion)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,209 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Static meshes form FBX."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class StaticMeshFBXLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from FBX."""
|
||||
|
||||
product_types = {"model", "staticMesh"}
|
||||
label = "Import FBX Static Mesh"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.FbxImportUI()
|
||||
import_data = unreal.FbxStaticMeshImportData()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
options.set_editor_property('import_animations', False)
|
||||
|
||||
import_data.set_editor_property('combine_meshes', True)
|
||||
import_data.set_editor_property('remove_degenerates', False)
|
||||
|
||||
options.static_mesh_import_data = import_data
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"folder_path": folder_path,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
options (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
context["product"]["productType"]
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type,
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,171 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load UAsset."""
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class UAssetLoader(plugin.Loader):
|
||||
"""Load UAsset."""
|
||||
|
||||
product_types = {"uasset"}
|
||||
label = "Load UAsset"
|
||||
representations = {"uasset"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
extension = "uasset"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
options (dict): Those would be data to be imprinted. This is not
|
||||
used now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
root = unreal_pipeline.AYON_ASSET_DIR
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{folder_name}/{name}", suffix=""
|
||||
)
|
||||
|
||||
unique_number = 1
|
||||
while unreal.EditorAssetLibrary.does_directory_exist(
|
||||
f"{asset_dir}_{unique_number:02}"
|
||||
):
|
||||
unique_number += 1
|
||||
|
||||
asset_dir = f"{asset_dir}_{unique_number:02}"
|
||||
container_name = f"{container_name}_{unique_number:02}{suffix}"
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
destination_path = asset_dir.replace(
|
||||
"/Game", Path(unreal.Paths.project_content_dir()).as_posix(), 1)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
shutil.copy(
|
||||
path,
|
||||
f"{destination_path}/{name}_{unique_number:02}.{self.extension}")
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"folder_path": folder_path,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
asset_dir = container["namespace"]
|
||||
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
unique_number = container["container_name"].split("_")[-2]
|
||||
|
||||
destination_path = asset_dir.replace(
|
||||
"/Game", Path(unreal.Paths.project_content_dir()).as_posix(), 1)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
for asset in asset_content:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() != "AyonAssetContainer":
|
||||
unreal.EditorAssetLibrary.delete_asset(asset)
|
||||
|
||||
update_filepath = get_representation_path(repre_entity)
|
||||
|
||||
shutil.copy(
|
||||
update_filepath,
|
||||
f"{destination_path}/{product_name}_{unique_number}.{self.extension}"
|
||||
)
|
||||
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
}
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = Path(path).parent.as_posix()
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
||||
|
||||
class UMapLoader(UAssetLoader):
|
||||
"""Load Level."""
|
||||
|
||||
product_types = {"uasset"}
|
||||
label = "Load Level"
|
||||
representations = {"umap"}
|
||||
|
||||
extension = "umap"
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for Yeti Cache."""
|
||||
import os
|
||||
import json
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class YetiLoader(plugin.Loader):
|
||||
"""Load Yeti Cache"""
|
||||
|
||||
product_types = {"yeticacheUE"}
|
||||
label = "Import Yeti"
|
||||
representations = {"abc"}
|
||||
icon = "pagelines"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def is_groom_module_active():
|
||||
"""
|
||||
Check if Groom plugin is active.
|
||||
|
||||
This is a workaround, because the Unreal python API don't have
|
||||
any method to check if plugin is active.
|
||||
"""
|
||||
prj_file = unreal.Paths.get_project_file_path()
|
||||
|
||||
with open(prj_file, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
plugins = data.get("Plugins")
|
||||
|
||||
if not plugins:
|
||||
return False
|
||||
|
||||
plugin_names = [p.get("Name") for p in plugins]
|
||||
|
||||
return "HairStrands" in plugin_names
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
|
||||
"""
|
||||
# Check if Groom plugin is active
|
||||
if not self.is_groom_module_active():
|
||||
raise RuntimeError("Groom plugin is not activated.")
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
root = unreal_pipeline.AYON_ASSET_DIR
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{folder_name}/{name}", suffix="")
|
||||
|
||||
unique_number = 1
|
||||
while unreal.EditorAssetLibrary.does_directory_exist(
|
||||
f"{asset_dir}_{unique_number:02}"
|
||||
):
|
||||
unique_number += 1
|
||||
|
||||
asset_dir = f"{asset_dir}_{unique_number:02}"
|
||||
container_name = f"{container_name}_{unique_number:02}{suffix}"
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
task = self.get_task(path, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"folder_path": folder_path,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
repre_entity = context["representation"]
|
||||
name = container["asset_name"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = self.get_task(source_path, destination_path, name, True)
|
||||
|
||||
# do import fbx and replace existing data
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
})
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect current project path."""
|
||||
import unreal # noqa
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectUnrealCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Unreal Current File"
|
||||
hosts = ['unreal']
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file."""
|
||||
current_file = unreal.Paths.get_project_file_path()
|
||||
context.data['currentFile'] = current_file
|
||||
|
||||
assert current_file != '', "Current file is empty. " \
|
||||
"Save the file before continuing."
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
import unreal
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceMembers(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collect members of instance.
|
||||
|
||||
This collector will collect the assets for the families that support to
|
||||
have them included as External Data, and will add them to the instance
|
||||
as members.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["unreal"]
|
||||
families = ["camera", "look", "unrealStaticMesh", "uasset"]
|
||||
label = "Collect Instance Members"
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect members of instance."""
|
||||
self.log.info("Collecting instance members")
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
inst_path = instance.data.get('instance_path')
|
||||
inst_name = inst_path.split('/')[-1]
|
||||
|
||||
pub_instance = ar.get_asset_by_object_path(
|
||||
f"{inst_path}.{inst_name}").get_asset()
|
||||
|
||||
if not pub_instance:
|
||||
self.log.error(f"{inst_path}.{inst_name}")
|
||||
raise RuntimeError(f"Instance {instance} not found.")
|
||||
|
||||
if not pub_instance.get_editor_property("add_external_assets"):
|
||||
# No external assets in the instance
|
||||
return
|
||||
|
||||
assets = pub_instance.get_editor_property('asset_data_external')
|
||||
|
||||
members = [asset.get_path_name() for asset in assets]
|
||||
|
||||
self.log.debug(f"Members: {members}")
|
||||
|
||||
instance.data["members"] = members
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRemoveMarked(pyblish.api.ContextPlugin):
|
||||
"""Remove marked data
|
||||
|
||||
Remove instances that have 'remove' in their instance.data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = 'Remove Marked Instances'
|
||||
|
||||
def process(self, context):
|
||||
|
||||
self.log.debug(context)
|
||||
# make ftrack publishable
|
||||
instances_to_remove = []
|
||||
for instance in context:
|
||||
if instance.data.get('remove'):
|
||||
instances_to_remove.append(instance)
|
||||
|
||||
for instance in instances_to_remove:
|
||||
context.remove(instance)
|
||||
|
|
@ -1,116 +0,0 @@
|
|||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_unreal.api import pipeline
|
||||
|
||||
|
||||
class CollectRenderInstances(pyblish.api.InstancePlugin):
|
||||
""" This collector will try to find all the rendered frames.
|
||||
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["unreal"]
|
||||
families = ["render"]
|
||||
label = "Collect Render Instances"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("Preparing Rendering Instances")
|
||||
|
||||
context = instance.context
|
||||
|
||||
data = instance.data
|
||||
data['remove'] = True
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sequence = ar.get_asset_by_object_path(
|
||||
data.get('sequence')).get_asset()
|
||||
|
||||
sequences = [{
|
||||
"sequence": sequence,
|
||||
"output": data.get('output'),
|
||||
"frame_range": (
|
||||
data.get('frameStart'), data.get('frameEnd'))
|
||||
}]
|
||||
|
||||
for s in sequences:
|
||||
self.log.debug(f"Processing: {s.get('sequence').get_name()}")
|
||||
subscenes = pipeline.get_subsequences(s.get('sequence'))
|
||||
|
||||
if subscenes:
|
||||
for ss in subscenes:
|
||||
sequences.append({
|
||||
"sequence": ss.get_sequence(),
|
||||
"output": (f"{s.get('output')}/"
|
||||
f"{ss.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
ss.get_start_frame(), ss.get_end_frame() - 1)
|
||||
})
|
||||
else:
|
||||
# Avoid creating instances for camera sequences
|
||||
if "_camera" not in s.get('sequence').get_name():
|
||||
seq = s.get('sequence')
|
||||
seq_name = seq.get_name()
|
||||
|
||||
product_type = "render"
|
||||
new_product_name = f"{data.get('productName')}_{seq_name}"
|
||||
new_instance = context.create_instance(
|
||||
new_product_name
|
||||
)
|
||||
new_instance[:] = seq_name
|
||||
|
||||
new_data = new_instance.data
|
||||
|
||||
new_data["folderPath"] = f"/{s.get('output')}"
|
||||
new_data["setMembers"] = seq_name
|
||||
new_data["productName"] = new_product_name
|
||||
new_data["productType"] = product_type
|
||||
new_data["family"] = product_type
|
||||
new_data["families"] = [product_type, "review"]
|
||||
new_data["parent"] = data.get("parent")
|
||||
new_data["level"] = data.get("level")
|
||||
new_data["output"] = s.get('output')
|
||||
new_data["fps"] = seq.get_display_rate().numerator
|
||||
new_data["frameStart"] = int(s.get('frame_range')[0])
|
||||
new_data["frameEnd"] = int(s.get('frame_range')[1])
|
||||
new_data["sequence"] = seq.get_path_name()
|
||||
new_data["master_sequence"] = data["master_sequence"]
|
||||
new_data["master_level"] = data["master_level"]
|
||||
|
||||
self.log.debug(f"new instance data: {new_data}")
|
||||
|
||||
try:
|
||||
project = get_current_project_name()
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception as e:
|
||||
raise Exception((
|
||||
"Could not find render root "
|
||||
"in anatomy settings.")) from e
|
||||
|
||||
render_dir = f"{root}/{project}/{s.get('output')}"
|
||||
render_path = Path(render_dir)
|
||||
|
||||
frames = []
|
||||
|
||||
for x in render_path.iterdir():
|
||||
if x.is_file() and x.suffix == '.png':
|
||||
frames.append(str(x.name))
|
||||
|
||||
if "representations" not in new_instance.data:
|
||||
new_instance.data["representations"] = []
|
||||
|
||||
repr = {
|
||||
'frameStart': instance.data["frameStart"],
|
||||
'frameEnd': instance.data["frameEnd"],
|
||||
'name': 'png',
|
||||
'ext': 'png',
|
||||
'files': frames,
|
||||
'stagingDir': render_dir,
|
||||
'tags': ['review']
|
||||
}
|
||||
new_instance.data["representations"].append(repr)
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract camera from Unreal."""
|
||||
import os
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_unreal.api.pipeline import UNREAL_VERSION
|
||||
|
||||
|
||||
class ExtractCamera(publish.Extractor):
|
||||
"""Extract a camera."""
|
||||
|
||||
label = "Extract Camera"
|
||||
hosts = ["unreal"]
|
||||
families = ["camera"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
# Define extract output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
fbx_filename = "{}.fbx".format(instance.name)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Check if the loaded level is the same of the instance
|
||||
if UNREAL_VERSION.major == 5:
|
||||
world = unreal.UnrealEditorSubsystem().get_editor_world()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
current_level = world.get_path_name()
|
||||
assert current_level == instance.data.get("level"), \
|
||||
"Wrong level loaded"
|
||||
|
||||
for member in instance.data.get('members'):
|
||||
data = ar.get_asset_by_object_path(member)
|
||||
if UNREAL_VERSION.major == 5:
|
||||
is_level_sequence = (
|
||||
data.asset_class_path.asset_name == "LevelSequence")
|
||||
else:
|
||||
is_level_sequence = (data.asset_class == "LevelSequence")
|
||||
|
||||
if is_level_sequence:
|
||||
sequence = data.get_asset()
|
||||
if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor >= 1:
|
||||
params = unreal.SequencerExportFBXParams(
|
||||
world=world,
|
||||
root_sequence=sequence,
|
||||
sequence=sequence,
|
||||
bindings=sequence.get_bindings(),
|
||||
master_tracks=sequence.get_master_tracks(),
|
||||
fbx_file_name=os.path.join(staging_dir, fbx_filename)
|
||||
)
|
||||
unreal.SequencerTools.export_level_sequence_fbx(params)
|
||||
elif UNREAL_VERSION.major == 4 and UNREAL_VERSION.minor == 26:
|
||||
unreal.SequencerTools.export_fbx(
|
||||
world,
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
unreal.FbxExportOption(),
|
||||
os.path.join(staging_dir, fbx_filename)
|
||||
)
|
||||
else:
|
||||
# Unreal 5.0 or 4.27
|
||||
unreal.SequencerTools.export_level_sequence_fbx(
|
||||
world,
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
unreal.FbxExportOption(),
|
||||
os.path.join(staging_dir, fbx_filename)
|
||||
)
|
||||
|
||||
if not os.path.isfile(os.path.join(staging_dir, fbx_filename)):
|
||||
raise RuntimeError("Failed to extract camera")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
fbx_representation = {
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': fbx_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(fbx_representation)
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import json
|
||||
import math
|
||||
|
||||
import unreal
|
||||
from unreal import EditorLevelLibrary as ell
|
||||
from unreal import EditorAssetLibrary as eal
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractLayout(publish.Extractor):
|
||||
"""Extract a layout."""
|
||||
|
||||
label = "Extract Layout"
|
||||
hosts = ["unreal"]
|
||||
families = ["layout"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Check if the loaded level is the same of the instance
|
||||
current_level = ell.get_editor_world().get_path_name()
|
||||
assert current_level == instance.data.get("level"), \
|
||||
"Wrong level loaded"
|
||||
|
||||
json_data = []
|
||||
project_name = instance.context.data["projectName"]
|
||||
|
||||
for member in instance[:]:
|
||||
actor = ell.get_actor_reference(member)
|
||||
mesh = None
|
||||
|
||||
# Check type the type of mesh
|
||||
if actor.get_class().get_name() == 'SkeletalMeshActor':
|
||||
mesh = actor.skeletal_mesh_component.skeletal_mesh
|
||||
elif actor.get_class().get_name() == 'StaticMeshActor':
|
||||
mesh = actor.static_mesh_component.static_mesh
|
||||
|
||||
if mesh:
|
||||
# Search the reference to the Asset Container for the object
|
||||
path = unreal.Paths.get_path(mesh.get_path_name())
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["AyonAssetContainer"], package_paths=[path])
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
try:
|
||||
asset_container = ar.get_assets(filter)[0].get_asset()
|
||||
except IndexError:
|
||||
self.log.error("AssetContainer not found.")
|
||||
return
|
||||
|
||||
parent_id = eal.get_metadata_tag(asset_container, "parent")
|
||||
family = eal.get_metadata_tag(asset_container, "family")
|
||||
|
||||
self.log.info("Parent: {}".format(parent_id))
|
||||
blend = ayon_api.get_representation_by_name(
|
||||
project_name, "blend", parent_id, fields={"id"}
|
||||
)
|
||||
blend_id = blend["id"]
|
||||
|
||||
json_element = {}
|
||||
json_element["reference"] = str(blend_id)
|
||||
json_element["family"] = family
|
||||
json_element["product_type"] = family
|
||||
json_element["instance_name"] = actor.get_name()
|
||||
json_element["asset_name"] = mesh.get_name()
|
||||
import_data = mesh.get_editor_property("asset_import_data")
|
||||
json_element["file_path"] = import_data.get_first_filename()
|
||||
transform = actor.get_actor_transform()
|
||||
|
||||
json_element["transform"] = {
|
||||
"translation": {
|
||||
"x": -transform.translation.x,
|
||||
"y": transform.translation.y,
|
||||
"z": transform.translation.z
|
||||
},
|
||||
"rotation": {
|
||||
"x": math.radians(transform.rotation.euler().x),
|
||||
"y": math.radians(transform.rotation.euler().y),
|
||||
"z": math.radians(180.0 - transform.rotation.euler().z)
|
||||
},
|
||||
"scale": {
|
||||
"x": transform.scale3d.x,
|
||||
"y": transform.scale3d.y,
|
||||
"z": transform.scale3d.z
|
||||
}
|
||||
}
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(json_representation)
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
import os
|
||||
|
||||
import unreal
|
||||
from unreal import MaterialEditingLibrary as mat_lib
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractLook(publish.Extractor):
|
||||
"""Extract look."""
|
||||
|
||||
label = "Extract Look"
|
||||
hosts = ["unreal"]
|
||||
families = ["look"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
resources_dir = instance.data["resourcesDir"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
transfers = []
|
||||
|
||||
json_data = []
|
||||
|
||||
for member in instance:
|
||||
asset = ar.get_asset_by_object_path(member)
|
||||
obj = asset.get_asset()
|
||||
|
||||
name = asset.get_editor_property('asset_name')
|
||||
|
||||
json_element = {'material': str(name)}
|
||||
|
||||
material_obj = obj.get_editor_property('static_materials')[0]
|
||||
material = material_obj.material_interface
|
||||
|
||||
base_color = mat_lib.get_material_property_input_node(
|
||||
material, unreal.MaterialProperty.MP_BASE_COLOR)
|
||||
|
||||
base_color_name = base_color.get_editor_property('parameter_name')
|
||||
|
||||
texture = mat_lib.get_material_default_texture_parameter_value(
|
||||
material, base_color_name)
|
||||
|
||||
if texture:
|
||||
# Export Texture
|
||||
tga_filename = f"{instance.name}_{name}_texture.tga"
|
||||
|
||||
tga_exporter = unreal.TextureExporterTGA()
|
||||
|
||||
tga_export_task = unreal.AssetExportTask()
|
||||
|
||||
tga_export_task.set_editor_property('exporter', tga_exporter)
|
||||
tga_export_task.set_editor_property('automated', True)
|
||||
tga_export_task.set_editor_property('object', texture)
|
||||
tga_export_task.set_editor_property(
|
||||
'filename', f"{staging_dir}/{tga_filename}")
|
||||
tga_export_task.set_editor_property('prompt', False)
|
||||
tga_export_task.set_editor_property('selected', False)
|
||||
|
||||
unreal.Exporter.run_asset_export_task(tga_export_task)
|
||||
|
||||
json_element['tga_filename'] = tga_filename
|
||||
|
||||
transfers.append((
|
||||
f"{staging_dir}/{tga_filename}",
|
||||
f"{resources_dir}/{tga_filename}"))
|
||||
|
||||
fbx_filename = f"{instance.name}_{name}.fbx"
|
||||
|
||||
fbx_exporter = unreal.StaticMeshExporterFBX()
|
||||
fbx_exporter.set_editor_property('text', False)
|
||||
|
||||
options = unreal.FbxExportOption()
|
||||
options.set_editor_property('ascii', False)
|
||||
options.set_editor_property('collision', False)
|
||||
|
||||
task = unreal.AssetExportTask()
|
||||
task.set_editor_property('exporter', fbx_exporter)
|
||||
task.set_editor_property('options', options)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('object', object)
|
||||
task.set_editor_property(
|
||||
'filename', f"{staging_dir}/{fbx_filename}")
|
||||
task.set_editor_property('prompt', False)
|
||||
task.set_editor_property('selected', False)
|
||||
|
||||
unreal.Exporter.run_asset_export_task(task)
|
||||
|
||||
json_element['fbx_filename'] = fbx_filename
|
||||
|
||||
transfers.append((
|
||||
f"{staging_dir}/{fbx_filename}",
|
||||
f"{resources_dir}/{fbx_filename}"))
|
||||
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = f"{instance.name}.json"
|
||||
json_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = []
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
instance.data["representations"].append(json_representation)
|
||||
instance.data["transfers"].extend(transfers)
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractUAsset(publish.Extractor):
|
||||
"""Extract a UAsset."""
|
||||
|
||||
label = "Extract UAsset"
|
||||
hosts = ["unreal"]
|
||||
families = ["uasset", "umap"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
extension = (
|
||||
"umap" if "umap" in instance.data.get("families") else "uasset")
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
self.log.debug("Performing extraction..")
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
members = instance.data.get("members", [])
|
||||
|
||||
if not members:
|
||||
raise RuntimeError("No members found in instance.")
|
||||
|
||||
# UAsset publishing supports only one member
|
||||
obj = members[0]
|
||||
|
||||
asset = ar.get_asset_by_object_path(obj).get_asset()
|
||||
sys_path = unreal.SystemLibrary.get_system_path(asset)
|
||||
filename = Path(sys_path).name
|
||||
|
||||
shutil.copy(sys_path, staging_dir)
|
||||
|
||||
self.log.info(f"instance.data: {instance.data}")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
import unreal
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateNoDependencies(pyblish.api.InstancePlugin):
|
||||
"""Ensure that the uasset has no dependencies
|
||||
|
||||
The uasset is checked for dependencies. If there are any, the instance
|
||||
cannot be published.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Check no dependencies"
|
||||
families = ["uasset"]
|
||||
hosts = ["unreal"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
all_dependencies = []
|
||||
|
||||
for obj in instance[:]:
|
||||
asset = ar.get_asset_by_object_path(obj)
|
||||
dependencies = ar.get_dependencies(
|
||||
asset.package_name,
|
||||
unreal.AssetRegistryDependencyOptions(
|
||||
include_soft_package_references=False,
|
||||
include_hard_package_references=True,
|
||||
include_searchable_names=False,
|
||||
include_soft_management_references=False,
|
||||
include_hard_management_references=False
|
||||
))
|
||||
if dependencies:
|
||||
for dep in dependencies:
|
||||
if str(dep).startswith("/Game/"):
|
||||
all_dependencies.append(str(dep))
|
||||
|
||||
if all_dependencies:
|
||||
raise RuntimeError(
|
||||
f"Dependencies found: {all_dependencies}")
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import clique
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import PublishValidationError
|
||||
|
||||
|
||||
class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
||||
"""Ensure the sequence of frames is complete
|
||||
|
||||
The files found in the folder are checked against the frameStart and
|
||||
frameEnd of the instance. If the first or last file is not
|
||||
corresponding with the first or last frame it is flagged as invalid.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Sequence Frames"
|
||||
families = ["render"]
|
||||
hosts = ["unreal"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
representations = instance.data.get("representations")
|
||||
folder_attributes = (
|
||||
instance.data
|
||||
.get("folderEntity", {})
|
||||
.get("attrib", {})
|
||||
)
|
||||
for repr in representations:
|
||||
repr_files = repr["files"]
|
||||
if isinstance(repr_files, str):
|
||||
continue
|
||||
|
||||
ext = repr.get("ext")
|
||||
if not ext:
|
||||
_, ext = os.path.splitext(repr_files[0])
|
||||
elif not ext.startswith("."):
|
||||
ext = ".{}".format(ext)
|
||||
pattern = r"\D?(?P<index>(?P<padding>0*)\d+){}$".format(
|
||||
re.escape(ext))
|
||||
patterns = [pattern]
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
repr["files"], minimum_items=1, patterns=patterns)
|
||||
|
||||
if remainder:
|
||||
raise PublishValidationError(
|
||||
"Some files have been found outside a sequence. "
|
||||
f"Invalid files: {remainder}")
|
||||
if not collections:
|
||||
raise PublishValidationError(
|
||||
"We have been unable to find a sequence in the "
|
||||
"files. Please ensure the files are named "
|
||||
"appropriately. "
|
||||
f"Files: {repr_files}")
|
||||
if len(collections) > 1:
|
||||
raise PublishValidationError(
|
||||
"Multiple collections detected. There should be a single "
|
||||
"collection per representation. "
|
||||
f"Collections identified: {collections}")
|
||||
|
||||
collection = collections[0]
|
||||
frames = list(collection.indexes)
|
||||
|
||||
if instance.data.get("slate"):
|
||||
# Slate is not part of the frame range
|
||||
frames = frames[1:]
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (folder_attributes["clipIn"],
|
||||
folder_attributes["clipOut"])
|
||||
|
||||
if current_range != required_range:
|
||||
raise PublishValidationError(
|
||||
f"Invalid frame range: {current_range} - "
|
||||
f"expected: {required_range}")
|
||||
|
||||
missing = collection.holes().indexes
|
||||
if missing:
|
||||
raise PublishValidationError(
|
||||
"Missing frames have been detected. "
|
||||
f"Missing frames: {missing}")
|
||||
|
|
@ -1,434 +0,0 @@
|
|||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
from distutils import dir_util
|
||||
from distutils.dir_util import copy_tree
|
||||
from pathlib import Path
|
||||
from typing import List, Union
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
import ayon_unreal.lib as ue_lib
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
def parse_comp_progress(line: str, progress_signal: QtCore.Signal(int)):
|
||||
match = re.search(r"\[[1-9]+/[0-9]+]", line)
|
||||
if match is not None:
|
||||
split: list[str] = match.group().split("/")
|
||||
curr: float = float(split[0][1:])
|
||||
total: float = float(split[1][:-1])
|
||||
progress_signal.emit(int((curr / total) * 100.0))
|
||||
|
||||
|
||||
def parse_prj_progress(line: str, progress_signal: QtCore.Signal(int)):
|
||||
match = re.search("@progress", line)
|
||||
if match is not None:
|
||||
percent_match = re.search(r"\d{1,3}", line)
|
||||
progress_signal.emit(int(percent_match.group()))
|
||||
|
||||
|
||||
def retrieve_exit_code(line: str):
|
||||
match = re.search(r"ExitCode=\d+", line)
|
||||
if match is not None:
|
||||
split: list[str] = match.group().split("=")
|
||||
return int(split[1])
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class UEWorker(QtCore.QObject):
|
||||
finished = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str, int)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
|
||||
engine_path: Path = None
|
||||
env = None
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError("Please implement this method!")
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.execute()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
self.log.emit(str(e))
|
||||
self.log.emit(traceback.format_exc())
|
||||
self.failed.emit(str(e), 1)
|
||||
raise e
|
||||
|
||||
|
||||
class UEProjectGenerationWorker(UEWorker):
|
||||
stage_begin = QtCore.Signal(str)
|
||||
|
||||
ue_version: str = None
|
||||
project_name: str = None
|
||||
project_dir: Path = None
|
||||
dev_mode = False
|
||||
|
||||
def setup(self, ue_version: str,
|
||||
project_name: str,
|
||||
unreal_project_name,
|
||||
engine_path: Path,
|
||||
project_dir: Path,
|
||||
dev_mode: bool = False,
|
||||
env: dict = None):
|
||||
"""Set the worker with necessary parameters.
|
||||
|
||||
Args:
|
||||
ue_version (str): Unreal Engine version.
|
||||
project_name (str): Name of the project in AYON.
|
||||
unreal_project_name (str): Name of the project in Unreal.
|
||||
engine_path (Path): Path to the Unreal Engine.
|
||||
project_dir (Path): Path to the project directory.
|
||||
dev_mode (bool, optional): Whether to run the project in dev mode.
|
||||
Defaults to False.
|
||||
env (dict, optional): Environment variables. Defaults to None.
|
||||
|
||||
"""
|
||||
|
||||
self.ue_version = ue_version
|
||||
self.project_dir = project_dir
|
||||
self.env = env or os.environ
|
||||
|
||||
preset = get_project_settings(project_name)["unreal"]["project_setup"]
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
self.dev_mode = True
|
||||
|
||||
self.project_name = unreal_project_name
|
||||
self.engine_path = engine_path
|
||||
|
||||
def execute(self):
|
||||
# engine_path should be the location of UE_X.X folder
|
||||
|
||||
ue_editor_exe = ue_lib.get_editor_exe_path(self.engine_path,
|
||||
self.ue_version)
|
||||
cmdlet_project = ue_lib.get_path_to_cmdlet_project(self.ue_version)
|
||||
project_file = self.project_dir / f"{self.project_name}.uproject"
|
||||
|
||||
print("--- Generating a new project ...")
|
||||
# 1st stage
|
||||
stage_count = 2
|
||||
if self.dev_mode:
|
||||
stage_count = 4
|
||||
|
||||
self.stage_begin.emit(
|
||||
("Generating a new UE project ... 1 out of "
|
||||
f"{stage_count}"))
|
||||
|
||||
# Need to copy the commandlet project to a temporary folder where
|
||||
# users don't need admin rights to write to.
|
||||
cmdlet_tmp = tempfile.TemporaryDirectory()
|
||||
cmdlet_filename = cmdlet_project.name
|
||||
cmdlet_dir = cmdlet_project.parent.as_posix()
|
||||
cmdlet_tmp_name = Path(cmdlet_tmp.name)
|
||||
cmdlet_tmp_file = cmdlet_tmp_name.joinpath(cmdlet_filename)
|
||||
copy_tree(
|
||||
cmdlet_dir,
|
||||
cmdlet_tmp_name.as_posix())
|
||||
|
||||
commandlet_cmd = [
|
||||
f"{ue_editor_exe.as_posix()}",
|
||||
f"{cmdlet_tmp_file.as_posix()}",
|
||||
"-run=AyonGenerateProject",
|
||||
f"{project_file.resolve().as_posix()}",
|
||||
]
|
||||
|
||||
if self.dev_mode:
|
||||
commandlet_cmd.append("-GenerateCode")
|
||||
|
||||
gen_process = subprocess.Popen(commandlet_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in gen_process.stdout:
|
||||
decoded_line = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
gen_process.stdout.close()
|
||||
return_code = gen_process.wait()
|
||||
|
||||
cmdlet_tmp.cleanup()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = (
|
||||
f"Failed to generate {self.project_name} "
|
||||
f"project! Exited with return code {return_code}"
|
||||
)
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
print("--- Project has been generated successfully.")
|
||||
self.stage_begin.emit(
|
||||
(f"Writing the Engine ID of the build UE ... 1"
|
||||
f" out of {stage_count}"))
|
||||
|
||||
if not project_file.is_file():
|
||||
msg = ("Failed to write the Engine ID into .uproject file! Can "
|
||||
"not read!")
|
||||
self.failed.emit(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
with open(project_file.as_posix(), mode="r+") as pf:
|
||||
pf_json = json.load(pf)
|
||||
pf_json["EngineAssociation"] = ue_lib.get_build_id(
|
||||
self.engine_path,
|
||||
self.ue_version
|
||||
)
|
||||
print(pf_json["EngineAssociation"])
|
||||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
print("--- Engine ID has been written into the project file")
|
||||
|
||||
self.progress.emit(90)
|
||||
if self.dev_mode:
|
||||
# 2nd stage
|
||||
self.stage_begin.emit(
|
||||
(f"Generating project files ... 2 out of "
|
||||
f"{stage_count}"))
|
||||
|
||||
self.progress.emit(0)
|
||||
ubt_path = ue_lib.get_path_to_ubt(self.engine_path,
|
||||
self.ue_version)
|
||||
|
||||
arch = "Win64"
|
||||
if platform.system().lower() == "windows":
|
||||
arch = "Win64"
|
||||
elif platform.system().lower() == "linux":
|
||||
arch = "Linux"
|
||||
elif platform.system().lower() == "darwin":
|
||||
# we need to test this out
|
||||
arch = "Mac"
|
||||
|
||||
gen_prj_files_cmd = [ubt_path.as_posix(),
|
||||
"-projectfiles",
|
||||
f"-project={project_file}",
|
||||
"-progress"]
|
||||
gen_proc = subprocess.Popen(gen_prj_files_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in gen_proc.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
parse_prj_progress(decoded_line, self.progress)
|
||||
|
||||
gen_proc.stdout.close()
|
||||
return_code = gen_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to generate project files! "
|
||||
f"Exited with return code {return_code}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
self.stage_begin.emit(
|
||||
f"Building the project ... 3 out of {stage_count}")
|
||||
self.progress.emit(0)
|
||||
# 3rd stage
|
||||
build_prj_cmd = [ubt_path.as_posix(),
|
||||
f"-ModuleWithSuffix={self.project_name},3555",
|
||||
arch,
|
||||
"Development",
|
||||
"-TargetType=Editor",
|
||||
f"-Project={project_file}",
|
||||
f"{project_file}",
|
||||
"-IgnoreJunk"]
|
||||
|
||||
build_prj_proc = subprocess.Popen(build_prj_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in build_prj_proc.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
parse_comp_progress(decoded_line, self.progress)
|
||||
|
||||
build_prj_proc.stdout.close()
|
||||
return_code = build_prj_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to build project! "
|
||||
f"Exited with return code {return_code}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# ensure we have PySide2/6 installed in engine
|
||||
|
||||
self.progress.emit(0)
|
||||
self.stage_begin.emit(
|
||||
(f"Checking Qt bindings installation... {stage_count} "
|
||||
f" out of {stage_count}"))
|
||||
python_path = None
|
||||
if platform.system().lower() == "windows":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Win64/python.exe")
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Linux/bin/python3")
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Mac/bin/python3")
|
||||
|
||||
if not python_path:
|
||||
msg = "Unsupported platform"
|
||||
self.failed.emit(msg, 1)
|
||||
raise NotImplementedError(msg)
|
||||
if not python_path.exists():
|
||||
msg = f"Unreal Python not found at {python_path}"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
pyside_version = "PySide2"
|
||||
ue_version = self.ue_version.split(".")
|
||||
if int(ue_version[0]) == 5 and int(ue_version[1]) >= 4:
|
||||
# Use PySide6 6.6.3 because 6.7.0 had a bug
|
||||
# - 'QPushButton' can't be added to 'QBoxLayout'
|
||||
pyside_version = "PySide6==6.6.3"
|
||||
|
||||
site_packages_prefix = python_path.parent.as_posix()
|
||||
|
||||
pyside_cmd = [
|
||||
python_path.as_posix(),
|
||||
"-m", "pip",
|
||||
"install",
|
||||
"--ignore-installed",
|
||||
pyside_version,
|
||||
|
||||
]
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
pyside_cmd += ["--target", site_packages_prefix]
|
||||
|
||||
print(f"--- Installing {pyside_version} ...")
|
||||
print(" ".join(pyside_cmd))
|
||||
|
||||
pyside_install = subprocess.Popen(pyside_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in pyside_install.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
|
||||
pyside_install.stdout.close()
|
||||
return_code = pyside_install.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = (f"Failed to create the project! {return_code} "
|
||||
f"The installation of {pyside_version} has failed!: {pyside_install}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
self.progress.emit(100)
|
||||
self.finished.emit("Project successfully built!")
|
||||
|
||||
|
||||
class UEPluginInstallWorker(UEWorker):
|
||||
installing = QtCore.Signal(str)
|
||||
|
||||
def setup(self, engine_path: Path, env: dict = None, ):
|
||||
self.engine_path = engine_path
|
||||
self.env = env or os.environ
|
||||
|
||||
def _build_and_move_plugin(self, plugin_build_path: Path):
|
||||
uat_path: Path = ue_lib.get_path_to_uat(self.engine_path)
|
||||
src_plugin_dir = Path(self.env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
msg = "Path to the integration plugin is null!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if not uat_path.is_file():
|
||||
msg = "Building failed! Path to UAT is invalid!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
temp_dir: Path = src_plugin_dir.parent / "Temp"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
uplugin_path: Path = src_plugin_dir / "Ayon.uplugin"
|
||||
|
||||
# in order to successfully build the plugin,
|
||||
# It must be built outside the Engine directory and then moved
|
||||
build_plugin_cmd: List[str] = [f"{uat_path.as_posix()}",
|
||||
"BuildPlugin",
|
||||
f"-Plugin={uplugin_path.as_posix()}",
|
||||
f"-Package={temp_dir.as_posix()}"]
|
||||
|
||||
build_proc = subprocess.Popen(build_plugin_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
return_code: Union[None, int] = None
|
||||
for line in build_proc.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
if return_code is None:
|
||||
return_code = retrieve_exit_code(decoded_line)
|
||||
parse_comp_progress(decoded_line, self.progress)
|
||||
|
||||
build_proc.stdout.close()
|
||||
build_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to build plugin"
|
||||
f" project! Exited with return code {return_code}")
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Copy the contents of the 'Temp' dir into the
|
||||
# 'Ayon' directory in the engine
|
||||
dir_util.copy_tree(temp_dir.as_posix(),
|
||||
plugin_build_path.as_posix())
|
||||
|
||||
# We need to also copy the config folder.
|
||||
# The UAT doesn't include the Config folder in the build
|
||||
plugin_install_config_path: Path = plugin_build_path / "Config"
|
||||
src_plugin_config_path = src_plugin_dir / "Config"
|
||||
|
||||
dir_util.copy_tree(src_plugin_config_path.as_posix(),
|
||||
plugin_install_config_path.as_posix())
|
||||
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
|
||||
def execute(self):
|
||||
src_plugin_dir = Path(self.env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
msg = "Path to the integration plugin is null!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path = self.engine_path / "Engine/Plugins/Marketplace" \
|
||||
"/Ayon"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
self.installing.emit("Installing and building the plugin ...")
|
||||
op_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path = op_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
self.installing.emit("Building the plugin ...")
|
||||
print("--- Building the plugin...")
|
||||
|
||||
self._build_and_move_plugin(op_plugin_path)
|
||||
|
||||
self.finished.emit("Plugin successfully installed")
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
from .splash_screen import SplashScreen
|
||||
|
||||
__all__ = (
|
||||
"SplashScreen",
|
||||
)
|
||||
|
|
@ -1,262 +0,0 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from ayon_core import style, resources
|
||||
|
||||
|
||||
class SplashScreen(QtWidgets.QDialog):
|
||||
"""Splash screen for executing a process on another thread. It is able
|
||||
to inform about the progress of the process and log given information.
|
||||
"""
|
||||
|
||||
splash_icon = None
|
||||
top_label = None
|
||||
show_log_btn: QtWidgets.QLabel = None
|
||||
progress_bar = None
|
||||
log_text: QtWidgets.QLabel = None
|
||||
scroll_area: QtWidgets.QScrollArea = None
|
||||
close_btn: QtWidgets.QPushButton = None
|
||||
scroll_bar: QtWidgets.QScrollBar = None
|
||||
|
||||
is_log_visible = False
|
||||
is_scroll_auto = True
|
||||
|
||||
thread_return_code = None
|
||||
q_thread: QtCore.QThread = None
|
||||
|
||||
def __init__(self,
|
||||
window_title: str,
|
||||
splash_icon=None,
|
||||
window_icon=None):
|
||||
"""
|
||||
Args:
|
||||
window_title (str): String which sets the window title
|
||||
splash_icon (str | bytes | None): A resource (pic) which is used
|
||||
for the splash icon
|
||||
window_icon (str | bytes | None: A resource (pic) which is used for
|
||||
the window's icon
|
||||
"""
|
||||
super(SplashScreen, self).__init__()
|
||||
|
||||
if splash_icon is None:
|
||||
splash_icon = resources.get_ayon_icon_filepath()
|
||||
|
||||
if window_icon is None:
|
||||
window_icon = resources.get_ayon_icon_filepath()
|
||||
|
||||
self.splash_icon = splash_icon
|
||||
self.setWindowIcon(QtGui.QIcon(window_icon))
|
||||
self.setWindowTitle(window_title)
|
||||
self.init_ui()
|
||||
|
||||
def was_proc_successful(self) -> bool:
|
||||
return self.thread_return_code == 0
|
||||
|
||||
def start_thread(self, q_thread: QtCore.QThread):
|
||||
"""Saves the reference to this thread and starts it.
|
||||
|
||||
Args:
|
||||
q_thread (QtCore.QThread): A QThread containing a given worker
|
||||
(QtCore.QObject)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if not q_thread:
|
||||
raise RuntimeError("Failed to run a worker thread! "
|
||||
"The thread is null!")
|
||||
|
||||
self.q_thread = q_thread
|
||||
self.q_thread.start()
|
||||
|
||||
@QtCore.Slot()
|
||||
def quit_and_close(self):
|
||||
"""Quits the thread and closes the splash screen. Note that this means
|
||||
the thread has exited with the return code 0!
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.thread_return_code = 0
|
||||
self.q_thread.quit()
|
||||
|
||||
if not self.q_thread.wait(5000):
|
||||
raise RuntimeError("Failed to quit the QThread! "
|
||||
"The deadline has been reached! The thread "
|
||||
"has not finished it's execution!.")
|
||||
self.close()
|
||||
|
||||
|
||||
@QtCore.Slot()
|
||||
def toggle_log(self):
|
||||
if self.is_log_visible:
|
||||
self.scroll_area.hide()
|
||||
width = self.width()
|
||||
self.adjustSize()
|
||||
self.resize(width, self.height())
|
||||
else:
|
||||
self.scroll_area.show()
|
||||
self.scroll_bar.setValue(self.scroll_bar.maximum())
|
||||
self.resize(self.width(), 300)
|
||||
|
||||
self.is_log_visible = not self.is_log_visible
|
||||
|
||||
def show_ui(self):
|
||||
"""Shows the splash screen. BEWARE THAT THIS FUNCTION IS BLOCKING
|
||||
(The execution of code can not proceed further beyond this function
|
||||
until the splash screen is closed!)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.show()
|
||||
self.exec_()
|
||||
|
||||
def init_ui(self):
|
||||
self.resize(450, 100)
|
||||
self.setMinimumWidth(250)
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# Top Section
|
||||
self.top_label = QtWidgets.QLabel(self)
|
||||
self.top_label.setText("Starting process ...")
|
||||
self.top_label.setWordWrap(True)
|
||||
|
||||
icon = QtWidgets.QLabel(self)
|
||||
icon.setPixmap(QtGui.QPixmap(self.splash_icon))
|
||||
icon.setFixedHeight(45)
|
||||
icon.setFixedWidth(45)
|
||||
icon.setScaledContents(True)
|
||||
|
||||
self.close_btn = QtWidgets.QPushButton(self)
|
||||
self.close_btn.setText("Quit")
|
||||
self.close_btn.clicked.connect(self.close)
|
||||
self.close_btn.setFixedWidth(80)
|
||||
self.close_btn.hide()
|
||||
|
||||
self.show_log_btn = QtWidgets.QPushButton(self)
|
||||
self.show_log_btn.setText("Show log")
|
||||
self.show_log_btn.setFixedWidth(80)
|
||||
self.show_log_btn.clicked.connect(self.toggle_log)
|
||||
|
||||
button_layout = QtWidgets.QVBoxLayout()
|
||||
button_layout.addWidget(self.show_log_btn)
|
||||
button_layout.addWidget(self.close_btn)
|
||||
|
||||
# Progress Bar
|
||||
self.progress_bar = QtWidgets.QProgressBar()
|
||||
self.progress_bar.setValue(0)
|
||||
self.progress_bar.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
# Log Content
|
||||
self.scroll_area = QtWidgets.QScrollArea(self)
|
||||
self.scroll_area.hide()
|
||||
log_widget = QtWidgets.QWidget(self.scroll_area)
|
||||
self.scroll_area.setWidgetResizable(True)
|
||||
self.scroll_area.setHorizontalScrollBarPolicy(
|
||||
QtCore.Qt.ScrollBarAlwaysOn
|
||||
)
|
||||
self.scroll_area.setVerticalScrollBarPolicy(
|
||||
QtCore.Qt.ScrollBarAlwaysOn
|
||||
)
|
||||
self.scroll_area.setWidget(log_widget)
|
||||
|
||||
self.scroll_bar = self.scroll_area.verticalScrollBar()
|
||||
self.scroll_bar.sliderMoved.connect(self.on_scroll)
|
||||
|
||||
self.log_text = QtWidgets.QLabel(self)
|
||||
self.log_text.setText('')
|
||||
self.log_text.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
log_layout = QtWidgets.QVBoxLayout(log_widget)
|
||||
log_layout.addWidget(self.log_text)
|
||||
|
||||
top_layout = QtWidgets.QHBoxLayout()
|
||||
top_layout.setAlignment(QtCore.Qt.AlignTop)
|
||||
top_layout.addWidget(icon)
|
||||
top_layout.addSpacing(10)
|
||||
top_layout.addWidget(self.top_label)
|
||||
top_layout.addSpacing(10)
|
||||
top_layout.addLayout(button_layout)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addLayout(top_layout)
|
||||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(self.progress_bar)
|
||||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(self.scroll_area)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
desktop_rect = QtWidgets.QApplication.desktop().availableGeometry(self)
|
||||
center = desktop_rect.center()
|
||||
self.move(
|
||||
center.x() - (self.width() * 0.5),
|
||||
center.y() - (self.height() * 0.5)
|
||||
)
|
||||
|
||||
@QtCore.Slot(int)
|
||||
def update_progress(self, value: int):
|
||||
self.progress_bar.setValue(value)
|
||||
|
||||
@QtCore.Slot(str)
|
||||
def update_top_label_text(self, text: str):
|
||||
self.top_label.setText(text)
|
||||
|
||||
@QtCore.Slot(str, str)
|
||||
def append_log(self, text: str, end: str = ''):
|
||||
"""A slot used for receiving log info and appending it to scroll area's
|
||||
content.
|
||||
Args:
|
||||
text (str): A log text that will append to the current one in the
|
||||
scroll area.
|
||||
end (str): end string which can be appended to the end of the given
|
||||
line (for ex. a line break).
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.log_text.setText(self.log_text.text() + text + end)
|
||||
if self.is_scroll_auto:
|
||||
self.scroll_bar.setValue(self.scroll_bar.maximum())
|
||||
|
||||
@QtCore.Slot(int)
|
||||
def on_scroll(self, position: int):
|
||||
"""
|
||||
A slot for the vertical scroll bar's movement. This ensures the
|
||||
auto-scrolling feature of the scroll area when the scroll bar is at its
|
||||
maximum value.
|
||||
|
||||
Args:
|
||||
position (int): Position value of the scroll bar.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if self.scroll_bar.maximum() == position:
|
||||
self.is_scroll_auto = True
|
||||
return
|
||||
|
||||
self.is_scroll_auto = False
|
||||
|
||||
@QtCore.Slot(str, int)
|
||||
def fail(self, text: str, return_code: int = 1):
|
||||
"""
|
||||
A slot used for signals which can emit when a worker (process) has
|
||||
failed. at this moment the splash screen doesn't close by itself.
|
||||
it has to be closed by the user.
|
||||
|
||||
Args:
|
||||
text (str): A text which can be set to the top label.
|
||||
|
||||
Returns:
|
||||
return_code (int): Return code of the thread's code
|
||||
"""
|
||||
self.top_label.setText(text)
|
||||
self.close_btn.show()
|
||||
self.thread_return_code = return_code
|
||||
self.q_thread.exit(return_code)
|
||||
self.q_thread.wait()
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'unreal' version."""
|
||||
__version__ = "0.2.0"
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
name = "unreal"
|
||||
title = "Unreal"
|
||||
version = "0.2.0"
|
||||
|
||||
client_dir = "ayon_unreal"
|
||||
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.2",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue