mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/remove-hosts-addons-imports
This commit is contained in:
commit
ad490b311e
72 changed files with 116 additions and 8633 deletions
3
.gitmodules
vendored
3
.gitmodules
vendored
|
|
@ -1,3 +0,0 @@
|
|||
[submodule "client/ayon_core/hosts/unreal/integration"]
|
||||
path = server_addon/unreal/client/ayon_unreal/integration
|
||||
url = https://github.com/ynput/ayon-unreal-plugin.git
|
||||
|
|
@ -64,9 +64,10 @@ class Commands:
|
|||
get_global_context,
|
||||
)
|
||||
|
||||
# Register target and host
|
||||
import ayon_api
|
||||
import pyblish.util
|
||||
|
||||
# Register target and host
|
||||
if not isinstance(path, str):
|
||||
raise RuntimeError("Path to JSON must be a string.")
|
||||
|
||||
|
|
@ -86,6 +87,19 @@ class Commands:
|
|||
|
||||
log = Logger.get_logger("CLI-publish")
|
||||
|
||||
# Make public ayon api behave as other user
|
||||
# - this works only if public ayon api is using service user
|
||||
username = os.environ.get("AYON_USERNAME")
|
||||
if username:
|
||||
# NOTE: ayon-python-api does not have public api function to find
|
||||
# out if is used service user. So we need to have try > except
|
||||
# block.
|
||||
con = ayon_api.get_server_api_connection()
|
||||
try:
|
||||
con.set_default_service_username(username)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
install_ayon_plugins()
|
||||
|
||||
manager = AddonsManager()
|
||||
|
|
|
|||
|
|
@ -281,7 +281,7 @@ class HiddenDef(AbstractAttrDef):
|
|||
def __init__(self, key, default=None, **kwargs):
|
||||
kwargs["default"] = default
|
||||
kwargs["hidden"] = True
|
||||
super(UnknownDef, self).__init__(key, **kwargs)
|
||||
super(HiddenDef, self).__init__(key, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
return value
|
||||
|
|
|
|||
|
|
@ -313,7 +313,14 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
|
||||
# Define version
|
||||
version_number = None
|
||||
if self.follow_workfile_version:
|
||||
|
||||
# Allow an instance to force enable or disable the version
|
||||
# following of the current context
|
||||
use_context_version = self.follow_workfile_version
|
||||
if "followWorkfileVersion" in instance.data:
|
||||
use_context_version = instance.data["followWorkfileVersion"]
|
||||
|
||||
if use_context_version:
|
||||
version_number = context.data("version")
|
||||
|
||||
# Even if 'follow_workfile_version' is enabled, it may not be set
|
||||
|
|
|
|||
|
|
@ -14,22 +14,20 @@ class CollectFarmTarget(pyblish.api.InstancePlugin):
|
|||
if not instance.data.get("farm"):
|
||||
return
|
||||
|
||||
context = instance.context
|
||||
addons_manager = instance.context.data.get("ayonAddonsManager")
|
||||
|
||||
farm_name = ""
|
||||
addons_manager = context.data.get("ayonAddonsManager")
|
||||
|
||||
for farm_renderer in ["deadline", "royalrender"]:
|
||||
addon = addons_manager.get(farm_renderer, False)
|
||||
|
||||
if not addon:
|
||||
self.log.error("Cannot find AYON addon '{0}'.".format(
|
||||
farm_renderer))
|
||||
elif addon.enabled:
|
||||
farm_renderer_addons = ["deadline", "royalrender"]
|
||||
for farm_renderer in farm_renderer_addons:
|
||||
addon = addons_manager.get(farm_renderer)
|
||||
if addon and addon.enabled:
|
||||
farm_name = farm_renderer
|
||||
|
||||
if farm_name:
|
||||
self.log.debug("Collected render target: {0}".format(farm_name))
|
||||
instance.data["toBeRenderedOn"] = farm_name
|
||||
break
|
||||
else:
|
||||
AssertionError("No AYON renderer addon found")
|
||||
# No enabled farm render addon found, then report all farm
|
||||
# addons that were searched for yet not found
|
||||
for farm_renderer in farm_renderer_addons:
|
||||
self.log.error(f"Cannot find AYON addon '{farm_renderer}'.")
|
||||
raise RuntimeError("No AYON renderer addon found.")
|
||||
|
||||
self.log.debug("Collected render target: {0}".format(farm_name))
|
||||
instance.data["toBeRenderedOn"] = farm_name
|
||||
|
|
|
|||
|
|
@ -28,7 +28,8 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
"photoshop",
|
||||
"resolve",
|
||||
"tvpaint",
|
||||
"motionbuilder"
|
||||
"motionbuilder",
|
||||
"substancepainter"
|
||||
]
|
||||
|
||||
# in some cases of headless publishing (for example webpublisher using PS)
|
||||
|
|
|
|||
|
|
@ -202,43 +202,16 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
added_representations = True
|
||||
|
||||
if added_representations:
|
||||
self._mark_original_repre_for_deletion(repre, profile,
|
||||
added_review)
|
||||
self._mark_original_repre_for_deletion(
|
||||
repre, profile, added_review
|
||||
)
|
||||
|
||||
for repre in tuple(instance.data["representations"]):
|
||||
tags = repre.get("tags") or []
|
||||
if "delete" in tags and "thumbnail" not in tags:
|
||||
instance.data["representations"].remove(repre)
|
||||
|
||||
instance.data["representations"].extend(new_representations)
|
||||
|
||||
def _rename_in_representation(self, new_repre, files_to_convert,
|
||||
output_name, output_extension):
|
||||
"""Replace old extension with new one everywhere in representation.
|
||||
|
||||
Args:
|
||||
new_repre (dict)
|
||||
files_to_convert (list): of filenames from repre["files"],
|
||||
standardized to always list
|
||||
output_name (str): key of output definition from Settings,
|
||||
if "<passthrough>" token used, keep original repre name
|
||||
output_extension (str): extension from output definition
|
||||
"""
|
||||
if output_name != "passthrough":
|
||||
new_repre["name"] = output_name
|
||||
if not output_extension:
|
||||
return
|
||||
|
||||
new_repre["ext"] = output_extension
|
||||
|
||||
renamed_files = []
|
||||
for file_name in files_to_convert:
|
||||
file_name, _ = os.path.splitext(file_name)
|
||||
file_name = '{}.{}'.format(file_name,
|
||||
output_extension)
|
||||
renamed_files.append(file_name)
|
||||
new_repre["files"] = renamed_files
|
||||
|
||||
def _rename_in_representation(self, new_repre, files_to_convert,
|
||||
output_name, output_extension):
|
||||
"""Replace old extension with new one everywhere in representation.
|
||||
|
|
@ -364,7 +337,7 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
|
||||
if not repre.get("colorspaceData"):
|
||||
self.log.debug("Representation '{}' has no colorspace data. "
|
||||
"Skipped.")
|
||||
"Skipped.".format(repre["name"]))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -380,29 +380,28 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
data = {
|
||||
"families": get_instance_families(instance)
|
||||
}
|
||||
attribibutes = {}
|
||||
attributes = {}
|
||||
|
||||
product_group = instance.data.get("productGroup")
|
||||
if product_group:
|
||||
attribibutes["productGroup"] = product_group
|
||||
attributes["productGroup"] = product_group
|
||||
elif existing_product_entity:
|
||||
# Preserve previous product group if new version does not set it
|
||||
product_group = existing_product_entity.get("attrib", {}).get(
|
||||
"productGroup"
|
||||
)
|
||||
if product_group is not None:
|
||||
attribibutes["productGroup"] = product_group
|
||||
attributes["productGroup"] = product_group
|
||||
|
||||
product_id = None
|
||||
if existing_product_entity:
|
||||
product_id = existing_product_entity["id"]
|
||||
|
||||
product_entity = new_product_entity(
|
||||
product_name,
|
||||
product_type,
|
||||
folder_entity["id"],
|
||||
data=data,
|
||||
attribs=attribibutes,
|
||||
attribs=attributes,
|
||||
entity_id=product_id
|
||||
)
|
||||
|
||||
|
|
@ -464,6 +463,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
version_number,
|
||||
product_entity["id"],
|
||||
task_id=task_id,
|
||||
status=instance.data.get("status"),
|
||||
data=version_data,
|
||||
attribs=version_attributes,
|
||||
entity_id=version_id,
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
main_layout.addWidget(content_body, 1)
|
||||
|
||||
btn_back.clicked.connect(self._on_back_clicked)
|
||||
refresh_btn.clicked.connect(self._on_refreh_clicked)
|
||||
refresh_btn.clicked.connect(self._on_refresh_clicked)
|
||||
folders_filter_text.textChanged.connect(self._on_filter_text_changed)
|
||||
|
||||
self._is_visible = False
|
||||
|
|
@ -99,7 +99,7 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
def _on_back_clicked(self):
|
||||
self._controller.set_selected_project(None)
|
||||
|
||||
def _on_refreh_clicked(self):
|
||||
def _on_refresh_clicked(self):
|
||||
self._controller.refresh()
|
||||
|
||||
def _on_filter_text_changed(self, text):
|
||||
|
|
|
|||
|
|
@ -17,3 +17,4 @@ Click = "^8"
|
|||
OpenTimelineIO = "0.16.0"
|
||||
opencolorio = "2.2.1"
|
||||
Pillow = "9.5.0"
|
||||
websocket-client = ">=0.40.0,<2"
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ unfixable = []
|
|||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
exclude = [
|
||||
"client/ayon_core/hosts/unreal/integration/*",
|
||||
"client/ayon_core/modules/click_wrap.py",
|
||||
"client/ayon_core/scripts/slates/__init__.py",
|
||||
"server_addon/deadline/client/ayon_deadline/repository/custom/plugins/CelAction/*",
|
||||
|
|
@ -106,7 +105,7 @@ line-ending = "auto"
|
|||
|
||||
[tool.codespell]
|
||||
# Ignore words that are not in the dictionary.
|
||||
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement,ue"
|
||||
ignore-words-list = "ayon,ynput,parms,parm,hda,developpement"
|
||||
|
||||
# Ignore lines that contain this regex. This is hack for missing inline ignore.
|
||||
# Remove with next codespell release (>2.2.6)
|
||||
|
|
|
|||
|
|
@ -863,7 +863,8 @@ DEFAULT_PUBLISH_VALUES = {
|
|||
"nuke",
|
||||
"photoshop",
|
||||
"resolve",
|
||||
"tvpaint"
|
||||
"tvpaint",
|
||||
"substancepainter"
|
||||
],
|
||||
"skip_hosts_headless_publish": []
|
||||
},
|
||||
|
|
@ -890,6 +891,7 @@ DEFAULT_PUBLISH_VALUES = {
|
|||
"maya",
|
||||
"nuke",
|
||||
"photoshop",
|
||||
"substancepainter"
|
||||
],
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
|
|||
|
|
@ -454,9 +454,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self.plugin_info = self.get_plugin_info()
|
||||
self.aux_files = self.get_aux_files()
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
job_id = self.process_submission(auth, verify)
|
||||
job_id = self.process_submission()
|
||||
self.log.info("Submitted job to Deadline: {}.".format(job_id))
|
||||
|
||||
# TODO: Find a way that's more generic and not render type specific
|
||||
|
|
@ -469,10 +467,12 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
job_info=render_job_info,
|
||||
plugin_info=render_plugin_info
|
||||
)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
render_job_id = self.submit(payload, auth, verify)
|
||||
self.log.info("Render job id: %s", render_job_id)
|
||||
|
||||
def process_submission(self, auth=None, verify=True):
|
||||
def process_submission(self):
|
||||
"""Process data for submission.
|
||||
|
||||
This takes Deadline JobInfo, PluginInfo, AuxFile, creates payload
|
||||
|
|
@ -483,6 +483,8 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
|
||||
"""
|
||||
payload = self.assemble_payload()
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth, verify)
|
||||
|
||||
@abstractmethod
|
||||
|
|
|
|||
10
server_addon/deadline/client/ayon_deadline/lib.py
Normal file
10
server_addon/deadline/client/ayon_deadline/lib.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# describes list of product typed used for plugin filtering for farm publishing
|
||||
FARM_FAMILIES = [
|
||||
"render", "render.farm", "render.frames_farm",
|
||||
"prerender", "prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence", "image",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
"karma_rop", "vray_rop", "redshift_rop",
|
||||
"renderFarm", "usrender", "publish.hou"
|
||||
]
|
||||
|
|
@ -8,6 +8,8 @@ attribute or using default server if that attribute doesn't exists.
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import KnownPublishError
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
|
@ -16,20 +18,8 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.225
|
||||
label = "Deadline Webservice from the Instance"
|
||||
targets = ["local"]
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou",
|
||||
"image"] # for Fusion
|
||||
|
||||
families = FARM_FAMILIES
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import pyblish.api
|
|||
from ayon_core.lib import TextDef
|
||||
from ayon_core.pipeline.publish import AYONPyblishPluginMixin
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlinePools(pyblish.api.InstancePlugin,
|
||||
AYONPyblishPluginMixin):
|
||||
|
|
@ -36,22 +38,7 @@ class CollectDeadlinePools(pyblish.api.InstancePlugin,
|
|||
"nuke",
|
||||
]
|
||||
|
||||
families = [
|
||||
"render",
|
||||
"prerender",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou",
|
||||
]
|
||||
families = FARM_FAMILIES
|
||||
|
||||
primary_pool = None
|
||||
secondary_pool = None
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ import pyblish.api
|
|||
|
||||
from ayon_api import get_server_api_connection
|
||||
|
||||
from ayon_deadline.lib import FARM_FAMILIES
|
||||
|
||||
|
||||
class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
||||
"""Collects user name and password for artist if DL requires authentication
|
||||
|
|
@ -31,19 +33,7 @@ class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin):
|
|||
"max",
|
||||
"houdini"]
|
||||
|
||||
families = ["render",
|
||||
"rendering",
|
||||
"render.farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender",
|
||||
"usdrender",
|
||||
"redshift_rop",
|
||||
"arnold_rop",
|
||||
"mantra_rop",
|
||||
"karma_rop",
|
||||
"vray_rop",
|
||||
"publish.hou"]
|
||||
families = FARM_FAMILIES
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
|
|
|
|||
|
|
@ -175,8 +175,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
payload = self.assemble_payload()
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
return self.submit(payload, auth=auth, verify=verify)
|
||||
|
||||
def from_published_scene(self):
|
||||
|
|
|
|||
|
|
@ -182,8 +182,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
self.log.debug("Submitting 3dsMax render..")
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if instance.data.get("multiCamera"):
|
||||
self.log.debug("Submitting jobs for multiple cameras..")
|
||||
payload = self._use_published_name_for_multiples(
|
||||
|
|
|
|||
|
|
@ -294,7 +294,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return plugin_payload
|
||||
|
||||
def process_submission(self, auth=None, verify=True):
|
||||
def process_submission(self):
|
||||
from maya import cmds
|
||||
instance = self._instance
|
||||
|
||||
|
|
@ -331,10 +331,11 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
for x in ['vrayscene', 'assscene']), (
|
||||
"Vray Scene and Ass Scene options are mutually exclusive")
|
||||
|
||||
auth = self._instance.data["deadline"]["auth"]
|
||||
verify = self._instance.data["deadline"]["verify"]
|
||||
if "vrayscene" in instance.data["families"]:
|
||||
self.log.debug("Submitting V-Ray scene render..")
|
||||
vray_export_payload = self._get_vray_export_payload(payload_data)
|
||||
|
||||
export_job = self.submit(vray_export_payload,
|
||||
auth=auth,
|
||||
verify=verify)
|
||||
|
|
@ -409,6 +410,9 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
files = exp
|
||||
assembly_files = files
|
||||
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
|
||||
# Define frame tile jobs
|
||||
frame_file_hash = {}
|
||||
frame_payloads = {}
|
||||
|
|
@ -458,8 +462,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit frame tile jobs
|
||||
frame_tile_job_id = {}
|
||||
for frame, tile_job_payload in frame_payloads.items():
|
||||
job_id = self.submit(tile_job_payload,
|
||||
instance.data["deadline"]["auth"])
|
||||
job_id = self.submit(
|
||||
tile_job_payload, auth, verify)
|
||||
frame_tile_job_id[frame] = job_id
|
||||
|
||||
# Define assembly payloads
|
||||
|
|
@ -562,8 +566,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Submit assembly jobs
|
||||
assembly_job_ids = []
|
||||
num_assemblies = len(assembly_payloads)
|
||||
auth = instance.data["deadline"]["auth"]
|
||||
verify = instance.data["deadline"]["verify"]
|
||||
for i, payload in enumerate(assembly_payloads):
|
||||
self.log.debug(
|
||||
"submitting assembly job {} of {}".format(i + 1,
|
||||
|
|
|
|||
|
|
@ -145,7 +145,27 @@ attributes "OpenPypeContext"
|
|||
rt.saveMaxFile(dst_path)
|
||||
|
||||
|
||||
def ls() -> list:
|
||||
def parse_container(container):
|
||||
"""Return the container node's full container data.
|
||||
|
||||
Args:
|
||||
container (str): A container node name.
|
||||
|
||||
Returns:
|
||||
dict: The container schema data for this container node.
|
||||
|
||||
"""
|
||||
data = lib.read(container)
|
||||
|
||||
# Backwards compatibility pre-schemas for containers
|
||||
data["schema"] = data.get("schema", "openpype:container-3.0")
|
||||
|
||||
# Append transient data
|
||||
data["objectName"] = container.Name
|
||||
return data
|
||||
|
||||
|
||||
def ls():
|
||||
"""Get all AYON containers."""
|
||||
objs = rt.objects
|
||||
containers = [
|
||||
|
|
@ -156,7 +176,7 @@ def ls() -> list:
|
|||
]
|
||||
|
||||
for container in sorted(containers, key=attrgetter("name")):
|
||||
yield lib.read(container)
|
||||
yield parse_container(container)
|
||||
|
||||
|
||||
def on_new():
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
## Unreal Integration
|
||||
|
||||
Supported Unreal Engine version is 4.26+ (mainly because of major Python changes done there).
|
||||
|
||||
### Project naming
|
||||
Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are
|
||||
invalid. If Ayon detects such name it automatically prepends letter **P** to make it valid name, so `123_myProject`
|
||||
will become `P123_myProject`. There is also soft-limit on project name length to be shorter than 20 characters.
|
||||
Longer names will issue warning in Unreal Editor that there might be possible side effects.
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
from .version import __version__
|
||||
from .addon import UNREAL_ADDON_ROOT, UnrealAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
"UNREAL_ADDON_ROOT",
|
||||
"UnrealAddon",
|
||||
)
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
from .version import __version__
|
||||
|
||||
UNREAL_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class UnrealAddon(AYONAddon, IHostAddon):
|
||||
name = "unreal"
|
||||
version = __version__
|
||||
host_name = "unreal"
|
||||
|
||||
def get_global_environments(self):
|
||||
return {
|
||||
"AYON_UNREAL_ROOT": UNREAL_ADDON_ROOT,
|
||||
}
|
||||
|
||||
def add_implementation_envs(self, env, app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Set AYON_UNREAL_PLUGIN required for Unreal implementation
|
||||
# Imports are in this method for Python 2 compatiblity of an addon
|
||||
from pathlib import Path
|
||||
|
||||
from .lib import get_compatible_integration
|
||||
|
||||
from ayon_core.tools.utils import show_message_dialog
|
||||
|
||||
pattern = re.compile(r'^\d+-\d+$')
|
||||
|
||||
if not pattern.match(app.name):
|
||||
msg = (
|
||||
"Unreal application key in the settings must be in format"
|
||||
"'5-0' or '5-1'"
|
||||
)
|
||||
show_message_dialog(
|
||||
parent=None,
|
||||
title="Unreal application name format",
|
||||
message=msg,
|
||||
level="critical")
|
||||
raise ValueError(msg)
|
||||
|
||||
ue_version = app.name.replace("-", ".")
|
||||
unreal_plugin_path = os.path.join(
|
||||
UNREAL_ADDON_ROOT, "integration", "UE_{}".format(ue_version), "Ayon"
|
||||
)
|
||||
if not Path(unreal_plugin_path).exists():
|
||||
compatible_versions = get_compatible_integration(
|
||||
ue_version, Path(UNREAL_ADDON_ROOT) / "integration"
|
||||
)
|
||||
if compatible_versions:
|
||||
unreal_plugin_path = compatible_versions[-1] / "Ayon"
|
||||
unreal_plugin_path = unreal_plugin_path.as_posix()
|
||||
|
||||
if not env.get("AYON_UNREAL_PLUGIN") or \
|
||||
env.get("AYON_UNREAL_PLUGIN") != unreal_plugin_path:
|
||||
env["AYON_UNREAL_PLUGIN"] = unreal_plugin_path
|
||||
|
||||
# Set default environments if are not set via settings
|
||||
defaults = {
|
||||
"AYON_LOG_NO_COLORS": "1",
|
||||
"UE_PYTHONPATH": os.environ.get("PYTHONPATH", ""),
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(UNREAL_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".uproject"]
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unreal Editor Ayon host API."""
|
||||
|
||||
from .plugin import (
|
||||
UnrealActorCreator,
|
||||
UnrealAssetCreator,
|
||||
Loader
|
||||
)
|
||||
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
publish,
|
||||
containerise,
|
||||
show_creator,
|
||||
show_loader,
|
||||
show_publisher,
|
||||
show_manager,
|
||||
show_experimental_tools,
|
||||
show_tools_dialog,
|
||||
show_tools_popup,
|
||||
instantiate,
|
||||
UnrealHost,
|
||||
set_sequence_hierarchy,
|
||||
generate_sequence,
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"UnrealActorCreator",
|
||||
"UnrealAssetCreator",
|
||||
"Loader",
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"publish",
|
||||
"containerise",
|
||||
"show_creator",
|
||||
"show_loader",
|
||||
"show_publisher",
|
||||
"show_manager",
|
||||
"show_experimental_tools",
|
||||
"show_tools_dialog",
|
||||
"show_tools_popup",
|
||||
"instantiate",
|
||||
"UnrealHost",
|
||||
"set_sequence_hierarchy",
|
||||
"generate_sequence",
|
||||
"maintained_selection"
|
||||
]
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class AyonUnrealException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@unreal.uclass()
|
||||
class AyonHelpers(unreal.AyonLib):
|
||||
"""Class wrapping some useful functions for Ayon.
|
||||
|
||||
This class is extending native BP class in Ayon Integration Plugin.
|
||||
|
||||
"""
|
||||
|
||||
@unreal.ufunction(params=[str, unreal.LinearColor, bool])
|
||||
def set_folder_color(self, path: str, color: unreal.LinearColor) -> None:
|
||||
"""Set color on folder in Content Browser.
|
||||
|
||||
This method sets color on folder in Content Browser. Unfortunately
|
||||
there is no way to refresh Content Browser so new color isn't applied
|
||||
immediately. They are saved to config file and appears correctly
|
||||
only after Editor is restarted.
|
||||
|
||||
Args:
|
||||
path (str): Path to folder
|
||||
color (:class:`unreal.LinearColor`): Color of the folder
|
||||
|
||||
Example:
|
||||
|
||||
AyonHelpers().set_folder_color(
|
||||
"/Game/Path", unreal.LinearColor(a=1.0, r=1.0, g=0.5, b=0)
|
||||
)
|
||||
|
||||
Note:
|
||||
This will take effect only after Editor is restarted. I couldn't
|
||||
find a way to refresh it. Also, this saves the color definition
|
||||
into the project config, binding this path with color. So if you
|
||||
delete this path and later re-create, it will set this color
|
||||
again.
|
||||
|
||||
"""
|
||||
self.c_set_folder_color(path, color, False)
|
||||
|
|
@ -1,803 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
from contextlib import contextmanager
|
||||
import time
|
||||
|
||||
import semver
|
||||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
)
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core.host import HostBase, ILoadHost, IPublishHost
|
||||
from ayon_unreal import UNREAL_ADDON_ROOT
|
||||
|
||||
import unreal # noqa
|
||||
|
||||
# Rename to Ayon once parent module renames
|
||||
logger = logging.getLogger("ayon_core.hosts.unreal")
|
||||
|
||||
AYON_CONTAINERS = "AyonContainers"
|
||||
AYON_ASSET_DIR = "/Game/Ayon/Assets"
|
||||
CONTEXT_CONTAINER = "Ayon/context.json"
|
||||
UNREAL_VERSION = semver.VersionInfo(
|
||||
*os.getenv("AYON_UNREAL_VERSION").split(".")
|
||||
)
|
||||
|
||||
PLUGINS_DIR = os.path.join(UNREAL_ADDON_ROOT, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class UnrealHost(HostBase, ILoadHost, IPublishHost):
|
||||
"""Unreal host implementation.
|
||||
|
||||
For some time this class will re-use functions from module based
|
||||
implementation for backwards compatibility of older unreal projects.
|
||||
"""
|
||||
|
||||
name = "unreal"
|
||||
|
||||
def install(self):
|
||||
install()
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
@staticmethod
|
||||
def show_tools_popup():
|
||||
"""Show tools popup with actions leading to show other tools."""
|
||||
show_tools_popup()
|
||||
|
||||
@staticmethod
|
||||
def show_tools_dialog():
|
||||
"""Show tools dialog with actions leading to show other tools."""
|
||||
show_tools_dialog()
|
||||
|
||||
def update_context_data(self, data, changes):
|
||||
content_path = unreal.Paths.project_content_dir()
|
||||
op_ctx = content_path + CONTEXT_CONTAINER
|
||||
attempts = 3
|
||||
for i in range(attempts):
|
||||
try:
|
||||
with open(op_ctx, "w+") as f:
|
||||
json.dump(data, f)
|
||||
break
|
||||
except IOError as e:
|
||||
if i == attempts - 1:
|
||||
raise Exception(
|
||||
"Failed to write context data. Aborting.") from e
|
||||
unreal.log_warning("Failed to write context data. Retrying...")
|
||||
i += 1
|
||||
time.sleep(3)
|
||||
continue
|
||||
|
||||
def get_context_data(self):
|
||||
content_path = unreal.Paths.project_content_dir()
|
||||
op_ctx = content_path + CONTEXT_CONTAINER
|
||||
if not os.path.isfile(op_ctx):
|
||||
return {}
|
||||
with open(op_ctx, "r") as fp:
|
||||
data = json.load(fp)
|
||||
return data
|
||||
|
||||
|
||||
def install():
|
||||
"""Install Unreal configuration for AYON."""
|
||||
print("-=" * 40)
|
||||
logo = '''.
|
||||
.
|
||||
·
|
||||
│
|
||||
·∙/
|
||||
·-∙•∙-·
|
||||
/ \\ /∙· / \\
|
||||
∙ \\ │ / ∙
|
||||
\\ \\ · / /
|
||||
\\\\ ∙ ∙ //
|
||||
\\\\/ \\//
|
||||
___
|
||||
│ │
|
||||
│ │
|
||||
│ │
|
||||
│___│
|
||||
-·
|
||||
|
||||
·-─═─-∙ A Y O N ∙-─═─-·
|
||||
by YNPUT
|
||||
.
|
||||
'''
|
||||
print(logo)
|
||||
print("installing Ayon for Unreal ...")
|
||||
print("-=" * 40)
|
||||
logger.info("installing Ayon for Unreal")
|
||||
pyblish.api.register_host("unreal")
|
||||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
register_inventory_action_path(str(INVENTORY_PATH))
|
||||
_register_callbacks()
|
||||
_register_events()
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Unreal configuration for Ayon."""
|
||||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
deregister_loader_plugin_path(str(LOAD_PATH))
|
||||
deregister_creator_plugin_path(str(CREATE_PATH))
|
||||
deregister_inventory_action_path(str(INVENTORY_PATH))
|
||||
|
||||
|
||||
def _register_callbacks():
|
||||
"""
|
||||
TODO: Implement callbacks if supported by UE
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _register_events():
|
||||
"""
|
||||
TODO: Implement callbacks if supported by UE
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def ls():
|
||||
"""List all containers.
|
||||
|
||||
List all found in *Content Manager* of Unreal and return
|
||||
metadata from them. Adding `objectName` to set.
|
||||
|
||||
"""
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
# UE 5.1 changed how class name is specified
|
||||
class_name = ["/Script/Ayon", "AyonAssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AyonAssetContainer" # noqa
|
||||
ayon_containers = ar.get_assets_by_class(class_name, True)
|
||||
|
||||
# get_asset_by_class returns AssetData. To get all metadata we need to
|
||||
# load asset. get_tag_values() work only on metadata registered in
|
||||
# Asset Registry Project settings (and there is no way to set it with
|
||||
# python short of editing ini configuration file).
|
||||
for asset_data in ayon_containers:
|
||||
asset = asset_data.get_asset()
|
||||
data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset)
|
||||
data["objectName"] = asset_data.asset_name
|
||||
yield cast_map_to_str_dict(data)
|
||||
|
||||
|
||||
def ls_inst():
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
# UE 5.1 changed how class name is specified
|
||||
class_name = [
|
||||
"/Script/Ayon",
|
||||
"AyonPublishInstance"
|
||||
] if (
|
||||
UNREAL_VERSION.major == 5
|
||||
and UNREAL_VERSION.minor > 0
|
||||
) else "AyonPublishInstance" # noqa
|
||||
instances = ar.get_assets_by_class(class_name, True)
|
||||
|
||||
# get_asset_by_class returns AssetData. To get all metadata we need to
|
||||
# load asset. get_tag_values() work only on metadata registered in
|
||||
# Asset Registry Project settings (and there is no way to set it with
|
||||
# python short of editing ini configuration file).
|
||||
for asset_data in instances:
|
||||
asset = asset_data.get_asset()
|
||||
data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset)
|
||||
data["objectName"] = asset_data.asset_name
|
||||
yield cast_map_to_str_dict(data)
|
||||
|
||||
|
||||
def parse_container(container):
|
||||
"""To get data from container, AyonAssetContainer must be loaded.
|
||||
|
||||
Args:
|
||||
container(str): path to container
|
||||
|
||||
Returns:
|
||||
dict: metadata stored on container
|
||||
"""
|
||||
asset = unreal.EditorAssetLibrary.load_asset(container)
|
||||
data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset)
|
||||
data["objectName"] = asset.get_name()
|
||||
data = cast_map_to_str_dict(data)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def publish():
|
||||
"""Shorthand to publish from within host."""
|
||||
import pyblish.util
|
||||
|
||||
return pyblish.util.publish()
|
||||
|
||||
|
||||
def containerise(name, namespace, nodes, context, loader=None, suffix="_CON"):
|
||||
"""Bundles *nodes* (assets) into a *container* and add metadata to it.
|
||||
|
||||
Unreal doesn't support *groups* of assets that you can add metadata to.
|
||||
But it does support folders that helps to organize asset. Unfortunately
|
||||
those folders are just that - you cannot add any additional information
|
||||
to them. Ayon Integration Plugin is providing way out - Implementing
|
||||
`AssetContainer` Blueprint class. This class when added to folder can
|
||||
handle metadata on it using standard
|
||||
:func:`unreal.EditorAssetLibrary.set_metadata_tag()` and
|
||||
:func:`unreal.EditorAssetLibrary.get_metadata_tag_values()`. It also
|
||||
stores and monitor all changes in assets in path where it resides. List of
|
||||
those assets is available as `assets` property.
|
||||
|
||||
This is list of strings starting with asset type and ending with its path:
|
||||
`Material /Game/Ayon/Test/TestMaterial.TestMaterial`
|
||||
|
||||
"""
|
||||
# 1 - create directory for container
|
||||
root = "/Game"
|
||||
container_name = f"{name}{suffix}"
|
||||
new_name = move_assets_to_path(root, container_name, nodes)
|
||||
|
||||
# 2 - create Asset Container there
|
||||
path = f"{root}/{new_name}"
|
||||
create_container(container=container_name, path=path)
|
||||
|
||||
namespace = path
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"name": new_name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": context["representation"]["id"],
|
||||
}
|
||||
# 3 - imprint data
|
||||
imprint(f"{path}/{container_name}", data)
|
||||
return path
|
||||
|
||||
|
||||
def instantiate(root, name, data, assets=None, suffix="_INS"):
|
||||
"""Bundles *nodes* into *container*.
|
||||
|
||||
Marking it with metadata as publishable instance. If assets are provided,
|
||||
they are moved to new path where `AyonPublishInstance` class asset is
|
||||
created and imprinted with metadata.
|
||||
|
||||
This can then be collected for publishing by Pyblish for example.
|
||||
|
||||
Args:
|
||||
root (str): root path where to create instance container
|
||||
name (str): name of the container
|
||||
data (dict): data to imprint on container
|
||||
assets (list of str): list of asset paths to include in publish
|
||||
instance
|
||||
suffix (str): suffix string to append to instance name
|
||||
|
||||
"""
|
||||
container_name = f"{name}{suffix}"
|
||||
|
||||
# if we specify assets, create new folder and move them there. If not,
|
||||
# just create empty folder
|
||||
if assets:
|
||||
new_name = move_assets_to_path(root, container_name, assets)
|
||||
else:
|
||||
new_name = create_folder(root, name)
|
||||
|
||||
path = f"{root}/{new_name}"
|
||||
create_publish_instance(instance=container_name, path=path)
|
||||
|
||||
imprint(f"{path}/{container_name}", data)
|
||||
|
||||
|
||||
def imprint(node, data):
|
||||
loaded_asset = unreal.EditorAssetLibrary.load_asset(node)
|
||||
for key, value in data.items():
|
||||
# Support values evaluated at imprint
|
||||
if callable(value):
|
||||
value = value()
|
||||
# Unreal doesn't support NoneType in metadata values
|
||||
if value is None:
|
||||
value = ""
|
||||
unreal.EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset, key, str(value)
|
||||
)
|
||||
|
||||
with unreal.ScopedEditorTransaction("Ayon containerising"):
|
||||
unreal.EditorAssetLibrary.save_asset(node)
|
||||
|
||||
|
||||
def show_tools_popup():
|
||||
"""Show popup with tools.
|
||||
|
||||
Popup will disappear on click or losing focus.
|
||||
"""
|
||||
from ayon_unreal.api import tools_ui
|
||||
|
||||
tools_ui.show_tools_popup()
|
||||
|
||||
|
||||
def show_tools_dialog():
|
||||
"""Show dialog with tools.
|
||||
|
||||
Dialog will stay visible.
|
||||
"""
|
||||
from ayon_unreal.api import tools_ui
|
||||
|
||||
tools_ui.show_tools_dialog()
|
||||
|
||||
|
||||
def show_creator():
|
||||
host_tools.show_creator()
|
||||
|
||||
|
||||
def show_loader():
|
||||
host_tools.show_loader(use_context=True)
|
||||
|
||||
|
||||
def show_publisher():
|
||||
host_tools.show_publish()
|
||||
|
||||
|
||||
def show_manager():
|
||||
host_tools.show_scene_inventory()
|
||||
|
||||
|
||||
def show_experimental_tools():
|
||||
host_tools.show_experimental_tools_dialog()
|
||||
|
||||
|
||||
def create_folder(root: str, name: str) -> str:
|
||||
"""Create new folder.
|
||||
|
||||
If folder exists, append number at the end and try again, incrementing
|
||||
if needed.
|
||||
|
||||
Args:
|
||||
root (str): path root
|
||||
name (str): folder name
|
||||
|
||||
Returns:
|
||||
str: folder name
|
||||
|
||||
Example:
|
||||
>>> create_folder("/Game/Foo")
|
||||
/Game/Foo
|
||||
>>> create_folder("/Game/Foo")
|
||||
/Game/Foo1
|
||||
|
||||
"""
|
||||
eal = unreal.EditorAssetLibrary
|
||||
index = 1
|
||||
while True:
|
||||
if eal.does_directory_exist(f"{root}/{name}"):
|
||||
name = f"{name}{index}"
|
||||
index += 1
|
||||
else:
|
||||
eal.make_directory(f"{root}/{name}")
|
||||
break
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def move_assets_to_path(root: str, name: str, assets: List[str]) -> str:
|
||||
"""Moving (renaming) list of asset paths to new destination.
|
||||
|
||||
Args:
|
||||
root (str): root of the path (eg. `/Game`)
|
||||
name (str): name of destination directory (eg. `Foo` )
|
||||
assets (list of str): list of asset paths
|
||||
|
||||
Returns:
|
||||
str: folder name
|
||||
|
||||
Example:
|
||||
This will get paths of all assets under `/Game/Test` and move them
|
||||
to `/Game/NewTest`. If `/Game/NewTest` already exists, then resulting
|
||||
path will be `/Game/NewTest1`
|
||||
|
||||
>>> assets = unreal.EditorAssetLibrary.list_assets("/Game/Test")
|
||||
>>> move_assets_to_path("/Game", "NewTest", assets)
|
||||
NewTest
|
||||
|
||||
"""
|
||||
eal = unreal.EditorAssetLibrary
|
||||
name = create_folder(root, name)
|
||||
|
||||
unreal.log(assets)
|
||||
for asset in assets:
|
||||
loaded = eal.load_asset(asset)
|
||||
eal.rename_asset(asset, f"{root}/{name}/{loaded.get_name()}")
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def create_container(container: str, path: str) -> unreal.Object:
|
||||
"""Helper function to create Asset Container class on given path.
|
||||
|
||||
This Asset Class helps to mark given path as Container
|
||||
and enable asset version control on it.
|
||||
|
||||
Args:
|
||||
container (str): Asset Container name
|
||||
path (str): Path where to create Asset Container. This path should
|
||||
point into container folder
|
||||
|
||||
Returns:
|
||||
:class:`unreal.Object`: instance of created asset
|
||||
|
||||
Example:
|
||||
|
||||
create_container(
|
||||
"/Game/modelingFooCharacter_CON",
|
||||
"modelingFooCharacter_CON"
|
||||
)
|
||||
|
||||
"""
|
||||
factory = unreal.AyonAssetContainerFactory()
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
return tools.create_asset(container, path, None, factory)
|
||||
|
||||
|
||||
def create_publish_instance(instance: str, path: str) -> unreal.Object:
|
||||
"""Helper function to create Ayon Publish Instance on given path.
|
||||
|
||||
This behaves similarly as :func:`create_ayon_container`.
|
||||
|
||||
Args:
|
||||
path (str): Path where to create Publish Instance.
|
||||
This path should point into container folder
|
||||
instance (str): Publish Instance name
|
||||
|
||||
Returns:
|
||||
:class:`unreal.Object`: instance of created asset
|
||||
|
||||
Example:
|
||||
|
||||
create_publish_instance(
|
||||
"/Game/modelingFooCharacter_INST",
|
||||
"modelingFooCharacter_INST"
|
||||
)
|
||||
|
||||
"""
|
||||
factory = unreal.AyonPublishInstanceFactory()
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
return tools.create_asset(instance, path, None, factory)
|
||||
|
||||
|
||||
def cast_map_to_str_dict(umap) -> dict:
|
||||
"""Cast Unreal Map to dict.
|
||||
|
||||
Helper function to cast Unreal Map object to plain old python
|
||||
dict. This will also cast values and keys to str. Useful for
|
||||
metadata dicts.
|
||||
|
||||
Args:
|
||||
umap: Unreal Map object
|
||||
|
||||
Returns:
|
||||
dict
|
||||
|
||||
"""
|
||||
return {str(key): str(value) for (key, value) in umap.items()}
|
||||
|
||||
|
||||
def get_subsequences(sequence: unreal.LevelSequence):
|
||||
"""Get list of subsequences from sequence.
|
||||
|
||||
Args:
|
||||
sequence (unreal.LevelSequence): Sequence
|
||||
|
||||
Returns:
|
||||
list(unreal.LevelSequence): List of subsequences
|
||||
|
||||
"""
|
||||
tracks = sequence.get_master_tracks()
|
||||
subscene_track = next(
|
||||
(
|
||||
t
|
||||
for t in tracks
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class()
|
||||
),
|
||||
None,
|
||||
)
|
||||
if subscene_track is not None and subscene_track.get_sections():
|
||||
return subscene_track.get_sections()
|
||||
return []
|
||||
|
||||
|
||||
def set_sequence_hierarchy(
|
||||
seq_i, seq_j, max_frame_i, min_frame_j, max_frame_j, map_paths
|
||||
):
|
||||
# Get existing sequencer tracks or create them if they don't exist
|
||||
tracks = seq_i.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if not subscene_track:
|
||||
subscene_track = seq_i.add_master_track(unreal.MovieSceneSubTrack)
|
||||
if not visibility_track:
|
||||
visibility_track = seq_i.add_master_track(
|
||||
unreal.MovieSceneLevelVisibilityTrack)
|
||||
|
||||
# Create the sub-scene section
|
||||
subscenes = subscene_track.get_sections()
|
||||
subscene = None
|
||||
for s in subscenes:
|
||||
if s.get_editor_property('sub_sequence') == seq_j:
|
||||
subscene = s
|
||||
break
|
||||
if not subscene:
|
||||
subscene = subscene_track.add_section()
|
||||
subscene.set_row_index(len(subscene_track.get_sections()))
|
||||
subscene.set_editor_property('sub_sequence', seq_j)
|
||||
subscene.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
|
||||
# Create the visibility section
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
maps = []
|
||||
for m in map_paths:
|
||||
# Unreal requires to load the level to get the map name
|
||||
unreal.EditorLevelLibrary.save_all_dirty_levels()
|
||||
unreal.EditorLevelLibrary.load_level(m)
|
||||
maps.append(str(ar.get_asset_by_object_path(m).asset_name))
|
||||
|
||||
vis_section = visibility_track.add_section()
|
||||
index = len(visibility_track.get_sections())
|
||||
|
||||
vis_section.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
vis_section.set_visibility(unreal.LevelVisibility.VISIBLE)
|
||||
vis_section.set_row_index(index)
|
||||
vis_section.set_level_names(maps)
|
||||
|
||||
if min_frame_j > 1:
|
||||
hid_section = visibility_track.add_section()
|
||||
hid_section.set_range(
|
||||
1,
|
||||
min_frame_j)
|
||||
hid_section.set_visibility(unreal.LevelVisibility.HIDDEN)
|
||||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
if max_frame_j < max_frame_i:
|
||||
hid_section = visibility_track.add_section()
|
||||
hid_section.set_range(
|
||||
max_frame_j + 1,
|
||||
max_frame_i + 1)
|
||||
hid_section.set_visibility(unreal.LevelVisibility.HIDDEN)
|
||||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
|
||||
|
||||
def generate_sequence(h, h_dir):
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
sequence = tools.create_asset(
|
||||
asset_name=h,
|
||||
package_path=h_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
project_name = get_current_project_name()
|
||||
# TODO Fix this does not return folder path
|
||||
folder_path = h_dir.split('/')[-1],
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name,
|
||||
folder_path,
|
||||
fields={"id", "attrib.fps"}
|
||||
)
|
||||
|
||||
start_frames = []
|
||||
end_frames = []
|
||||
|
||||
elements = list(ayon_api.get_folders(
|
||||
project_name,
|
||||
parent_ids=[folder_entity["id"]],
|
||||
fields={"id", "attrib.clipIn", "attrib.clipOut"}
|
||||
))
|
||||
for e in elements:
|
||||
start_frames.append(e["attrib"].get("clipIn"))
|
||||
end_frames.append(e["attrib"].get("clipOut"))
|
||||
|
||||
elements.extend(ayon_api.get_folders(
|
||||
project_name,
|
||||
parent_ids=[e["id"]],
|
||||
fields={"id", "attrib.clipIn", "attrib.clipOut"}
|
||||
))
|
||||
|
||||
min_frame = min(start_frames)
|
||||
max_frame = max(end_frames)
|
||||
|
||||
fps = folder_entity["attrib"].get("fps")
|
||||
|
||||
sequence.set_display_rate(
|
||||
unreal.FrameRate(fps, 1.0))
|
||||
sequence.set_playback_start(min_frame)
|
||||
sequence.set_playback_end(max_frame)
|
||||
|
||||
sequence.set_work_range_start(min_frame / fps)
|
||||
sequence.set_work_range_end(max_frame / fps)
|
||||
sequence.set_view_range_start(min_frame / fps)
|
||||
sequence.set_view_range_end(max_frame / fps)
|
||||
|
||||
tracks = sequence.get_master_tracks()
|
||||
track = None
|
||||
for t in tracks:
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneCameraCutTrack.static_class()):
|
||||
track = t
|
||||
break
|
||||
if not track:
|
||||
track = sequence.add_master_track(
|
||||
unreal.MovieSceneCameraCutTrack)
|
||||
|
||||
return sequence, (min_frame, max_frame)
|
||||
|
||||
|
||||
def _get_comps_and_assets(
|
||||
component_class, asset_class, old_assets, new_assets, selected
|
||||
):
|
||||
eas = unreal.get_editor_subsystem(unreal.EditorActorSubsystem)
|
||||
|
||||
components = []
|
||||
if selected:
|
||||
sel_actors = eas.get_selected_level_actors()
|
||||
for actor in sel_actors:
|
||||
comps = actor.get_components_by_class(component_class)
|
||||
components.extend(comps)
|
||||
else:
|
||||
comps = eas.get_all_level_actors_components()
|
||||
components = [
|
||||
c for c in comps if isinstance(c, component_class)
|
||||
]
|
||||
|
||||
# Get all the static meshes among the old assets in a dictionary with
|
||||
# the name as key
|
||||
selected_old_assets = {}
|
||||
for a in old_assets:
|
||||
asset = unreal.EditorAssetLibrary.load_asset(a)
|
||||
if isinstance(asset, asset_class):
|
||||
selected_old_assets[asset.get_name()] = asset
|
||||
|
||||
# Get all the static meshes among the new assets in a dictionary with
|
||||
# the name as key
|
||||
selected_new_assets = {}
|
||||
for a in new_assets:
|
||||
asset = unreal.EditorAssetLibrary.load_asset(a)
|
||||
if isinstance(asset, asset_class):
|
||||
selected_new_assets[asset.get_name()] = asset
|
||||
|
||||
return components, selected_old_assets, selected_new_assets
|
||||
|
||||
|
||||
def replace_static_mesh_actors(old_assets, new_assets, selected):
|
||||
smes = unreal.get_editor_subsystem(unreal.StaticMeshEditorSubsystem)
|
||||
|
||||
static_mesh_comps, old_meshes, new_meshes = _get_comps_and_assets(
|
||||
unreal.StaticMeshComponent,
|
||||
unreal.StaticMesh,
|
||||
old_assets,
|
||||
new_assets,
|
||||
selected
|
||||
)
|
||||
|
||||
for old_name, old_mesh in old_meshes.items():
|
||||
new_mesh = new_meshes.get(old_name)
|
||||
|
||||
if not new_mesh:
|
||||
continue
|
||||
|
||||
smes.replace_mesh_components_meshes(
|
||||
static_mesh_comps, old_mesh, new_mesh)
|
||||
|
||||
|
||||
def replace_skeletal_mesh_actors(old_assets, new_assets, selected):
|
||||
skeletal_mesh_comps, old_meshes, new_meshes = _get_comps_and_assets(
|
||||
unreal.SkeletalMeshComponent,
|
||||
unreal.SkeletalMesh,
|
||||
old_assets,
|
||||
new_assets,
|
||||
selected
|
||||
)
|
||||
|
||||
for old_name, old_mesh in old_meshes.items():
|
||||
new_mesh = new_meshes.get(old_name)
|
||||
|
||||
if not new_mesh:
|
||||
continue
|
||||
|
||||
for comp in skeletal_mesh_comps:
|
||||
if comp.get_skeletal_mesh_asset() == old_mesh:
|
||||
comp.set_skeletal_mesh_asset(new_mesh)
|
||||
|
||||
|
||||
def replace_geometry_cache_actors(old_assets, new_assets, selected):
|
||||
geometry_cache_comps, old_caches, new_caches = _get_comps_and_assets(
|
||||
unreal.GeometryCacheComponent,
|
||||
unreal.GeometryCache,
|
||||
old_assets,
|
||||
new_assets,
|
||||
selected
|
||||
)
|
||||
|
||||
for old_name, old_mesh in old_caches.items():
|
||||
new_mesh = new_caches.get(old_name)
|
||||
|
||||
if not new_mesh:
|
||||
continue
|
||||
|
||||
for comp in geometry_cache_comps:
|
||||
if comp.get_editor_property("geometry_cache") == old_mesh:
|
||||
comp.set_geometry_cache(new_mesh)
|
||||
|
||||
|
||||
def delete_asset_if_unused(container, asset_content):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
references = set()
|
||||
|
||||
for asset_path in asset_content:
|
||||
asset = ar.get_asset_by_object_path(asset_path)
|
||||
refs = ar.get_referencers(
|
||||
asset.package_name,
|
||||
unreal.AssetRegistryDependencyOptions(
|
||||
include_soft_package_references=False,
|
||||
include_hard_package_references=True,
|
||||
include_searchable_names=False,
|
||||
include_soft_management_references=False,
|
||||
include_hard_management_references=False
|
||||
))
|
||||
if not refs:
|
||||
continue
|
||||
references = references.union(set(refs))
|
||||
|
||||
# Filter out references that are in the Temp folder
|
||||
cleaned_references = {
|
||||
ref for ref in references if not str(ref).startswith("/Temp/")}
|
||||
|
||||
# Check which of the references are Levels
|
||||
for ref in cleaned_references:
|
||||
loaded_asset = unreal.EditorAssetLibrary.load_asset(ref)
|
||||
if isinstance(loaded_asset, unreal.World):
|
||||
# If there is at least a level, we can stop, we don't want to
|
||||
# delete the container
|
||||
return
|
||||
|
||||
unreal.log("Previous version unused, deleting...")
|
||||
|
||||
# No levels, delete the asset
|
||||
unreal.EditorAssetLibrary.delete_directory(container["namespace"])
|
||||
|
||||
|
||||
@contextmanager
|
||||
def maintained_selection():
|
||||
"""Stub to be either implemented or replaced.
|
||||
|
||||
This is needed for old publisher implementation, but
|
||||
it is not supported (yet) in UE.
|
||||
"""
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
pass
|
||||
|
|
@ -1,245 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import ast
|
||||
import collections
|
||||
import sys
|
||||
import six
|
||||
from abc import (
|
||||
ABC,
|
||||
ABCMeta,
|
||||
)
|
||||
|
||||
import unreal
|
||||
|
||||
from .pipeline import (
|
||||
create_publish_instance,
|
||||
imprint,
|
||||
ls_inst,
|
||||
UNREAL_VERSION
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
BoolDef,
|
||||
UILabelDef
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
Creator,
|
||||
LoaderPlugin,
|
||||
CreatorError,
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class UnrealBaseCreator(Creator):
|
||||
"""Base class for Unreal creator plugins."""
|
||||
root = "/Game/Ayon/AyonPublishInstances"
|
||||
suffix = "_INS"
|
||||
|
||||
@staticmethod
|
||||
def cache_instance_data(shared_data):
|
||||
"""Cache instances for Creators to shared data.
|
||||
|
||||
Create `unreal_cached_instances` key when needed in shared data and
|
||||
fill it with all collected instances from the scene under its
|
||||
respective creator identifiers.
|
||||
|
||||
If legacy instances are detected in the scene, create
|
||||
`unreal_cached_legacy_instances` there and fill it with
|
||||
all legacy products under family as a key.
|
||||
|
||||
Args:
|
||||
Dict[str, Any]: Shared data.
|
||||
|
||||
"""
|
||||
if "unreal_cached_instances" in shared_data:
|
||||
return
|
||||
|
||||
unreal_cached_instances = collections.defaultdict(list)
|
||||
unreal_cached_legacy_instances = collections.defaultdict(list)
|
||||
for instance in ls_inst():
|
||||
creator_id = instance.get("creator_identifier")
|
||||
if creator_id:
|
||||
unreal_cached_instances[creator_id].append(instance)
|
||||
else:
|
||||
family = instance.get("family")
|
||||
unreal_cached_legacy_instances[family].append(instance)
|
||||
|
||||
shared_data["unreal_cached_instances"] = unreal_cached_instances
|
||||
shared_data["unreal_cached_legacy_instances"] = (
|
||||
unreal_cached_legacy_instances
|
||||
)
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
try:
|
||||
instance_name = f"{product_name}{self.suffix}"
|
||||
pub_instance = create_publish_instance(instance_name, self.root)
|
||||
|
||||
instance_data["productName"] = product_name
|
||||
instance_data["instance_path"] = f"{self.root}/{instance_name}"
|
||||
|
||||
instance = CreatedInstance(
|
||||
self.product_type,
|
||||
product_name,
|
||||
instance_data,
|
||||
self)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
pub_instance.set_editor_property('add_external_assets', True)
|
||||
assets = pub_instance.get_editor_property('asset_data_external')
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for member in pre_create_data.get("members", []):
|
||||
obj = ar.get_asset_by_object_path(member).get_asset()
|
||||
assets.add(obj)
|
||||
|
||||
imprint(f"{self.root}/{instance_name}", instance.data_to_store())
|
||||
|
||||
return instance
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
CreatorError,
|
||||
CreatorError(f"Creator error: {er}"),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def collect_instances(self):
|
||||
# cache instances if missing
|
||||
self.cache_instance_data(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data[
|
||||
"unreal_cached_instances"].get(self.identifier, []):
|
||||
# Unreal saves metadata as string, so we need to convert it back
|
||||
instance['creator_attributes'] = ast.literal_eval(
|
||||
instance.get('creator_attributes', '{}'))
|
||||
instance['publish_attributes'] = ast.literal_eval(
|
||||
instance.get('publish_attributes', '{}'))
|
||||
created_instance = CreatedInstance.from_existing(instance, self)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = created_inst.get("instance_path", "")
|
||||
|
||||
if not instance_node:
|
||||
unreal.log_warning(
|
||||
f"Instance node not found for {created_inst}")
|
||||
continue
|
||||
|
||||
new_values = {
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
imprint(
|
||||
instance_node,
|
||||
new_values
|
||||
)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
instance_node = instance.data.get("instance_path", "")
|
||||
if instance_node:
|
||||
unreal.EditorAssetLibrary.delete_asset(instance_node)
|
||||
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class UnrealAssetCreator(UnrealBaseCreator):
|
||||
"""Base class for Unreal creator plugins based on assets."""
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
"""Create instance of the asset.
|
||||
|
||||
Args:
|
||||
product_name (str): Name of the product.
|
||||
instance_data (dict): Data for the instance.
|
||||
pre_create_data (dict): Data for the instance.
|
||||
|
||||
Returns:
|
||||
CreatedInstance: Created instance.
|
||||
"""
|
||||
try:
|
||||
# Check if instance data has members, filled by the plugin.
|
||||
# If not, use selection.
|
||||
if not pre_create_data.get("members"):
|
||||
pre_create_data["members"] = []
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
utilib = unreal.EditorUtilityLibrary
|
||||
sel_objects = utilib.get_selected_assets()
|
||||
pre_create_data["members"] = [
|
||||
a.get_path_name() for a in sel_objects]
|
||||
|
||||
super(UnrealAssetCreator, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
CreatorError,
|
||||
CreatorError(f"Creator error: {er}"),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef("use_selection", label="Use selection", default=True)
|
||||
]
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class UnrealActorCreator(UnrealBaseCreator):
|
||||
"""Base class for Unreal creator plugins based on actors."""
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
"""Create instance of the asset.
|
||||
|
||||
Args:
|
||||
product_name (str): Name of the product.
|
||||
instance_data (dict): Data for the instance.
|
||||
pre_create_data (dict): Data for the instance.
|
||||
|
||||
Returns:
|
||||
CreatedInstance: Created instance.
|
||||
"""
|
||||
try:
|
||||
if UNREAL_VERSION.major == 5:
|
||||
world = unreal.UnrealEditorSubsystem().get_editor_world()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
|
||||
# Check if the level is saved
|
||||
if world.get_path_name().startswith("/Temp/"):
|
||||
raise CreatorError(
|
||||
"Level must be saved before creating instances.")
|
||||
|
||||
# Check if instance data has members, filled by the plugin.
|
||||
# If not, use selection.
|
||||
if not instance_data.get("members"):
|
||||
actor_subsystem = unreal.EditorActorSubsystem()
|
||||
sel_actors = actor_subsystem.get_selected_level_actors()
|
||||
selection = [a.get_path_name() for a in sel_actors]
|
||||
|
||||
instance_data["members"] = selection
|
||||
|
||||
instance_data["level"] = world.get_path_name()
|
||||
|
||||
super(UnrealActorCreator, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
except Exception as er:
|
||||
six.reraise(
|
||||
CreatorError,
|
||||
CreatorError(f"Creator error: {er}"),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
UILabelDef("Select actors to create instance from them.")
|
||||
]
|
||||
|
||||
|
||||
class Loader(LoaderPlugin, ABC):
|
||||
"""This serves as skeleton for future Ayon specific functionality"""
|
||||
pass
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
import os
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.tools.utils import show_message_dialog
|
||||
from ayon_unreal.api import pipeline
|
||||
|
||||
|
||||
queue = None
|
||||
executor = None
|
||||
|
||||
|
||||
def _queue_finish_callback(exec, success):
|
||||
unreal.log("Render completed. Success: " + str(success))
|
||||
|
||||
# Delete our reference so we don't keep it alive.
|
||||
global executor
|
||||
global queue
|
||||
del executor
|
||||
del queue
|
||||
|
||||
|
||||
def _job_finish_callback(job, success):
|
||||
# You can make any edits you want to the editor world here, and the world
|
||||
# will be duplicated when the next render happens. Make sure you undo your
|
||||
# edits in OnQueueFinishedCallback if you don't want to leak state changes
|
||||
# into the editor world.
|
||||
unreal.log("Individual job completed.")
|
||||
|
||||
|
||||
def start_rendering():
|
||||
"""
|
||||
Start the rendering process.
|
||||
"""
|
||||
unreal.log("Starting rendering...")
|
||||
|
||||
# Get selected sequences
|
||||
assets = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
|
||||
if not assets:
|
||||
show_message_dialog(
|
||||
title="No assets selected",
|
||||
message="No assets selected. Select a render instance.",
|
||||
level="warning")
|
||||
raise RuntimeError(
|
||||
"No assets selected. You need to select a render instance.")
|
||||
|
||||
# instances = pipeline.ls_inst()
|
||||
instances = [
|
||||
a for a in assets
|
||||
if a.get_class().get_name() == "AyonPublishInstance"]
|
||||
|
||||
inst_data = []
|
||||
|
||||
for i in instances:
|
||||
data = pipeline.parse_container(i.get_path_name())
|
||||
if data["productType"] == "render":
|
||||
inst_data.append(data)
|
||||
|
||||
try:
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
"Could not find render root in anatomy settings.") from e
|
||||
|
||||
render_dir = f"{root}/{project}"
|
||||
|
||||
# subsystem = unreal.get_editor_subsystem(
|
||||
# unreal.MoviePipelineQueueSubsystem)
|
||||
# queue = subsystem.get_queue()
|
||||
global queue
|
||||
queue = unreal.MoviePipelineQueue()
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
data = get_project_settings(project)
|
||||
config = None
|
||||
config_path = str(data.get("unreal").get("render_config_path"))
|
||||
if config_path and unreal.EditorAssetLibrary.does_asset_exist(config_path):
|
||||
unreal.log("Found saved render configuration")
|
||||
config = ar.get_asset_by_object_path(config_path).get_asset()
|
||||
|
||||
for i in inst_data:
|
||||
sequence = ar.get_asset_by_object_path(i["sequence"]).get_asset()
|
||||
|
||||
sequences = [{
|
||||
"sequence": sequence,
|
||||
"output": f"{i['output']}",
|
||||
"frame_range": (
|
||||
int(float(i["frameStart"])),
|
||||
int(float(i["frameEnd"])) + 1)
|
||||
}]
|
||||
render_list = []
|
||||
|
||||
# Get all the sequences to render. If there are subsequences,
|
||||
# add them and their frame ranges to the render list. We also
|
||||
# use the names for the output paths.
|
||||
for seq in sequences:
|
||||
subscenes = pipeline.get_subsequences(seq.get('sequence'))
|
||||
|
||||
if subscenes:
|
||||
for sub_seq in subscenes:
|
||||
sequences.append({
|
||||
"sequence": sub_seq.get_sequence(),
|
||||
"output": (f"{seq.get('output')}/"
|
||||
f"{sub_seq.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
sub_seq.get_start_frame(), sub_seq.get_end_frame())
|
||||
})
|
||||
else:
|
||||
# Avoid rendering camera sequences
|
||||
if "_camera" not in seq.get('sequence').get_name():
|
||||
render_list.append(seq)
|
||||
|
||||
# Create the rendering jobs and add them to the queue.
|
||||
for render_setting in render_list:
|
||||
job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob)
|
||||
job.sequence = unreal.SoftObjectPath(i["master_sequence"])
|
||||
job.map = unreal.SoftObjectPath(i["master_level"])
|
||||
job.author = "Ayon"
|
||||
|
||||
# If we have a saved configuration, copy it to the job.
|
||||
if config:
|
||||
job.get_configuration().copy_from(config)
|
||||
|
||||
# User data could be used to pass data to the job, that can be
|
||||
# read in the job's OnJobFinished callback. We could,
|
||||
# for instance, pass the AyonPublishInstance's path to the job.
|
||||
# job.user_data = ""
|
||||
|
||||
output_dir = render_setting.get('output')
|
||||
shot_name = render_setting.get('sequence').get_name()
|
||||
|
||||
settings = job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineOutputSetting)
|
||||
settings.output_resolution = unreal.IntPoint(1920, 1080)
|
||||
settings.custom_start_frame = render_setting.get("frame_range")[0]
|
||||
settings.custom_end_frame = render_setting.get("frame_range")[1]
|
||||
settings.use_custom_playback_range = True
|
||||
settings.file_name_format = f"{shot_name}" + ".{frame_number}"
|
||||
settings.output_directory.path = f"{render_dir}/{output_dir}"
|
||||
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineDeferredPassBase)
|
||||
|
||||
render_format = data.get("unreal").get("render_format", "png")
|
||||
|
||||
if render_format == "png":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_PNG)
|
||||
elif render_format == "exr":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_EXR)
|
||||
elif render_format == "jpg":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_JPG)
|
||||
elif render_format == "bmp":
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_BMP)
|
||||
|
||||
# If there are jobs in the queue, start the rendering process.
|
||||
if queue.get_jobs():
|
||||
global executor
|
||||
executor = unreal.MoviePipelinePIEExecutor()
|
||||
|
||||
preroll_frames = data.get("unreal").get("preroll_frames", 0)
|
||||
|
||||
settings = unreal.MoviePipelinePIEExecutorSettings()
|
||||
settings.set_editor_property(
|
||||
"initial_delay_frame_count", preroll_frames)
|
||||
|
||||
executor.on_executor_finished_delegate.add_callable_unique(
|
||||
_queue_finish_callback)
|
||||
executor.on_individual_job_finished_delegate.add_callable_unique(
|
||||
_job_finish_callback) # Only available on PIE Executor
|
||||
executor.execute(queue)
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
import sys
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from ayon_core import (
|
||||
resources,
|
||||
style
|
||||
)
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
from ayon_unreal.api import rendering
|
||||
|
||||
|
||||
class ToolsBtnsWidget(QtWidgets.QWidget):
|
||||
"""Widget containing buttons which are clickable."""
|
||||
tool_required = QtCore.Signal(str)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(ToolsBtnsWidget, self).__init__(parent)
|
||||
|
||||
load_btn = QtWidgets.QPushButton("Load...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publisher...", self)
|
||||
manage_btn = QtWidgets.QPushButton("Manage...", self)
|
||||
render_btn = QtWidgets.QPushButton("Render...", self)
|
||||
experimental_tools_btn = QtWidgets.QPushButton(
|
||||
"Experimental tools...", self
|
||||
)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(load_btn, 0)
|
||||
layout.addWidget(publish_btn, 0)
|
||||
layout.addWidget(manage_btn, 0)
|
||||
layout.addWidget(render_btn, 0)
|
||||
layout.addWidget(experimental_tools_btn, 0)
|
||||
layout.addStretch(1)
|
||||
|
||||
load_btn.clicked.connect(self._on_load)
|
||||
publish_btn.clicked.connect(self._on_publish)
|
||||
manage_btn.clicked.connect(self._on_manage)
|
||||
render_btn.clicked.connect(self._on_render)
|
||||
experimental_tools_btn.clicked.connect(self._on_experimental)
|
||||
|
||||
def _on_create(self):
|
||||
self.tool_required.emit("creator")
|
||||
|
||||
def _on_load(self):
|
||||
self.tool_required.emit("loader")
|
||||
|
||||
def _on_publish(self):
|
||||
self.tool_required.emit("publisher")
|
||||
|
||||
def _on_manage(self):
|
||||
self.tool_required.emit("sceneinventory")
|
||||
|
||||
def _on_render(self):
|
||||
rendering.start_rendering()
|
||||
|
||||
def _on_experimental(self):
|
||||
self.tool_required.emit("experimental_tools")
|
||||
|
||||
|
||||
class ToolsDialog(QtWidgets.QDialog):
|
||||
"""Dialog with tool buttons that will stay opened until user close it."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ToolsDialog, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setWindowTitle("Ayon tools")
|
||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
||||
tools_widget = ToolsBtnsWidget(self)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(tools_widget)
|
||||
|
||||
tools_widget.tool_required.connect(self._on_tool_require)
|
||||
self._tools_widget = tools_widget
|
||||
|
||||
self._first_show = True
|
||||
|
||||
def sizeHint(self):
|
||||
result = super(ToolsDialog, self).sizeHint()
|
||||
result.setWidth(result.width() * 2)
|
||||
return result
|
||||
|
||||
def showEvent(self, event):
|
||||
super(ToolsDialog, self).showEvent(event)
|
||||
if self._first_show:
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self._first_show = False
|
||||
|
||||
def _on_tool_require(self, tool_name):
|
||||
host_tools.show_tool_by_name(tool_name, parent=self)
|
||||
|
||||
|
||||
class ToolsPopup(ToolsDialog):
|
||||
"""Popup with tool buttons that will close when loose focus."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ToolsPopup, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.FramelessWindowHint
|
||||
| QtCore.Qt.Popup
|
||||
)
|
||||
|
||||
def showEvent(self, event):
|
||||
super(ToolsPopup, self).showEvent(event)
|
||||
app = QtWidgets.QApplication.instance()
|
||||
app.processEvents()
|
||||
pos = QtGui.QCursor.pos()
|
||||
self.move(pos)
|
||||
|
||||
|
||||
class WindowCache:
|
||||
"""Cached objects and methods to be used in global scope."""
|
||||
_dialog = None
|
||||
_popup = None
|
||||
_first_show = True
|
||||
|
||||
@classmethod
|
||||
def _before_show(cls):
|
||||
"""Create QApplication if does not exist yet."""
|
||||
if not cls._first_show:
|
||||
return
|
||||
|
||||
cls._first_show = False
|
||||
if not QtWidgets.QApplication.instance():
|
||||
QtWidgets.QApplication(sys.argv)
|
||||
|
||||
@classmethod
|
||||
def show_popup(cls):
|
||||
cls._before_show()
|
||||
with qt_app_context():
|
||||
if cls._popup is None:
|
||||
cls._popup = ToolsPopup()
|
||||
|
||||
cls._popup.show()
|
||||
|
||||
@classmethod
|
||||
def show_dialog(cls):
|
||||
cls._before_show()
|
||||
with qt_app_context():
|
||||
if cls._dialog is None:
|
||||
cls._dialog = ToolsDialog()
|
||||
|
||||
cls._dialog.show()
|
||||
cls._dialog.raise_()
|
||||
cls._dialog.activateWindow()
|
||||
|
||||
|
||||
def show_tools_popup():
|
||||
WindowCache.show_popup()
|
||||
|
||||
|
||||
def show_tools_dialog():
|
||||
WindowCache.show_dialog()
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Hook to launch Unreal and prepare projects."""
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
from ayon_core import resources
|
||||
from ayon_applications import (
|
||||
PreLaunchHook,
|
||||
ApplicationLaunchFailed,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.pipeline.workfile import get_workfile_template_key
|
||||
import ayon_unreal.lib as unreal_lib
|
||||
from ayon_unreal.ue_workers import (
|
||||
UEProjectGenerationWorker,
|
||||
UEPluginInstallWorker
|
||||
)
|
||||
from ayon_unreal.ui import SplashScreen
|
||||
|
||||
|
||||
class UnrealPrelaunchHook(PreLaunchHook):
|
||||
"""Hook to handle launching Unreal.
|
||||
|
||||
This hook will check if current workfile path has Unreal
|
||||
project inside. IF not, it initializes it, and finally it pass
|
||||
path to the project by environment variable to Unreal launcher
|
||||
shell script.
|
||||
|
||||
"""
|
||||
app_groups = {"unreal"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.signature = f"( {self.__class__.__name__} )"
|
||||
|
||||
def _get_work_filename(self):
|
||||
# Use last workfile if was found
|
||||
if self.data.get("last_workfile_path"):
|
||||
last_workfile = Path(self.data.get("last_workfile_path"))
|
||||
if last_workfile and last_workfile.exists():
|
||||
return last_workfile.name
|
||||
|
||||
# Prepare data for fill data and for getting workfile template key
|
||||
anatomy = self.data["anatomy"]
|
||||
project_entity = self.data["project_entity"]
|
||||
|
||||
# Use already prepared workdir data
|
||||
workdir_data = copy.deepcopy(self.data["workdir_data"])
|
||||
task_type = workdir_data.get("task", {}).get("type")
|
||||
|
||||
# QUESTION raise exception if version is part of filename template?
|
||||
workdir_data["version"] = 1
|
||||
workdir_data["ext"] = "uproject"
|
||||
|
||||
# Get workfile template key for current context
|
||||
workfile_template_key = get_workfile_template_key(
|
||||
project_entity["name"],
|
||||
task_type,
|
||||
self.host_name,
|
||||
)
|
||||
# Fill templates
|
||||
template_obj = anatomy.get_template_item(
|
||||
"work", workfile_template_key, "file"
|
||||
)
|
||||
|
||||
# Return filename
|
||||
return template_obj.format_strict(workdir_data)
|
||||
|
||||
def exec_plugin_install(self, engine_path: Path, env: dict = None):
|
||||
# set up the QThread and worker with necessary signals
|
||||
env = env or os.environ
|
||||
q_thread = QtCore.QThread()
|
||||
ue_plugin_worker = UEPluginInstallWorker()
|
||||
|
||||
q_thread.started.connect(ue_plugin_worker.run)
|
||||
ue_plugin_worker.setup(engine_path, env)
|
||||
ue_plugin_worker.moveToThread(q_thread)
|
||||
|
||||
splash_screen = SplashScreen(
|
||||
"Installing plugin",
|
||||
resources.get_resource("app_icons", "ue4.png")
|
||||
)
|
||||
|
||||
# set up the splash screen with necessary triggers
|
||||
ue_plugin_worker.installing.connect(
|
||||
splash_screen.update_top_label_text
|
||||
)
|
||||
ue_plugin_worker.progress.connect(splash_screen.update_progress)
|
||||
ue_plugin_worker.log.connect(splash_screen.append_log)
|
||||
ue_plugin_worker.finished.connect(splash_screen.quit_and_close)
|
||||
ue_plugin_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
|
||||
if not splash_screen.was_proc_successful():
|
||||
raise ApplicationLaunchFailed("Couldn't run the application! "
|
||||
"Plugin failed to install!")
|
||||
|
||||
def exec_ue_project_gen(self,
|
||||
engine_version: str,
|
||||
unreal_project_name: str,
|
||||
engine_path: Path,
|
||||
project_dir: Path):
|
||||
self.log.info((
|
||||
f"{self.signature} Creating unreal "
|
||||
f"project [ {unreal_project_name} ]"
|
||||
))
|
||||
|
||||
q_thread = QtCore.QThread()
|
||||
ue_project_worker = UEProjectGenerationWorker()
|
||||
ue_project_worker.setup(
|
||||
engine_version,
|
||||
self.data["project_name"],
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
project_dir
|
||||
)
|
||||
ue_project_worker.moveToThread(q_thread)
|
||||
q_thread.started.connect(ue_project_worker.run)
|
||||
|
||||
splash_screen = SplashScreen(
|
||||
"Initializing UE project",
|
||||
resources.get_resource("app_icons", "ue4.png")
|
||||
)
|
||||
|
||||
ue_project_worker.stage_begin.connect(
|
||||
splash_screen.update_top_label_text
|
||||
)
|
||||
ue_project_worker.progress.connect(splash_screen.update_progress)
|
||||
ue_project_worker.log.connect(splash_screen.append_log)
|
||||
ue_project_worker.finished.connect(splash_screen.quit_and_close)
|
||||
ue_project_worker.failed.connect(splash_screen.fail)
|
||||
|
||||
splash_screen.start_thread(q_thread)
|
||||
splash_screen.show_ui()
|
||||
|
||||
if not splash_screen.was_proc_successful():
|
||||
raise ApplicationLaunchFailed("Couldn't run the application! "
|
||||
"Failed to generate the project!")
|
||||
|
||||
def execute(self):
|
||||
"""Hook entry method."""
|
||||
workdir = self.launch_context.env["AYON_WORKDIR"]
|
||||
executable = str(self.launch_context.executable)
|
||||
engine_version = self.app_name.split("/")[-1].replace("-", ".")
|
||||
try:
|
||||
if int(engine_version.split(".")[0]) < 4 and \
|
||||
int(engine_version.split(".")[1]) < 26:
|
||||
raise ApplicationLaunchFailed((
|
||||
f"{self.signature} Old unsupported version of UE "
|
||||
f"detected - {engine_version}"))
|
||||
except ValueError:
|
||||
# there can be string in minor version and in that case
|
||||
# int cast is failing. This probably happens only with
|
||||
# early access versions and is of no concert for this check
|
||||
# so let's keep it quiet.
|
||||
...
|
||||
|
||||
unreal_project_filename = self._get_work_filename()
|
||||
unreal_project_name = os.path.splitext(unreal_project_filename)[0]
|
||||
# Unreal is sensitive about project names longer then 20 chars
|
||||
if len(unreal_project_name) > 20:
|
||||
raise ApplicationLaunchFailed(
|
||||
f"Project name exceeds 20 characters ({unreal_project_name})!"
|
||||
)
|
||||
|
||||
# Unreal doesn't accept non alphabet characters at the start
|
||||
# of the project name. This is because project name is then used
|
||||
# in various places inside c++ code and there variable names cannot
|
||||
# start with non-alpha. We append 'P' before project name to solve it.
|
||||
# 😱
|
||||
if not unreal_project_name[:1].isalpha():
|
||||
self.log.warning((
|
||||
"Project name doesn't start with alphabet "
|
||||
f"character ({unreal_project_name}). Appending 'P'"
|
||||
))
|
||||
unreal_project_name = f"P{unreal_project_name}"
|
||||
unreal_project_filename = f'{unreal_project_name}.uproject'
|
||||
|
||||
project_path = Path(os.path.join(workdir, unreal_project_name))
|
||||
|
||||
self.log.info((
|
||||
f"{self.signature} requested UE version: "
|
||||
f"[ {engine_version} ]"
|
||||
))
|
||||
|
||||
project_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# engine_path points to the specific Unreal Engine root
|
||||
# so, we are going up from the executable itself 3 levels.
|
||||
engine_path: Path = Path(executable).parents[3]
|
||||
|
||||
# Check if new env variable exists, and if it does, if the path
|
||||
# actually contains the plugin. If not, install it.
|
||||
|
||||
built_plugin_path = self.launch_context.env.get(
|
||||
"AYON_BUILT_UNREAL_PLUGIN", None)
|
||||
|
||||
if unreal_lib.check_built_plugin_existance(built_plugin_path):
|
||||
self.log.info((
|
||||
f"{self.signature} using existing built Ayon plugin from "
|
||||
f"{built_plugin_path}"
|
||||
))
|
||||
unreal_lib.copy_built_plugin(engine_path, Path(built_plugin_path))
|
||||
else:
|
||||
# Set "AYON_UNREAL_PLUGIN" to current process environment for
|
||||
# execution of `create_unreal_project`
|
||||
env_key = "AYON_UNREAL_PLUGIN"
|
||||
if self.launch_context.env.get(env_key):
|
||||
self.log.info((
|
||||
f"{self.signature} using Ayon plugin from "
|
||||
f"{self.launch_context.env.get(env_key)}"
|
||||
))
|
||||
if self.launch_context.env.get(env_key):
|
||||
os.environ[env_key] = self.launch_context.env[env_key]
|
||||
|
||||
if not unreal_lib.check_plugin_existence(engine_path):
|
||||
self.exec_plugin_install(engine_path)
|
||||
|
||||
project_file = project_path / unreal_project_filename
|
||||
|
||||
if not project_file.is_file():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
self.exec_ue_project_gen(engine_version,
|
||||
unreal_project_name,
|
||||
engine_path,
|
||||
Path(temp_dir))
|
||||
try:
|
||||
self.log.info((
|
||||
f"Moving from {temp_dir} to "
|
||||
f"{project_path.as_posix()}"
|
||||
))
|
||||
shutil.copytree(
|
||||
temp_dir, project_path, dirs_exist_ok=True)
|
||||
|
||||
except shutil.Error as e:
|
||||
raise ApplicationLaunchFailed((
|
||||
f"{self.signature} Cannot copy directory {temp_dir} "
|
||||
f"to {project_path.as_posix()} - {e}"
|
||||
)) from e
|
||||
|
||||
self.launch_context.env["AYON_UNREAL_VERSION"] = engine_version
|
||||
# Append project file to launch arguments
|
||||
self.launch_context.launch_args.append(
|
||||
f"\"{project_file.as_posix()}\"")
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 04b35dbf5fc42d905281fc30d3a22b139c1855e5
|
||||
|
|
@ -1,551 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unreal launching and project tools."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
from distutils import dir_util
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
def get_engine_versions(env=None):
|
||||
"""Detect Unreal Engine versions.
|
||||
|
||||
This will try to detect location and versions of installed Unreal Engine.
|
||||
Location can be overridden by `UNREAL_ENGINE_LOCATION` environment
|
||||
variable.
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Args:
|
||||
env (dict, optional): Environment to use.
|
||||
|
||||
Returns:
|
||||
OrderedDict: dictionary with version as a key and dir as value.
|
||||
so the highest version is first.
|
||||
|
||||
Example:
|
||||
>>> get_engine_versions()
|
||||
{
|
||||
"4.23": "C:/Epic Games/UE_4.23",
|
||||
"4.24": "C:/Epic Games/UE_4.24"
|
||||
}
|
||||
|
||||
"""
|
||||
env = env or os.environ
|
||||
engine_locations = {}
|
||||
try:
|
||||
root, dirs, _ = next(os.walk(env["UNREAL_ENGINE_LOCATION"]))
|
||||
|
||||
for directory in dirs:
|
||||
if directory.startswith("UE"):
|
||||
try:
|
||||
ver = re.split(r"[-_]", directory)[1]
|
||||
except IndexError:
|
||||
continue
|
||||
engine_locations[ver] = os.path.join(root, directory)
|
||||
except KeyError:
|
||||
# environment variable not set
|
||||
pass
|
||||
except OSError:
|
||||
# specified directory doesn't exist
|
||||
pass
|
||||
except StopIteration:
|
||||
# specified directory doesn't exist
|
||||
pass
|
||||
|
||||
# if we've got something, terminate auto-detection process
|
||||
if engine_locations:
|
||||
return OrderedDict(sorted(engine_locations.items()))
|
||||
|
||||
# else kick in platform specific detection
|
||||
if platform.system().lower() == "windows":
|
||||
return OrderedDict(sorted(_win_get_engine_versions().items()))
|
||||
if platform.system().lower() == "linux":
|
||||
# on linux, there is no installation and getting Unreal Engine involves
|
||||
# git clone. So we'll probably depend on `UNREAL_ENGINE_LOCATION`.
|
||||
pass
|
||||
if platform.system().lower() == "darwin":
|
||||
return OrderedDict(sorted(_darwin_get_engine_version().items()))
|
||||
|
||||
return OrderedDict()
|
||||
|
||||
|
||||
def get_editor_exe_path(engine_path: Path, engine_version: str) -> Path:
|
||||
"""Get UE Editor executable path."""
|
||||
ue_path = engine_path / "Engine/Binaries"
|
||||
|
||||
ue_name = "UnrealEditor"
|
||||
|
||||
# handle older versions of Unreal Engine
|
||||
if engine_version.split(".")[0] == "4":
|
||||
ue_name = "UE4Editor"
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
ue_path /= f"Win64/{ue_name}.exe"
|
||||
|
||||
elif platform.system().lower() == "linux":
|
||||
ue_path /= f"Linux/{ue_name}"
|
||||
|
||||
elif platform.system().lower() == "darwin":
|
||||
ue_path /= f"Mac/{ue_name}"
|
||||
|
||||
return ue_path
|
||||
|
||||
|
||||
def _win_get_engine_versions():
|
||||
"""Get Unreal Engine versions on Windows.
|
||||
|
||||
If engines are installed via Epic Games Launcher then there is:
|
||||
`%PROGRAMDATA%/Epic/UnrealEngineLauncher/LauncherInstalled.dat`
|
||||
This file is JSON file listing installed stuff, Unreal engines
|
||||
are marked with `"AppName" = "UE_X.XX"`` like `UE_4.24`
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Returns:
|
||||
dict: version as a key and path as a value.
|
||||
|
||||
"""
|
||||
install_json_path = os.path.join(
|
||||
os.getenv("PROGRAMDATA"),
|
||||
"Epic",
|
||||
"UnrealEngineLauncher",
|
||||
"LauncherInstalled.dat",
|
||||
)
|
||||
|
||||
return _parse_launcher_locations(install_json_path)
|
||||
|
||||
|
||||
def _darwin_get_engine_version() -> dict:
|
||||
"""Get Unreal Engine versions on MacOS.
|
||||
|
||||
It works the same as on Windows, just JSON file location is different.
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Returns:
|
||||
dict: version as a key and path as a value.
|
||||
|
||||
See Also:
|
||||
:func:`_win_get_engine_versions`.
|
||||
|
||||
"""
|
||||
install_json_path = os.path.join(
|
||||
os.getenv("HOME"),
|
||||
"Library",
|
||||
"Application Support",
|
||||
"Epic",
|
||||
"UnrealEngineLauncher",
|
||||
"LauncherInstalled.dat",
|
||||
)
|
||||
|
||||
return _parse_launcher_locations(install_json_path)
|
||||
|
||||
|
||||
def _parse_launcher_locations(install_json_path: str) -> dict:
|
||||
"""This will parse locations from json file.
|
||||
|
||||
.. deprecated:: 3.15.4
|
||||
|
||||
Args:
|
||||
install_json_path (str): Path to `LauncherInstalled.dat`.
|
||||
|
||||
Returns:
|
||||
dict: with unreal engine versions as keys and
|
||||
paths to those engine installations as value.
|
||||
|
||||
"""
|
||||
engine_locations = {}
|
||||
if os.path.isfile(install_json_path):
|
||||
with open(install_json_path, "r") as ilf:
|
||||
try:
|
||||
install_data = json.load(ilf)
|
||||
except json.JSONDecodeError as e:
|
||||
raise Exception(
|
||||
"Invalid `LauncherInstalled.dat file. `"
|
||||
"Cannot determine Unreal Engine location."
|
||||
) from e
|
||||
|
||||
for installation in install_data.get("InstallationList", []):
|
||||
if installation.get("AppName").startswith("UE_"):
|
||||
ver = installation.get("AppName").split("_")[1]
|
||||
engine_locations[ver] = installation.get("InstallLocation")
|
||||
|
||||
return engine_locations
|
||||
|
||||
|
||||
def create_unreal_project(project_name: str,
|
||||
unreal_project_name: str,
|
||||
ue_version: str,
|
||||
pr_dir: Path,
|
||||
engine_path: Path,
|
||||
dev_mode: bool = False,
|
||||
env: dict = None) -> None:
|
||||
"""This will create `.uproject` file at specified location.
|
||||
|
||||
As there is no way I know to create a project via command line, this is
|
||||
easiest option. Unreal project file is basically a JSON file. If we find
|
||||
the `AYON_UNREAL_PLUGIN` environment variable we assume this is the
|
||||
location of the Integration Plugin and we copy its content to the project
|
||||
folder and enable this plugin.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of the project in AYON.
|
||||
unreal_project_name (str): Name of the project in Unreal.
|
||||
ue_version (str): Unreal engine version (like 4.23).
|
||||
pr_dir (Path): Path to directory where project will be created.
|
||||
engine_path (Path): Path to Unreal Engine installation.
|
||||
dev_mode (bool, optional): Flag to trigger C++ style Unreal project
|
||||
needing Visual Studio and other tools to compile plugins from
|
||||
sources. This will trigger automatically if `Binaries`
|
||||
directory is not found in plugin folders as this indicates
|
||||
this is only source distribution of the plugin. Dev mode
|
||||
is also set in Settings.
|
||||
env (dict, optional): Environment to use. If not set, `os.environ`.
|
||||
|
||||
Throws:
|
||||
NotImplementedError: For unsupported platforms.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Deprecated:
|
||||
since 3.16.0
|
||||
|
||||
"""
|
||||
|
||||
preset = get_project_settings(project_name)["unreal"]["project_setup"]
|
||||
# get unreal engine identifier
|
||||
# -------------------------------------------------------------------------
|
||||
# FIXME (antirotor): As of 4.26 this is problem with UE4 built from
|
||||
# sources. In that case Engine ID is calculated per machine/user and not
|
||||
# from Engine files as this code then reads. This then prevents UE4
|
||||
# to directly open project as it will complain about project being
|
||||
# created in different UE4 version. When user convert such project
|
||||
# to his UE4 version, Engine ID is replaced in uproject file. If some
|
||||
# other user tries to open it, it will present him with similar error.
|
||||
|
||||
# engine_path should be the location of UE_X.X folder
|
||||
|
||||
ue_editor_exe: Path = get_editor_exe_path(engine_path, ue_version)
|
||||
cmdlet_project: Path = get_path_to_cmdlet_project(ue_version)
|
||||
|
||||
project_file = pr_dir / f"{unreal_project_name}.uproject"
|
||||
|
||||
print("--- Generating a new project ...")
|
||||
commandlet_cmd = [
|
||||
ue_editor_exe.as_posix(),
|
||||
cmdlet_project.as_posix(),
|
||||
"-run=AyonGenerateProject",
|
||||
project_file.resolve().as_posix()
|
||||
]
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
commandlet_cmd.append('-GenerateCode')
|
||||
|
||||
gen_process = subprocess.Popen(commandlet_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in gen_process.stdout:
|
||||
print(line.decode(), end='')
|
||||
gen_process.stdout.close()
|
||||
return_code = gen_process.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
raise RuntimeError(
|
||||
(f"Failed to generate '{unreal_project_name}' project! "
|
||||
f"Exited with return code {return_code}"))
|
||||
|
||||
print("--- Project has been generated successfully.")
|
||||
|
||||
with open(project_file.as_posix(), mode="r+") as pf:
|
||||
pf_json = json.load(pf)
|
||||
pf_json["EngineAssociation"] = get_build_id(engine_path, ue_version)
|
||||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
print("--- Engine ID has been written into the project file")
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
u_build_tool = get_path_to_ubt(engine_path, ue_version)
|
||||
|
||||
arch = "Win64"
|
||||
if platform.system().lower() == "windows":
|
||||
arch = "Win64"
|
||||
elif platform.system().lower() == "linux":
|
||||
arch = "Linux"
|
||||
elif platform.system().lower() == "darwin":
|
||||
# we need to test this out
|
||||
arch = "Mac"
|
||||
|
||||
command1 = [
|
||||
u_build_tool.as_posix(),
|
||||
"-projectfiles",
|
||||
f"-project={project_file}",
|
||||
"-progress"
|
||||
]
|
||||
|
||||
subprocess.run(command1)
|
||||
|
||||
command2 = [
|
||||
u_build_tool.as_posix(),
|
||||
f"-ModuleWithSuffix={unreal_project_name},3555",
|
||||
arch,
|
||||
"Development",
|
||||
"-TargetType=Editor",
|
||||
f"-Project={project_file}",
|
||||
project_file,
|
||||
"-IgnoreJunk"
|
||||
]
|
||||
|
||||
subprocess.run(command2)
|
||||
|
||||
# ensure we have PySide2 installed in engine
|
||||
python_path = None
|
||||
if platform.system().lower() == "windows":
|
||||
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Win64/python.exe")
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Linux/bin/python3")
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
python_path = engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Mac/bin/python3")
|
||||
|
||||
if not python_path:
|
||||
raise NotImplementedError("Unsupported platform")
|
||||
if not python_path.exists():
|
||||
raise RuntimeError(f"Unreal Python not found at {python_path}")
|
||||
subprocess.check_call(
|
||||
[python_path.as_posix(), "-m", "pip", "install", "pyside2"])
|
||||
|
||||
|
||||
def get_path_to_uat(engine_path: Path) -> Path:
|
||||
if platform.system().lower() == "windows":
|
||||
return engine_path / "Engine/Build/BatchFiles/RunUAT.bat"
|
||||
|
||||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
return engine_path / "Engine/Build/BatchFiles/RunUAT.sh"
|
||||
|
||||
|
||||
def get_compatible_integration(
|
||||
ue_version: str, integration_root: Path) -> List[Path]:
|
||||
"""Get path to compatible version of integration plugin.
|
||||
|
||||
This will try to get the closest compatible versions to the one
|
||||
specified in sorted list.
|
||||
|
||||
Args:
|
||||
ue_version (str): version of the current Unreal Engine.
|
||||
integration_root (Path): path to built-in integration plugins.
|
||||
|
||||
Returns:
|
||||
list of Path: Sorted list of paths closest to the specified
|
||||
version.
|
||||
|
||||
"""
|
||||
major, minor = ue_version.split(".")
|
||||
integration_paths = [p for p in integration_root.iterdir()
|
||||
if p.is_dir()]
|
||||
|
||||
compatible_versions = []
|
||||
for i in integration_paths:
|
||||
# parse version from path
|
||||
try:
|
||||
i_major, i_minor = re.search(
|
||||
r"(?P<major>\d+).(?P<minor>\d+)$", i.name).groups()
|
||||
except AttributeError:
|
||||
# in case there is no match, just skip to next
|
||||
continue
|
||||
|
||||
# consider versions with different major so different that they
|
||||
# are incompatible
|
||||
if int(major) != int(i_major):
|
||||
continue
|
||||
|
||||
compatible_versions.append(i)
|
||||
|
||||
sorted(set(compatible_versions))
|
||||
return compatible_versions
|
||||
|
||||
|
||||
def get_path_to_cmdlet_project(ue_version: str) -> Path:
|
||||
cmd_project = Path(
|
||||
os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# For now, only tested on Windows (For Linux and Mac
|
||||
# it has to be implemented)
|
||||
cmd_project /= f"integration/UE_{ue_version}"
|
||||
|
||||
# if the integration doesn't exist for current engine version
|
||||
# try to find the closest to it.
|
||||
if cmd_project.exists():
|
||||
return cmd_project / "CommandletProject/CommandletProject.uproject"
|
||||
|
||||
if compatible_versions := get_compatible_integration(
|
||||
ue_version, cmd_project.parent
|
||||
):
|
||||
return compatible_versions[-1] / "CommandletProject/CommandletProject.uproject" # noqa: E501
|
||||
else:
|
||||
raise RuntimeError(
|
||||
("There are no compatible versions of Unreal "
|
||||
"integration plugin compatible with running version "
|
||||
f"of Unreal Engine {ue_version}"))
|
||||
|
||||
|
||||
def get_path_to_ubt(engine_path: Path, ue_version: str) -> Path:
|
||||
u_build_tool_path = engine_path / "Engine/Binaries/DotNET"
|
||||
|
||||
if ue_version.split(".")[0] == "4":
|
||||
u_build_tool_path /= "UnrealBuildTool.exe"
|
||||
elif ue_version.split(".")[0] == "5":
|
||||
u_build_tool_path /= "UnrealBuildTool/UnrealBuildTool.exe"
|
||||
|
||||
return Path(u_build_tool_path)
|
||||
|
||||
|
||||
def get_build_id(engine_path: Path, ue_version: str) -> str:
|
||||
ue_modules = Path()
|
||||
if platform.system().lower() == "windows":
|
||||
ue_modules_path = engine_path / "Engine/Binaries/Win64"
|
||||
if ue_version.split(".")[0] == "4":
|
||||
ue_modules_path /= "UE4Editor.modules"
|
||||
elif ue_version.split(".")[0] == "5":
|
||||
ue_modules_path /= "UnrealEditor.modules"
|
||||
ue_modules = Path(ue_modules_path)
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
ue_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
|
||||
"Linux", "UE4Editor.modules"))
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
ue_modules = Path(os.path.join(engine_path, "Engine", "Binaries",
|
||||
"Mac", "UE4Editor.modules"))
|
||||
|
||||
if ue_modules.exists():
|
||||
print("--- Loading Engine ID from modules file ...")
|
||||
with open(ue_modules, "r") as mp:
|
||||
loaded_modules = json.load(mp)
|
||||
|
||||
if loaded_modules.get("BuildId"):
|
||||
return "{" + loaded_modules.get("BuildId") + "}"
|
||||
|
||||
|
||||
def check_built_plugin_existance(plugin_path) -> bool:
|
||||
if not plugin_path:
|
||||
return False
|
||||
|
||||
integration_plugin_path = Path(plugin_path)
|
||||
|
||||
if not integration_plugin_path.is_dir():
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
if not (integration_plugin_path / "Binaries").is_dir() \
|
||||
or not (integration_plugin_path / "Intermediate").is_dir():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def copy_built_plugin(engine_path: Path, plugin_path: Path) -> None:
|
||||
ayon_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not ayon_plugin_path.is_dir():
|
||||
ayon_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path: Path = ayon_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
dir_util.copy_tree(plugin_path.as_posix(), ayon_plugin_path.as_posix())
|
||||
|
||||
|
||||
def check_plugin_existence(engine_path: Path, env: dict = None) -> bool:
|
||||
env = env or os.environ
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(integration_plugin_path):
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
return False
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_installing_plugin(engine_path: Path, env: dict = None) -> None:
|
||||
env = env or os.environ
|
||||
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(integration_plugin_path):
|
||||
raise RuntimeError("Path to the integration plugin is null!")
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path: Path = engine_path / "Engine/Plugins/Marketplace/Ayon"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
op_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path: Path = op_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
_build_and_move_plugin(engine_path, op_plugin_path, env)
|
||||
|
||||
|
||||
def _build_and_move_plugin(engine_path: Path,
|
||||
plugin_build_path: Path,
|
||||
env: dict = None) -> None:
|
||||
uat_path: Path = get_path_to_uat(engine_path)
|
||||
|
||||
env = env or os.environ
|
||||
integration_plugin_path: Path = Path(env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if uat_path.is_file():
|
||||
temp_dir: Path = integration_plugin_path.parent / "Temp"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
uplugin_path: Path = integration_plugin_path / "Ayon.uplugin"
|
||||
|
||||
# in order to successfully build the plugin,
|
||||
# It must be built outside the Engine directory and then moved
|
||||
build_plugin_cmd: List[str] = [f'{uat_path.as_posix()}',
|
||||
'BuildPlugin',
|
||||
f'-Plugin={uplugin_path.as_posix()}',
|
||||
f'-Package={temp_dir.as_posix()}']
|
||||
subprocess.run(build_plugin_cmd)
|
||||
|
||||
# Copy the contents of the 'Temp' dir into the
|
||||
# 'Ayon' directory in the engine
|
||||
dir_util.copy_tree(temp_dir.as_posix(), plugin_build_path.as_posix())
|
||||
|
||||
# We need to also copy the config folder.
|
||||
# The UAT doesn't include the Config folder in the build
|
||||
plugin_install_config_path: Path = plugin_build_path / "Config"
|
||||
integration_plugin_config_path = integration_plugin_path / "Config"
|
||||
|
||||
dir_util.copy_tree(integration_plugin_config_path.as_posix(),
|
||||
plugin_install_config_path.as_posix())
|
||||
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_unreal.api.pipeline import UNREAL_VERSION
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateCamera(UnrealAssetCreator):
|
||||
"""Create Camera."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.camera"
|
||||
label = "Camera"
|
||||
product_type = "camera"
|
||||
icon = "fa.camera"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
if len(selection) != 1:
|
||||
raise CreatorError("Please select only one object.")
|
||||
|
||||
# Add the current level path to the metadata
|
||||
if UNREAL_VERSION.major == 5:
|
||||
world = unreal.UnrealEditorSubsystem().get_editor_world()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
|
||||
instance_data["level"] = world.get_path_name()
|
||||
|
||||
super(CreateCamera, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealActorCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateLayout(UnrealActorCreator):
|
||||
"""Layout output for character rigs."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.layout"
|
||||
label = "Layout"
|
||||
product_type = "layout"
|
||||
icon = "cubes"
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_unreal.api.pipeline import (
|
||||
create_folder
|
||||
)
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator
|
||||
)
|
||||
from ayon_core.lib import UILabelDef
|
||||
|
||||
|
||||
class CreateLook(UnrealAssetCreator):
|
||||
"""Shader connections defining shape look."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.look"
|
||||
label = "Look"
|
||||
product_type = "look"
|
||||
icon = "paint-brush"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
# We need to set this to True for the parent class to work
|
||||
pre_create_data["use_selection"] = True
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
if len(selection) != 1:
|
||||
raise CreatorError("Please select only one asset.")
|
||||
|
||||
selected_asset = selection[0]
|
||||
|
||||
look_directory = "/Game/Ayon/Looks"
|
||||
|
||||
# Create the folder
|
||||
folder_name = create_folder(look_directory, product_name)
|
||||
path = f"{look_directory}/{folder_name}"
|
||||
|
||||
instance_data["look"] = path
|
||||
|
||||
# Create a new cube static mesh
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
cube = ar.get_asset_by_object_path("/Engine/BasicShapes/Cube.Cube")
|
||||
|
||||
# Get the mesh of the selected object
|
||||
original_mesh = ar.get_asset_by_object_path(selected_asset).get_asset()
|
||||
materials = original_mesh.get_editor_property('static_materials')
|
||||
|
||||
pre_create_data["members"] = []
|
||||
|
||||
# Add the materials to the cube
|
||||
for material in materials:
|
||||
mat_name = material.get_editor_property('material_slot_name')
|
||||
object_path = f"{path}/{mat_name}.{mat_name}"
|
||||
unreal_object = unreal.EditorAssetLibrary.duplicate_loaded_asset(
|
||||
cube.get_asset(), object_path
|
||||
)
|
||||
|
||||
# Remove the default material of the cube object
|
||||
unreal_object.get_editor_property('static_materials').pop()
|
||||
|
||||
unreal_object.add_material(
|
||||
material.get_editor_property('material_interface'))
|
||||
|
||||
pre_create_data["members"].append(object_path)
|
||||
|
||||
unreal.EditorAssetLibrary.save_asset(object_path)
|
||||
|
||||
super(CreateLook, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
UILabelDef("Select the asset from which to create the look.")
|
||||
]
|
||||
|
|
@ -1,276 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_unreal.api.pipeline import (
|
||||
UNREAL_VERSION,
|
||||
create_folder,
|
||||
get_subsequences,
|
||||
)
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
UILabelDef,
|
||||
UISeparatorDef,
|
||||
BoolDef,
|
||||
NumberDef
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(UnrealAssetCreator):
|
||||
"""Create instance for sequence for rendering"""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.render"
|
||||
label = "Render"
|
||||
product_type = "render"
|
||||
icon = "eye"
|
||||
|
||||
def create_instance(
|
||||
self, instance_data, product_name, pre_create_data,
|
||||
selected_asset_path, master_seq, master_lvl, seq_data
|
||||
):
|
||||
instance_data["members"] = [selected_asset_path]
|
||||
instance_data["sequence"] = selected_asset_path
|
||||
instance_data["master_sequence"] = master_seq
|
||||
instance_data["master_level"] = master_lvl
|
||||
instance_data["output"] = seq_data.get('output')
|
||||
instance_data["frameStart"] = seq_data.get('frame_range')[0]
|
||||
instance_data["frameEnd"] = seq_data.get('frame_range')[1]
|
||||
|
||||
super(CreateRender, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
def create_with_new_sequence(
|
||||
self, product_name, instance_data, pre_create_data
|
||||
):
|
||||
# If the option to create a new level sequence is selected,
|
||||
# create a new level sequence and a master level.
|
||||
|
||||
root = "/Game/Ayon/Sequences"
|
||||
|
||||
# Create a new folder for the sequence in root
|
||||
sequence_dir_name = create_folder(root, product_name)
|
||||
sequence_dir = f"{root}/{sequence_dir_name}"
|
||||
|
||||
unreal.log_warning(f"sequence_dir: {sequence_dir}")
|
||||
|
||||
# Create the level sequence
|
||||
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||
seq = asset_tools.create_asset(
|
||||
asset_name=product_name,
|
||||
package_path=sequence_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew())
|
||||
|
||||
seq.set_playback_start(pre_create_data.get("start_frame"))
|
||||
seq.set_playback_end(pre_create_data.get("end_frame"))
|
||||
|
||||
pre_create_data["members"] = [seq.get_path_name()]
|
||||
|
||||
unreal.EditorAssetLibrary.save_asset(seq.get_path_name())
|
||||
|
||||
# Create the master level
|
||||
if UNREAL_VERSION.major >= 5:
|
||||
curr_level = unreal.LevelEditorSubsystem().get_current_level()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
levels = unreal.EditorLevelUtils.get_levels(world)
|
||||
curr_level = levels[0] if len(levels) else None
|
||||
if not curr_level:
|
||||
raise RuntimeError("No level loaded.")
|
||||
curr_level_path = curr_level.get_outer().get_path_name()
|
||||
|
||||
# If the level path does not start with "/Game/", the current
|
||||
# level is a temporary, unsaved level.
|
||||
if curr_level_path.startswith("/Game/"):
|
||||
if UNREAL_VERSION.major >= 5:
|
||||
unreal.LevelEditorSubsystem().save_current_level()
|
||||
else:
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
|
||||
ml_path = f"{sequence_dir}/{product_name}_MasterLevel"
|
||||
|
||||
if UNREAL_VERSION.major >= 5:
|
||||
unreal.LevelEditorSubsystem().new_level(ml_path)
|
||||
else:
|
||||
unreal.EditorLevelLibrary.new_level(ml_path)
|
||||
|
||||
seq_data = {
|
||||
"sequence": seq,
|
||||
"output": f"{seq.get_name()}",
|
||||
"frame_range": (
|
||||
seq.get_playback_start(),
|
||||
seq.get_playback_end())}
|
||||
|
||||
self.create_instance(
|
||||
instance_data, product_name, pre_create_data,
|
||||
seq.get_path_name(), seq.get_path_name(), ml_path, seq_data)
|
||||
|
||||
def create_from_existing_sequence(
|
||||
self, product_name, instance_data, pre_create_data
|
||||
):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [
|
||||
a.get_path_name() for a in sel_objects
|
||||
if a.get_class().get_name() == "LevelSequence"]
|
||||
|
||||
if len(selection) == 0:
|
||||
raise RuntimeError("Please select at least one Level Sequence.")
|
||||
|
||||
seq_data = None
|
||||
|
||||
for sel in selection:
|
||||
selected_asset = ar.get_asset_by_object_path(sel).get_asset()
|
||||
selected_asset_path = selected_asset.get_path_name()
|
||||
|
||||
# Check if the selected asset is a level sequence asset.
|
||||
if selected_asset.get_class().get_name() != "LevelSequence":
|
||||
unreal.log_warning(
|
||||
f"Skipping {selected_asset.get_name()}. It isn't a Level "
|
||||
"Sequence.")
|
||||
|
||||
if pre_create_data.get("use_hierarchy"):
|
||||
# The asset name is the the third element of the path which
|
||||
# contains the map.
|
||||
# To take the asset name, we remove from the path the prefix
|
||||
# "/Game/OpenPype/" and then we split the path by "/".
|
||||
sel_path = selected_asset_path
|
||||
asset_name = sel_path.replace(
|
||||
"/Game/Ayon/", "").split("/")[0]
|
||||
|
||||
search_path = f"/Game/Ayon/{asset_name}"
|
||||
else:
|
||||
search_path = Path(selected_asset_path).parent.as_posix()
|
||||
|
||||
# Get the master sequence and the master level.
|
||||
# There should be only one sequence and one level in the directory.
|
||||
try:
|
||||
ar_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[search_path],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(ar_filter)
|
||||
master_seq = sequences[0].get_asset().get_path_name()
|
||||
master_seq_obj = sequences[0].get_asset()
|
||||
ar_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[search_path],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(ar_filter)
|
||||
master_lvl = levels[0].get_asset().get_path_name()
|
||||
except IndexError:
|
||||
raise RuntimeError(
|
||||
"Could not find the hierarchy for the selected sequence.")
|
||||
|
||||
# If the selected asset is the master sequence, we get its data
|
||||
# and then we create the instance for the master sequence.
|
||||
# Otherwise, we cycle from the master sequence to find the selected
|
||||
# sequence and we get its data. This data will be used to create
|
||||
# the instance for the selected sequence. In particular,
|
||||
# we get the frame range of the selected sequence and its final
|
||||
# output path.
|
||||
master_seq_data = {
|
||||
"sequence": master_seq_obj,
|
||||
"output": f"{master_seq_obj.get_name()}",
|
||||
"frame_range": (
|
||||
master_seq_obj.get_playback_start(),
|
||||
master_seq_obj.get_playback_end())}
|
||||
|
||||
if (selected_asset_path == master_seq or
|
||||
pre_create_data.get("use_hierarchy")):
|
||||
seq_data = master_seq_data
|
||||
else:
|
||||
seq_data_list = [master_seq_data]
|
||||
|
||||
for seq in seq_data_list:
|
||||
subscenes = get_subsequences(seq.get('sequence'))
|
||||
|
||||
for sub_seq in subscenes:
|
||||
sub_seq_obj = sub_seq.get_sequence()
|
||||
curr_data = {
|
||||
"sequence": sub_seq_obj,
|
||||
"output": (f"{seq.get('output')}/"
|
||||
f"{sub_seq_obj.get_name()}"),
|
||||
"frame_range": (
|
||||
sub_seq.get_start_frame(),
|
||||
sub_seq.get_end_frame() - 1)}
|
||||
|
||||
# If the selected asset is the current sub-sequence,
|
||||
# we get its data and we break the loop.
|
||||
# Otherwise, we add the current sub-sequence data to
|
||||
# the list of sequences to check.
|
||||
if sub_seq_obj.get_path_name() == selected_asset_path:
|
||||
seq_data = curr_data
|
||||
break
|
||||
|
||||
seq_data_list.append(curr_data)
|
||||
|
||||
# If we found the selected asset, we break the loop.
|
||||
if seq_data is not None:
|
||||
break
|
||||
|
||||
# If we didn't find the selected asset, we don't create the
|
||||
# instance.
|
||||
if not seq_data:
|
||||
unreal.log_warning(
|
||||
f"Skipping {selected_asset.get_name()}. It isn't a "
|
||||
"sub-sequence of the master sequence.")
|
||||
continue
|
||||
|
||||
self.create_instance(
|
||||
instance_data, product_name, pre_create_data,
|
||||
selected_asset_path, master_seq, master_lvl, seq_data)
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("create_seq"):
|
||||
self.create_with_new_sequence(
|
||||
product_name, instance_data, pre_create_data)
|
||||
else:
|
||||
self.create_from_existing_sequence(
|
||||
product_name, instance_data, pre_create_data)
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
UILabelDef(
|
||||
"Select a Level Sequence to render or create a new one."
|
||||
),
|
||||
BoolDef(
|
||||
"create_seq",
|
||||
label="Create a new Level Sequence",
|
||||
default=False
|
||||
),
|
||||
UILabelDef(
|
||||
"WARNING: If you create a new Level Sequence, the current\n"
|
||||
"level will be saved and a new Master Level will be created."
|
||||
),
|
||||
NumberDef(
|
||||
"start_frame",
|
||||
label="Start Frame",
|
||||
default=0,
|
||||
minimum=-999999,
|
||||
maximum=999999
|
||||
),
|
||||
NumberDef(
|
||||
"end_frame",
|
||||
label="Start Frame",
|
||||
default=150,
|
||||
minimum=-999999,
|
||||
maximum=999999
|
||||
),
|
||||
UISeparatorDef(),
|
||||
UILabelDef(
|
||||
"The following settings are valid only if you are not\n"
|
||||
"creating a new sequence."
|
||||
),
|
||||
BoolDef(
|
||||
"use_hierarchy",
|
||||
label="Use Hierarchy",
|
||||
default=False
|
||||
),
|
||||
]
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateStaticMeshFBX(UnrealAssetCreator):
|
||||
"""Create Static Meshes as FBX geometry."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.staticmeshfbx"
|
||||
label = "Static Mesh (FBX)"
|
||||
product_type = "unrealStaticMesh"
|
||||
icon = "cube"
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_unreal.api.plugin import (
|
||||
UnrealAssetCreator,
|
||||
)
|
||||
|
||||
|
||||
class CreateUAsset(UnrealAssetCreator):
|
||||
"""Create UAsset."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.uasset"
|
||||
label = "UAsset"
|
||||
product_type = "uasset"
|
||||
icon = "cube"
|
||||
|
||||
extension = ".uasset"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
if pre_create_data.get("use_selection"):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
if len(selection) != 1:
|
||||
raise CreatorError("Please select only one object.")
|
||||
|
||||
obj = selection[0]
|
||||
|
||||
asset = ar.get_asset_by_object_path(obj).get_asset()
|
||||
sys_path = unreal.SystemLibrary.get_system_path(asset)
|
||||
|
||||
if not sys_path:
|
||||
raise CreatorError(
|
||||
f"{Path(obj).name} is not on the disk. Likely it needs to"
|
||||
"be saved first.")
|
||||
|
||||
if Path(sys_path).suffix != self.extension:
|
||||
raise CreatorError(
|
||||
f"{Path(sys_path).name} is not a {self.label}.")
|
||||
|
||||
super(CreateUAsset, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
||||
class CreateUMap(CreateUAsset):
|
||||
"""Create Level."""
|
||||
|
||||
identifier = "io.ayon.creators.unreal.umap"
|
||||
label = "Level"
|
||||
product_type = "uasset"
|
||||
extension = ".umap"
|
||||
|
||||
def create(self, product_name, instance_data, pre_create_data):
|
||||
instance_data["families"] = ["umap"]
|
||||
|
||||
super(CreateUMap, self).create(
|
||||
product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
import unreal
|
||||
|
||||
from ayon_unreal.api.tools_ui import qt_app_context
|
||||
from ayon_unreal.api.pipeline import delete_asset_if_unused
|
||||
from ayon_core.pipeline import InventoryAction
|
||||
|
||||
|
||||
class DeleteUnusedAssets(InventoryAction):
|
||||
"""Delete all the assets that are not used in any level.
|
||||
"""
|
||||
|
||||
label = "Delete Unused Assets"
|
||||
icon = "trash"
|
||||
color = "red"
|
||||
order = 1
|
||||
|
||||
dialog = None
|
||||
|
||||
def _delete_unused_assets(self, containers):
|
||||
allowed_families = ["model", "rig"]
|
||||
|
||||
for container in containers:
|
||||
container_dir = container.get("namespace")
|
||||
if container.get("family") not in allowed_families:
|
||||
unreal.log_warning(
|
||||
f"Container {container_dir} is not supported.")
|
||||
continue
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
container_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
delete_asset_if_unused(container, asset_content)
|
||||
|
||||
def _show_confirmation_dialog(self, containers):
|
||||
from qtpy import QtCore
|
||||
from ayon_core.tools.utils import SimplePopup
|
||||
from ayon_core.style import load_stylesheet
|
||||
|
||||
dialog = SimplePopup()
|
||||
dialog.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
dialog.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
dialog.setWindowTitle("Delete all unused assets")
|
||||
dialog.set_message(
|
||||
"You are about to delete all the assets in the project that \n"
|
||||
"are not used in any level. Are you sure you want to continue?"
|
||||
)
|
||||
dialog.set_button_text("Delete")
|
||||
|
||||
dialog.on_clicked.connect(
|
||||
lambda: self._delete_unused_assets(containers)
|
||||
)
|
||||
|
||||
dialog.show()
|
||||
dialog.raise_()
|
||||
dialog.activateWindow()
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
|
||||
self.dialog = dialog
|
||||
|
||||
def process(self, containers):
|
||||
with qt_app_context():
|
||||
self._show_confirmation_dialog(containers)
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
import unreal
|
||||
|
||||
from ayon_unreal.api.pipeline import (
|
||||
ls,
|
||||
replace_static_mesh_actors,
|
||||
replace_skeletal_mesh_actors,
|
||||
replace_geometry_cache_actors,
|
||||
)
|
||||
from ayon_core.pipeline import InventoryAction
|
||||
|
||||
|
||||
def update_assets(containers, selected):
|
||||
allowed_families = ["model", "rig"]
|
||||
|
||||
# Get all the containers in the Unreal Project
|
||||
all_containers = ls()
|
||||
|
||||
for container in containers:
|
||||
container_dir = container.get("namespace")
|
||||
if container.get("family") not in allowed_families:
|
||||
unreal.log_warning(
|
||||
f"Container {container_dir} is not supported.")
|
||||
continue
|
||||
|
||||
# Get all containers with same asset_name but different objectName.
|
||||
# These are the containers that need to be updated in the level.
|
||||
sa_containers = [
|
||||
i
|
||||
for i in all_containers
|
||||
if (
|
||||
i.get("asset_name") == container.get("asset_name") and
|
||||
i.get("objectName") != container.get("objectName")
|
||||
)
|
||||
]
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
container_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
# Update all actors in level
|
||||
for sa_cont in sa_containers:
|
||||
sa_dir = sa_cont.get("namespace")
|
||||
old_content = unreal.EditorAssetLibrary.list_assets(
|
||||
sa_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
if container.get("family") == "rig":
|
||||
replace_skeletal_mesh_actors(
|
||||
old_content, asset_content, selected)
|
||||
replace_static_mesh_actors(
|
||||
old_content, asset_content, selected)
|
||||
elif container.get("family") == "model":
|
||||
if container.get("loader") == "PointCacheAlembicLoader":
|
||||
replace_geometry_cache_actors(
|
||||
old_content, asset_content, selected)
|
||||
else:
|
||||
replace_static_mesh_actors(
|
||||
old_content, asset_content, selected)
|
||||
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
|
||||
|
||||
class UpdateAllActors(InventoryAction):
|
||||
"""Update all the Actors in the current level to the version of the asset
|
||||
selected in the scene manager.
|
||||
"""
|
||||
|
||||
label = "Replace all Actors in level to this version"
|
||||
icon = "arrow-up"
|
||||
|
||||
def process(self, containers):
|
||||
update_assets(containers, False)
|
||||
|
||||
|
||||
class UpdateSelectedActors(InventoryAction):
|
||||
"""Update only the selected Actors in the current level to the version
|
||||
of the asset selected in the scene manager.
|
||||
"""
|
||||
|
||||
label = "Replace selected Actors in level to this version"
|
||||
icon = "arrow-up"
|
||||
|
||||
def process(self, containers):
|
||||
update_assets(containers, True)
|
||||
|
|
@ -1,176 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Alembic Animation."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class AnimationAlembicLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from Alembic"""
|
||||
|
||||
product_types = {"animation"}
|
||||
label = "Import Alembic Animation"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def get_task(self, filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
sm_settings = unreal.AbcStaticMeshSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, -1.0])
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
options.static_mesh_settings = sm_settings
|
||||
options.conversion_settings = conversion_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and ayon container
|
||||
root = unreal_pipeline.AYON_ASSET_DIR
|
||||
folder_name = context["folder"]["name"]
|
||||
folder_path = context["folder"]["path"]
|
||||
product_type = context["product"]["productType"]
|
||||
suffix = "_CON"
|
||||
if folder_name:
|
||||
asset_name = "{}_{}".format(folder_name, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
task = self.get_task(path, asset_dir, asset_name, False)
|
||||
|
||||
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||
asset_tools.import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
unreal_pipeline.imprint(
|
||||
f"{asset_dir}/{container_name}", data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_name = container["asset_name"]
|
||||
repre_entity = context["representation"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = self.get_task(
|
||||
source_path, destination_path, folder_name, True
|
||||
)
|
||||
|
||||
# do import fbx and replace existing data
|
||||
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
|
||||
asset_tools.import_asset_tasks([task])
|
||||
|
||||
container_path = f"{container['namespace']}/{container['objectName']}"
|
||||
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
})
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,337 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load FBX with animations."""
|
||||
import os
|
||||
import json
|
||||
|
||||
import unreal
|
||||
from unreal import EditorAssetLibrary
|
||||
from unreal import MovieSceneSkeletalAnimationTrack
|
||||
from unreal import MovieSceneSkeletalAnimationSection
|
||||
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
|
||||
|
||||
class AnimationFBXLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from FBX."""
|
||||
|
||||
product_types = {"animation"}
|
||||
label = "Import FBX Animation"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def _process(self, path, asset_dir, asset_name, instance_name):
|
||||
automated = False
|
||||
actor = None
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
if instance_name:
|
||||
automated = True
|
||||
# Old method to get the actor
|
||||
# actor_name = 'PersistentLevel.' + instance_name
|
||||
# actor = unreal.EditorLevelLibrary.get_actor_reference(actor_name)
|
||||
actors = unreal.EditorLevelLibrary.get_all_level_actors()
|
||||
for a in actors:
|
||||
if a.get_class().get_name() != "SkeletalMeshActor":
|
||||
continue
|
||||
if a.get_actor_label() == instance_name:
|
||||
actor = a
|
||||
break
|
||||
if not actor:
|
||||
raise Exception(f"Could not find actor {instance_name}")
|
||||
skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
||||
if not actor:
|
||||
return None
|
||||
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
|
||||
task.set_editor_property('filename', path)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', automated)
|
||||
task.set_editor_property('save', False)
|
||||
|
||||
# set import options here
|
||||
task.options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
task.options.set_editor_property(
|
||||
'original_import_type', unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
task.options.set_editor_property(
|
||||
'mesh_type_to_import', unreal.FBXImportType.FBXIT_ANIMATION)
|
||||
task.options.set_editor_property('import_mesh', False)
|
||||
task.options.set_editor_property('import_animations', True)
|
||||
task.options.set_editor_property('override_full_name', True)
|
||||
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'animation_length',
|
||||
unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME
|
||||
)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_meshes_in_bone_hierarchy', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'use_default_sample_rate', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'custom_sample_rate', folder_entity.get("attrib", {}).get("fps"))
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_custom_attribute', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_bone_tracks', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'remove_redundant_keys', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
animation = None
|
||||
|
||||
for a in asset_content:
|
||||
imported_asset_data = EditorAssetLibrary.find_asset_data(a)
|
||||
imported_asset = unreal.AssetRegistryHelpers.get_asset(
|
||||
imported_asset_data)
|
||||
if imported_asset.__class__ == unreal.AnimSequence:
|
||||
animation = imported_asset
|
||||
break
|
||||
|
||||
if animation:
|
||||
animation.set_editor_property('enable_root_motion', True)
|
||||
actor.skeletal_mesh_component.set_editor_property(
|
||||
'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
|
||||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
return animation
|
||||
|
||||
def load(self, context, name, namespace, options=None):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
root = "/Game/Ayon"
|
||||
folder_path = context["folder"]["path"]
|
||||
hierarchy = folder_path.lstrip("/").split("/")
|
||||
folder_name = hierarchy.pop(-1)
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/Animations/{folder_name}/{name}", suffix="")
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{hierarchy[0]}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
hierarchy_dir = root
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir = f"{hierarchy_dir}/{folder_name}"
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{hierarchy_dir}/"],
|
||||
recursive_paths=True)
|
||||
levels = ar.get_assets(_filter)
|
||||
level = levels[0].get_asset().get_path_name()
|
||||
|
||||
unreal.EditorLevelLibrary.save_all_dirty_levels()
|
||||
unreal.EditorLevelLibrary.load_level(level)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
libpath = path.replace(".fbx", ".json")
|
||||
|
||||
with open(libpath, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
instance_name = data.get("instance_name")
|
||||
|
||||
animation = self._process(path, asset_dir, asset_name, instance_name)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
hierarchy_dir, recursive=True, include_folder=False)
|
||||
|
||||
# Get the sequence for the layout, excluding the camera one.
|
||||
sequences = [a for a in asset_content
|
||||
if (EditorAssetLibrary.find_asset_data(a).get_class() ==
|
||||
unreal.LevelSequence.static_class() and
|
||||
"_camera" not in a.split("/")[-1])]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for s in sequences:
|
||||
sequence = ar.get_asset_by_object_path(s).get_asset()
|
||||
possessables = [
|
||||
p for p in sequence.get_possessables()
|
||||
if p.get_display_name() == instance_name]
|
||||
|
||||
for p in possessables:
|
||||
tracks = [
|
||||
t for t in p.get_tracks()
|
||||
if (t.get_class() ==
|
||||
MovieSceneSkeletalAnimationTrack.static_class())]
|
||||
|
||||
for t in tracks:
|
||||
sections = [
|
||||
s for s in t.get_sections()
|
||||
if (s.get_class() ==
|
||||
MovieSceneSkeletalAnimationSection.static_class())]
|
||||
|
||||
for s in sections:
|
||||
s.params.set_editor_property('animation', animation)
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"folder_path": folder_path,
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
imported_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in imported_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
unreal.EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
def update(self, container, context):
|
||||
repre_entity = context["representation"]
|
||||
folder_name = container["asset_name"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
folder_entity = get_current_folder_entity(fields=["attrib.fps"])
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
task.set_editor_property('filename', source_path)
|
||||
task.set_editor_property('destination_path', destination_path)
|
||||
# strip suffix
|
||||
task.set_editor_property('destination_name', folder_name)
|
||||
task.set_editor_property('replace_existing', True)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
task.options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
task.options.set_editor_property(
|
||||
'original_import_type', unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
task.options.set_editor_property(
|
||||
'mesh_type_to_import', unreal.FBXImportType.FBXIT_ANIMATION)
|
||||
task.options.set_editor_property('import_mesh', False)
|
||||
task.options.set_editor_property('import_animations', True)
|
||||
task.options.set_editor_property('override_full_name', True)
|
||||
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'animation_length',
|
||||
unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME
|
||||
)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_meshes_in_bone_hierarchy', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'use_default_sample_rate', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'custom_sample_rate', folder_entity.get("attrib", {}).get("fps"))
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_custom_attribute', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_bone_tracks', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'remove_redundant_keys', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
skeletal_mesh = EditorAssetLibrary.load_asset(
|
||||
container.get('namespace') + "/" + container.get('asset_name'))
|
||||
skeleton = skeletal_mesh.get_editor_property('skeleton')
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
||||
# do import fbx and replace existing data
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
})
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,591 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load camera from FBX."""
|
||||
from pathlib import Path
|
||||
|
||||
import ayon_api
|
||||
|
||||
import unreal
|
||||
from unreal import (
|
||||
EditorAssetLibrary,
|
||||
EditorLevelLibrary,
|
||||
EditorLevelUtils,
|
||||
LevelSequenceEditorBlueprintLibrary as LevelSequenceLib,
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
get_representation_path,
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
generate_sequence,
|
||||
set_sequence_hierarchy,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
|
||||
|
||||
class CameraLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from FBX"""
|
||||
|
||||
product_types = {"camera"}
|
||||
label = "Load Camera"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def _import_camera(
|
||||
self, world, sequence, bindings, import_fbx_settings, import_filename
|
||||
):
|
||||
ue_version = unreal.SystemLibrary.get_engine_version().split('.')
|
||||
ue_major = int(ue_version[0])
|
||||
ue_minor = int(ue_version[1])
|
||||
|
||||
if ue_major == 4 and ue_minor <= 26:
|
||||
unreal.SequencerTools.import_fbx(
|
||||
world,
|
||||
sequence,
|
||||
bindings,
|
||||
import_fbx_settings,
|
||||
import_filename
|
||||
)
|
||||
elif (ue_major == 4 and ue_minor >= 27) or ue_major == 5:
|
||||
unreal.SequencerTools.import_level_sequence_fbx(
|
||||
world,
|
||||
sequence,
|
||||
bindings,
|
||||
import_fbx_settings,
|
||||
import_filename
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unreal version {ue_major} not supported")
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
folder_entity = context["folder"]
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy_parts = folder_path.split("/")
|
||||
# Remove empty string
|
||||
hierarchy_parts.pop(0)
|
||||
# Pop folder name
|
||||
folder_name = hierarchy_parts.pop(-1)
|
||||
|
||||
root = "/Game/Ayon"
|
||||
hierarchy_dir = root
|
||||
hierarchy_dir_list = []
|
||||
for h in hierarchy_parts:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir_list.append(hierarchy_dir)
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else name
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
# Create a unique name for the camera directory
|
||||
unique_number = 1
|
||||
if EditorAssetLibrary.does_directory_exist(
|
||||
f"{hierarchy_dir}/{folder_name}"
|
||||
):
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
f"{root}/{folder_name}", recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
# Get highest number to make a unique name
|
||||
folders = [a for a in asset_content
|
||||
if a[-1] == "/" and f"{name}_" in a]
|
||||
# Get number from folder name. Splits the string by "_" and
|
||||
# removes the last element (which is a "/").
|
||||
f_numbers = [int(f.split("_")[-1][:-1]) for f in folders]
|
||||
f_numbers.sort()
|
||||
unique_number = f_numbers[-1] + 1 if f_numbers else 1
|
||||
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{hierarchy_dir}/{folder_name}/{name}_{unique_number:02d}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
# Create map for the shot, and create hierarchy of map. If the maps
|
||||
# already exist, we will use them.
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy_dir[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
|
||||
level = (
|
||||
f"{asset_dir}/{folder_name}_map_camera.{folder_name}_map_camera"
|
||||
)
|
||||
if not EditorAssetLibrary.does_asset_exist(level):
|
||||
EditorLevelLibrary.new_level(
|
||||
f"{asset_dir}/{folder_name}_map_camera"
|
||||
)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
frame_ranges = []
|
||||
sequences = []
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy_parts):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
if existing_sequences:
|
||||
for seq in existing_sequences:
|
||||
sequences.append(seq.get_asset())
|
||||
frame_ranges.append((
|
||||
seq.get_asset().get_playback_start(),
|
||||
seq.get_asset().get_playback_end()))
|
||||
else:
|
||||
sequence, frame_range = generate_sequence(h, h_dir)
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append(frame_range)
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
cam_seq = tools.create_asset(
|
||||
asset_name=f"{folder_name}_camera",
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
# Add sequences data to hierarchy
|
||||
for i in range(len(sequences) - 1):
|
||||
set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
[level])
|
||||
|
||||
clip_in = folder_attributes.get("clipIn")
|
||||
clip_out = folder_attributes.get("clipOut")
|
||||
|
||||
cam_seq.set_display_rate(
|
||||
unreal.FrameRate(folder_attributes.get("fps"), 1.0))
|
||||
cam_seq.set_playback_start(clip_in)
|
||||
cam_seq.set_playback_end(clip_out + 1)
|
||||
set_sequence_hierarchy(
|
||||
sequences[-1], cam_seq,
|
||||
frame_ranges[-1][1],
|
||||
clip_in, clip_out,
|
||||
[level])
|
||||
|
||||
settings = unreal.MovieSceneUserImportFBXSettings()
|
||||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
if cam_seq:
|
||||
path = self.filepath_from_context(context)
|
||||
self._import_camera(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
cam_seq,
|
||||
cam_seq.get_bindings(),
|
||||
settings,
|
||||
path
|
||||
)
|
||||
|
||||
# Set range of all sections
|
||||
# Changing the range of the section is not enough. We need to change
|
||||
# the frame of all the keys in the section.
|
||||
for possessable in cam_seq.get_possessables():
|
||||
for tracks in possessable.get_tracks():
|
||||
for section in tracks.get_sections():
|
||||
section.set_range(clip_in, clip_out + 1)
|
||||
for channel in section.get_all_channels():
|
||||
for key in channel.get_keys():
|
||||
old_time = key.get_time().get_editor_property(
|
||||
'frame_number')
|
||||
old_time_value = old_time.get_editor_property(
|
||||
'value')
|
||||
new_time = old_time_value + (
|
||||
clip_in - folder_attributes.get('frameStart')
|
||||
)
|
||||
key.set_time(unreal.FrameNumber(value=new_time))
|
||||
|
||||
# Create Asset Container
|
||||
create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_name,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
# Save all assets in the hierarchy
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
hierarchy_dir_list[0], recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
curr_level_sequence = LevelSequenceLib.get_current_level_sequence()
|
||||
curr_time = LevelSequenceLib.get_current_time()
|
||||
is_cam_lock = LevelSequenceLib.is_camera_cut_locked_to_viewport()
|
||||
|
||||
editor_subsystem = unreal.UnrealEditorSubsystem()
|
||||
vp_loc, vp_rot = editor_subsystem.get_level_viewport_camera_info()
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(_filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
EditorLevelLibrary.load_level(maps[0].get_asset().get_path_name())
|
||||
|
||||
level_sequence = sequences[0].get_asset()
|
||||
|
||||
display_rate = level_sequence.get_display_rate()
|
||||
playback_start = level_sequence.get_playback_start()
|
||||
playback_end = level_sequence.get_playback_end()
|
||||
|
||||
sequence_name = f"{container.get('asset')}_camera"
|
||||
|
||||
# Get the actors in the level sequence.
|
||||
objs = unreal.SequencerTools.get_bound_objects(
|
||||
unreal.EditorLevelLibrary.get_editor_world(),
|
||||
level_sequence,
|
||||
level_sequence.get_bindings(),
|
||||
unreal.SequencerScriptingRange(
|
||||
has_start_value=True,
|
||||
has_end_value=True,
|
||||
inclusive_start=level_sequence.get_playback_start(),
|
||||
exclusive_end=level_sequence.get_playback_end()
|
||||
)
|
||||
)
|
||||
|
||||
# Delete actors from the map
|
||||
for o in objs:
|
||||
if o.bound_objects[0].get_class().get_name() == "CineCameraActor":
|
||||
actor_path = o.bound_objects[0].get_path_name().split(":")[-1]
|
||||
actor = EditorLevelLibrary.get_actor_reference(actor_path)
|
||||
EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/Ayon"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
sub_scene = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == sequence_name:
|
||||
parent = s
|
||||
sub_scene = ss
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
for i, ss in enumerate(sections):
|
||||
ss.set_row_index(i)
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
EditorAssetLibrary.delete_asset(level_sequence.get_path_name())
|
||||
|
||||
settings = unreal.MovieSceneUserImportFBXSettings()
|
||||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
new_sequence = tools.create_asset(
|
||||
asset_name=sequence_name,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
new_sequence.set_display_rate(display_rate)
|
||||
new_sequence.set_playback_start(playback_start)
|
||||
new_sequence.set_playback_end(playback_end)
|
||||
|
||||
sub_scene.set_sequence(new_sequence)
|
||||
|
||||
repre_entity = context["representation"]
|
||||
repre_path = get_representation_path(repre_entity)
|
||||
self._import_camera(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
new_sequence,
|
||||
new_sequence.get_bindings(),
|
||||
settings,
|
||||
repre_path
|
||||
)
|
||||
|
||||
# Set range of all sections
|
||||
# Changing the range of the section is not enough. We need to change
|
||||
# the frame of all the keys in the section.
|
||||
project_name = get_current_project_name()
|
||||
folder_path = container.get("folder_path")
|
||||
if folder_path is None:
|
||||
folder_path = container.get("asset")
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
clip_in = folder_attributes["clipIn"]
|
||||
clip_out = folder_attributes["clipOut"]
|
||||
frame_start = folder_attributes["frameStart"]
|
||||
for possessable in new_sequence.get_possessables():
|
||||
for tracks in possessable.get_tracks():
|
||||
for section in tracks.get_sections():
|
||||
section.set_range(clip_in, clip_out + 1)
|
||||
for channel in section.get_all_channels():
|
||||
for key in channel.get_keys():
|
||||
old_time = key.get_time().get_editor_property(
|
||||
'frame_number')
|
||||
old_time_value = old_time.get_editor_property(
|
||||
'value')
|
||||
new_time = old_time_value + (
|
||||
clip_in - frame_start
|
||||
)
|
||||
key.set_time(unreal.FrameNumber(value=new_time))
|
||||
|
||||
data = {
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
}
|
||||
imprint(f"{asset_dir}/{container.get('container_name')}", data)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
f"{root}/{ms_asset}", recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
if curr_level_sequence:
|
||||
LevelSequenceLib.open_level_sequence(curr_level_sequence)
|
||||
LevelSequenceLib.set_current_time(curr_time)
|
||||
LevelSequenceLib.set_lock_camera_cut_to_viewport(is_cam_lock)
|
||||
|
||||
editor_subsystem.set_level_viewport_camera_info(vp_loc, vp_rot)
|
||||
|
||||
def remove(self, container):
|
||||
asset_dir = container.get('namespace')
|
||||
path = Path(asset_dir)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
|
||||
if not sequences:
|
||||
raise Exception("Could not find sequence.")
|
||||
|
||||
world = ar.get_asset_by_object_path(
|
||||
EditorLevelLibrary.get_editor_world().get_path_name())
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(_filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
if not maps:
|
||||
raise Exception("Could not find map.")
|
||||
|
||||
map = maps[0]
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(map.get_asset().get_path_name())
|
||||
|
||||
# Remove the camera from the level.
|
||||
actors = EditorLevelLibrary.get_all_level_actors()
|
||||
|
||||
for a in actors:
|
||||
if a.__class__ == unreal.CineCameraActor:
|
||||
EditorLevelLibrary.destroy_actor(a)
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(world.get_asset().get_path_name())
|
||||
|
||||
# There should be only one sequence in the path.
|
||||
sequence_name = sequences[0].asset_name
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/Ayon"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_full_name()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == sequence_name:
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
for i, ss in enumerate(sections):
|
||||
ss.set_row_index(i)
|
||||
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map_camera")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
# Create a temporary level to delete the layout level.
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
tmp_level = f"{root}/tmp/temp_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
EditorLevelLibrary.new_level(tmp_level)
|
||||
else:
|
||||
EditorLevelLibrary.load_level(tmp_level)
|
||||
|
||||
# Delete the layout directory.
|
||||
EditorAssetLibrary.delete_directory(asset_dir)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
# Check if there isn't any more assets in the parent folder, and
|
||||
# delete it if not.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
path.parent.as_posix(), recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(path.parent.as_posix())
|
||||
|
|
@ -1,251 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for published alembics."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class PointCacheAlembicLoader(plugin.Loader):
|
||||
"""Load Point Cache from Alembic"""
|
||||
|
||||
product_types = {"model", "pointcache"}
|
||||
label = "Import Alembic Point Cache"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(
|
||||
filename, asset_dir, asset_name, replace,
|
||||
frame_start=None, frame_end=None
|
||||
):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
gc_settings = unreal.AbcGeometryCacheSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings()
|
||||
sampling_settings = unreal.AbcSamplingSettings()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.GEOMETRY_CACHE)
|
||||
|
||||
gc_settings.set_editor_property('flatten_tracks', False)
|
||||
|
||||
conversion_settings.set_editor_property('flip_u', False)
|
||||
conversion_settings.set_editor_property('flip_v', True)
|
||||
conversion_settings.set_editor_property(
|
||||
'scale', unreal.Vector(x=100.0, y=100.0, z=100.0))
|
||||
conversion_settings.set_editor_property(
|
||||
'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0))
|
||||
|
||||
if frame_start is not None:
|
||||
sampling_settings.set_editor_property('frame_start', frame_start)
|
||||
if frame_end is not None:
|
||||
sampling_settings.set_editor_property('frame_end', frame_end)
|
||||
|
||||
options.geometry_cache_settings = gc_settings
|
||||
options.conversion_settings = conversion_settings
|
||||
options.sampling_settings = sampling_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name,
|
||||
frame_start, frame_end
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False, frame_start, frame_end)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
frame_start,
|
||||
frame_end,
|
||||
product_type,
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"frame_start": frame_start,
|
||||
"frame_end": frame_end,
|
||||
"product_type": product_type,
|
||||
"folder_path": folder_path,
|
||||
# TODO these should be probably removed
|
||||
"family": product_type,
|
||||
"asset": folder_path,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_entity = context["folder"]
|
||||
folder_path = folder_entity["path"]
|
||||
folder_name = folder_entity["name"]
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
|
||||
# If frame start and end are the same, we increase the end frame by
|
||||
# one, otherwise Unreal will not import it
|
||||
if frame_start == frame_end:
|
||||
frame_end += 1
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name,
|
||||
frame_start, frame_end)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
frame_start,
|
||||
frame_end,
|
||||
context["product"]["productType"]
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
# Create directory for folder and Ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
frame_start = int(container.get("frame_start"))
|
||||
frame_end = int(container.get("frame_end"))
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name,
|
||||
frame_start, frame_end)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
frame_start,
|
||||
frame_end,
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,916 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for layouts."""
|
||||
import json
|
||||
import collections
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
from unreal import (
|
||||
EditorAssetLibrary,
|
||||
EditorLevelLibrary,
|
||||
EditorLevelUtils,
|
||||
AssetToolsHelpers,
|
||||
FBXImportType,
|
||||
MovieSceneLevelVisibilityTrack,
|
||||
MovieSceneSubTrack,
|
||||
LevelSequenceEditorBlueprintLibrary as LevelSequenceLib,
|
||||
)
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
discover_loader_plugins,
|
||||
loaders_from_representation,
|
||||
load_container,
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID,
|
||||
get_current_project_name,
|
||||
)
|
||||
from ayon_core.pipeline.context_tools import get_current_folder_entity
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
generate_sequence,
|
||||
set_sequence_hierarchy,
|
||||
create_container,
|
||||
imprint,
|
||||
ls,
|
||||
)
|
||||
|
||||
|
||||
class LayoutLoader(plugin.Loader):
|
||||
"""Load Layout from a JSON file"""
|
||||
|
||||
product_types = {"layout"}
|
||||
representations = {"json"}
|
||||
|
||||
label = "Load Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
ASSET_ROOT = "/Game/Ayon"
|
||||
|
||||
def _get_asset_containers(self, path):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
path, recursive=True)
|
||||
|
||||
asset_containers = []
|
||||
|
||||
# Get all the asset containers
|
||||
for a in asset_content:
|
||||
obj = ar.get_asset_by_object_path(a)
|
||||
if obj.get_asset().get_class().get_name() == 'AyonAssetContainer':
|
||||
asset_containers.append(obj)
|
||||
|
||||
return asset_containers
|
||||
|
||||
@staticmethod
|
||||
def _get_fbx_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshFBXLoader"
|
||||
elif family == 'model':
|
||||
name = "StaticMeshFBXLoader"
|
||||
elif family == 'camera':
|
||||
name = "CameraLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_abc_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshAlembicLoader"
|
||||
elif family == 'model':
|
||||
name = "StaticMeshAlembicLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
def _transform_from_basis(self, transform, basis):
|
||||
"""Transform a transform from a basis to a new basis."""
|
||||
# Get the basis matrix
|
||||
basis_matrix = unreal.Matrix(
|
||||
basis[0],
|
||||
basis[1],
|
||||
basis[2],
|
||||
basis[3]
|
||||
)
|
||||
transform_matrix = unreal.Matrix(
|
||||
transform[0],
|
||||
transform[1],
|
||||
transform[2],
|
||||
transform[3]
|
||||
)
|
||||
|
||||
new_transform = (
|
||||
basis_matrix.get_inverse() * transform_matrix * basis_matrix)
|
||||
|
||||
return new_transform.transform()
|
||||
|
||||
def _process_family(
|
||||
self, assets, class_name, transform, basis, sequence, inst_name=None
|
||||
):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
actors = []
|
||||
bindings = []
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == class_name:
|
||||
t = self._transform_from_basis(transform, basis)
|
||||
actor = EditorLevelLibrary.spawn_actor_from_object(
|
||||
obj, t.translation
|
||||
)
|
||||
actor.set_actor_rotation(t.rotation.rotator(), False)
|
||||
actor.set_actor_scale3d(t.scale3d)
|
||||
|
||||
if class_name == 'SkeletalMesh':
|
||||
skm_comp = actor.get_editor_property(
|
||||
'skeletal_mesh_component')
|
||||
skm_comp.set_bounds_scale(10.0)
|
||||
|
||||
actors.append(actor)
|
||||
|
||||
if sequence:
|
||||
binding = None
|
||||
for p in sequence.get_possessables():
|
||||
if p.get_name() == actor.get_name():
|
||||
binding = p
|
||||
break
|
||||
|
||||
if not binding:
|
||||
binding = sequence.add_possessable(actor)
|
||||
|
||||
bindings.append(binding)
|
||||
|
||||
return actors, bindings
|
||||
|
||||
def _import_animation(
|
||||
self, asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence
|
||||
):
|
||||
anim_file = Path(animation_file)
|
||||
anim_file_name = anim_file.with_suffix('')
|
||||
|
||||
anim_path = f"{asset_dir}/animations/{anim_file_name}"
|
||||
|
||||
folder_entity = get_current_folder_entity()
|
||||
# Import animation
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
task.set_editor_property(
|
||||
'filename', str(path.with_suffix(f".{animation_file}")))
|
||||
task.set_editor_property('destination_path', anim_path)
|
||||
task.set_editor_property(
|
||||
'destination_name', f"{instance_name}_animation")
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', False)
|
||||
|
||||
# set import options here
|
||||
task.options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
task.options.set_editor_property(
|
||||
'original_import_type', FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
task.options.set_editor_property(
|
||||
'mesh_type_to_import', FBXImportType.FBXIT_ANIMATION)
|
||||
task.options.set_editor_property('import_mesh', False)
|
||||
task.options.set_editor_property('import_animations', True)
|
||||
task.options.set_editor_property('override_full_name', True)
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'animation_length',
|
||||
unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME
|
||||
)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_meshes_in_bone_hierarchy', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'use_default_sample_rate', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'custom_sample_rate', folder_entity.get("attrib", {}).get("fps"))
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_custom_attribute', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'import_bone_tracks', True)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'remove_redundant_keys', False)
|
||||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
anim_path, recursive=False, include_folder=False
|
||||
)
|
||||
|
||||
animation = None
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a)
|
||||
imported_asset = unreal.AssetRegistryHelpers.get_asset(
|
||||
imported_asset_data)
|
||||
if imported_asset.__class__ == unreal.AnimSequence:
|
||||
animation = imported_asset
|
||||
break
|
||||
|
||||
if animation:
|
||||
actor = None
|
||||
if actors_dict.get(instance_name):
|
||||
for a in actors_dict.get(instance_name):
|
||||
if a.get_class().get_name() == 'SkeletalMeshActor':
|
||||
actor = a
|
||||
break
|
||||
|
||||
animation.set_editor_property('enable_root_motion', True)
|
||||
actor.skeletal_mesh_component.set_editor_property(
|
||||
'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
|
||||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
if sequence:
|
||||
# Add animation to the sequencer
|
||||
bindings = bindings_dict.get(instance_name)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for binding in bindings:
|
||||
tracks = binding.get_tracks()
|
||||
track = None
|
||||
track = tracks[0] if tracks else binding.add_track(
|
||||
unreal.MovieSceneSkeletalAnimationTrack)
|
||||
|
||||
sections = track.get_sections()
|
||||
section = None
|
||||
if not sections:
|
||||
section = track.add_section()
|
||||
else:
|
||||
section = sections[0]
|
||||
|
||||
sec_params = section.get_editor_property('params')
|
||||
curr_anim = sec_params.get_editor_property('animation')
|
||||
|
||||
if curr_anim:
|
||||
# Checks if the animation path has a container.
|
||||
# If it does, it means that the animation is
|
||||
# already in the sequencer.
|
||||
anim_path = str(Path(
|
||||
curr_anim.get_path_name()).parent
|
||||
).replace('\\', '/')
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["AyonAssetContainer"],
|
||||
package_paths=[anim_path],
|
||||
recursive_paths=False)
|
||||
containers = ar.get_assets(_filter)
|
||||
|
||||
if len(containers) > 0:
|
||||
return
|
||||
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
sec_params = section.get_editor_property('params')
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
|
||||
def _get_repre_entities_by_version_id(self, data):
|
||||
version_ids = {
|
||||
element.get("version")
|
||||
for element in data
|
||||
if element.get("representation")
|
||||
}
|
||||
version_ids.discard(None)
|
||||
|
||||
output = collections.defaultdict(list)
|
||||
if not version_ids:
|
||||
return output
|
||||
|
||||
project_name = get_current_project_name()
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name,
|
||||
representation_names={"fbx", "abc"},
|
||||
version_ids=version_ids,
|
||||
fields={"id", "versionId", "name"}
|
||||
)
|
||||
for repre_entity in repre_entities:
|
||||
version_id = repre_entity["versionId"]
|
||||
output[version_id].append(repre_entity)
|
||||
return output
|
||||
|
||||
def _process(self, lib_path, asset_dir, sequence, repr_loaded=None):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
all_loaders = discover_loader_plugins()
|
||||
|
||||
if not repr_loaded:
|
||||
repr_loaded = []
|
||||
|
||||
path = Path(lib_path)
|
||||
|
||||
skeleton_dict = {}
|
||||
actors_dict = {}
|
||||
bindings_dict = {}
|
||||
|
||||
loaded_assets = []
|
||||
|
||||
repre_entities_by_version_id = self._get_repre_entities_by_version_id(
|
||||
data
|
||||
)
|
||||
for element in data:
|
||||
repre_id = None
|
||||
repr_format = None
|
||||
if element.get('representation'):
|
||||
version_id = element.get("version")
|
||||
repre_entities = repre_entities_by_version_id[version_id]
|
||||
if not repre_entities:
|
||||
self.log.error(
|
||||
f"No valid representation found for version"
|
||||
f" {version_id}")
|
||||
continue
|
||||
repre_entity = repre_entities[0]
|
||||
repre_id = repre_entity["id"]
|
||||
repr_format = repre_entity["name"]
|
||||
|
||||
# This is to keep compatibility with old versions of the
|
||||
# json format.
|
||||
elif element.get('reference_fbx'):
|
||||
repre_id = element.get('reference_fbx')
|
||||
repr_format = 'fbx'
|
||||
elif element.get('reference_abc'):
|
||||
repre_id = element.get('reference_abc')
|
||||
repr_format = 'abc'
|
||||
|
||||
# If reference is None, this element is skipped, as it cannot be
|
||||
# imported in Unreal
|
||||
if not repre_id:
|
||||
continue
|
||||
|
||||
instance_name = element.get('instance_name')
|
||||
|
||||
skeleton = None
|
||||
|
||||
if repre_id not in repr_loaded:
|
||||
repr_loaded.append(repre_id)
|
||||
|
||||
product_type = element.get("product_type")
|
||||
if product_type is None:
|
||||
product_type = element.get("family")
|
||||
loaders = loaders_from_representation(
|
||||
all_loaders, repre_id)
|
||||
|
||||
loader = None
|
||||
|
||||
if repr_format == 'fbx':
|
||||
loader = self._get_fbx_loader(loaders, product_type)
|
||||
elif repr_format == 'abc':
|
||||
loader = self._get_abc_loader(loaders, product_type)
|
||||
|
||||
if not loader:
|
||||
self.log.error(
|
||||
f"No valid loader found for {repre_id}")
|
||||
continue
|
||||
|
||||
options = {
|
||||
# "asset_dir": asset_dir
|
||||
}
|
||||
|
||||
assets = load_container(
|
||||
loader,
|
||||
repre_id,
|
||||
namespace=instance_name,
|
||||
options=options
|
||||
)
|
||||
|
||||
container = None
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == 'AyonAssetContainer':
|
||||
container = obj
|
||||
if obj.get_class().get_name() == 'Skeleton':
|
||||
skeleton = obj
|
||||
|
||||
loaded_assets.append(container.get_path_name())
|
||||
|
||||
instances = [
|
||||
item for item in data
|
||||
if ((item.get('version') and
|
||||
item.get('version') == element.get('version')) or
|
||||
item.get('reference_fbx') == repre_id or
|
||||
item.get('reference_abc') == repre_id)]
|
||||
|
||||
for instance in instances:
|
||||
# transform = instance.get('transform')
|
||||
transform = instance.get('transform_matrix')
|
||||
basis = instance.get('basis')
|
||||
inst = instance.get('instance_name')
|
||||
|
||||
actors = []
|
||||
|
||||
if product_type == 'model':
|
||||
actors, _ = self._process_family(
|
||||
assets, 'StaticMesh', transform, basis,
|
||||
sequence, inst
|
||||
)
|
||||
elif product_type == 'rig':
|
||||
actors, bindings = self._process_family(
|
||||
assets, 'SkeletalMesh', transform, basis,
|
||||
sequence, inst
|
||||
)
|
||||
actors_dict[inst] = actors
|
||||
bindings_dict[inst] = bindings
|
||||
|
||||
if skeleton:
|
||||
skeleton_dict[repre_id] = skeleton
|
||||
else:
|
||||
skeleton = skeleton_dict.get(repre_id)
|
||||
|
||||
animation_file = element.get('animation')
|
||||
|
||||
if animation_file and skeleton:
|
||||
self._import_animation(
|
||||
asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence)
|
||||
|
||||
return loaded_assets
|
||||
|
||||
@staticmethod
|
||||
def _remove_family(assets, components, class_name, prop_name):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
objects = []
|
||||
for a in assets:
|
||||
obj = ar.get_asset_by_object_path(a)
|
||||
if obj.get_asset().get_class().get_name() == class_name:
|
||||
objects.append(obj)
|
||||
for obj in objects:
|
||||
for comp in components:
|
||||
if comp.get_editor_property(prop_name) == obj.get_asset():
|
||||
comp.get_owner().destroy_actor()
|
||||
|
||||
def _remove_actors(self, path):
|
||||
asset_containers = self._get_asset_containers(path)
|
||||
|
||||
# Get all the static and skeletal meshes components in the level
|
||||
components = EditorLevelLibrary.get_all_level_actors_components()
|
||||
static_meshes_comp = [
|
||||
c for c in components
|
||||
if c.get_class().get_name() == 'StaticMeshComponent']
|
||||
skel_meshes_comp = [
|
||||
c for c in components
|
||||
if c.get_class().get_name() == 'SkeletalMeshComponent']
|
||||
|
||||
# For all the asset containers, get the static and skeletal meshes.
|
||||
# Then, check the components in the level and destroy the matching
|
||||
# actors.
|
||||
for asset_container in asset_containers:
|
||||
package_path = asset_container.get_editor_property('package_path')
|
||||
family = EditorAssetLibrary.get_metadata_tag(
|
||||
asset_container.get_asset(), "family")
|
||||
assets = EditorAssetLibrary.list_assets(
|
||||
str(package_path), recursive=False)
|
||||
if family == 'model':
|
||||
self._remove_family(
|
||||
assets, static_meshes_comp, 'StaticMesh', 'static_mesh')
|
||||
elif family == 'rig':
|
||||
self._remove_family(
|
||||
assets, skel_meshes_comp, 'SkeletalMesh', 'skeletal_mesh')
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
options (dict): Those would be data to be imprinted. This is not
|
||||
used now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
folder_entity = context["folder"]
|
||||
folder_path = folder_entity["path"]
|
||||
hierarchy = folder_path.lstrip("/").split("/")
|
||||
# Remove folder name
|
||||
folder_name = hierarchy.pop(-1)
|
||||
root = self.ASSET_ROOT
|
||||
hierarchy_dir = root
|
||||
hierarchy_dir_list = []
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir_list.append(hierarchy_dir)
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else name
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(hierarchy_dir, folder_name, name),
|
||||
suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
master_level = None
|
||||
shot = None
|
||||
sequences = []
|
||||
|
||||
level = f"{asset_dir}/{folder_name}_map.{folder_name}_map"
|
||||
EditorLevelLibrary.new_level(f"{asset_dir}/{folder_name}_map")
|
||||
|
||||
if create_sequences:
|
||||
# Create map for the shot, and create hierarchy of map. If the
|
||||
# maps already exist, we will use them.
|
||||
if hierarchy:
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
frame_ranges = []
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
if not existing_sequences:
|
||||
sequence, frame_range = generate_sequence(h, h_dir)
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append(frame_range)
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
frame_ranges.append((
|
||||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
|
||||
shot = tools.create_asset(
|
||||
asset_name=folder_name,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
# sequences and frame_ranges have the same length
|
||||
for i in range(0, len(sequences) - 1):
|
||||
set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
[level])
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_attributes = (
|
||||
ayon_api.get_folder_by_path(project_name, folder_path)["attrib"]
|
||||
)
|
||||
shot.set_display_rate(
|
||||
unreal.FrameRate(folder_attributes.get("fps"), 1.0))
|
||||
shot.set_playback_start(0)
|
||||
shot.set_playback_end(
|
||||
folder_attributes.get('clipOut')
|
||||
- folder_attributes.get('clipIn')
|
||||
+ 1
|
||||
)
|
||||
if sequences:
|
||||
set_sequence_hierarchy(
|
||||
sequences[-1],
|
||||
shot,
|
||||
frame_ranges[-1][1],
|
||||
folder_attributes.get('clipIn'),
|
||||
folder_attributes.get('clipOut'),
|
||||
[level])
|
||||
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
loaded_assets = self._process(path, asset_dir, shot)
|
||||
|
||||
for s in sequences:
|
||||
EditorAssetLibrary.save_asset(s.get_path_name())
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
# Create Asset Container
|
||||
create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"asset": folder_name,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"family": context["product"]["productType"],
|
||||
"loaded_assets": loaded_assets
|
||||
}
|
||||
imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
||||
save_dir = hierarchy_dir_list[0] if create_sequences else asset_dir
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
save_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
curr_level_sequence = LevelSequenceLib.get_current_level_sequence()
|
||||
curr_time = LevelSequenceLib.get_current_time()
|
||||
is_cam_lock = LevelSequenceLib.is_camera_cut_locked_to_viewport()
|
||||
|
||||
editor_subsystem = unreal.UnrealEditorSubsystem()
|
||||
vp_loc, vp_rot = editor_subsystem.get_level_viewport_camera_info()
|
||||
|
||||
root = "/Game/Ayon"
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
folder_entity = context["folder"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
hierarchy = folder_entity["path"].lstrip("/").split("/")
|
||||
first_parent_name = hierarchy[0]
|
||||
|
||||
sequence = None
|
||||
master_level = None
|
||||
|
||||
if create_sequences:
|
||||
h_dir = f"{root}/{first_parent_name}"
|
||||
h_asset = first_parent_name
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
sequence = sequences[0].get_asset()
|
||||
|
||||
prev_level = None
|
||||
|
||||
if not master_level:
|
||||
curr_level = unreal.LevelEditorSubsystem().get_current_level()
|
||||
curr_level_path = curr_level.get_outer().get_path_name()
|
||||
# If the level path does not start with "/Game/", the current
|
||||
# level is a temporary, unsaved level.
|
||||
if curr_level_path.startswith("/Game/"):
|
||||
prev_level = curr_level_path
|
||||
|
||||
# Get layout level
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
|
||||
layout_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(layout_level)
|
||||
|
||||
# Delete all the actors in the level
|
||||
actors = unreal.EditorLevelLibrary.get_all_level_actors()
|
||||
for actor in actors:
|
||||
unreal.EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
if create_sequences:
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/")
|
||||
|
||||
source_path = get_representation_path(repre_entity)
|
||||
|
||||
loaded_assets = self._process(source_path, asset_dir, sequence)
|
||||
|
||||
data = {
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
"loaded_assets": loaded_assets,
|
||||
}
|
||||
imprint(
|
||||
"{}/{}".format(asset_dir, container.get('container_name')), data)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
save_dir = f"{root}/{first_parent_name}" if create_sequences else asset_dir
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
save_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
elif prev_level:
|
||||
EditorLevelLibrary.load_level(prev_level)
|
||||
|
||||
if curr_level_sequence:
|
||||
LevelSequenceLib.open_level_sequence(curr_level_sequence)
|
||||
LevelSequenceLib.set_current_time(curr_time)
|
||||
LevelSequenceLib.set_lock_camera_cut_to_viewport(is_cam_lock)
|
||||
|
||||
editor_subsystem.set_level_viewport_camera_info(vp_loc, vp_rot)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Delete the layout. First, check if the assets loaded with the layout
|
||||
are used by other layouts. If not, delete the assets.
|
||||
"""
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
root = "/Game/Ayon"
|
||||
path = Path(container.get("namespace"))
|
||||
|
||||
containers = ls()
|
||||
layout_containers = [
|
||||
c for c in containers
|
||||
if (c.get('asset_name') != container.get('asset_name') and
|
||||
c.get('family') == "layout")]
|
||||
|
||||
# Check if the assets have been loaded by other layouts, and deletes
|
||||
# them if they haven't.
|
||||
for asset in eval(container.get('loaded_assets')):
|
||||
layouts = [
|
||||
lc for lc in layout_containers
|
||||
if asset in lc.get('loaded_assets')]
|
||||
|
||||
if not layouts:
|
||||
EditorAssetLibrary.delete_directory(str(Path(asset).parent))
|
||||
|
||||
# Delete the parent folder if there aren't any more
|
||||
# layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
str(Path(asset).parent.parent), recursive=False,
|
||||
include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(
|
||||
str(Path(asset).parent.parent))
|
||||
|
||||
master_sequence = None
|
||||
master_level = None
|
||||
sequences = []
|
||||
|
||||
if create_sequences:
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to
|
||||
# find the level sequence.
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_asset().get_path_name()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if (ss.get_sequence().get_name() ==
|
||||
container.get('asset')):
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
# Create a temporary level to delete the layout level.
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
tmp_level = f"{root}/tmp/temp_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
EditorLevelLibrary.new_level(tmp_level)
|
||||
else:
|
||||
EditorLevelLibrary.load_level(tmp_level)
|
||||
|
||||
# Delete the layout directory.
|
||||
EditorAssetLibrary.delete_directory(str(path))
|
||||
|
||||
if create_sequences:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
# Delete the parent folder if there aren't any more layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
str(path.parent), recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(str(path.parent))
|
||||
|
|
@ -1,451 +0,0 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
from unreal import EditorLevelLibrary
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
discover_loader_plugins,
|
||||
loaders_from_representation,
|
||||
load_container,
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as upipeline
|
||||
|
||||
|
||||
class ExistingLayoutLoader(plugin.Loader):
|
||||
"""
|
||||
Load Layout for an existing scene, and match the existing assets.
|
||||
"""
|
||||
|
||||
product_types = {"layout"}
|
||||
representations = {"json"}
|
||||
|
||||
label = "Load Layout on Existing Scene"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
ASSET_ROOT = "/Game/Ayon"
|
||||
|
||||
delete_unmatched_assets = True
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
super(ExistingLayoutLoader, cls).apply_settings(
|
||||
project_settings
|
||||
)
|
||||
cls.delete_unmatched_assets = (
|
||||
project_settings["unreal"]["delete_unmatched_assets"]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _create_container(
|
||||
asset_name,
|
||||
asset_dir,
|
||||
folder_path,
|
||||
representation,
|
||||
version_id,
|
||||
product_type
|
||||
):
|
||||
container_name = f"{asset_name}_CON"
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_asset_exist(
|
||||
f"{asset_dir}/{container_name}"
|
||||
):
|
||||
container = upipeline.create_container(container_name, asset_dir)
|
||||
else:
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
obj = ar.get_asset_by_object_path(
|
||||
f"{asset_dir}/{container_name}.{container_name}")
|
||||
container = obj.get_asset()
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
# "loader": str(self.__class__.__name__),
|
||||
"representation": representation,
|
||||
"parent": version_id,
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
|
||||
upipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
||||
return container.get_path_name()
|
||||
|
||||
@staticmethod
|
||||
def _get_current_level():
|
||||
ue_version = unreal.SystemLibrary.get_engine_version().split('.')
|
||||
ue_major = ue_version[0]
|
||||
|
||||
if ue_major == '4':
|
||||
return EditorLevelLibrary.get_editor_world()
|
||||
elif ue_major == '5':
|
||||
return unreal.LevelEditorSubsystem().get_current_level()
|
||||
|
||||
raise NotImplementedError(
|
||||
f"Unreal version {ue_major} not supported")
|
||||
|
||||
def _transform_from_basis(self, transform, basis):
|
||||
"""Transform a transform from a basis to a new basis."""
|
||||
# Get the basis matrix
|
||||
basis_matrix = unreal.Matrix(
|
||||
basis[0],
|
||||
basis[1],
|
||||
basis[2],
|
||||
basis[3]
|
||||
)
|
||||
transform_matrix = unreal.Matrix(
|
||||
transform[0],
|
||||
transform[1],
|
||||
transform[2],
|
||||
transform[3]
|
||||
)
|
||||
|
||||
new_transform = (
|
||||
basis_matrix.get_inverse() * transform_matrix * basis_matrix)
|
||||
|
||||
return new_transform.transform()
|
||||
|
||||
def _spawn_actor(self, obj, lasset):
|
||||
actor = EditorLevelLibrary.spawn_actor_from_object(
|
||||
obj, unreal.Vector(0.0, 0.0, 0.0)
|
||||
)
|
||||
|
||||
actor.set_actor_label(lasset.get('instance_name'))
|
||||
|
||||
transform = lasset.get('transform_matrix')
|
||||
basis = lasset.get('basis')
|
||||
|
||||
computed_transform = self._transform_from_basis(transform, basis)
|
||||
|
||||
actor.set_actor_transform(computed_transform, False, True)
|
||||
|
||||
@staticmethod
|
||||
def _get_fbx_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshFBXLoader"
|
||||
elif family == 'model' or family == 'staticMesh':
|
||||
name = "StaticMeshFBXLoader"
|
||||
elif family == 'camera':
|
||||
name = "CameraLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_abc_loader(loaders, family):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
name = "SkeletalMeshAlembicLoader"
|
||||
elif family == 'model':
|
||||
name = "StaticMeshAlembicLoader"
|
||||
|
||||
if name == "":
|
||||
return None
|
||||
|
||||
for loader in loaders:
|
||||
if loader.__name__ == name:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
def _load_asset(self, repr_data, representation, instance_name, family):
|
||||
repr_format = repr_data.get('name')
|
||||
|
||||
all_loaders = discover_loader_plugins()
|
||||
loaders = loaders_from_representation(
|
||||
all_loaders, representation)
|
||||
|
||||
loader = None
|
||||
|
||||
if repr_format == 'fbx':
|
||||
loader = self._get_fbx_loader(loaders, family)
|
||||
elif repr_format == 'abc':
|
||||
loader = self._get_abc_loader(loaders, family)
|
||||
|
||||
if not loader:
|
||||
self.log.error(f"No valid loader found for {representation}")
|
||||
return []
|
||||
|
||||
# This option is necessary to avoid importing the assets with a
|
||||
# different conversion compared to the other assets. For ABC files,
|
||||
# it is in fact impossible to access the conversion settings. So,
|
||||
# we must assume that the Maya conversion settings have been applied.
|
||||
options = {
|
||||
"default_conversion": True
|
||||
}
|
||||
|
||||
assets = load_container(
|
||||
loader,
|
||||
representation,
|
||||
namespace=instance_name,
|
||||
options=options
|
||||
)
|
||||
|
||||
return assets
|
||||
|
||||
def _get_valid_repre_entities(self, project_name, version_ids):
|
||||
valid_formats = ['fbx', 'abc']
|
||||
|
||||
repre_entities = list(ayon_api.get_representations(
|
||||
project_name,
|
||||
representation_names=valid_formats,
|
||||
version_ids=version_ids
|
||||
))
|
||||
repre_entities_by_version_id = {}
|
||||
for repre_entity in repre_entities:
|
||||
version_id = repre_entity["versionId"]
|
||||
repre_entities_by_version_id[version_id] = repre_entity
|
||||
return repre_entities_by_version_id
|
||||
|
||||
def _process(self, lib_path, project_name):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
actors = EditorLevelLibrary.get_all_level_actors()
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
elements = []
|
||||
repre_ids = set()
|
||||
# Get all the representations in the JSON from the database.
|
||||
for element in data:
|
||||
repre_id = element.get('representation')
|
||||
if repre_id:
|
||||
repre_ids.add(repre_id)
|
||||
elements.append(element)
|
||||
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name, representation_ids=repre_ids
|
||||
)
|
||||
repre_entities_by_id = {
|
||||
repre_entity["id"]: repre_entity
|
||||
for repre_entity in repre_entities
|
||||
}
|
||||
layout_data = []
|
||||
version_ids = set()
|
||||
for element in elements:
|
||||
repre_id = element.get("representation")
|
||||
repre_entity = repre_entities_by_id.get(repre_id)
|
||||
if not repre_entity:
|
||||
raise AssertionError("Representation not found")
|
||||
if not (
|
||||
repre_entity.get("attrib")
|
||||
or repre_entity["attrib"].get("path")
|
||||
):
|
||||
raise AssertionError("Representation does not have path")
|
||||
if not repre_entity.get('context'):
|
||||
raise AssertionError("Representation does not have context")
|
||||
|
||||
layout_data.append((repre_entity, element))
|
||||
version_ids.add(repre_entity["versionId"])
|
||||
|
||||
repre_parents_by_id = ayon_api.get_representation_parents(
|
||||
project_name, repre_entities_by_id.keys()
|
||||
)
|
||||
|
||||
# Prequery valid repre documents for all elements at once
|
||||
valid_repre_entities_by_version_id = self._get_valid_repre_entities(
|
||||
project_name, version_ids)
|
||||
containers = []
|
||||
actors_matched = []
|
||||
|
||||
for (repre_entity, lasset) in layout_data:
|
||||
# For every actor in the scene, check if it has a representation in
|
||||
# those we got from the JSON. If so, create a container for it.
|
||||
# Otherwise, remove it from the scene.
|
||||
found = False
|
||||
repre_id = repre_entity["id"]
|
||||
repre_parents = repre_parents_by_id[repre_id]
|
||||
folder_path = repre_parents.folder["path"]
|
||||
folder_name = repre_parents.folder["name"]
|
||||
product_name = repre_parents.product["name"]
|
||||
product_type = repre_parents.product["productType"]
|
||||
|
||||
for actor in actors:
|
||||
if not actor.get_class().get_name() == 'StaticMeshActor':
|
||||
continue
|
||||
if actor in actors_matched:
|
||||
continue
|
||||
|
||||
# Get the original path of the file from which the asset has
|
||||
# been imported.
|
||||
smc = actor.get_editor_property('static_mesh_component')
|
||||
mesh = smc.get_editor_property('static_mesh')
|
||||
import_data = mesh.get_editor_property('asset_import_data')
|
||||
filename = import_data.get_first_filename()
|
||||
path = Path(filename)
|
||||
|
||||
if (not path.name or
|
||||
path.name not in repre_entity["attrib"]["path"]):
|
||||
continue
|
||||
|
||||
actor.set_actor_label(lasset.get('instance_name'))
|
||||
|
||||
mesh_path = Path(mesh.get_path_name()).parent.as_posix()
|
||||
|
||||
# Create the container for the asset.
|
||||
container = self._create_container(
|
||||
f"{folder_name}_{product_name}",
|
||||
mesh_path,
|
||||
folder_path,
|
||||
repre_entity["id"],
|
||||
repre_entity["versionId"],
|
||||
product_type
|
||||
)
|
||||
containers.append(container)
|
||||
|
||||
# Set the transform for the actor.
|
||||
transform = lasset.get('transform_matrix')
|
||||
basis = lasset.get('basis')
|
||||
|
||||
computed_transform = self._transform_from_basis(
|
||||
transform, basis)
|
||||
actor.set_actor_transform(computed_transform, False, True)
|
||||
|
||||
actors_matched.append(actor)
|
||||
found = True
|
||||
break
|
||||
|
||||
# If an actor has not been found for this representation,
|
||||
# we check if it has been loaded already by checking all the
|
||||
# loaded containers. If so, we add it to the scene. Otherwise,
|
||||
# we load it.
|
||||
if found:
|
||||
continue
|
||||
|
||||
all_containers = upipeline.ls()
|
||||
|
||||
loaded = False
|
||||
|
||||
for container in all_containers:
|
||||
repre_id = container.get('representation')
|
||||
|
||||
if not repre_id == repre_entity["id"]:
|
||||
continue
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
arfilter = unreal.ARFilter(
|
||||
class_names=["StaticMesh"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
assets = ar.get_assets(arfilter)
|
||||
|
||||
for asset in assets:
|
||||
obj = asset.get_asset()
|
||||
self._spawn_actor(obj, lasset)
|
||||
|
||||
loaded = True
|
||||
break
|
||||
|
||||
# If the asset has not been loaded yet, we load it.
|
||||
if loaded:
|
||||
continue
|
||||
|
||||
version_id = lasset.get('version')
|
||||
assets = self._load_asset(
|
||||
valid_repre_entities_by_version_id.get(version_id),
|
||||
lasset.get('representation'),
|
||||
lasset.get('instance_name'),
|
||||
lasset.get('family')
|
||||
)
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if not obj.get_class().get_name() == 'StaticMesh':
|
||||
continue
|
||||
self._spawn_actor(obj, lasset)
|
||||
|
||||
break
|
||||
|
||||
# Check if an actor was not matched to a representation.
|
||||
# If so, remove it from the scene.
|
||||
for actor in actors:
|
||||
if not actor.get_class().get_name() == 'StaticMeshActor':
|
||||
continue
|
||||
if actor not in actors_matched:
|
||||
self.log.warning(f"Actor {actor.get_name()} not matched.")
|
||||
if self.delete_unmatched_assets:
|
||||
EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
return containers
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
print("Loading Layout and Match Assets")
|
||||
|
||||
folder_name = context["folder"]["name"]
|
||||
folder_path = context["folder"]["path"]
|
||||
product_type = context["product"]["productType"]
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else name
|
||||
container_name = f"{folder_name}_{name}_CON"
|
||||
|
||||
curr_level = self._get_current_level()
|
||||
|
||||
if not curr_level:
|
||||
raise AssertionError("Current level not saved")
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
path = self.filepath_from_context(context)
|
||||
containers = self._process(path, project_name)
|
||||
|
||||
curr_level_path = Path(
|
||||
curr_level.get_outer().get_path_name()).parent.as_posix()
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_asset_exist(
|
||||
f"{curr_level_path}/{container_name}"
|
||||
):
|
||||
upipeline.create_container(
|
||||
container=container_name, path=curr_level_path)
|
||||
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": curr_level_path,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
"loaded_assets": containers,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
upipeline.imprint(f"{curr_level_path}/{container_name}", data)
|
||||
|
||||
def update(self, container, context):
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
source_path = get_representation_path(repre_entity)
|
||||
containers = self._process(source_path, project_name)
|
||||
|
||||
data = {
|
||||
"representation": repre_entity["id"],
|
||||
"loaded_assets": containers,
|
||||
"parent": repre_entity["versionId"],
|
||||
}
|
||||
upipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container.get('container_name')), data)
|
||||
|
|
@ -1,220 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Skeletal Mesh alembics."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class SkeletalMeshAlembicLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from Alembic"""
|
||||
|
||||
product_types = {"pointcache", "skeletalMesh"}
|
||||
label = "Import Alembic Skeletal Mesh"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace, default_conversion):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
if not default_conversion:
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
options.conversion_settings = conversion_settings
|
||||
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name,
|
||||
default_conversion=False
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False, default_conversion)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
product_type
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
default_conversion = False
|
||||
if options.get("default_conversion"):
|
||||
default_conversion = options.get("default_conversion")
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name, default_conversion)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for folder and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type,
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,222 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Skeletal Meshes form FBX."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class SkeletalMeshFBXLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from FBX."""
|
||||
|
||||
product_types = {"rig", "skeletalMesh"}
|
||||
label = "Import FBX Skeletal Mesh"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.FbxImportUI()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
options.set_editor_property('import_as_skeletal', True)
|
||||
options.set_editor_property('import_animations', False)
|
||||
options.set_editor_property('import_mesh', True)
|
||||
options.set_editor_property('import_materials', False)
|
||||
options.set_editor_property('import_textures', False)
|
||||
options.set_editor_property('skeleton', None)
|
||||
options.set_editor_property('create_physics_asset', False)
|
||||
|
||||
options.set_editor_property(
|
||||
'mesh_type_to_import',
|
||||
unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'import_content_type',
|
||||
unreal.FBXImportContentType.FBXICT_ALL)
|
||||
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'normal_import_method',
|
||||
unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS)
|
||||
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
product_type
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_name = context["folder"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version_entity = context["version"]
|
||||
# Check if version is hero version and use different name
|
||||
version = version_entity["version"]
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_name,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,223 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for Static Mesh alembics."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class StaticMeshAlembicLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from Alembic"""
|
||||
|
||||
product_types = {"model", "staticMesh"}
|
||||
label = "Import Alembic Static Mesh"
|
||||
representations = {"abc"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace, default_conversion):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
sm_settings = unreal.AbcStaticMeshSettings()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.STATIC_MESH)
|
||||
|
||||
sm_settings.set_editor_property('merge_meshes', True)
|
||||
|
||||
if not default_conversion:
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
options.conversion_settings = conversion_settings
|
||||
|
||||
options.static_mesh_settings = sm_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name,
|
||||
default_conversion=False
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False, default_conversion)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
representation,
|
||||
product_type,
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"folder_path": folder_path,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": representation["id"],
|
||||
"parent": representation["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["path"]
|
||||
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
default_conversion = False
|
||||
if options.get("default_conversion"):
|
||||
default_conversion = options.get("default_conversion")
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name, default_conversion)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(path, asset_dir, asset_name,
|
||||
container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,209 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load Static meshes form FBX."""
|
||||
import os
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api.pipeline import (
|
||||
AYON_ASSET_DIR,
|
||||
create_container,
|
||||
imprint,
|
||||
)
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class StaticMeshFBXLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from FBX."""
|
||||
|
||||
product_types = {"model", "staticMesh"}
|
||||
label = "Import FBX Static Mesh"
|
||||
representations = {"fbx"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
root = AYON_ASSET_DIR
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.FbxImportUI()
|
||||
import_data = unreal.FbxStaticMeshImportData()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
options.set_editor_property(
|
||||
'automated_import_should_detect_type', False)
|
||||
options.set_editor_property('import_animations', False)
|
||||
|
||||
import_data.set_editor_property('combine_meshes', True)
|
||||
import_data.set_editor_property('remove_degenerates', False)
|
||||
|
||||
options.static_mesh_import_data = import_data
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def import_and_containerize(
|
||||
self, filepath, asset_dir, asset_name, container_name
|
||||
):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(
|
||||
filepath, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
# Create Asset Container
|
||||
create_container(container=container_name, path=asset_dir)
|
||||
|
||||
def imprint(
|
||||
self,
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type
|
||||
):
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"folder_path": folder_path,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
options (dict): Those would be data to be imprinted.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and Ayon container
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
version = context["version"]["version"]
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version:03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = self.filepath_from_context(context)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
context["representation"],
|
||||
context["product"]["productType"]
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
product_type = context["product"]["productType"]
|
||||
version = context["version"]["version"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
suffix = "_CON"
|
||||
asset_name = product_name
|
||||
if folder_name:
|
||||
asset_name = f"{folder_name}_{product_name}"
|
||||
# Check if version is hero version and use different name
|
||||
if version < 0:
|
||||
name_version = f"{product_name}_hero"
|
||||
else:
|
||||
name_version = f"{product_name}_v{version:03d}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{self.root}/{folder_name}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
self.import_and_containerize(
|
||||
path, asset_dir, asset_name, container_name)
|
||||
|
||||
self.imprint(
|
||||
folder_path,
|
||||
asset_dir,
|
||||
container_name,
|
||||
asset_name,
|
||||
repre_entity,
|
||||
product_type,
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,171 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load UAsset."""
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class UAssetLoader(plugin.Loader):
|
||||
"""Load UAsset."""
|
||||
|
||||
product_types = {"uasset"}
|
||||
label = "Load UAsset"
|
||||
representations = {"uasset"}
|
||||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
extension = "uasset"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
options (dict): Those would be data to be imprinted. This is not
|
||||
used now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
root = unreal_pipeline.AYON_ASSET_DIR
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{folder_name}/{name}", suffix=""
|
||||
)
|
||||
|
||||
unique_number = 1
|
||||
while unreal.EditorAssetLibrary.does_directory_exist(
|
||||
f"{asset_dir}_{unique_number:02}"
|
||||
):
|
||||
unique_number += 1
|
||||
|
||||
asset_dir = f"{asset_dir}_{unique_number:02}"
|
||||
container_name = f"{container_name}_{unique_number:02}{suffix}"
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
destination_path = asset_dir.replace(
|
||||
"/Game", Path(unreal.Paths.project_content_dir()).as_posix(), 1)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
shutil.copy(
|
||||
path,
|
||||
f"{destination_path}/{name}_{unique_number:02}.{self.extension}")
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"folder_path": folder_path,
|
||||
"container_name": container_name,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these should be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
asset_dir = container["namespace"]
|
||||
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
unique_number = container["container_name"].split("_")[-2]
|
||||
|
||||
destination_path = asset_dir.replace(
|
||||
"/Game", Path(unreal.Paths.project_content_dir()).as_posix(), 1)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
for asset in asset_content:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() != "AyonAssetContainer":
|
||||
unreal.EditorAssetLibrary.delete_asset(asset)
|
||||
|
||||
update_filepath = get_representation_path(repre_entity)
|
||||
|
||||
shutil.copy(
|
||||
update_filepath,
|
||||
f"{destination_path}/{product_name}_{unique_number}.{self.extension}"
|
||||
)
|
||||
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
}
|
||||
)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = Path(path).parent.as_posix()
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
||||
|
||||
class UMapLoader(UAssetLoader):
|
||||
"""Load Level."""
|
||||
|
||||
product_types = {"uasset"}
|
||||
label = "Load Level"
|
||||
representations = {"umap"}
|
||||
|
||||
extension = "umap"
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Loader for Yeti Cache."""
|
||||
import os
|
||||
import json
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_representation_path,
|
||||
AYON_CONTAINER_ID
|
||||
)
|
||||
from ayon_unreal.api import plugin
|
||||
from ayon_unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class YetiLoader(plugin.Loader):
|
||||
"""Load Yeti Cache"""
|
||||
|
||||
product_types = {"yeticacheUE"}
|
||||
label = "Import Yeti"
|
||||
representations = {"abc"}
|
||||
icon = "pagelines"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def get_task(filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def is_groom_module_active():
|
||||
"""
|
||||
Check if Groom plugin is active.
|
||||
|
||||
This is a workaround, because the Unreal python API don't have
|
||||
any method to check if plugin is active.
|
||||
"""
|
||||
prj_file = unreal.Paths.get_project_file_path()
|
||||
|
||||
with open(prj_file, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
plugins = data.get("Plugins")
|
||||
|
||||
if not plugins:
|
||||
return False
|
||||
|
||||
plugin_names = [p.get("Name") for p in plugins]
|
||||
|
||||
return "HairStrands" in plugin_names
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): Product name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
|
||||
"""
|
||||
# Check if Groom plugin is active
|
||||
if not self.is_groom_module_active():
|
||||
raise RuntimeError("Groom plugin is not activated.")
|
||||
|
||||
# Create directory for asset and Ayon container
|
||||
root = unreal_pipeline.AYON_ASSET_DIR
|
||||
folder_path = context["folder"]["path"]
|
||||
folder_name = context["folder"]["name"]
|
||||
suffix = "_CON"
|
||||
asset_name = f"{folder_name}_{name}" if folder_name else f"{name}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{folder_name}/{name}", suffix="")
|
||||
|
||||
unique_number = 1
|
||||
while unreal.EditorAssetLibrary.does_directory_exist(
|
||||
f"{asset_dir}_{unique_number:02}"
|
||||
):
|
||||
unique_number += 1
|
||||
|
||||
asset_dir = f"{asset_dir}_{unique_number:02}"
|
||||
container_name = f"{container_name}_{unique_number:02}{suffix}"
|
||||
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
task = self.get_task(path, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
data = {
|
||||
"schema": "ayon:container-2.0",
|
||||
"id": AYON_CONTAINER_ID,
|
||||
"namespace": asset_dir,
|
||||
"container_name": container_name,
|
||||
"folder_path": folder_path,
|
||||
"asset_name": asset_name,
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["id"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"product_type": product_type,
|
||||
# TODO these shold be probably removed
|
||||
"asset": folder_path,
|
||||
"family": product_type,
|
||||
}
|
||||
unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, context):
|
||||
repre_entity = context["representation"]
|
||||
name = container["asset_name"]
|
||||
source_path = get_representation_path(repre_entity)
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = self.get_task(source_path, destination_path, name, True)
|
||||
|
||||
# do import fbx and replace existing data
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
container_path = f'{container["namespace"]}/{container["objectName"]}'
|
||||
# update metadata
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
})
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect current project path."""
|
||||
import unreal # noqa
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectUnrealCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Unreal Current File"
|
||||
hosts = ['unreal']
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file."""
|
||||
current_file = unreal.Paths.get_project_file_path()
|
||||
context.data['currentFile'] = current_file
|
||||
|
||||
assert current_file != '', "Current file is empty. " \
|
||||
"Save the file before continuing."
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
import unreal
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceMembers(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collect members of instance.
|
||||
|
||||
This collector will collect the assets for the families that support to
|
||||
have them included as External Data, and will add them to the instance
|
||||
as members.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
hosts = ["unreal"]
|
||||
families = ["camera", "look", "unrealStaticMesh", "uasset"]
|
||||
label = "Collect Instance Members"
|
||||
|
||||
def process(self, instance):
|
||||
"""Collect members of instance."""
|
||||
self.log.info("Collecting instance members")
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
inst_path = instance.data.get('instance_path')
|
||||
inst_name = inst_path.split('/')[-1]
|
||||
|
||||
pub_instance = ar.get_asset_by_object_path(
|
||||
f"{inst_path}.{inst_name}").get_asset()
|
||||
|
||||
if not pub_instance:
|
||||
self.log.error(f"{inst_path}.{inst_name}")
|
||||
raise RuntimeError(f"Instance {instance} not found.")
|
||||
|
||||
if not pub_instance.get_editor_property("add_external_assets"):
|
||||
# No external assets in the instance
|
||||
return
|
||||
|
||||
assets = pub_instance.get_editor_property('asset_data_external')
|
||||
|
||||
members = [asset.get_path_name() for asset in assets]
|
||||
|
||||
self.log.debug(f"Members: {members}")
|
||||
|
||||
instance.data["members"] = members
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRemoveMarked(pyblish.api.ContextPlugin):
|
||||
"""Remove marked data
|
||||
|
||||
Remove instances that have 'remove' in their instance.data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = 'Remove Marked Instances'
|
||||
|
||||
def process(self, context):
|
||||
|
||||
self.log.debug(context)
|
||||
# make ftrack publishable
|
||||
instances_to_remove = []
|
||||
for instance in context:
|
||||
if instance.data.get('remove'):
|
||||
instances_to_remove.append(instance)
|
||||
|
||||
for instance in instances_to_remove:
|
||||
context.remove(instance)
|
||||
|
|
@ -1,116 +0,0 @@
|
|||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_unreal.api import pipeline
|
||||
|
||||
|
||||
class CollectRenderInstances(pyblish.api.InstancePlugin):
|
||||
""" This collector will try to find all the rendered frames.
|
||||
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["unreal"]
|
||||
families = ["render"]
|
||||
label = "Collect Render Instances"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("Preparing Rendering Instances")
|
||||
|
||||
context = instance.context
|
||||
|
||||
data = instance.data
|
||||
data['remove'] = True
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sequence = ar.get_asset_by_object_path(
|
||||
data.get('sequence')).get_asset()
|
||||
|
||||
sequences = [{
|
||||
"sequence": sequence,
|
||||
"output": data.get('output'),
|
||||
"frame_range": (
|
||||
data.get('frameStart'), data.get('frameEnd'))
|
||||
}]
|
||||
|
||||
for s in sequences:
|
||||
self.log.debug(f"Processing: {s.get('sequence').get_name()}")
|
||||
subscenes = pipeline.get_subsequences(s.get('sequence'))
|
||||
|
||||
if subscenes:
|
||||
for ss in subscenes:
|
||||
sequences.append({
|
||||
"sequence": ss.get_sequence(),
|
||||
"output": (f"{s.get('output')}/"
|
||||
f"{ss.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
ss.get_start_frame(), ss.get_end_frame() - 1)
|
||||
})
|
||||
else:
|
||||
# Avoid creating instances for camera sequences
|
||||
if "_camera" not in s.get('sequence').get_name():
|
||||
seq = s.get('sequence')
|
||||
seq_name = seq.get_name()
|
||||
|
||||
product_type = "render"
|
||||
new_product_name = f"{data.get('productName')}_{seq_name}"
|
||||
new_instance = context.create_instance(
|
||||
new_product_name
|
||||
)
|
||||
new_instance[:] = seq_name
|
||||
|
||||
new_data = new_instance.data
|
||||
|
||||
new_data["folderPath"] = f"/{s.get('output')}"
|
||||
new_data["setMembers"] = seq_name
|
||||
new_data["productName"] = new_product_name
|
||||
new_data["productType"] = product_type
|
||||
new_data["family"] = product_type
|
||||
new_data["families"] = [product_type, "review"]
|
||||
new_data["parent"] = data.get("parent")
|
||||
new_data["level"] = data.get("level")
|
||||
new_data["output"] = s.get('output')
|
||||
new_data["fps"] = seq.get_display_rate().numerator
|
||||
new_data["frameStart"] = int(s.get('frame_range')[0])
|
||||
new_data["frameEnd"] = int(s.get('frame_range')[1])
|
||||
new_data["sequence"] = seq.get_path_name()
|
||||
new_data["master_sequence"] = data["master_sequence"]
|
||||
new_data["master_level"] = data["master_level"]
|
||||
|
||||
self.log.debug(f"new instance data: {new_data}")
|
||||
|
||||
try:
|
||||
project = get_current_project_name()
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception as e:
|
||||
raise Exception((
|
||||
"Could not find render root "
|
||||
"in anatomy settings.")) from e
|
||||
|
||||
render_dir = f"{root}/{project}/{s.get('output')}"
|
||||
render_path = Path(render_dir)
|
||||
|
||||
frames = []
|
||||
|
||||
for x in render_path.iterdir():
|
||||
if x.is_file() and x.suffix == '.png':
|
||||
frames.append(str(x.name))
|
||||
|
||||
if "representations" not in new_instance.data:
|
||||
new_instance.data["representations"] = []
|
||||
|
||||
repr = {
|
||||
'frameStart': instance.data["frameStart"],
|
||||
'frameEnd': instance.data["frameEnd"],
|
||||
'name': 'png',
|
||||
'ext': 'png',
|
||||
'files': frames,
|
||||
'stagingDir': render_dir,
|
||||
'tags': ['review']
|
||||
}
|
||||
new_instance.data["representations"].append(repr)
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract camera from Unreal."""
|
||||
import os
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_unreal.api.pipeline import UNREAL_VERSION
|
||||
|
||||
|
||||
class ExtractCamera(publish.Extractor):
|
||||
"""Extract a camera."""
|
||||
|
||||
label = "Extract Camera"
|
||||
hosts = ["unreal"]
|
||||
families = ["camera"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
# Define extract output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
fbx_filename = "{}.fbx".format(instance.name)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Check if the loaded level is the same of the instance
|
||||
if UNREAL_VERSION.major == 5:
|
||||
world = unreal.UnrealEditorSubsystem().get_editor_world()
|
||||
else:
|
||||
world = unreal.EditorLevelLibrary.get_editor_world()
|
||||
current_level = world.get_path_name()
|
||||
assert current_level == instance.data.get("level"), \
|
||||
"Wrong level loaded"
|
||||
|
||||
for member in instance.data.get('members'):
|
||||
data = ar.get_asset_by_object_path(member)
|
||||
if UNREAL_VERSION.major == 5:
|
||||
is_level_sequence = (
|
||||
data.asset_class_path.asset_name == "LevelSequence")
|
||||
else:
|
||||
is_level_sequence = (data.asset_class == "LevelSequence")
|
||||
|
||||
if is_level_sequence:
|
||||
sequence = data.get_asset()
|
||||
if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor >= 1:
|
||||
params = unreal.SequencerExportFBXParams(
|
||||
world=world,
|
||||
root_sequence=sequence,
|
||||
sequence=sequence,
|
||||
bindings=sequence.get_bindings(),
|
||||
master_tracks=sequence.get_master_tracks(),
|
||||
fbx_file_name=os.path.join(staging_dir, fbx_filename)
|
||||
)
|
||||
unreal.SequencerTools.export_level_sequence_fbx(params)
|
||||
elif UNREAL_VERSION.major == 4 and UNREAL_VERSION.minor == 26:
|
||||
unreal.SequencerTools.export_fbx(
|
||||
world,
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
unreal.FbxExportOption(),
|
||||
os.path.join(staging_dir, fbx_filename)
|
||||
)
|
||||
else:
|
||||
# Unreal 5.0 or 4.27
|
||||
unreal.SequencerTools.export_level_sequence_fbx(
|
||||
world,
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
unreal.FbxExportOption(),
|
||||
os.path.join(staging_dir, fbx_filename)
|
||||
)
|
||||
|
||||
if not os.path.isfile(os.path.join(staging_dir, fbx_filename)):
|
||||
raise RuntimeError("Failed to extract camera")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
fbx_representation = {
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': fbx_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(fbx_representation)
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import json
|
||||
import math
|
||||
|
||||
import unreal
|
||||
from unreal import EditorLevelLibrary as ell
|
||||
from unreal import EditorAssetLibrary as eal
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractLayout(publish.Extractor):
|
||||
"""Extract a layout."""
|
||||
|
||||
label = "Extract Layout"
|
||||
hosts = ["unreal"]
|
||||
families = ["layout"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Check if the loaded level is the same of the instance
|
||||
current_level = ell.get_editor_world().get_path_name()
|
||||
assert current_level == instance.data.get("level"), \
|
||||
"Wrong level loaded"
|
||||
|
||||
json_data = []
|
||||
project_name = instance.context.data["projectName"]
|
||||
|
||||
for member in instance[:]:
|
||||
actor = ell.get_actor_reference(member)
|
||||
mesh = None
|
||||
|
||||
# Check type the type of mesh
|
||||
if actor.get_class().get_name() == 'SkeletalMeshActor':
|
||||
mesh = actor.skeletal_mesh_component.skeletal_mesh
|
||||
elif actor.get_class().get_name() == 'StaticMeshActor':
|
||||
mesh = actor.static_mesh_component.static_mesh
|
||||
|
||||
if mesh:
|
||||
# Search the reference to the Asset Container for the object
|
||||
path = unreal.Paths.get_path(mesh.get_path_name())
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["AyonAssetContainer"], package_paths=[path])
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
try:
|
||||
asset_container = ar.get_assets(filter)[0].get_asset()
|
||||
except IndexError:
|
||||
self.log.error("AssetContainer not found.")
|
||||
return
|
||||
|
||||
parent_id = eal.get_metadata_tag(asset_container, "parent")
|
||||
family = eal.get_metadata_tag(asset_container, "family")
|
||||
|
||||
self.log.info("Parent: {}".format(parent_id))
|
||||
blend = ayon_api.get_representation_by_name(
|
||||
project_name, "blend", parent_id, fields={"id"}
|
||||
)
|
||||
blend_id = blend["id"]
|
||||
|
||||
json_element = {}
|
||||
json_element["reference"] = str(blend_id)
|
||||
json_element["family"] = family
|
||||
json_element["product_type"] = family
|
||||
json_element["instance_name"] = actor.get_name()
|
||||
json_element["asset_name"] = mesh.get_name()
|
||||
import_data = mesh.get_editor_property("asset_import_data")
|
||||
json_element["file_path"] = import_data.get_first_filename()
|
||||
transform = actor.get_actor_transform()
|
||||
|
||||
json_element["transform"] = {
|
||||
"translation": {
|
||||
"x": -transform.translation.x,
|
||||
"y": transform.translation.y,
|
||||
"z": transform.translation.z
|
||||
},
|
||||
"rotation": {
|
||||
"x": math.radians(transform.rotation.euler().x),
|
||||
"y": math.radians(transform.rotation.euler().y),
|
||||
"z": math.radians(180.0 - transform.rotation.euler().z)
|
||||
},
|
||||
"scale": {
|
||||
"x": transform.scale3d.x,
|
||||
"y": transform.scale3d.y,
|
||||
"z": transform.scale3d.z
|
||||
}
|
||||
}
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(json_representation)
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
import os
|
||||
|
||||
import unreal
|
||||
from unreal import MaterialEditingLibrary as mat_lib
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractLook(publish.Extractor):
|
||||
"""Extract look."""
|
||||
|
||||
label = "Extract Look"
|
||||
hosts = ["unreal"]
|
||||
families = ["look"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
resources_dir = instance.data["resourcesDir"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
transfers = []
|
||||
|
||||
json_data = []
|
||||
|
||||
for member in instance:
|
||||
asset = ar.get_asset_by_object_path(member)
|
||||
obj = asset.get_asset()
|
||||
|
||||
name = asset.get_editor_property('asset_name')
|
||||
|
||||
json_element = {'material': str(name)}
|
||||
|
||||
material_obj = obj.get_editor_property('static_materials')[0]
|
||||
material = material_obj.material_interface
|
||||
|
||||
base_color = mat_lib.get_material_property_input_node(
|
||||
material, unreal.MaterialProperty.MP_BASE_COLOR)
|
||||
|
||||
base_color_name = base_color.get_editor_property('parameter_name')
|
||||
|
||||
texture = mat_lib.get_material_default_texture_parameter_value(
|
||||
material, base_color_name)
|
||||
|
||||
if texture:
|
||||
# Export Texture
|
||||
tga_filename = f"{instance.name}_{name}_texture.tga"
|
||||
|
||||
tga_exporter = unreal.TextureExporterTGA()
|
||||
|
||||
tga_export_task = unreal.AssetExportTask()
|
||||
|
||||
tga_export_task.set_editor_property('exporter', tga_exporter)
|
||||
tga_export_task.set_editor_property('automated', True)
|
||||
tga_export_task.set_editor_property('object', texture)
|
||||
tga_export_task.set_editor_property(
|
||||
'filename', f"{staging_dir}/{tga_filename}")
|
||||
tga_export_task.set_editor_property('prompt', False)
|
||||
tga_export_task.set_editor_property('selected', False)
|
||||
|
||||
unreal.Exporter.run_asset_export_task(tga_export_task)
|
||||
|
||||
json_element['tga_filename'] = tga_filename
|
||||
|
||||
transfers.append((
|
||||
f"{staging_dir}/{tga_filename}",
|
||||
f"{resources_dir}/{tga_filename}"))
|
||||
|
||||
fbx_filename = f"{instance.name}_{name}.fbx"
|
||||
|
||||
fbx_exporter = unreal.StaticMeshExporterFBX()
|
||||
fbx_exporter.set_editor_property('text', False)
|
||||
|
||||
options = unreal.FbxExportOption()
|
||||
options.set_editor_property('ascii', False)
|
||||
options.set_editor_property('collision', False)
|
||||
|
||||
task = unreal.AssetExportTask()
|
||||
task.set_editor_property('exporter', fbx_exporter)
|
||||
task.set_editor_property('options', options)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('object', object)
|
||||
task.set_editor_property(
|
||||
'filename', f"{staging_dir}/{fbx_filename}")
|
||||
task.set_editor_property('prompt', False)
|
||||
task.set_editor_property('selected', False)
|
||||
|
||||
unreal.Exporter.run_asset_export_task(task)
|
||||
|
||||
json_element['fbx_filename'] = fbx_filename
|
||||
|
||||
transfers.append((
|
||||
f"{staging_dir}/{fbx_filename}",
|
||||
f"{resources_dir}/{fbx_filename}"))
|
||||
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = f"{instance.name}.json"
|
||||
json_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = []
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
instance.data["representations"].append(json_representation)
|
||||
instance.data["transfers"].extend(transfers)
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import unreal
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractUAsset(publish.Extractor):
|
||||
"""Extract a UAsset."""
|
||||
|
||||
label = "Extract UAsset"
|
||||
hosts = ["unreal"]
|
||||
families = ["uasset", "umap"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
extension = (
|
||||
"umap" if "umap" in instance.data.get("families") else "uasset")
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
self.log.debug("Performing extraction..")
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
members = instance.data.get("members", [])
|
||||
|
||||
if not members:
|
||||
raise RuntimeError("No members found in instance.")
|
||||
|
||||
# UAsset publishing supports only one member
|
||||
obj = members[0]
|
||||
|
||||
asset = ar.get_asset_by_object_path(obj).get_asset()
|
||||
sys_path = unreal.SystemLibrary.get_system_path(asset)
|
||||
filename = Path(sys_path).name
|
||||
|
||||
shutil.copy(sys_path, staging_dir)
|
||||
|
||||
self.log.info(f"instance.data: {instance.data}")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
import unreal
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateNoDependencies(pyblish.api.InstancePlugin):
|
||||
"""Ensure that the uasset has no dependencies
|
||||
|
||||
The uasset is checked for dependencies. If there are any, the instance
|
||||
cannot be published.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Check no dependencies"
|
||||
families = ["uasset"]
|
||||
hosts = ["unreal"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
all_dependencies = []
|
||||
|
||||
for obj in instance[:]:
|
||||
asset = ar.get_asset_by_object_path(obj)
|
||||
dependencies = ar.get_dependencies(
|
||||
asset.package_name,
|
||||
unreal.AssetRegistryDependencyOptions(
|
||||
include_soft_package_references=False,
|
||||
include_hard_package_references=True,
|
||||
include_searchable_names=False,
|
||||
include_soft_management_references=False,
|
||||
include_hard_management_references=False
|
||||
))
|
||||
if dependencies:
|
||||
for dep in dependencies:
|
||||
if str(dep).startswith("/Game/"):
|
||||
all_dependencies.append(str(dep))
|
||||
|
||||
if all_dependencies:
|
||||
raise RuntimeError(
|
||||
f"Dependencies found: {all_dependencies}")
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import clique
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import PublishValidationError
|
||||
|
||||
|
||||
class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
||||
"""Ensure the sequence of frames is complete
|
||||
|
||||
The files found in the folder are checked against the frameStart and
|
||||
frameEnd of the instance. If the first or last file is not
|
||||
corresponding with the first or last frame it is flagged as invalid.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Sequence Frames"
|
||||
families = ["render"]
|
||||
hosts = ["unreal"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
representations = instance.data.get("representations")
|
||||
folder_attributes = (
|
||||
instance.data
|
||||
.get("folderEntity", {})
|
||||
.get("attrib", {})
|
||||
)
|
||||
for repr in representations:
|
||||
repr_files = repr["files"]
|
||||
if isinstance(repr_files, str):
|
||||
continue
|
||||
|
||||
ext = repr.get("ext")
|
||||
if not ext:
|
||||
_, ext = os.path.splitext(repr_files[0])
|
||||
elif not ext.startswith("."):
|
||||
ext = ".{}".format(ext)
|
||||
pattern = r"\D?(?P<index>(?P<padding>0*)\d+){}$".format(
|
||||
re.escape(ext))
|
||||
patterns = [pattern]
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
repr["files"], minimum_items=1, patterns=patterns)
|
||||
|
||||
if remainder:
|
||||
raise PublishValidationError(
|
||||
"Some files have been found outside a sequence. "
|
||||
f"Invalid files: {remainder}")
|
||||
if not collections:
|
||||
raise PublishValidationError(
|
||||
"We have been unable to find a sequence in the "
|
||||
"files. Please ensure the files are named "
|
||||
"appropriately. "
|
||||
f"Files: {repr_files}")
|
||||
if len(collections) > 1:
|
||||
raise PublishValidationError(
|
||||
"Multiple collections detected. There should be a single "
|
||||
"collection per representation. "
|
||||
f"Collections identified: {collections}")
|
||||
|
||||
collection = collections[0]
|
||||
frames = list(collection.indexes)
|
||||
|
||||
if instance.data.get("slate"):
|
||||
# Slate is not part of the frame range
|
||||
frames = frames[1:]
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (folder_attributes["clipIn"],
|
||||
folder_attributes["clipOut"])
|
||||
|
||||
if current_range != required_range:
|
||||
raise PublishValidationError(
|
||||
f"Invalid frame range: {current_range} - "
|
||||
f"expected: {required_range}")
|
||||
|
||||
missing = collection.holes().indexes
|
||||
if missing:
|
||||
raise PublishValidationError(
|
||||
"Missing frames have been detected. "
|
||||
f"Missing frames: {missing}")
|
||||
|
|
@ -1,434 +0,0 @@
|
|||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
from distutils import dir_util
|
||||
from distutils.dir_util import copy_tree
|
||||
from pathlib import Path
|
||||
from typing import List, Union
|
||||
|
||||
from qtpy import QtCore
|
||||
|
||||
import ayon_unreal.lib as ue_lib
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
||||
def parse_comp_progress(line: str, progress_signal: QtCore.Signal(int)):
|
||||
match = re.search(r"\[[1-9]+/[0-9]+]", line)
|
||||
if match is not None:
|
||||
split: list[str] = match.group().split("/")
|
||||
curr: float = float(split[0][1:])
|
||||
total: float = float(split[1][:-1])
|
||||
progress_signal.emit(int((curr / total) * 100.0))
|
||||
|
||||
|
||||
def parse_prj_progress(line: str, progress_signal: QtCore.Signal(int)):
|
||||
match = re.search("@progress", line)
|
||||
if match is not None:
|
||||
percent_match = re.search(r"\d{1,3}", line)
|
||||
progress_signal.emit(int(percent_match.group()))
|
||||
|
||||
|
||||
def retrieve_exit_code(line: str):
|
||||
match = re.search(r"ExitCode=\d+", line)
|
||||
if match is not None:
|
||||
split: list[str] = match.group().split("=")
|
||||
return int(split[1])
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class UEWorker(QtCore.QObject):
|
||||
finished = QtCore.Signal(str)
|
||||
failed = QtCore.Signal(str, int)
|
||||
progress = QtCore.Signal(int)
|
||||
log = QtCore.Signal(str)
|
||||
|
||||
engine_path: Path = None
|
||||
env = None
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError("Please implement this method!")
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.execute()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
self.log.emit(str(e))
|
||||
self.log.emit(traceback.format_exc())
|
||||
self.failed.emit(str(e), 1)
|
||||
raise e
|
||||
|
||||
|
||||
class UEProjectGenerationWorker(UEWorker):
|
||||
stage_begin = QtCore.Signal(str)
|
||||
|
||||
ue_version: str = None
|
||||
project_name: str = None
|
||||
project_dir: Path = None
|
||||
dev_mode = False
|
||||
|
||||
def setup(self, ue_version: str,
|
||||
project_name: str,
|
||||
unreal_project_name,
|
||||
engine_path: Path,
|
||||
project_dir: Path,
|
||||
dev_mode: bool = False,
|
||||
env: dict = None):
|
||||
"""Set the worker with necessary parameters.
|
||||
|
||||
Args:
|
||||
ue_version (str): Unreal Engine version.
|
||||
project_name (str): Name of the project in AYON.
|
||||
unreal_project_name (str): Name of the project in Unreal.
|
||||
engine_path (Path): Path to the Unreal Engine.
|
||||
project_dir (Path): Path to the project directory.
|
||||
dev_mode (bool, optional): Whether to run the project in dev mode.
|
||||
Defaults to False.
|
||||
env (dict, optional): Environment variables. Defaults to None.
|
||||
|
||||
"""
|
||||
|
||||
self.ue_version = ue_version
|
||||
self.project_dir = project_dir
|
||||
self.env = env or os.environ
|
||||
|
||||
preset = get_project_settings(project_name)["unreal"]["project_setup"]
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
self.dev_mode = True
|
||||
|
||||
self.project_name = unreal_project_name
|
||||
self.engine_path = engine_path
|
||||
|
||||
def execute(self):
|
||||
# engine_path should be the location of UE_X.X folder
|
||||
|
||||
ue_editor_exe = ue_lib.get_editor_exe_path(self.engine_path,
|
||||
self.ue_version)
|
||||
cmdlet_project = ue_lib.get_path_to_cmdlet_project(self.ue_version)
|
||||
project_file = self.project_dir / f"{self.project_name}.uproject"
|
||||
|
||||
print("--- Generating a new project ...")
|
||||
# 1st stage
|
||||
stage_count = 2
|
||||
if self.dev_mode:
|
||||
stage_count = 4
|
||||
|
||||
self.stage_begin.emit(
|
||||
("Generating a new UE project ... 1 out of "
|
||||
f"{stage_count}"))
|
||||
|
||||
# Need to copy the commandlet project to a temporary folder where
|
||||
# users don't need admin rights to write to.
|
||||
cmdlet_tmp = tempfile.TemporaryDirectory()
|
||||
cmdlet_filename = cmdlet_project.name
|
||||
cmdlet_dir = cmdlet_project.parent.as_posix()
|
||||
cmdlet_tmp_name = Path(cmdlet_tmp.name)
|
||||
cmdlet_tmp_file = cmdlet_tmp_name.joinpath(cmdlet_filename)
|
||||
copy_tree(
|
||||
cmdlet_dir,
|
||||
cmdlet_tmp_name.as_posix())
|
||||
|
||||
commandlet_cmd = [
|
||||
f"{ue_editor_exe.as_posix()}",
|
||||
f"{cmdlet_tmp_file.as_posix()}",
|
||||
"-run=AyonGenerateProject",
|
||||
f"{project_file.resolve().as_posix()}",
|
||||
]
|
||||
|
||||
if self.dev_mode:
|
||||
commandlet_cmd.append("-GenerateCode")
|
||||
|
||||
gen_process = subprocess.Popen(commandlet_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in gen_process.stdout:
|
||||
decoded_line = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
gen_process.stdout.close()
|
||||
return_code = gen_process.wait()
|
||||
|
||||
cmdlet_tmp.cleanup()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = (
|
||||
f"Failed to generate {self.project_name} "
|
||||
f"project! Exited with return code {return_code}"
|
||||
)
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
print("--- Project has been generated successfully.")
|
||||
self.stage_begin.emit(
|
||||
(f"Writing the Engine ID of the build UE ... 1"
|
||||
f" out of {stage_count}"))
|
||||
|
||||
if not project_file.is_file():
|
||||
msg = ("Failed to write the Engine ID into .uproject file! Can "
|
||||
"not read!")
|
||||
self.failed.emit(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
with open(project_file.as_posix(), mode="r+") as pf:
|
||||
pf_json = json.load(pf)
|
||||
pf_json["EngineAssociation"] = ue_lib.get_build_id(
|
||||
self.engine_path,
|
||||
self.ue_version
|
||||
)
|
||||
print(pf_json["EngineAssociation"])
|
||||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
print("--- Engine ID has been written into the project file")
|
||||
|
||||
self.progress.emit(90)
|
||||
if self.dev_mode:
|
||||
# 2nd stage
|
||||
self.stage_begin.emit(
|
||||
(f"Generating project files ... 2 out of "
|
||||
f"{stage_count}"))
|
||||
|
||||
self.progress.emit(0)
|
||||
ubt_path = ue_lib.get_path_to_ubt(self.engine_path,
|
||||
self.ue_version)
|
||||
|
||||
arch = "Win64"
|
||||
if platform.system().lower() == "windows":
|
||||
arch = "Win64"
|
||||
elif platform.system().lower() == "linux":
|
||||
arch = "Linux"
|
||||
elif platform.system().lower() == "darwin":
|
||||
# we need to test this out
|
||||
arch = "Mac"
|
||||
|
||||
gen_prj_files_cmd = [ubt_path.as_posix(),
|
||||
"-projectfiles",
|
||||
f"-project={project_file}",
|
||||
"-progress"]
|
||||
gen_proc = subprocess.Popen(gen_prj_files_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in gen_proc.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
parse_prj_progress(decoded_line, self.progress)
|
||||
|
||||
gen_proc.stdout.close()
|
||||
return_code = gen_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to generate project files! "
|
||||
f"Exited with return code {return_code}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
self.stage_begin.emit(
|
||||
f"Building the project ... 3 out of {stage_count}")
|
||||
self.progress.emit(0)
|
||||
# 3rd stage
|
||||
build_prj_cmd = [ubt_path.as_posix(),
|
||||
f"-ModuleWithSuffix={self.project_name},3555",
|
||||
arch,
|
||||
"Development",
|
||||
"-TargetType=Editor",
|
||||
f"-Project={project_file}",
|
||||
f"{project_file}",
|
||||
"-IgnoreJunk"]
|
||||
|
||||
build_prj_proc = subprocess.Popen(build_prj_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
for line in build_prj_proc.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
parse_comp_progress(decoded_line, self.progress)
|
||||
|
||||
build_prj_proc.stdout.close()
|
||||
return_code = build_prj_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to build project! "
|
||||
f"Exited with return code {return_code}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# ensure we have PySide2/6 installed in engine
|
||||
|
||||
self.progress.emit(0)
|
||||
self.stage_begin.emit(
|
||||
(f"Checking Qt bindings installation... {stage_count} "
|
||||
f" out of {stage_count}"))
|
||||
python_path = None
|
||||
if platform.system().lower() == "windows":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Win64/python.exe")
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Linux/bin/python3")
|
||||
|
||||
if platform.system().lower() == "darwin":
|
||||
python_path = self.engine_path / ("Engine/Binaries/ThirdParty/"
|
||||
"Python3/Mac/bin/python3")
|
||||
|
||||
if not python_path:
|
||||
msg = "Unsupported platform"
|
||||
self.failed.emit(msg, 1)
|
||||
raise NotImplementedError(msg)
|
||||
if not python_path.exists():
|
||||
msg = f"Unreal Python not found at {python_path}"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
pyside_version = "PySide2"
|
||||
ue_version = self.ue_version.split(".")
|
||||
if int(ue_version[0]) == 5 and int(ue_version[1]) >= 4:
|
||||
# Use PySide6 6.6.3 because 6.7.0 had a bug
|
||||
# - 'QPushButton' can't be added to 'QBoxLayout'
|
||||
pyside_version = "PySide6==6.6.3"
|
||||
|
||||
site_packages_prefix = python_path.parent.as_posix()
|
||||
|
||||
pyside_cmd = [
|
||||
python_path.as_posix(),
|
||||
"-m", "pip",
|
||||
"install",
|
||||
"--ignore-installed",
|
||||
pyside_version,
|
||||
|
||||
]
|
||||
|
||||
if platform.system().lower() == "windows":
|
||||
pyside_cmd += ["--target", site_packages_prefix]
|
||||
|
||||
print(f"--- Installing {pyside_version} ...")
|
||||
print(" ".join(pyside_cmd))
|
||||
|
||||
pyside_install = subprocess.Popen(pyside_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
for line in pyside_install.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
|
||||
pyside_install.stdout.close()
|
||||
return_code = pyside_install.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = (f"Failed to create the project! {return_code} "
|
||||
f"The installation of {pyside_version} has failed!: {pyside_install}")
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
self.progress.emit(100)
|
||||
self.finished.emit("Project successfully built!")
|
||||
|
||||
|
||||
class UEPluginInstallWorker(UEWorker):
|
||||
installing = QtCore.Signal(str)
|
||||
|
||||
def setup(self, engine_path: Path, env: dict = None, ):
|
||||
self.engine_path = engine_path
|
||||
self.env = env or os.environ
|
||||
|
||||
def _build_and_move_plugin(self, plugin_build_path: Path):
|
||||
uat_path: Path = ue_lib.get_path_to_uat(self.engine_path)
|
||||
src_plugin_dir = Path(self.env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
msg = "Path to the integration plugin is null!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if not uat_path.is_file():
|
||||
msg = "Building failed! Path to UAT is invalid!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
temp_dir: Path = src_plugin_dir.parent / "Temp"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
uplugin_path: Path = src_plugin_dir / "Ayon.uplugin"
|
||||
|
||||
# in order to successfully build the plugin,
|
||||
# It must be built outside the Engine directory and then moved
|
||||
build_plugin_cmd: List[str] = [f"{uat_path.as_posix()}",
|
||||
"BuildPlugin",
|
||||
f"-Plugin={uplugin_path.as_posix()}",
|
||||
f"-Package={temp_dir.as_posix()}"]
|
||||
|
||||
build_proc = subprocess.Popen(build_plugin_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
return_code: Union[None, int] = None
|
||||
for line in build_proc.stdout:
|
||||
decoded_line: str = line.decode(errors="replace")
|
||||
print(decoded_line, end="")
|
||||
self.log.emit(decoded_line)
|
||||
if return_code is None:
|
||||
return_code = retrieve_exit_code(decoded_line)
|
||||
parse_comp_progress(decoded_line, self.progress)
|
||||
|
||||
build_proc.stdout.close()
|
||||
build_proc.wait()
|
||||
|
||||
if return_code and return_code != 0:
|
||||
msg = ("Failed to build plugin"
|
||||
f" project! Exited with return code {return_code}")
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
self.failed.emit(msg, return_code)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Copy the contents of the 'Temp' dir into the
|
||||
# 'Ayon' directory in the engine
|
||||
dir_util.copy_tree(temp_dir.as_posix(),
|
||||
plugin_build_path.as_posix())
|
||||
|
||||
# We need to also copy the config folder.
|
||||
# The UAT doesn't include the Config folder in the build
|
||||
plugin_install_config_path: Path = plugin_build_path / "Config"
|
||||
src_plugin_config_path = src_plugin_dir / "Config"
|
||||
|
||||
dir_util.copy_tree(src_plugin_config_path.as_posix(),
|
||||
plugin_install_config_path.as_posix())
|
||||
|
||||
dir_util.remove_tree(temp_dir.as_posix())
|
||||
|
||||
def execute(self):
|
||||
src_plugin_dir = Path(self.env.get("AYON_UNREAL_PLUGIN", ""))
|
||||
|
||||
if not os.path.isdir(src_plugin_dir):
|
||||
msg = "Path to the integration plugin is null!"
|
||||
self.failed.emit(msg, 1)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Create a path to the plugin in the engine
|
||||
op_plugin_path = self.engine_path / "Engine/Plugins/Marketplace" \
|
||||
"/Ayon"
|
||||
|
||||
if not op_plugin_path.is_dir():
|
||||
self.installing.emit("Installing and building the plugin ...")
|
||||
op_plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
engine_plugin_config_path = op_plugin_path / "Config"
|
||||
engine_plugin_config_path.mkdir(exist_ok=True)
|
||||
|
||||
dir_util._path_created = {}
|
||||
|
||||
if not (op_plugin_path / "Binaries").is_dir() \
|
||||
or not (op_plugin_path / "Intermediate").is_dir():
|
||||
self.installing.emit("Building the plugin ...")
|
||||
print("--- Building the plugin...")
|
||||
|
||||
self._build_and_move_plugin(op_plugin_path)
|
||||
|
||||
self.finished.emit("Plugin successfully installed")
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
from .splash_screen import SplashScreen
|
||||
|
||||
__all__ = (
|
||||
"SplashScreen",
|
||||
)
|
||||
|
|
@ -1,262 +0,0 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from ayon_core import style, resources
|
||||
|
||||
|
||||
class SplashScreen(QtWidgets.QDialog):
|
||||
"""Splash screen for executing a process on another thread. It is able
|
||||
to inform about the progress of the process and log given information.
|
||||
"""
|
||||
|
||||
splash_icon = None
|
||||
top_label = None
|
||||
show_log_btn: QtWidgets.QLabel = None
|
||||
progress_bar = None
|
||||
log_text: QtWidgets.QLabel = None
|
||||
scroll_area: QtWidgets.QScrollArea = None
|
||||
close_btn: QtWidgets.QPushButton = None
|
||||
scroll_bar: QtWidgets.QScrollBar = None
|
||||
|
||||
is_log_visible = False
|
||||
is_scroll_auto = True
|
||||
|
||||
thread_return_code = None
|
||||
q_thread: QtCore.QThread = None
|
||||
|
||||
def __init__(self,
|
||||
window_title: str,
|
||||
splash_icon=None,
|
||||
window_icon=None):
|
||||
"""
|
||||
Args:
|
||||
window_title (str): String which sets the window title
|
||||
splash_icon (str | bytes | None): A resource (pic) which is used
|
||||
for the splash icon
|
||||
window_icon (str | bytes | None: A resource (pic) which is used for
|
||||
the window's icon
|
||||
"""
|
||||
super(SplashScreen, self).__init__()
|
||||
|
||||
if splash_icon is None:
|
||||
splash_icon = resources.get_ayon_icon_filepath()
|
||||
|
||||
if window_icon is None:
|
||||
window_icon = resources.get_ayon_icon_filepath()
|
||||
|
||||
self.splash_icon = splash_icon
|
||||
self.setWindowIcon(QtGui.QIcon(window_icon))
|
||||
self.setWindowTitle(window_title)
|
||||
self.init_ui()
|
||||
|
||||
def was_proc_successful(self) -> bool:
|
||||
return self.thread_return_code == 0
|
||||
|
||||
def start_thread(self, q_thread: QtCore.QThread):
|
||||
"""Saves the reference to this thread and starts it.
|
||||
|
||||
Args:
|
||||
q_thread (QtCore.QThread): A QThread containing a given worker
|
||||
(QtCore.QObject)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if not q_thread:
|
||||
raise RuntimeError("Failed to run a worker thread! "
|
||||
"The thread is null!")
|
||||
|
||||
self.q_thread = q_thread
|
||||
self.q_thread.start()
|
||||
|
||||
@QtCore.Slot()
|
||||
def quit_and_close(self):
|
||||
"""Quits the thread and closes the splash screen. Note that this means
|
||||
the thread has exited with the return code 0!
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.thread_return_code = 0
|
||||
self.q_thread.quit()
|
||||
|
||||
if not self.q_thread.wait(5000):
|
||||
raise RuntimeError("Failed to quit the QThread! "
|
||||
"The deadline has been reached! The thread "
|
||||
"has not finished it's execution!.")
|
||||
self.close()
|
||||
|
||||
|
||||
@QtCore.Slot()
|
||||
def toggle_log(self):
|
||||
if self.is_log_visible:
|
||||
self.scroll_area.hide()
|
||||
width = self.width()
|
||||
self.adjustSize()
|
||||
self.resize(width, self.height())
|
||||
else:
|
||||
self.scroll_area.show()
|
||||
self.scroll_bar.setValue(self.scroll_bar.maximum())
|
||||
self.resize(self.width(), 300)
|
||||
|
||||
self.is_log_visible = not self.is_log_visible
|
||||
|
||||
def show_ui(self):
|
||||
"""Shows the splash screen. BEWARE THAT THIS FUNCTION IS BLOCKING
|
||||
(The execution of code can not proceed further beyond this function
|
||||
until the splash screen is closed!)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.show()
|
||||
self.exec_()
|
||||
|
||||
def init_ui(self):
|
||||
self.resize(450, 100)
|
||||
self.setMinimumWidth(250)
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# Top Section
|
||||
self.top_label = QtWidgets.QLabel(self)
|
||||
self.top_label.setText("Starting process ...")
|
||||
self.top_label.setWordWrap(True)
|
||||
|
||||
icon = QtWidgets.QLabel(self)
|
||||
icon.setPixmap(QtGui.QPixmap(self.splash_icon))
|
||||
icon.setFixedHeight(45)
|
||||
icon.setFixedWidth(45)
|
||||
icon.setScaledContents(True)
|
||||
|
||||
self.close_btn = QtWidgets.QPushButton(self)
|
||||
self.close_btn.setText("Quit")
|
||||
self.close_btn.clicked.connect(self.close)
|
||||
self.close_btn.setFixedWidth(80)
|
||||
self.close_btn.hide()
|
||||
|
||||
self.show_log_btn = QtWidgets.QPushButton(self)
|
||||
self.show_log_btn.setText("Show log")
|
||||
self.show_log_btn.setFixedWidth(80)
|
||||
self.show_log_btn.clicked.connect(self.toggle_log)
|
||||
|
||||
button_layout = QtWidgets.QVBoxLayout()
|
||||
button_layout.addWidget(self.show_log_btn)
|
||||
button_layout.addWidget(self.close_btn)
|
||||
|
||||
# Progress Bar
|
||||
self.progress_bar = QtWidgets.QProgressBar()
|
||||
self.progress_bar.setValue(0)
|
||||
self.progress_bar.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
# Log Content
|
||||
self.scroll_area = QtWidgets.QScrollArea(self)
|
||||
self.scroll_area.hide()
|
||||
log_widget = QtWidgets.QWidget(self.scroll_area)
|
||||
self.scroll_area.setWidgetResizable(True)
|
||||
self.scroll_area.setHorizontalScrollBarPolicy(
|
||||
QtCore.Qt.ScrollBarAlwaysOn
|
||||
)
|
||||
self.scroll_area.setVerticalScrollBarPolicy(
|
||||
QtCore.Qt.ScrollBarAlwaysOn
|
||||
)
|
||||
self.scroll_area.setWidget(log_widget)
|
||||
|
||||
self.scroll_bar = self.scroll_area.verticalScrollBar()
|
||||
self.scroll_bar.sliderMoved.connect(self.on_scroll)
|
||||
|
||||
self.log_text = QtWidgets.QLabel(self)
|
||||
self.log_text.setText('')
|
||||
self.log_text.setAlignment(QtCore.Qt.AlignTop)
|
||||
|
||||
log_layout = QtWidgets.QVBoxLayout(log_widget)
|
||||
log_layout.addWidget(self.log_text)
|
||||
|
||||
top_layout = QtWidgets.QHBoxLayout()
|
||||
top_layout.setAlignment(QtCore.Qt.AlignTop)
|
||||
top_layout.addWidget(icon)
|
||||
top_layout.addSpacing(10)
|
||||
top_layout.addWidget(self.top_label)
|
||||
top_layout.addSpacing(10)
|
||||
top_layout.addLayout(button_layout)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addLayout(top_layout)
|
||||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(self.progress_bar)
|
||||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(self.scroll_area)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
desktop_rect = QtWidgets.QApplication.desktop().availableGeometry(self)
|
||||
center = desktop_rect.center()
|
||||
self.move(
|
||||
center.x() - (self.width() * 0.5),
|
||||
center.y() - (self.height() * 0.5)
|
||||
)
|
||||
|
||||
@QtCore.Slot(int)
|
||||
def update_progress(self, value: int):
|
||||
self.progress_bar.setValue(value)
|
||||
|
||||
@QtCore.Slot(str)
|
||||
def update_top_label_text(self, text: str):
|
||||
self.top_label.setText(text)
|
||||
|
||||
@QtCore.Slot(str, str)
|
||||
def append_log(self, text: str, end: str = ''):
|
||||
"""A slot used for receiving log info and appending it to scroll area's
|
||||
content.
|
||||
Args:
|
||||
text (str): A log text that will append to the current one in the
|
||||
scroll area.
|
||||
end (str): end string which can be appended to the end of the given
|
||||
line (for ex. a line break).
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.log_text.setText(self.log_text.text() + text + end)
|
||||
if self.is_scroll_auto:
|
||||
self.scroll_bar.setValue(self.scroll_bar.maximum())
|
||||
|
||||
@QtCore.Slot(int)
|
||||
def on_scroll(self, position: int):
|
||||
"""
|
||||
A slot for the vertical scroll bar's movement. This ensures the
|
||||
auto-scrolling feature of the scroll area when the scroll bar is at its
|
||||
maximum value.
|
||||
|
||||
Args:
|
||||
position (int): Position value of the scroll bar.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if self.scroll_bar.maximum() == position:
|
||||
self.is_scroll_auto = True
|
||||
return
|
||||
|
||||
self.is_scroll_auto = False
|
||||
|
||||
@QtCore.Slot(str, int)
|
||||
def fail(self, text: str, return_code: int = 1):
|
||||
"""
|
||||
A slot used for signals which can emit when a worker (process) has
|
||||
failed. at this moment the splash screen doesn't close by itself.
|
||||
it has to be closed by the user.
|
||||
|
||||
Args:
|
||||
text (str): A text which can be set to the top label.
|
||||
|
||||
Returns:
|
||||
return_code (int): Return code of the thread's code
|
||||
"""
|
||||
self.top_label.setText(text)
|
||||
self.close_btn.show()
|
||||
self.thread_return_code = return_code
|
||||
self.q_thread.exit(return_code)
|
||||
self.q_thread.wait()
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring AYON addon 'unreal' version."""
|
||||
__version__ = "0.2.0"
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
name = "unreal"
|
||||
title = "Unreal"
|
||||
version = "0.2.0"
|
||||
|
||||
client_dir = "ayon_unreal"
|
||||
|
||||
ayon_required_addons = {
|
||||
"core": ">0.3.2",
|
||||
}
|
||||
ayon_compatible_addons = {}
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from typing import Type
|
||||
|
||||
from ayon_server.addons import BaseServerAddon
|
||||
|
||||
from .settings import UnrealSettings, DEFAULT_VALUES
|
||||
|
||||
|
||||
class UnrealAddon(BaseServerAddon):
|
||||
settings_model: Type[UnrealSettings] = UnrealSettings
|
||||
|
||||
async def get_default_settings(self):
|
||||
settings_model_cls = self.get_settings_model()
|
||||
return settings_model_cls(**DEFAULT_VALUES)
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
from pydantic import validator
|
||||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
from ayon_server.settings.validators import ensure_unique_names
|
||||
|
||||
|
||||
class ImageIOConfigModel(BaseSettingsModel):
|
||||
override_global_config: bool = SettingsField(
|
||||
False,
|
||||
title="Override global OCIO config"
|
||||
)
|
||||
filepath: list[str] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Config path"
|
||||
)
|
||||
|
||||
|
||||
class ImageIOFileRuleModel(BaseSettingsModel):
|
||||
name: str = SettingsField("", title="Rule name")
|
||||
pattern: str = SettingsField("", title="Regex pattern")
|
||||
colorspace: str = SettingsField("", title="Colorspace name")
|
||||
ext: str = SettingsField("", title="File extension")
|
||||
|
||||
|
||||
class ImageIOFileRulesModel(BaseSettingsModel):
|
||||
activate_host_rules: bool = SettingsField(False)
|
||||
rules: list[ImageIOFileRuleModel] = SettingsField(
|
||||
default_factory=list,
|
||||
title="Rules"
|
||||
)
|
||||
|
||||
@validator("rules")
|
||||
def validate_unique_outputs(cls, value):
|
||||
ensure_unique_names(value)
|
||||
return value
|
||||
|
||||
|
||||
class UnrealImageIOModel(BaseSettingsModel):
|
||||
activate_host_color_management: bool = SettingsField(
|
||||
True, title="Enable Color Management"
|
||||
)
|
||||
ocio_config: ImageIOConfigModel = SettingsField(
|
||||
default_factory=ImageIOConfigModel,
|
||||
title="OCIO config"
|
||||
)
|
||||
file_rules: ImageIOFileRulesModel = SettingsField(
|
||||
default_factory=ImageIOFileRulesModel,
|
||||
title="File Rules"
|
||||
)
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
from ayon_server.settings import BaseSettingsModel, SettingsField
|
||||
|
||||
from .imageio import UnrealImageIOModel
|
||||
|
||||
|
||||
class ProjectSetup(BaseSettingsModel):
|
||||
dev_mode: bool = SettingsField(
|
||||
False,
|
||||
title="Dev mode"
|
||||
)
|
||||
|
||||
|
||||
def _render_format_enum():
|
||||
return [
|
||||
{"value": "png", "label": "PNG"},
|
||||
{"value": "exr", "label": "EXR"},
|
||||
{"value": "jpg", "label": "JPG"},
|
||||
{"value": "bmp", "label": "BMP"}
|
||||
]
|
||||
|
||||
|
||||
class UnrealSettings(BaseSettingsModel):
|
||||
imageio: UnrealImageIOModel = SettingsField(
|
||||
default_factory=UnrealImageIOModel,
|
||||
title="Color Management (ImageIO)"
|
||||
)
|
||||
level_sequences_for_layouts: bool = SettingsField(
|
||||
False,
|
||||
title="Generate level sequences when loading layouts"
|
||||
)
|
||||
delete_unmatched_assets: bool = SettingsField(
|
||||
False,
|
||||
title="Delete assets that are not matched"
|
||||
)
|
||||
render_config_path: str = SettingsField(
|
||||
"",
|
||||
title="Render Config Path"
|
||||
)
|
||||
preroll_frames: int = SettingsField(
|
||||
0,
|
||||
title="Pre-roll frames"
|
||||
)
|
||||
render_format: str = SettingsField(
|
||||
"png",
|
||||
title="Render format",
|
||||
enum_resolver=_render_format_enum
|
||||
)
|
||||
project_setup: ProjectSetup = SettingsField(
|
||||
default_factory=ProjectSetup,
|
||||
title="Project Setup",
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"level_sequences_for_layouts": True,
|
||||
"delete_unmatched_assets": False,
|
||||
"render_config_path": "",
|
||||
"preroll_frames": 0,
|
||||
"render_format": "exr",
|
||||
"project_setup": {
|
||||
"dev_mode": False
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue