mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into enhancement/OP-3095_TrayPublisher-Simple-families-from-settings
This commit is contained in:
commit
73d39f027c
25 changed files with 1238 additions and 231 deletions
0
.gitmodules
vendored
0
.gitmodules
vendored
|
|
@ -47,6 +47,7 @@ def install():
|
|||
print("installing OpenPype for Unreal ...")
|
||||
print("-=" * 40)
|
||||
logger.info("installing OpenPype for Unreal")
|
||||
pyblish.api.register_host("unreal")
|
||||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
|
|
@ -392,3 +393,24 @@ def cast_map_to_str_dict(umap) -> dict:
|
|||
|
||||
"""
|
||||
return {str(key): str(value) for (key, value) in umap.items()}
|
||||
|
||||
|
||||
def get_subsequences(sequence: unreal.LevelSequence):
|
||||
"""Get list of subsequences from sequence.
|
||||
|
||||
Args:
|
||||
sequence (unreal.LevelSequence): Sequence
|
||||
|
||||
Returns:
|
||||
list(unreal.LevelSequence): List of subsequences
|
||||
|
||||
"""
|
||||
tracks = sequence.get_master_tracks()
|
||||
subscene_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
break
|
||||
if subscene_track is not None and subscene_track.get_sections():
|
||||
return subscene_track.get_sections()
|
||||
return []
|
||||
|
|
|
|||
125
openpype/hosts/unreal/api/rendering.py
Normal file
125
openpype/hosts/unreal/api/rendering.py
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
import unreal
|
||||
|
||||
from openpype.hosts.unreal.api import pipeline
|
||||
|
||||
|
||||
queue = None
|
||||
executor = None
|
||||
|
||||
|
||||
def _queue_finish_callback(exec, success):
|
||||
unreal.log("Render completed. Success: " + str(success))
|
||||
|
||||
# Delete our reference so we don't keep it alive.
|
||||
global executor
|
||||
global queue
|
||||
del executor
|
||||
del queue
|
||||
|
||||
|
||||
def _job_finish_callback(job, success):
|
||||
# You can make any edits you want to the editor world here, and the world
|
||||
# will be duplicated when the next render happens. Make sure you undo your
|
||||
# edits in OnQueueFinishedCallback if you don't want to leak state changes
|
||||
# into the editor world.
|
||||
unreal.log("Individual job completed.")
|
||||
|
||||
|
||||
def start_rendering():
|
||||
"""
|
||||
Start the rendering process.
|
||||
"""
|
||||
print("Starting rendering...")
|
||||
|
||||
# Get selected sequences
|
||||
assets = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
|
||||
# instances = pipeline.ls_inst()
|
||||
instances = [
|
||||
a for a in assets
|
||||
if a.get_class().get_name() == "OpenPypePublishInstance"]
|
||||
|
||||
inst_data = []
|
||||
|
||||
for i in instances:
|
||||
data = pipeline.parse_container(i.get_path_name())
|
||||
if data["family"] == "render":
|
||||
inst_data.append(data)
|
||||
|
||||
# subsystem = unreal.get_editor_subsystem(
|
||||
# unreal.MoviePipelineQueueSubsystem)
|
||||
# queue = subsystem.get_queue()
|
||||
global queue
|
||||
queue = unreal.MoviePipelineQueue()
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for i in inst_data:
|
||||
sequence = ar.get_asset_by_object_path(i["sequence"]).get_asset()
|
||||
|
||||
sequences = [{
|
||||
"sequence": sequence,
|
||||
"output": f"{i['output']}",
|
||||
"frame_range": (
|
||||
int(float(i["frameStart"])),
|
||||
int(float(i["frameEnd"])) + 1)
|
||||
}]
|
||||
render_list = []
|
||||
|
||||
# Get all the sequences to render. If there are subsequences,
|
||||
# add them and their frame ranges to the render list. We also
|
||||
# use the names for the output paths.
|
||||
for s in sequences:
|
||||
subscenes = pipeline.get_subsequences(s.get('sequence'))
|
||||
|
||||
if subscenes:
|
||||
for ss in subscenes:
|
||||
sequences.append({
|
||||
"sequence": ss.get_sequence(),
|
||||
"output": (f"{s.get('output')}/"
|
||||
f"{ss.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
ss.get_start_frame(), ss.get_end_frame())
|
||||
})
|
||||
else:
|
||||
# Avoid rendering camera sequences
|
||||
if "_camera" not in s.get('sequence').get_name():
|
||||
render_list.append(s)
|
||||
|
||||
# Create the rendering jobs and add them to the queue.
|
||||
for r in render_list:
|
||||
job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob)
|
||||
job.sequence = unreal.SoftObjectPath(i["master_sequence"])
|
||||
job.map = unreal.SoftObjectPath(i["master_level"])
|
||||
job.author = "OpenPype"
|
||||
|
||||
# User data could be used to pass data to the job, that can be
|
||||
# read in the job's OnJobFinished callback. We could,
|
||||
# for instance, pass the AvalonPublishInstance's path to the job.
|
||||
# job.user_data = ""
|
||||
|
||||
settings = job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineOutputSetting)
|
||||
settings.output_resolution = unreal.IntPoint(1920, 1080)
|
||||
settings.custom_start_frame = r.get("frame_range")[0]
|
||||
settings.custom_end_frame = r.get("frame_range")[1]
|
||||
settings.use_custom_playback_range = True
|
||||
settings.file_name_format = "{sequence_name}.{frame_number}"
|
||||
settings.output_directory.path += r.get('output')
|
||||
|
||||
renderPass = job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineDeferredPassBase)
|
||||
renderPass.disable_multisample_effects = True
|
||||
|
||||
job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineImageSequenceOutput_PNG)
|
||||
|
||||
# If there are jobs in the queue, start the rendering process.
|
||||
if queue.get_jobs():
|
||||
global executor
|
||||
executor = unreal.MoviePipelinePIEExecutor()
|
||||
executor.on_executor_finished_delegate.add_callable_unique(
|
||||
_queue_finish_callback)
|
||||
executor.on_individual_job_finished_delegate.add_callable_unique(
|
||||
_job_finish_callback) # Only available on PIE Executor
|
||||
executor.execute(queue)
|
||||
|
|
@ -7,6 +7,7 @@ from openpype import (
|
|||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.tools.utils.lib import qt_app_context
|
||||
from openpype.hosts.unreal.api import rendering
|
||||
|
||||
|
||||
class ToolsBtnsWidget(QtWidgets.QWidget):
|
||||
|
|
@ -20,6 +21,7 @@ class ToolsBtnsWidget(QtWidgets.QWidget):
|
|||
load_btn = QtWidgets.QPushButton("Load...", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish...", self)
|
||||
manage_btn = QtWidgets.QPushButton("Manage...", self)
|
||||
render_btn = QtWidgets.QPushButton("Render...", self)
|
||||
experimental_tools_btn = QtWidgets.QPushButton(
|
||||
"Experimental tools...", self
|
||||
)
|
||||
|
|
@ -30,6 +32,7 @@ class ToolsBtnsWidget(QtWidgets.QWidget):
|
|||
layout.addWidget(load_btn, 0)
|
||||
layout.addWidget(publish_btn, 0)
|
||||
layout.addWidget(manage_btn, 0)
|
||||
layout.addWidget(render_btn, 0)
|
||||
layout.addWidget(experimental_tools_btn, 0)
|
||||
layout.addStretch(1)
|
||||
|
||||
|
|
@ -37,6 +40,7 @@ class ToolsBtnsWidget(QtWidgets.QWidget):
|
|||
load_btn.clicked.connect(self._on_load)
|
||||
publish_btn.clicked.connect(self._on_publish)
|
||||
manage_btn.clicked.connect(self._on_manage)
|
||||
render_btn.clicked.connect(self._on_render)
|
||||
experimental_tools_btn.clicked.connect(self._on_experimental)
|
||||
|
||||
def _on_create(self):
|
||||
|
|
@ -51,6 +55,9 @@ class ToolsBtnsWidget(QtWidgets.QWidget):
|
|||
def _on_manage(self):
|
||||
self.tool_required.emit("sceneinventory")
|
||||
|
||||
def _on_render(self):
|
||||
rendering.start_rendering()
|
||||
|
||||
def _on_experimental(self):
|
||||
self.tool_required.emit("experimental_tools")
|
||||
|
||||
|
|
|
|||
|
|
@ -254,6 +254,7 @@ def create_unreal_project(project_name: str,
|
|||
{"Name": "PythonScriptPlugin", "Enabled": True},
|
||||
{"Name": "EditorScriptingUtilities", "Enabled": True},
|
||||
{"Name": "SequencerScripting", "Enabled": True},
|
||||
{"Name": "MovieRenderPipeline", "Enabled": True},
|
||||
{"Name": "OpenPype", "Enabled": True}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from unreal import EditorLevelLibrary as ell
|
||||
from unreal import EditorLevelLibrary
|
||||
|
||||
from openpype.hosts.unreal.api import plugin
|
||||
from openpype.hosts.unreal.api.pipeline import instantiate
|
||||
|
||||
|
|
@ -28,13 +29,13 @@ class CreateLayout(plugin.Creator):
|
|||
# sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
# selection = [a.get_path_name() for a in sel_objects]
|
||||
|
||||
data["level"] = ell.get_editor_world().get_path_name()
|
||||
data["level"] = EditorLevelLibrary.get_editor_world().get_path_name()
|
||||
|
||||
data["members"] = []
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
# Set as members the selected actors
|
||||
for actor in ell.get_selected_level_actors():
|
||||
for actor in EditorLevelLibrary.get_selected_level_actors():
|
||||
data["members"].append("{}.{}".format(
|
||||
actor.get_outer().get_name(), actor.get_name()))
|
||||
|
||||
|
|
|
|||
111
openpype/hosts/unreal/plugins/create/create_render.py
Normal file
111
openpype/hosts/unreal/plugins/create/create_render.py
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
import unreal
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.unreal.api import pipeline
|
||||
from openpype.hosts.unreal.api.plugin import Creator
|
||||
|
||||
|
||||
class CreateRender(Creator):
|
||||
"""Create instance for sequence for rendering"""
|
||||
|
||||
name = "unrealRender"
|
||||
label = "Unreal - Render"
|
||||
family = "render"
|
||||
icon = "cube"
|
||||
asset_types = ["LevelSequence"]
|
||||
|
||||
root = "/Game/OpenPype/PublishInstances"
|
||||
suffix = "_INS"
|
||||
|
||||
def process(self):
|
||||
subset = self.data["subset"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
# Get the master sequence and the master level.
|
||||
# There should be only one sequence and one level in the directory.
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"/Game/OpenPype/{self.data['asset']}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
ms = sequences[0].get_editor_property('object_path')
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"/Game/OpenPype/{self.data['asset']}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
ml = levels[0].get_editor_property('object_path')
|
||||
|
||||
selection = []
|
||||
if (self.options or {}).get("useSelection"):
|
||||
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
|
||||
selection = [
|
||||
a.get_path_name() for a in sel_objects
|
||||
if a.get_class().get_name() in self.asset_types]
|
||||
else:
|
||||
selection.append(self.data['sequence'])
|
||||
|
||||
unreal.log(f"selection: {selection}")
|
||||
|
||||
path = f"{self.root}"
|
||||
unreal.EditorAssetLibrary.make_directory(path)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for a in selection:
|
||||
ms_obj = ar.get_asset_by_object_path(ms).get_asset()
|
||||
|
||||
seq_data = None
|
||||
|
||||
if a == ms:
|
||||
seq_data = {
|
||||
"sequence": ms_obj,
|
||||
"output": f"{ms_obj.get_name()}",
|
||||
"frame_range": (
|
||||
ms_obj.get_playback_start(), ms_obj.get_playback_end())
|
||||
}
|
||||
else:
|
||||
seq_data_list = [{
|
||||
"sequence": ms_obj,
|
||||
"output": f"{ms_obj.get_name()}",
|
||||
"frame_range": (
|
||||
ms_obj.get_playback_start(), ms_obj.get_playback_end())
|
||||
}]
|
||||
|
||||
for s in seq_data_list:
|
||||
subscenes = pipeline.get_subsequences(s.get('sequence'))
|
||||
|
||||
for ss in subscenes:
|
||||
curr_data = {
|
||||
"sequence": ss.get_sequence(),
|
||||
"output": (f"{s.get('output')}/"
|
||||
f"{ss.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
ss.get_start_frame(), ss.get_end_frame() - 1)
|
||||
}
|
||||
|
||||
if ss.get_sequence().get_path_name() == a:
|
||||
seq_data = curr_data
|
||||
break
|
||||
seq_data_list.append(curr_data)
|
||||
|
||||
if seq_data is not None:
|
||||
break
|
||||
|
||||
if not seq_data:
|
||||
continue
|
||||
|
||||
d = self.data.copy()
|
||||
d["members"] = [a]
|
||||
d["sequence"] = a
|
||||
d["master_sequence"] = ms
|
||||
d["master_level"] = ml
|
||||
d["output"] = seq_data.get('output')
|
||||
d["frameStart"] = seq_data.get('frame_range')[0]
|
||||
d["frameEnd"] = seq_data.get('frame_range')[1]
|
||||
|
||||
container_name = f"{subset}{self.suffix}"
|
||||
pipeline.create_publish_instance(
|
||||
instance=container_name, path=path)
|
||||
pipeline.imprint(f"{path}/{container_name}", d)
|
||||
|
|
@ -3,13 +3,17 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import unreal
|
||||
from unreal import EditorAssetLibrary
|
||||
from unreal import MovieSceneSkeletalAnimationTrack
|
||||
from unreal import MovieSceneSkeletalAnimationSection
|
||||
|
||||
from openpype.pipeline import (
|
||||
get_representation_path,
|
||||
AVALON_CONTAINER_ID
|
||||
)
|
||||
from openpype.hosts.unreal.api import plugin
|
||||
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class AnimationFBXLoader(plugin.Loader):
|
||||
|
|
@ -21,59 +25,13 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options=None):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): subset name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
|
||||
"""
|
||||
# Create directory for asset and OpenPype container
|
||||
root = "/Game/OpenPype/Assets"
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(root, asset, name), suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
def _process(self, asset_dir, asset_name, instance_name):
|
||||
automated = False
|
||||
actor = None
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task.options = unreal.FbxImportUI()
|
||||
|
||||
lib_path = self.fname.replace("fbx", "json")
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
instance_name = data.get("instance_name")
|
||||
|
||||
if instance_name:
|
||||
automated = True
|
||||
# Old method to get the actor
|
||||
|
|
@ -131,6 +89,116 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
animation = None
|
||||
|
||||
for a in asset_content:
|
||||
imported_asset_data = EditorAssetLibrary.find_asset_data(a)
|
||||
imported_asset = unreal.AssetRegistryHelpers.get_asset(
|
||||
imported_asset_data)
|
||||
if imported_asset.__class__ == unreal.AnimSequence:
|
||||
animation = imported_asset
|
||||
break
|
||||
|
||||
if animation:
|
||||
animation.set_editor_property('enable_root_motion', True)
|
||||
actor.skeletal_mesh_component.set_editor_property(
|
||||
'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
|
||||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
return animation
|
||||
|
||||
def load(self, context, name, namespace, options=None):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
||||
This is two step process. First, import FBX to temporary path and
|
||||
then call `containerise()` on it - this moves all content to new
|
||||
directory and then it will create AssetContainer there and imprint it
|
||||
with metadata. This will mark this path as container.
|
||||
|
||||
Args:
|
||||
context (dict): application context
|
||||
name (str): subset name
|
||||
namespace (str): in Unreal this is basically path to container.
|
||||
This is not passed here, so namespace is set
|
||||
by `containerise()` because only then we know
|
||||
real path.
|
||||
data (dict): Those would be data to be imprinted. This is not used
|
||||
now, data are imprinted by `containerise()`.
|
||||
|
||||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and avalon container
|
||||
hierarchy = context.get('asset').get('data').get('parents')
|
||||
root = "/Game/OpenPype"
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/Animations/{asset}/{name}", suffix="")
|
||||
|
||||
hierarchy_dir = root
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir = f"{hierarchy_dir}/{asset}"
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
libpath = self.fname.replace("fbx", "json")
|
||||
|
||||
with open(libpath, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
instance_name = data.get("instance_name")
|
||||
|
||||
animation = self._process(asset_dir, container_name, instance_name)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
hierarchy_dir, recursive=True, include_folder=False)
|
||||
|
||||
# Get the sequence for the layout, excluding the camera one.
|
||||
sequences = [a for a in asset_content
|
||||
if (EditorAssetLibrary.find_asset_data(a).get_class() ==
|
||||
unreal.LevelSequence.static_class() and
|
||||
"_camera" not in a.split("/")[-1])]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for s in sequences:
|
||||
sequence = ar.get_asset_by_object_path(s).get_asset()
|
||||
possessables = [
|
||||
p for p in sequence.get_possessables()
|
||||
if p.get_display_name() == instance_name]
|
||||
|
||||
for p in possessables:
|
||||
tracks = [
|
||||
t for t in p.get_tracks()
|
||||
if (t.get_class() ==
|
||||
MovieSceneSkeletalAnimationTrack.static_class())]
|
||||
|
||||
for t in tracks:
|
||||
sections = [
|
||||
s for s in t.get_sections()
|
||||
if (s.get_class() ==
|
||||
MovieSceneSkeletalAnimationSection.static_class())]
|
||||
|
||||
for s in sections:
|
||||
s.params.set_editor_property('animation', animation)
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
|
@ -150,29 +218,11 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
unreal_pipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
imported_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False)
|
||||
|
||||
animation = None
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a)
|
||||
imported_asset = unreal.AssetRegistryHelpers.get_asset(
|
||||
imported_asset_data)
|
||||
if imported_asset.__class__ == unreal.AnimSequence:
|
||||
animation = imported_asset
|
||||
break
|
||||
|
||||
if animation:
|
||||
animation.set_editor_property('enable_root_motion', True)
|
||||
actor.skeletal_mesh_component.set_editor_property(
|
||||
'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)
|
||||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
return asset_content
|
||||
for a in imported_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def update(self, container, representation):
|
||||
name = container["asset_name"]
|
||||
|
|
@ -218,7 +268,7 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
task.options.anim_sequence_import_data.set_editor_property(
|
||||
'convert_scene', True)
|
||||
|
||||
skeletal_mesh = unreal.EditorAssetLibrary.load_asset(
|
||||
skeletal_mesh = EditorAssetLibrary.load_asset(
|
||||
container.get('namespace') + "/" + container.get('asset_name'))
|
||||
skeleton = skeletal_mesh.get_editor_property('skeleton')
|
||||
task.options.set_editor_property('skeleton', skeleton)
|
||||
|
|
@ -235,22 +285,22 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
"parent": str(representation["parent"])
|
||||
})
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
|
|||
|
|
@ -2,13 +2,16 @@
|
|||
"""Load camera from FBX."""
|
||||
import os
|
||||
|
||||
import unreal
|
||||
from unreal import EditorAssetLibrary
|
||||
from unreal import EditorLevelLibrary
|
||||
|
||||
from openpype.pipeline import (
|
||||
AVALON_CONTAINER_ID,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.hosts.unreal.api import plugin
|
||||
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
|
||||
import unreal # noqa
|
||||
|
||||
|
||||
class CameraLoader(plugin.Loader):
|
||||
|
|
@ -20,6 +23,40 @@ class CameraLoader(plugin.Loader):
|
|||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def _get_data(self, asset_name):
|
||||
asset_doc = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
return asset_doc.get("data")
|
||||
|
||||
def _set_sequence_hierarchy(
|
||||
self, seq_i, seq_j, min_frame_j, max_frame_j
|
||||
):
|
||||
tracks = seq_i.get_master_tracks()
|
||||
track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
track = t
|
||||
break
|
||||
if not track:
|
||||
track = seq_i.add_master_track(unreal.MovieSceneSubTrack)
|
||||
|
||||
subscenes = track.get_sections()
|
||||
subscene = None
|
||||
for s in subscenes:
|
||||
if s.get_editor_property('sub_sequence') == seq_j:
|
||||
subscene = s
|
||||
break
|
||||
if not subscene:
|
||||
subscene = track.add_section()
|
||||
subscene.set_row_index(len(track.get_sections()))
|
||||
subscene.set_editor_property('sub_sequence', seq_j)
|
||||
subscene.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Load and containerise representation into Content Browser.
|
||||
|
|
@ -43,8 +80,14 @@ class CameraLoader(plugin.Loader):
|
|||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and OpenPype container
|
||||
root = "/Game/OpenPype/Assets"
|
||||
# Create directory for asset and avalon container
|
||||
hierarchy = context.get('asset').get('data').get('parents')
|
||||
root = "/Game/OpenPype"
|
||||
hierarchy_dir = root
|
||||
hierarchy_list = []
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_list.append(hierarchy_dir)
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
|
|
@ -54,10 +97,10 @@ class CameraLoader(plugin.Loader):
|
|||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
# Create a unique name for the camera directory
|
||||
unique_number = 1
|
||||
|
||||
if unreal.EditorAssetLibrary.does_directory_exist(f"{root}/{asset}"):
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
if EditorAssetLibrary.does_directory_exist(f"{hierarchy_dir}/{asset}"):
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
f"{root}/{asset}", recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
|
|
@ -76,42 +119,122 @@ class CameraLoader(plugin.Loader):
|
|||
unique_number = f_numbers[-1] + 1
|
||||
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}_{unique_number:02d}", suffix="")
|
||||
f"{hierarchy_dir}/{asset}/{name}_{unique_number:02d}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
current_level = EditorLevelLibrary.get_editor_world().get_full_name()
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
|
||||
sequence = tools.create_asset(
|
||||
asset_name=asset_name,
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{hierarchy_dir}/{asset}/"],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
EditorLevelLibrary.load_level(maps[0].get_full_name())
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
sequences = []
|
||||
frame_ranges = []
|
||||
i = 0
|
||||
for h in hierarchy_list:
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
if not existing_sequences:
|
||||
scene = tools.create_asset(
|
||||
asset_name=hierarchy[i],
|
||||
package_path=h,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
asset_data = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": h.split('/')[-1]
|
||||
})
|
||||
|
||||
id = asset_data.get('_id')
|
||||
|
||||
start_frames = []
|
||||
end_frames = []
|
||||
|
||||
elements = list(
|
||||
legacy_io.find({"type": "asset", "data.visualParent": id}))
|
||||
for e in elements:
|
||||
start_frames.append(e.get('data').get('clipIn'))
|
||||
end_frames.append(e.get('data').get('clipOut'))
|
||||
|
||||
elements.extend(legacy_io.find({
|
||||
"type": "asset",
|
||||
"data.visualParent": e.get('_id')
|
||||
}))
|
||||
|
||||
min_frame = min(start_frames)
|
||||
max_frame = max(end_frames)
|
||||
|
||||
scene.set_display_rate(
|
||||
unreal.FrameRate(asset_data.get('data').get("fps"), 1.0))
|
||||
scene.set_playback_start(min_frame)
|
||||
scene.set_playback_end(max_frame)
|
||||
|
||||
sequences.append(scene)
|
||||
frame_ranges.append((min_frame, max_frame))
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
frame_ranges.append((
|
||||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
|
||||
i += 1
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
cam_seq = tools.create_asset(
|
||||
asset_name=f"{asset}_camera",
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
io_asset = legacy_io.Session["AVALON_ASSET"]
|
||||
asset_doc = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": io_asset
|
||||
})
|
||||
# Add sequences data to hierarchy
|
||||
for i in range(0, len(sequences) - 1):
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1])
|
||||
|
||||
data = asset_doc.get("data")
|
||||
|
||||
if data:
|
||||
sequence.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0))
|
||||
sequence.set_playback_start(data.get("frameStart"))
|
||||
sequence.set_playback_end(data.get("frameEnd"))
|
||||
data = self._get_data(asset)
|
||||
cam_seq.set_display_rate(
|
||||
unreal.FrameRate(data.get("fps"), 1.0))
|
||||
cam_seq.set_playback_start(0)
|
||||
cam_seq.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1)
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[-1], cam_seq,
|
||||
data.get('clipIn'), data.get('clipOut'))
|
||||
|
||||
settings = unreal.MovieSceneUserImportFBXSettings()
|
||||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
unreal.SequencerTools.import_fbx(
|
||||
unreal.EditorLevelLibrary.get_editor_world(),
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
settings,
|
||||
self.fname
|
||||
)
|
||||
if cam_seq:
|
||||
unreal.SequencerTools.import_fbx(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
cam_seq,
|
||||
cam_seq.get_bindings(),
|
||||
settings,
|
||||
self.fname
|
||||
)
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
|
|
@ -132,12 +255,15 @@ class CameraLoader(plugin.Loader):
|
|||
unreal_pipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(current_level)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=True
|
||||
)
|
||||
|
||||
for a in asset_content:
|
||||
unreal.EditorAssetLibrary.save_asset(a)
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
return asset_content
|
||||
|
||||
|
|
@ -147,25 +273,25 @@ class CameraLoader(plugin.Loader):
|
|||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
path, recursive=False, include_folder=False
|
||||
)
|
||||
asset_name = ""
|
||||
for a in asset_content:
|
||||
asset = ar.get_asset_by_object_path(a)
|
||||
if a.endswith("_CON"):
|
||||
loaded_asset = unreal.EditorAssetLibrary.load_asset(a)
|
||||
unreal.EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset = EditorAssetLibrary.load_asset(a)
|
||||
EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset, "representation", str(representation["_id"])
|
||||
)
|
||||
unreal.EditorAssetLibrary.set_metadata_tag(
|
||||
EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset, "parent", str(representation["parent"])
|
||||
)
|
||||
asset_name = unreal.EditorAssetLibrary.get_metadata_tag(
|
||||
asset_name = EditorAssetLibrary.get_metadata_tag(
|
||||
loaded_asset, "asset_name"
|
||||
)
|
||||
elif asset.asset_class == "LevelSequence":
|
||||
unreal.EditorAssetLibrary.delete_asset(a)
|
||||
EditorAssetLibrary.delete_asset(a)
|
||||
|
||||
sequence = tools.create_asset(
|
||||
asset_name=asset_name,
|
||||
|
|
@ -191,7 +317,7 @@ class CameraLoader(plugin.Loader):
|
|||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
unreal.SequencerTools.import_fbx(
|
||||
unreal.EditorLevelLibrary.get_editor_world(),
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
settings,
|
||||
|
|
@ -202,11 +328,11 @@ class CameraLoader(plugin.Loader):
|
|||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
|
||||
unreal.EditorAssetLibrary.delete_directory(path)
|
||||
EditorAssetLibrary.delete_directory(path)
|
||||
|
||||
asset_content = unreal.EditorAssetLibrary.list_assets(
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
unreal.EditorAssetLibrary.delete_directory(parent_path)
|
||||
EditorAssetLibrary.delete_directory(parent_path)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from pathlib import Path
|
|||
import unreal
|
||||
from unreal import EditorAssetLibrary
|
||||
from unreal import EditorLevelLibrary
|
||||
from unreal import EditorLevelUtils
|
||||
from unreal import AssetToolsHelpers
|
||||
from unreal import FBXImportType
|
||||
from unreal import MathLibrary as umath
|
||||
|
|
@ -17,6 +18,7 @@ from openpype.pipeline import (
|
|||
load_container,
|
||||
get_representation_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.hosts.unreal.api import plugin
|
||||
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
|
||||
|
|
@ -31,7 +33,7 @@ class LayoutLoader(plugin.Loader):
|
|||
label = "Load Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
ASSET_ROOT = "/Game/OpenPype/Assets"
|
||||
ASSET_ROOT = "/Game/OpenPype"
|
||||
|
||||
def _get_asset_containers(self, path):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
|
@ -85,11 +87,91 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _process_family(assets, class_name, transform, inst_name=None):
|
||||
def _get_data(self, asset_name):
|
||||
asset_doc = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
return asset_doc.get("data")
|
||||
|
||||
def _set_sequence_hierarchy(
|
||||
self, seq_i, seq_j, max_frame_i, min_frame_j, max_frame_j, map_paths
|
||||
):
|
||||
# Get existing sequencer tracks or create them if they don't exist
|
||||
tracks = seq_i.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if not subscene_track:
|
||||
subscene_track = seq_i.add_master_track(unreal.MovieSceneSubTrack)
|
||||
if not visibility_track:
|
||||
visibility_track = seq_i.add_master_track(
|
||||
unreal.MovieSceneLevelVisibilityTrack)
|
||||
|
||||
# Create the sub-scene section
|
||||
subscenes = subscene_track.get_sections()
|
||||
subscene = None
|
||||
for s in subscenes:
|
||||
if s.get_editor_property('sub_sequence') == seq_j:
|
||||
subscene = s
|
||||
break
|
||||
if not subscene:
|
||||
subscene = subscene_track.add_section()
|
||||
subscene.set_row_index(len(subscene_track.get_sections()))
|
||||
subscene.set_editor_property('sub_sequence', seq_j)
|
||||
subscene.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
|
||||
# Create the visibility section
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
maps = []
|
||||
for m in map_paths:
|
||||
# Unreal requires to load the level to get the map name
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(m)
|
||||
maps.append(str(ar.get_asset_by_object_path(m).asset_name))
|
||||
|
||||
vis_section = visibility_track.add_section()
|
||||
index = len(visibility_track.get_sections())
|
||||
|
||||
vis_section.set_range(
|
||||
min_frame_j,
|
||||
max_frame_j + 1)
|
||||
vis_section.set_visibility(unreal.LevelVisibility.VISIBLE)
|
||||
vis_section.set_row_index(index)
|
||||
vis_section.set_level_names(maps)
|
||||
|
||||
if min_frame_j > 1:
|
||||
hid_section = visibility_track.add_section()
|
||||
hid_section.set_range(
|
||||
1,
|
||||
min_frame_j)
|
||||
hid_section.set_visibility(unreal.LevelVisibility.HIDDEN)
|
||||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
if max_frame_j < max_frame_i:
|
||||
hid_section = visibility_track.add_section()
|
||||
hid_section.set_range(
|
||||
max_frame_j + 1,
|
||||
max_frame_i + 1)
|
||||
hid_section.set_visibility(unreal.LevelVisibility.HIDDEN)
|
||||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
|
||||
def _process_family(
|
||||
self, assets, class_name, transform, sequence, inst_name=None
|
||||
):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
actors = []
|
||||
bindings = []
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
|
|
@ -119,14 +201,23 @@ class LayoutLoader(plugin.Loader):
|
|||
), False)
|
||||
actor.set_actor_scale3d(transform.get('scale'))
|
||||
|
||||
if class_name == 'SkeletalMesh':
|
||||
skm_comp = actor.get_editor_property(
|
||||
'skeletal_mesh_component')
|
||||
skm_comp.set_bounds_scale(10.0)
|
||||
|
||||
actors.append(actor)
|
||||
|
||||
return actors
|
||||
binding = sequence.add_possessable(actor)
|
||||
|
||||
bindings.append(binding)
|
||||
|
||||
return actors, bindings
|
||||
|
||||
@staticmethod
|
||||
def _import_animation(
|
||||
asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file):
|
||||
self, asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence
|
||||
):
|
||||
anim_file = Path(animation_file)
|
||||
anim_file_name = anim_file.with_suffix('')
|
||||
|
||||
|
|
@ -205,7 +296,20 @@ class LayoutLoader(plugin.Loader):
|
|||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
def _process(self, lib_path, asset_dir, loaded=None):
|
||||
# Add animation to the sequencer
|
||||
bindings = bindings_dict.get(instance_name)
|
||||
|
||||
for binding in bindings:
|
||||
binding.add_track(unreal.MovieSceneSkeletalAnimationTrack)
|
||||
for track in binding.get_tracks():
|
||||
section = track.add_section()
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
sec_params = section.get_editor_property('params')
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
|
||||
def _process(self, lib_path, asset_dir, sequence, loaded=None):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
|
|
@ -220,6 +324,7 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
skeleton_dict = {}
|
||||
actors_dict = {}
|
||||
bindings_dict = {}
|
||||
|
||||
for element in data:
|
||||
reference = None
|
||||
|
|
@ -277,12 +382,13 @@ class LayoutLoader(plugin.Loader):
|
|||
actors = []
|
||||
|
||||
if family == 'model':
|
||||
actors = self._process_family(
|
||||
assets, 'StaticMesh', transform, inst)
|
||||
actors, _ = self._process_family(
|
||||
assets, 'StaticMesh', transform, sequence, inst)
|
||||
elif family == 'rig':
|
||||
actors = self._process_family(
|
||||
assets, 'SkeletalMesh', transform, inst)
|
||||
actors, bindings = self._process_family(
|
||||
assets, 'SkeletalMesh', transform, sequence, inst)
|
||||
actors_dict[inst] = actors
|
||||
bindings_dict[inst] = bindings
|
||||
|
||||
if family == 'rig':
|
||||
# Finds skeleton among the imported assets
|
||||
|
|
@ -302,8 +408,8 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
if animation_file and skeleton:
|
||||
self._import_animation(
|
||||
asset_dir, path, instance_name, skeleton,
|
||||
actors_dict, animation_file)
|
||||
asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence)
|
||||
|
||||
@staticmethod
|
||||
def _remove_family(assets, components, class_name, prop_name):
|
||||
|
|
@ -369,7 +475,13 @@ class LayoutLoader(plugin.Loader):
|
|||
list(str): list of container content
|
||||
"""
|
||||
# Create directory for asset and avalon container
|
||||
hierarchy = context.get('asset').get('data').get('parents')
|
||||
root = self.ASSET_ROOT
|
||||
hierarchy_dir = root
|
||||
hierarchy_list = []
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_list.append(hierarchy_dir)
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
|
|
@ -379,13 +491,156 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(root, asset, name), suffix="")
|
||||
"{}/{}/{}".format(hierarchy_dir, asset, name), suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
self._process(self.fname, asset_dir)
|
||||
# Create map for the shot, and create hierarchy of map. If the maps
|
||||
# already exist, we will use them.
|
||||
maps = []
|
||||
for h in hierarchy_list:
|
||||
a = h.split('/')[-1]
|
||||
map = f"{h}/{a}_map.{a}_map"
|
||||
new = False
|
||||
|
||||
if not EditorAssetLibrary.does_asset_exist(map):
|
||||
EditorLevelLibrary.new_level(f"{h}/{a}_map")
|
||||
new = True
|
||||
|
||||
maps.append({"map": map, "new": new})
|
||||
|
||||
EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map")
|
||||
maps.append(
|
||||
{"map": f"{asset_dir}/{asset}_map.{asset}_map", "new": True})
|
||||
|
||||
for i in range(0, len(maps) - 1):
|
||||
for j in range(i + 1, len(maps)):
|
||||
if maps[j].get('new'):
|
||||
EditorLevelLibrary.load_level(maps[i].get('map'))
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
maps[j].get('map'),
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
|
||||
EditorLevelLibrary.load_level(maps[-1].get('map'))
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
sequences = []
|
||||
frame_ranges = []
|
||||
i = 0
|
||||
for h in hierarchy_list:
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
if not existing_sequences:
|
||||
sequence = tools.create_asset(
|
||||
asset_name=hierarchy[i],
|
||||
package_path=h,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
asset_data = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": h.split('/')[-1]
|
||||
})
|
||||
|
||||
id = asset_data.get('_id')
|
||||
|
||||
start_frames = []
|
||||
end_frames = []
|
||||
|
||||
elements = list(
|
||||
legacy_io.find({"type": "asset", "data.visualParent": id}))
|
||||
for e in elements:
|
||||
start_frames.append(e.get('data').get('clipIn'))
|
||||
end_frames.append(e.get('data').get('clipOut'))
|
||||
|
||||
elements.extend(legacy_io.find({
|
||||
"type": "asset",
|
||||
"data.visualParent": e.get('_id')
|
||||
}))
|
||||
|
||||
min_frame = min(start_frames)
|
||||
max_frame = max(end_frames)
|
||||
|
||||
sequence.set_display_rate(
|
||||
unreal.FrameRate(asset_data.get('data').get("fps"), 1.0))
|
||||
sequence.set_playback_start(min_frame)
|
||||
sequence.set_playback_end(max_frame)
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append((min_frame, max_frame))
|
||||
|
||||
tracks = sequence.get_master_tracks()
|
||||
track = None
|
||||
for t in tracks:
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneCameraCutTrack.static_class()):
|
||||
track = t
|
||||
break
|
||||
if not track:
|
||||
track = sequence.add_master_track(
|
||||
unreal.MovieSceneCameraCutTrack)
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
frame_ranges.append((
|
||||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
|
||||
i += 1
|
||||
|
||||
shot = tools.create_asset(
|
||||
asset_name=asset,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
# sequences and frame_ranges have the same length
|
||||
for i in range(0, len(sequences) - 1):
|
||||
maps_to_add = []
|
||||
for j in range(i + 1, len(maps)):
|
||||
maps_to_add.append(maps[j].get('map'))
|
||||
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
maps_to_add)
|
||||
|
||||
data = self._get_data(asset)
|
||||
shot.set_display_rate(
|
||||
unreal.FrameRate(data.get("fps"), 1.0))
|
||||
shot.set_playback_start(0)
|
||||
shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1)
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[-1], shot,
|
||||
frame_ranges[-1][1],
|
||||
data.get('clipIn'), data.get('clipOut'),
|
||||
[maps[-1].get('map')])
|
||||
|
||||
EditorLevelLibrary.load_level(maps[-1].get('map'))
|
||||
|
||||
self._process(self.fname, asset_dir, shot)
|
||||
|
||||
for s in sequences:
|
||||
EditorAssetLibrary.save_asset(s.get_full_name())
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
|
|
@ -412,6 +667,8 @@ class LayoutLoader(plugin.Loader):
|
|||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(maps[0].get('map'))
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, representation):
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
hosts = ["unreal"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRemoveMarked(pyblish.api.ContextPlugin):
|
||||
"""Remove marked data
|
||||
|
||||
Remove instances that have 'remove' in their instance.data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = 'Remove Marked Instances'
|
||||
|
||||
def process(self, context):
|
||||
|
||||
self.log.debug(context)
|
||||
# make ftrack publishable
|
||||
instances_to_remove = []
|
||||
for instance in context:
|
||||
if instance.data.get('remove'):
|
||||
instances_to_remove.append(instance)
|
||||
|
||||
for instance in instances_to_remove:
|
||||
context.remove(instance)
|
||||
|
|
@ -0,0 +1,103 @@
|
|||
from pathlib import Path
|
||||
import unreal
|
||||
|
||||
import pyblish.api
|
||||
from openpype.hosts.unreal.api import pipeline
|
||||
|
||||
|
||||
class CollectRenderInstances(pyblish.api.InstancePlugin):
|
||||
""" This collector will try to find all the rendered frames.
|
||||
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["unreal"]
|
||||
families = ["render"]
|
||||
label = "Collect Render Instances"
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("Preparing Rendering Instances")
|
||||
|
||||
context = instance.context
|
||||
|
||||
data = instance.data
|
||||
data['remove'] = True
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
sequence = ar.get_asset_by_object_path(
|
||||
data.get('sequence')).get_asset()
|
||||
|
||||
sequences = [{
|
||||
"sequence": sequence,
|
||||
"output": data.get('output'),
|
||||
"frame_range": (
|
||||
data.get('frameStart'), data.get('frameEnd'))
|
||||
}]
|
||||
|
||||
for s in sequences:
|
||||
self.log.debug(f"Processing: {s.get('sequence').get_name()}")
|
||||
subscenes = pipeline.get_subsequences(s.get('sequence'))
|
||||
|
||||
if subscenes:
|
||||
for ss in subscenes:
|
||||
sequences.append({
|
||||
"sequence": ss.get_sequence(),
|
||||
"output": (f"{s.get('output')}/"
|
||||
f"{ss.get_sequence().get_name()}"),
|
||||
"frame_range": (
|
||||
ss.get_start_frame(), ss.get_end_frame() - 1)
|
||||
})
|
||||
else:
|
||||
# Avoid creating instances for camera sequences
|
||||
if "_camera" not in s.get('sequence').get_name():
|
||||
seq = s.get('sequence')
|
||||
seq_name = seq.get_name()
|
||||
|
||||
new_instance = context.create_instance(
|
||||
f"{data.get('subset')}_"
|
||||
f"{seq_name}")
|
||||
new_instance[:] = seq_name
|
||||
|
||||
new_data = new_instance.data
|
||||
|
||||
new_data["asset"] = seq_name
|
||||
new_data["setMembers"] = seq_name
|
||||
new_data["family"] = "render"
|
||||
new_data["families"] = ["render", "review"]
|
||||
new_data["parent"] = data.get("parent")
|
||||
new_data["subset"] = f"{data.get('subset')}_{seq_name}"
|
||||
new_data["level"] = data.get("level")
|
||||
new_data["output"] = s.get('output')
|
||||
new_data["fps"] = seq.get_display_rate().numerator
|
||||
new_data["frameStart"] = s.get('frame_range')[0]
|
||||
new_data["frameEnd"] = s.get('frame_range')[1]
|
||||
new_data["sequence"] = seq.get_path_name()
|
||||
new_data["master_sequence"] = data["master_sequence"]
|
||||
new_data["master_level"] = data["master_level"]
|
||||
|
||||
self.log.debug(f"new instance data: {new_data}")
|
||||
|
||||
project_dir = unreal.Paths.project_dir()
|
||||
render_dir = (f"{project_dir}/Saved/MovieRenders/"
|
||||
f"{s.get('output')}")
|
||||
render_path = Path(render_dir)
|
||||
|
||||
frames = []
|
||||
|
||||
for x in render_path.iterdir():
|
||||
if x.is_file() and x.suffix == '.png':
|
||||
frames.append(str(x.name))
|
||||
|
||||
if "representations" not in new_instance.data:
|
||||
new_instance.data["representations"] = []
|
||||
|
||||
repr = {
|
||||
'frameStart': s.get('frame_range')[0],
|
||||
'frameEnd': s.get('frame_range')[1],
|
||||
'name': 'png',
|
||||
'ext': 'png',
|
||||
'files': frames,
|
||||
'stagingDir': render_dir,
|
||||
'tags': ['review']
|
||||
}
|
||||
new_instance.data["representations"].append(repr)
|
||||
48
openpype/hosts/unreal/plugins/publish/extract_render.py
Normal file
48
openpype/hosts/unreal/plugins/publish/extract_render.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ExtractRender(openpype.api.Extractor):
|
||||
"""Extract render."""
|
||||
|
||||
label = "Extract Render"
|
||||
hosts = ["unreal"]
|
||||
families = ["render"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Get the render output directory
|
||||
project_dir = unreal.Paths.project_dir()
|
||||
render_dir = (f"{project_dir}/Saved/MovieRenders/"
|
||||
f"{instance.data['subset']}")
|
||||
|
||||
assert unreal.Paths.directory_exists(render_dir), \
|
||||
"Render directory does not exist"
|
||||
|
||||
render_path = Path(render_dir)
|
||||
|
||||
frames = []
|
||||
|
||||
for x in render_path.iterdir():
|
||||
if x.is_file() and x.suffix == '.png':
|
||||
frames.append(str(x))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
render_representation = {
|
||||
'name': 'png',
|
||||
'ext': 'png',
|
||||
'files': frames,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(render_representation)
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import clique
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
||||
"""Ensure the sequence of frames is complete
|
||||
|
||||
The files found in the folder are checked against the frameStart and
|
||||
frameEnd of the instance. If the first or last file is not
|
||||
corresponding with the first or last frame it is flagged as invalid.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Sequence Frames"
|
||||
families = ["render"]
|
||||
hosts = ["unreal"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
representations = instance.data.get("representations")
|
||||
for repr in representations:
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
collections, remainder = clique.assemble(
|
||||
repr["files"], minimum_items=1, patterns=patterns)
|
||||
|
||||
assert not remainder, "Must not have remainder"
|
||||
assert len(collections) == 1, "Must detect single collection"
|
||||
collection = collections[0]
|
||||
frames = list(collection.indexes)
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (instance.data["frameStart"],
|
||||
instance.data["frameEnd"])
|
||||
|
||||
if current_range != required_range:
|
||||
raise ValueError(f"Invalid frame range: {current_range} - "
|
||||
f"expected: {required_range}")
|
||||
|
||||
missing = collection.holes().indexes
|
||||
assert not missing, "Missing frames: %s" % (missing,)
|
||||
|
|
@ -1532,13 +1532,13 @@ class BuildWorkfile:
|
|||
|
||||
subsets = list(legacy_io.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": asset_entity_by_ids.keys()}
|
||||
"parent": {"$in": list(asset_entity_by_ids.keys())}
|
||||
}))
|
||||
subset_entity_by_ids = {subset["_id"]: subset for subset in subsets}
|
||||
|
||||
sorted_versions = list(legacy_io.find({
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_entity_by_ids.keys()}
|
||||
"parent": {"$in": list(subset_entity_by_ids.keys())}
|
||||
}).sort("name", -1))
|
||||
|
||||
subset_id_with_latest_version = []
|
||||
|
|
@ -1552,7 +1552,7 @@ class BuildWorkfile:
|
|||
|
||||
repres = legacy_io.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": last_versions_by_id.keys()}
|
||||
"parent": {"$in": list(last_versions_by_id.keys())}
|
||||
})
|
||||
|
||||
output = {}
|
||||
|
|
|
|||
|
|
@ -365,6 +365,7 @@ class TemplateResult(str):
|
|||
when value of key in data is dictionary but template expect string
|
||||
of number.
|
||||
"""
|
||||
|
||||
used_values = None
|
||||
solved = None
|
||||
template = None
|
||||
|
|
@ -383,6 +384,12 @@ class TemplateResult(str):
|
|||
new_obj.invalid_types = invalid_types
|
||||
return new_obj
|
||||
|
||||
def __copy__(self, *args, **kwargs):
|
||||
return self.copy()
|
||||
|
||||
def __deepcopy__(self, *args, **kwargs):
|
||||
return self.copy()
|
||||
|
||||
def validate(self):
|
||||
if not self.solved:
|
||||
raise TemplateUnsolved(
|
||||
|
|
@ -391,6 +398,17 @@ class TemplateResult(str):
|
|||
self.invalid_types
|
||||
)
|
||||
|
||||
def copy(self):
|
||||
cls = self.__class__
|
||||
return cls(
|
||||
str(self),
|
||||
self.template,
|
||||
self.solved,
|
||||
self.used_values,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
|
||||
|
||||
class TemplatesResultDict(dict):
|
||||
"""Holds and wrap TemplateResults for easy bug report."""
|
||||
|
|
|
|||
|
|
@ -727,9 +727,9 @@ def get_ffmpeg_format_args(ffprobe_data, source_ffmpeg_cmd=None):
|
|||
def _ffmpeg_mxf_format_args(ffprobe_data, source_ffmpeg_cmd):
|
||||
input_format = ffprobe_data["format"]
|
||||
format_tags = input_format.get("tags") or {}
|
||||
product_name = format_tags.get("product_name") or ""
|
||||
operational_pattern_ul = format_tags.get("operational_pattern_ul") or ""
|
||||
output = []
|
||||
if "opatom" in product_name.lower():
|
||||
if operational_pattern_ul == "060e2b34.04010102.0d010201.10030000":
|
||||
output.extend(["-f", "mxf_opatom"])
|
||||
return output
|
||||
|
||||
|
|
|
|||
|
|
@ -24,48 +24,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
label = "Integrate Ftrack Api"
|
||||
families = ["ftrack"]
|
||||
|
||||
def query(self, entitytype, data):
|
||||
""" Generate a query expression from data supplied.
|
||||
|
||||
If a value is not a string, we'll add the id of the entity to the
|
||||
query.
|
||||
|
||||
Args:
|
||||
entitytype (str): The type of entity to query.
|
||||
data (dict): The data to identify the entity.
|
||||
exclusions (list): All keys to exclude from the query.
|
||||
|
||||
Returns:
|
||||
str: String query to use with "session.query"
|
||||
"""
|
||||
queries = []
|
||||
if sys.version_info[0] < 3:
|
||||
for key, value in data.iteritems():
|
||||
if not isinstance(value, (basestring, int)):
|
||||
self.log.info("value: {}".format(value))
|
||||
if "id" in value.keys():
|
||||
queries.append(
|
||||
"{0}.id is \"{1}\"".format(key, value["id"])
|
||||
)
|
||||
else:
|
||||
queries.append("{0} is \"{1}\"".format(key, value))
|
||||
else:
|
||||
for key, value in data.items():
|
||||
if not isinstance(value, (str, int)):
|
||||
self.log.info("value: {}".format(value))
|
||||
if "id" in value.keys():
|
||||
queries.append(
|
||||
"{0}.id is \"{1}\"".format(key, value["id"])
|
||||
)
|
||||
else:
|
||||
queries.append("{0} is \"{1}\"".format(key, value))
|
||||
|
||||
query = (
|
||||
"select id from " + entitytype + " where " + " and ".join(queries)
|
||||
)
|
||||
self.log.debug(query)
|
||||
return query
|
||||
|
||||
def process(self, instance):
|
||||
session = instance.context.data["ftrackSession"]
|
||||
context = instance.context
|
||||
|
|
@ -108,7 +66,19 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
default_asset_name = parent_entity["name"]
|
||||
|
||||
# Change status on task
|
||||
self._set_task_status(instance, task_entity, session)
|
||||
asset_version_status_ids_by_name = {}
|
||||
project_entity = instance.context.data.get("ftrackProject")
|
||||
if project_entity:
|
||||
project_schema = project_entity["project_schema"]
|
||||
asset_version_statuses = (
|
||||
project_schema.get_statuses("AssetVersion")
|
||||
)
|
||||
asset_version_status_ids_by_name = {
|
||||
status["name"].lower(): status["id"]
|
||||
for status in asset_version_statuses
|
||||
}
|
||||
|
||||
self._set_task_status(instance, project_entity, task_entity, session)
|
||||
|
||||
# Prepare AssetTypes
|
||||
asset_types_by_short = self._ensure_asset_types_exists(
|
||||
|
|
@ -139,7 +109,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
# Asset Version
|
||||
asset_version_data = data.get("assetversion_data") or {}
|
||||
asset_version_entity = self._ensure_asset_version_exists(
|
||||
session, asset_version_data, asset_entity["id"], task_entity
|
||||
session,
|
||||
asset_version_data,
|
||||
asset_entity["id"],
|
||||
task_entity,
|
||||
asset_version_status_ids_by_name
|
||||
)
|
||||
|
||||
# Component
|
||||
|
|
@ -174,8 +148,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
if asset_version not in instance.data[asset_versions_key]:
|
||||
instance.data[asset_versions_key].append(asset_version)
|
||||
|
||||
def _set_task_status(self, instance, task_entity, session):
|
||||
project_entity = instance.context.data.get("ftrackProject")
|
||||
def _set_task_status(self, instance, project_entity, task_entity, session):
|
||||
if not project_entity:
|
||||
self.log.info("Task status won't be set, project is not known.")
|
||||
return
|
||||
|
|
@ -319,12 +292,19 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
).first()
|
||||
|
||||
def _ensure_asset_version_exists(
|
||||
self, session, asset_version_data, asset_id, task_entity
|
||||
self,
|
||||
session,
|
||||
asset_version_data,
|
||||
asset_id,
|
||||
task_entity,
|
||||
status_ids_by_name
|
||||
):
|
||||
task_id = None
|
||||
if task_entity:
|
||||
task_id = task_entity["id"]
|
||||
|
||||
status_name = asset_version_data.pop("status_name", None)
|
||||
|
||||
# Try query asset version by criteria (asset id and version)
|
||||
version = asset_version_data.get("version") or 0
|
||||
asset_version_entity = self._query_asset_version(
|
||||
|
|
@ -366,6 +346,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
session, version, asset_id
|
||||
)
|
||||
|
||||
if status_name:
|
||||
status_id = status_ids_by_name.get(status_name.lower())
|
||||
if not status_id:
|
||||
self.log.info((
|
||||
"Ftrack status with name \"{}\""
|
||||
" for AssetVersion was not found."
|
||||
).format(status_name))
|
||||
|
||||
elif asset_version_entity["status_id"] != status_id:
|
||||
asset_version_entity["status_id"] = status_id
|
||||
session.commit()
|
||||
|
||||
# Set custom attributes if there were any set
|
||||
custom_attrs = asset_version_data.get("custom_attributes") or {}
|
||||
for attr_key, attr_value in custom_attrs.items():
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ import json
|
|||
import copy
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
|
||||
|
||||
class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect ftrack component data (not integrate yet).
|
||||
|
|
@ -36,6 +38,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
"reference": "reference"
|
||||
}
|
||||
keep_first_subset_name_for_review = True
|
||||
asset_versions_status_profiles = {}
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("instance {}".format(instance))
|
||||
|
|
@ -80,6 +83,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
if instance_fps is None:
|
||||
instance_fps = instance.context.data["fps"]
|
||||
|
||||
status_name = self._get_asset_version_status_name(instance)
|
||||
|
||||
# Base of component item data
|
||||
# - create a copy of this object when want to use it
|
||||
base_component_item = {
|
||||
|
|
@ -91,7 +96,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
},
|
||||
"assetversion_data": {
|
||||
"version": version_number,
|
||||
"comment": instance.context.data.get("comment") or ""
|
||||
"comment": instance.context.data.get("comment") or "",
|
||||
"status_name": status_name
|
||||
},
|
||||
"component_overwrite": False,
|
||||
# This can be change optionally
|
||||
|
|
@ -317,3 +323,24 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
)
|
||||
))
|
||||
instance.data["ftrackComponentsList"] = component_list
|
||||
|
||||
def _get_asset_version_status_name(self, instance):
|
||||
if not self.asset_versions_status_profiles:
|
||||
return None
|
||||
|
||||
# Prepare filtering data for new asset version status
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
task_type = anatomy_data.get("task", {}).get("type")
|
||||
filtering_criteria = {
|
||||
"families": instance.data["family"],
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"task_types": task_type
|
||||
}
|
||||
matching_profile = filter_profiles(
|
||||
self.asset_versions_status_profiles,
|
||||
filtering_criteria
|
||||
)
|
||||
if not matching_profile:
|
||||
return None
|
||||
|
||||
return matching_profile["status"] or None
|
||||
|
|
|
|||
|
|
@ -41,21 +41,33 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
|||
loaded_versions = []
|
||||
_containers = list(host.ls())
|
||||
_repr_ids = [ObjectId(c["representation"]) for c in _containers]
|
||||
repre_docs = legacy_io.find(
|
||||
{"_id": {"$in": _repr_ids}},
|
||||
projection={"_id": 1, "parent": 1}
|
||||
)
|
||||
version_by_repr = {
|
||||
str(doc["_id"]): doc["parent"] for doc in
|
||||
legacy_io.find(
|
||||
{"_id": {"$in": _repr_ids}},
|
||||
projection={"parent": 1}
|
||||
)
|
||||
str(doc["_id"]): doc["parent"]
|
||||
for doc in repre_docs
|
||||
}
|
||||
|
||||
# QUESTION should we add same representation id when loaded multiple
|
||||
# times?
|
||||
for con in _containers:
|
||||
repre_id = con["representation"]
|
||||
version_id = version_by_repr.get(repre_id)
|
||||
if version_id is None:
|
||||
self.log.warning((
|
||||
"Skipping container,"
|
||||
" did not find representation document. {}"
|
||||
).format(str(con)))
|
||||
continue
|
||||
|
||||
# NOTE:
|
||||
# may have more then one representation that are same version
|
||||
version = {
|
||||
"subsetName": con["name"],
|
||||
"representation": ObjectId(con["representation"]),
|
||||
"version": version_by_repr[con["representation"]], # _id
|
||||
"representation": ObjectId(repre_id),
|
||||
"version": version_id,
|
||||
}
|
||||
loaded_versions.append(version)
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"resolve",
|
||||
"webpublisher",
|
||||
"aftereffects",
|
||||
"flame"
|
||||
"flame",
|
||||
"unreal"
|
||||
]
|
||||
|
||||
# Supported extensions
|
||||
|
|
|
|||
|
|
@ -418,7 +418,8 @@
|
|||
"redshiftproxy": "cache",
|
||||
"usd": "usd"
|
||||
},
|
||||
"keep_first_subset_name_for_review": true
|
||||
"keep_first_subset_name_for_review": true,
|
||||
"asset_versions_status_profiles": []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -858,6 +858,43 @@
|
|||
"key": "keep_first_subset_name_for_review",
|
||||
"label": "Make subset name as first asset name",
|
||||
"default": true
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"collapsible": true,
|
||||
"key": "asset_versions_status_profiles",
|
||||
"label": "AssetVersion status on publish",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "hosts",
|
||||
"label": "Host names",
|
||||
"type": "hosts-enum",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "family",
|
||||
"label": "Family",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"key": "status",
|
||||
"label": "Status name",
|
||||
"type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
69
poetry.lock
generated
69
poetry.lock
generated
|
|
@ -820,7 +820,7 @@ six = "*"
|
|||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "9.0.0"
|
||||
version = "9.0.1"
|
||||
description = "Python Imaging Library (Fork)"
|
||||
category = "main"
|
||||
optional = false
|
||||
|
|
@ -2310,38 +2310,41 @@ pathlib2 = [
|
|||
{file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"},
|
||||
]
|
||||
pillow = [
|
||||
{file = "Pillow-9.0.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:113723312215b25c22df1fdf0e2da7a3b9c357a7d24a93ebbe80bfda4f37a8d4"},
|
||||
{file = "Pillow-9.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb47a548cea95b86494a26c89d153fd31122ed65255db5dcbc421a2d28eb3379"},
|
||||
{file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31b265496e603985fad54d52d11970383e317d11e18e856971bdbb86af7242a4"},
|
||||
{file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d154ed971a4cc04b93a6d5b47f37948d1f621f25de3e8fa0c26b2d44f24e3e8f"},
|
||||
{file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fe92813d208ce8aa7d76da878bdc84b90809f79ccbad2a288e9bcbeac1d9bd"},
|
||||
{file = "Pillow-9.0.0-cp310-cp310-win32.whl", hash = "sha256:d5dcea1387331c905405b09cdbfb34611050cc52c865d71f2362f354faee1e9f"},
|
||||
{file = "Pillow-9.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:52abae4c96b5da630a8b4247de5428f593465291e5b239f3f843a911a3cf0105"},
|
||||
{file = "Pillow-9.0.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:72c3110228944019e5f27232296c5923398496b28be42535e3b2dc7297b6e8b6"},
|
||||
{file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97b6d21771da41497b81652d44191489296555b761684f82b7b544c49989110f"},
|
||||
{file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72f649d93d4cc4d8cf79c91ebc25137c358718ad75f99e99e043325ea7d56100"},
|
||||
{file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aaf07085c756f6cb1c692ee0d5a86c531703b6e8c9cae581b31b562c16b98ce"},
|
||||
{file = "Pillow-9.0.0-cp37-cp37m-win32.whl", hash = "sha256:03b27b197deb4ee400ed57d8d4e572d2d8d80f825b6634daf6e2c18c3c6ccfa6"},
|
||||
{file = "Pillow-9.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a09a9d4ec2b7887f7a088bbaacfd5c07160e746e3d47ec5e8050ae3b2a229e9f"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:490e52e99224858f154975db61c060686df8a6b3f0212a678e5d2e2ce24675c9"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:500d397ddf4bbf2ca42e198399ac13e7841956c72645513e8ddf243b31ad2128"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ebd8b9137630a7bbbff8c4b31e774ff05bbb90f7911d93ea2c9371e41039b52"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd0e5062f11cb3e730450a7d9f323f4051b532781026395c4323b8ad055523c4"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f3b4522148586d35e78313db4db0df4b759ddd7649ef70002b6c3767d0fdeb7"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-win32.whl", hash = "sha256:0b281fcadbb688607ea6ece7649c5d59d4bbd574e90db6cd030e9e85bde9fecc"},
|
||||
{file = "Pillow-9.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5050d681bcf5c9f2570b93bee5d3ec8ae4cf23158812f91ed57f7126df91762"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c2067b3bb0781f14059b112c9da5a91c80a600a97915b4f48b37f197895dd925"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d16b6196fb7a54aff6b5e3ecd00f7c0bab1b56eee39214b2b223a9d938c50af"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98cb63ca63cb61f594511c06218ab4394bf80388b3d66cd61d0b1f63ee0ea69f"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc462d24500ba707e9cbdef436c16e5c8cbf29908278af053008d9f689f56dee"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3586e12d874ce2f1bc875a3ffba98732ebb12e18fb6d97be482bd62b56803281"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-win32.whl", hash = "sha256:68e06f8b2248f6dc8b899c3e7ecf02c9f413aab622f4d6190df53a78b93d97a5"},
|
||||
{file = "Pillow-9.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6579f9ba84a3d4f1807c4aab4be06f373017fc65fff43498885ac50a9b47a553"},
|
||||
{file = "Pillow-9.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:47f5cf60bcb9fbc46011f75c9b45a8b5ad077ca352a78185bd3e7f1d294b98bb"},
|
||||
{file = "Pillow-9.0.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fd8053e1f8ff1844419842fd474fc359676b2e2a2b66b11cc59f4fa0a301315"},
|
||||
{file = "Pillow-9.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c5439bfb35a89cac50e81c751317faea647b9a3ec11c039900cd6915831064d"},
|
||||
{file = "Pillow-9.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95545137fc56ce8c10de646074d242001a112a92de169986abd8c88c27566a05"},
|
||||
{file = "Pillow-9.0.0.tar.gz", hash = "sha256:ee6e2963e92762923956fe5d3479b1fdc3b76c83f290aad131a2f98c3df0593e"},
|
||||
{file = "Pillow-9.0.1-1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4"},
|
||||
{file = "Pillow-9.0.1-1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2632d0f846b7c7600edf53c48f8f9f1e13e62f66a6dbc15191029d950bfed976"},
|
||||
{file = "Pillow-9.0.1-1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9618823bd237c0d2575283f2939655f54d51b4527ec3972907a927acbcc5bfc"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:9bfdb82cdfeccec50aad441afc332faf8606dfa5e8efd18a6692b5d6e79f00fd"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5100b45a4638e3c00e4d2320d3193bdabb2d75e79793af7c3eb139e4f569f16f"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:528a2a692c65dd5cafc130de286030af251d2ee0483a5bf50c9348aefe834e8a"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f29d831e2151e0b7b39981756d201f7108d3d215896212ffe2e992d06bfe049"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:855c583f268edde09474b081e3ddcd5cf3b20c12f26e0d434e1386cc5d318e7a"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-win32.whl", hash = "sha256:d9d7942b624b04b895cb95af03a23407f17646815495ce4547f0e60e0b06f58e"},
|
||||
{file = "Pillow-9.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81c4b81611e3a3cb30e59b0cf05b888c675f97e3adb2c8672c3154047980726b"},
|
||||
{file = "Pillow-9.0.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:413ce0bbf9fc6278b2d63309dfeefe452835e1c78398efb431bab0672fe9274e"},
|
||||
{file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80fe64a6deb6fcfdf7b8386f2cf216d329be6f2781f7d90304351811fb591360"},
|
||||
{file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cef9c85ccbe9bee00909758936ea841ef12035296c748aaceee535969e27d31b"},
|
||||
{file = "Pillow-9.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d19397351f73a88904ad1aee421e800fe4bbcd1aeee6435fb62d0a05ccd1030"},
|
||||
{file = "Pillow-9.0.1-cp37-cp37m-win32.whl", hash = "sha256:d21237d0cd37acded35154e29aec853e945950321dd2ffd1a7d86fe686814669"},
|
||||
{file = "Pillow-9.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ede5af4a2702444a832a800b8eb7f0a7a1c0eed55b644642e049c98d589e5092"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b5b3f092fe345c03bca1e0b687dfbb39364b21ebb8ba90e3fa707374b7915204"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:335ace1a22325395c4ea88e00ba3dc89ca029bd66bd5a3c382d53e44f0ccd77e"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db6d9fac65bd08cea7f3540b899977c6dee9edad959fa4eaf305940d9cbd861c"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d4b1341ac07ae07eb2cc682f459bec932a380c3b122f5540432d8977e64eae"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-win32.whl", hash = "sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c"},
|
||||
{file = "Pillow-9.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:7f7609a718b177bf171ac93cea9fd2ddc0e03e84d8fa4e887bdfc39671d46b00"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:80ca33961ced9c63358056bd08403ff866512038883e74f3a4bf88ad3eb66838"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c3c33ac69cf059bbb9d1a71eeaba76781b450bc307e2291f8a4764d779a6b28"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12875d118f21cf35604176872447cdb57b07126750a33748bac15e77f90f1f9c"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:514ceac913076feefbeaf89771fd6febde78b0c4c1b23aaeab082c41c694e81b"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c5c79ab7dfce6d88f1ba639b77e77a17ea33a01b07b99840d6ed08031cb2a7"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-win32.whl", hash = "sha256:718856856ba31f14f13ba885ff13874be7fefc53984d2832458f12c38205f7f7"},
|
||||
{file = "Pillow-9.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac"},
|
||||
{file = "Pillow-9.0.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97"},
|
||||
{file = "Pillow-9.0.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253e8a302a96df6927310a9d44e6103055e8fb96a6822f8b7f514bb7ef77de56"},
|
||||
{file = "Pillow-9.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6295f6763749b89c994fcb6d8a7f7ce03c3992e695f89f00b741b4580b199b7e"},
|
||||
{file = "Pillow-9.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a9f44cd7e162ac6191491d7249cceb02b8116b0f7e847ee33f739d7cb1ea1f70"},
|
||||
{file = "Pillow-9.0.1.tar.gz", hash = "sha256:6c8bc8238a7dfdaf7a75f5ec5a663f4173f8c367e5a39f87e720495e1eed75fa"},
|
||||
]
|
||||
platformdirs = [
|
||||
{file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue