mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
1f49d0ee72
32 changed files with 866 additions and 363 deletions
|
|
@ -6,7 +6,7 @@ from openpype.pipeline import load
|
|||
|
||||
|
||||
class FusionSetFrameRangeLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range excluding pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
@ -40,7 +40,7 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin):
|
|||
|
||||
|
||||
class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range including pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
|
|||
|
|
@ -144,6 +144,7 @@ class CollectFarmRender(openpype.lib.abstract_collect_render.
|
|||
label=node.split("/")[1],
|
||||
subset=subset_name,
|
||||
asset=legacy_io.Session["AVALON_ASSET"],
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers=[node],
|
||||
publish=info[4],
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.pipeline import load
|
|||
|
||||
|
||||
class SetFrameRangeLoader(load.LoaderPlugin):
|
||||
"""Set Houdini frame range"""
|
||||
"""Set frame range excluding pre- and post-handles"""
|
||||
|
||||
families = [
|
||||
"animation",
|
||||
|
|
@ -44,7 +44,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
|
|||
|
||||
|
||||
class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||
"""Set Maya frame range including pre- and post-handles"""
|
||||
"""Set frame range including pre- and post-handles"""
|
||||
|
||||
families = [
|
||||
"animation",
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from openpype.hosts.houdini.api import pipeline
|
|||
|
||||
|
||||
class AbcLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Load Alembic"""
|
||||
|
||||
families = ["model", "animation", "pointcache", "gpuCache"]
|
||||
label = "Load Alembic"
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ def transfer_non_default_values(src, dest, ignore=None):
|
|||
|
||||
|
||||
class CameraLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Load camera from an Alembic file"""
|
||||
|
||||
families = ["camera"]
|
||||
label = "Load Camera (abc)"
|
||||
|
|
|
|||
|
|
@ -42,9 +42,9 @@ def get_image_avalon_container():
|
|||
|
||||
|
||||
class ImageLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Load images into COP2"""
|
||||
|
||||
families = ["colorbleed.imagesequence"]
|
||||
families = ["imagesequence"]
|
||||
label = "Load Image (COP2)"
|
||||
representations = ["*"]
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from openpype.hosts.houdini.api import pipeline
|
|||
|
||||
|
||||
class VdbLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Load VDB"""
|
||||
|
||||
families = ["vdbcache"]
|
||||
label = "Load VDB"
|
||||
|
|
|
|||
|
|
@ -77,6 +77,7 @@ IMAGE_PREFIXES = {
|
|||
"arnold": "defaultRenderGlobals.imageFilePrefix",
|
||||
"renderman": "rmanGlobals.imageFileFormat",
|
||||
"redshift": "defaultRenderGlobals.imageFilePrefix",
|
||||
"mayahardware2": "defaultRenderGlobals.imageFilePrefix"
|
||||
}
|
||||
|
||||
RENDERMAN_IMAGE_DIR = "maya/<scene>/<layer>"
|
||||
|
|
@ -155,7 +156,8 @@ def get(layer, render_instance=None):
|
|||
"arnold": RenderProductsArnold,
|
||||
"vray": RenderProductsVray,
|
||||
"redshift": RenderProductsRedshift,
|
||||
"renderman": RenderProductsRenderman
|
||||
"renderman": RenderProductsRenderman,
|
||||
"mayahardware2": RenderProductsMayaHardware
|
||||
}.get(renderer_name.lower(), None)
|
||||
if renderer is None:
|
||||
raise UnsupportedRendererException(
|
||||
|
|
@ -1124,6 +1126,67 @@ class RenderProductsRenderman(ARenderProducts):
|
|||
return new_files
|
||||
|
||||
|
||||
class RenderProductsMayaHardware(ARenderProducts):
|
||||
"""Expected files for MayaHardware renderer."""
|
||||
|
||||
renderer = "mayahardware2"
|
||||
|
||||
extensions = [
|
||||
{"label": "JPEG", "index": 8, "extension": "jpg"},
|
||||
{"label": "PNG", "index": 32, "extension": "png"},
|
||||
{"label": "EXR(exr)", "index": 40, "extension": "exr"}
|
||||
]
|
||||
|
||||
def _get_extension(self, value):
|
||||
result = None
|
||||
if isinstance(value, int):
|
||||
extensions = {
|
||||
extension["index"]: extension["extension"]
|
||||
for extension in self.extensions
|
||||
}
|
||||
try:
|
||||
result = extensions[value]
|
||||
except KeyError:
|
||||
raise NotImplementedError(
|
||||
"Could not find extension for {}".format(value)
|
||||
)
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
extensions = {
|
||||
extension["label"]: extension["extension"]
|
||||
for extension in self.extensions
|
||||
}
|
||||
try:
|
||||
result = extensions[value]
|
||||
except KeyError:
|
||||
raise NotImplementedError(
|
||||
"Could not find extension for {}".format(value)
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise NotImplementedError(
|
||||
"Could not find extension for {}".format(value)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def get_render_products(self):
|
||||
"""Get all AOVs.
|
||||
See Also:
|
||||
:func:`ARenderProducts.get_render_products()`
|
||||
"""
|
||||
ext = self._get_extension(
|
||||
self._get_attr("defaultRenderGlobals.imageFormat")
|
||||
)
|
||||
|
||||
products = []
|
||||
for cam in self.get_renderable_cameras():
|
||||
product = RenderProduct(productName="beauty", ext=ext, camera=cam)
|
||||
products.append(product)
|
||||
|
||||
return products
|
||||
|
||||
|
||||
class AOVError(Exception):
|
||||
"""Custom exception for determining AOVs."""
|
||||
|
||||
|
|
|
|||
|
|
@ -77,7 +77,8 @@ class CreateRender(plugin.Creator):
|
|||
'vray': 'vraySettings.fileNamePrefix',
|
||||
'arnold': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'renderman': 'rmanGlobals.imageFileFormat',
|
||||
'redshift': 'defaultRenderGlobals.imageFilePrefix'
|
||||
'redshift': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'mayahardware2': 'defaultRenderGlobals.imageFilePrefix',
|
||||
}
|
||||
|
||||
_image_prefixes = {
|
||||
|
|
@ -87,7 +88,8 @@ class CreateRender(plugin.Creator):
|
|||
# this needs `imageOutputDir`
|
||||
# (<ws>/renders/maya/<scene>) set separately
|
||||
'renderman': '<layer>_<aov>.<f4>.<ext>',
|
||||
'redshift': 'maya/<Scene>/<RenderLayer>/<RenderLayer>' # noqa
|
||||
'redshift': 'maya/<Scene>/<RenderLayer>/<RenderLayer>', # noqa
|
||||
'mayahardware2': 'maya/<Scene>/<RenderLayer>/<RenderLayer>', # noqa
|
||||
}
|
||||
|
||||
_aov_chars = {
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from openpype.hosts.maya.api.lib import (
|
|||
|
||||
|
||||
class SetFrameRangeLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range excluding pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
@ -44,7 +44,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
|
|||
|
||||
|
||||
class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range including pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from openpype.hosts.maya.api.pipeline import containerise
|
|||
|
||||
|
||||
class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Load the Proxy"""
|
||||
"""Load Arnold Proxy as reference"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from openpype.api import get_project_settings
|
|||
|
||||
|
||||
class GpuCacheLoader(load.LoaderPlugin):
|
||||
"""Load model Alembic as gpuCache"""
|
||||
"""Load Alembic as gpuCache"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["abc"]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from openpype.hosts.maya.api.lib import maintained_selection
|
|||
|
||||
|
||||
class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
"""Reference file"""
|
||||
|
||||
families = ["model",
|
||||
"pointcache",
|
||||
|
|
|
|||
|
|
@ -74,6 +74,7 @@ def _fix_duplicate_vvg_callbacks():
|
|||
|
||||
|
||||
class LoadVDBtoVRay(load.LoaderPlugin):
|
||||
"""Load OpenVDB in a V-Ray Volume Grid"""
|
||||
|
||||
families = ["vdbcache"]
|
||||
representations = ["vdb"]
|
||||
|
|
|
|||
|
|
@ -326,8 +326,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"byFrameStep": int(
|
||||
self.get_render_attribute("byFrameStep",
|
||||
layer=layer_name)),
|
||||
"renderer": self.get_render_attribute("currentRenderer",
|
||||
layer=layer_name),
|
||||
"renderer": self.get_render_attribute(
|
||||
"currentRenderer", layer=layer_name).lower(),
|
||||
# instance subset
|
||||
"family": "renderlayer",
|
||||
"families": ["renderlayer"],
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ ImagePrefixes = {
|
|||
'vray': 'vraySettings.fileNamePrefix',
|
||||
'arnold': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'renderman': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'redshift': 'defaultRenderGlobals.imageFilePrefix'
|
||||
'redshift': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'mayahardware2': 'defaultRenderGlobals.imageFilePrefix',
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -50,15 +50,17 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
'vray': 'vraySettings.fileNamePrefix',
|
||||
'arnold': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'renderman': 'rmanGlobals.imageFileFormat',
|
||||
'redshift': 'defaultRenderGlobals.imageFilePrefix'
|
||||
'redshift': 'defaultRenderGlobals.imageFilePrefix',
|
||||
'mayahardware2': 'defaultRenderGlobals.imageFilePrefix',
|
||||
}
|
||||
|
||||
ImagePrefixTokens = {
|
||||
|
||||
'arnold': 'maya/<Scene>/<RenderLayer>/<RenderLayer>{aov_separator}<RenderPass>', # noqa
|
||||
'mentalray': 'maya/<Scene>/<RenderLayer>/<RenderLayer>{aov_separator}<RenderPass>', # noqa: E501
|
||||
'arnold': 'maya/<Scene>/<RenderLayer>/<RenderLayer>{aov_separator}<RenderPass>', # noqa: E501
|
||||
'redshift': 'maya/<Scene>/<RenderLayer>/<RenderLayer>',
|
||||
'vray': 'maya/<Scene>/<Layer>/<Layer>',
|
||||
'renderman': '<layer>{aov_separator}<aov>.<f4>.<ext>' # noqa
|
||||
'renderman': '<layer>{aov_separator}<aov>.<f4>.<ext>',
|
||||
'mayahardware2': 'maya/<Scene>/<RenderLayer>/<RenderLayer>',
|
||||
}
|
||||
|
||||
_aov_chars = {
|
||||
|
|
@ -234,7 +236,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
# load validation definitions from settings
|
||||
validation_settings = (
|
||||
instance.context.data["project_settings"]["maya"]["publish"]["ValidateRenderSettings"].get( # noqa: E501
|
||||
"{}_render_attributes".format(renderer))
|
||||
"{}_render_attributes".format(renderer)) or []
|
||||
)
|
||||
|
||||
# go through definitions and test if such node.attribute exists.
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
|
||||
class SetFrameRangeLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range excluding pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
@ -43,7 +43,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
|
|||
|
||||
|
||||
class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
||||
"""Specific loader of Alembic for the avalon.animation family"""
|
||||
"""Set frame range including pre- and post-handles"""
|
||||
|
||||
families = ["animation",
|
||||
"camera",
|
||||
|
|
|
|||
|
|
@ -29,6 +29,16 @@ class PSItem(object):
|
|||
color_code = attr.ib(default=None) # color code of layer
|
||||
instance_id = attr.ib(default=None)
|
||||
|
||||
@property
|
||||
def clean_name(self):
|
||||
"""Returns layer name without publish icon highlight
|
||||
|
||||
Returns:
|
||||
(str)
|
||||
"""
|
||||
return (self.name.replace(PhotoshopServerStub.PUBLISH_ICON, '')
|
||||
.replace(PhotoshopServerStub.LOADED_ICON, ''))
|
||||
|
||||
|
||||
class PhotoshopServerStub:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import pyblish.api
|
|||
|
||||
from openpype.lib import prepare_template_data
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
||||
class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
||||
|
|
@ -49,6 +50,12 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
|||
asset_name = context.data["asset"]
|
||||
task_name = context.data["task"]
|
||||
variant = context.data["variant"]
|
||||
project_name = context.data["projectEntity"]["name"]
|
||||
|
||||
naming_conventions = get_project_settings(project_name).get(
|
||||
"photoshop", {}).get(
|
||||
"publish", {}).get(
|
||||
"ValidateNaming", {})
|
||||
|
||||
stub = photoshop.stub()
|
||||
layers = stub.get_layers()
|
||||
|
|
@ -83,6 +90,9 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
|||
subset = resolved_subset_template.format(
|
||||
**prepare_template_data(fill_pairs))
|
||||
|
||||
subset = self._clean_subset_name(stub, naming_conventions,
|
||||
subset, layer)
|
||||
|
||||
if subset in existing_subset_names:
|
||||
self.log.info(
|
||||
"Subset {} already created, skipping.".format(subset))
|
||||
|
|
@ -141,6 +151,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
|||
instance.data["task"] = task_name
|
||||
instance.data["subset"] = subset
|
||||
instance.data["layer"] = layer
|
||||
instance.data["families"] = []
|
||||
|
||||
return instance
|
||||
|
||||
|
|
@ -186,3 +197,21 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
|
|||
self.log.debug("resolved_subset_template {}".format(
|
||||
resolved_subset_template))
|
||||
return family, resolved_subset_template
|
||||
|
||||
def _clean_subset_name(self, stub, naming_conventions, subset, layer):
|
||||
"""Cleans invalid characters from subset name and layer name."""
|
||||
if re.search(naming_conventions["invalid_chars"], subset):
|
||||
subset = re.sub(
|
||||
naming_conventions["invalid_chars"],
|
||||
naming_conventions["replace_char"],
|
||||
subset
|
||||
)
|
||||
layer_name = re.sub(
|
||||
naming_conventions["invalid_chars"],
|
||||
naming_conventions["replace_char"],
|
||||
layer.clean_name
|
||||
)
|
||||
layer.name = layer_name
|
||||
stub.rename_layer(layer.id, layer_name)
|
||||
|
||||
return subset
|
||||
|
|
|
|||
|
|
@ -42,7 +42,8 @@ class ValidateNamingRepair(pyblish.api.Action):
|
|||
|
||||
layer_name = re.sub(invalid_chars,
|
||||
replace_char,
|
||||
current_layer_state.name)
|
||||
current_layer_state.clean_name)
|
||||
layer_name = stub.PUBLISH_ICON + layer_name
|
||||
|
||||
stub.rename_layer(current_layer_state.id, layer_name)
|
||||
|
||||
|
|
@ -73,13 +74,17 @@ class ValidateNaming(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
help_msg = ' Use Repair action (A) in Pyblish to fix it.'
|
||||
msg = "Name \"{}\" is not allowed.{}".format(instance.data["name"],
|
||||
help_msg)
|
||||
|
||||
formatting_data = {"msg": msg}
|
||||
if re.search(self.invalid_chars, instance.data["name"]):
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
layer = instance.data.get("layer")
|
||||
if layer:
|
||||
msg = "Name \"{}\" is not allowed.{}".format(layer.clean_name,
|
||||
help_msg)
|
||||
|
||||
formatting_data = {"msg": msg}
|
||||
if re.search(self.invalid_chars, layer.clean_name):
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data
|
||||
)
|
||||
|
||||
msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"],
|
||||
help_msg)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import os
|
||||
|
||||
import unreal
|
||||
|
||||
from openpype.api import Anatomy
|
||||
from openpype.hosts.unreal.api import pipeline
|
||||
|
||||
|
||||
|
|
@ -46,6 +49,15 @@ def start_rendering():
|
|||
if data["family"] == "render":
|
||||
inst_data.append(data)
|
||||
|
||||
try:
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception:
|
||||
raise Exception("Could not find render root in anatomy settings.")
|
||||
|
||||
render_dir = f"{root}/{project}"
|
||||
|
||||
# subsystem = unreal.get_editor_subsystem(
|
||||
# unreal.MoviePipelineQueueSubsystem)
|
||||
# queue = subsystem.get_queue()
|
||||
|
|
@ -105,7 +117,7 @@ def start_rendering():
|
|||
settings.custom_end_frame = r.get("frame_range")[1]
|
||||
settings.use_custom_playback_range = True
|
||||
settings.file_name_format = "{sequence_name}.{frame_number}"
|
||||
settings.output_directory.path += r.get('output')
|
||||
settings.output_directory.path = f"{render_dir}/{r.get('output')}"
|
||||
|
||||
renderPass = job.get_configuration().find_or_add_setting_by_class(
|
||||
unreal.MoviePipelineDeferredPassBase)
|
||||
|
|
|
|||
|
|
@ -134,7 +134,6 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
|
||||
# Create directory for asset and avalon container
|
||||
hierarchy = context.get('asset').get('data').get('parents')
|
||||
root = "/Game/OpenPype"
|
||||
|
|
@ -149,11 +148,30 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/Animations/{asset}/{name}", suffix="")
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{hierarchy[0]}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
master_level = levels[0].get_editor_property('object_path')
|
||||
|
||||
hierarchy_dir = root
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_dir = f"{hierarchy_dir}/{asset}"
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{hierarchy_dir}/"],
|
||||
recursive_paths=True)
|
||||
levels = ar.get_assets(filter)
|
||||
level = levels[0].get_editor_property('object_path')
|
||||
|
||||
unreal.EditorLevelLibrary.save_all_dirty_levels()
|
||||
unreal.EditorLevelLibrary.load_level(level)
|
||||
|
||||
container_name += suffix
|
||||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
|
@ -165,7 +183,7 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
|
||||
instance_name = data.get("instance_name")
|
||||
|
||||
animation = self._process(asset_dir, container_name, instance_name)
|
||||
animation = self._process(asset_dir, asset_name, instance_name)
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
hierarchy_dir, recursive=True, include_folder=False)
|
||||
|
|
@ -224,6 +242,9 @@ class AnimationFBXLoader(plugin.Loader):
|
|||
for a in imported_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
unreal.EditorLevelLibrary.save_current_level()
|
||||
unreal.EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
def update(self, container, representation):
|
||||
name = container["asset_name"]
|
||||
source_path = get_representation_path(representation)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Load camera from FBX."""
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
from unreal import EditorAssetLibrary
|
||||
|
|
@ -268,68 +268,242 @@ class CameraLoader(plugin.Loader):
|
|||
return asset_content
|
||||
|
||||
def update(self, container, representation):
|
||||
path = container["namespace"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
path, recursive=False, include_folder=False
|
||||
)
|
||||
asset_name = ""
|
||||
for a in asset_content:
|
||||
asset = ar.get_asset_by_object_path(a)
|
||||
if a.endswith("_CON"):
|
||||
loaded_asset = EditorAssetLibrary.load_asset(a)
|
||||
EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset, "representation", str(representation["_id"])
|
||||
)
|
||||
EditorAssetLibrary.set_metadata_tag(
|
||||
loaded_asset, "parent", str(representation["parent"])
|
||||
)
|
||||
asset_name = EditorAssetLibrary.get_metadata_tag(
|
||||
loaded_asset, "asset_name"
|
||||
)
|
||||
elif asset.asset_class == "LevelSequence":
|
||||
EditorAssetLibrary.delete_asset(a)
|
||||
root = "/Game/OpenPype"
|
||||
|
||||
sequence = tools.create_asset(
|
||||
asset_name=asset_name,
|
||||
package_path=path,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
context = representation.get("context")
|
||||
|
||||
hierarchy = context.get('hierarchy').split("/")
|
||||
h_dir = f"{root}/{hierarchy[0]}"
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[str(Path(asset_dir).parent.as_posix())],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
EditorLevelLibrary.load_level(maps[0].get_full_name())
|
||||
|
||||
level_sequence = sequences[0].get_asset()
|
||||
|
||||
display_rate = level_sequence.get_display_rate()
|
||||
playback_start = level_sequence.get_playback_start()
|
||||
playback_end = level_sequence.get_playback_end()
|
||||
|
||||
sequence_name = f"{container.get('asset')}_camera"
|
||||
|
||||
# Get the actors in the level sequence.
|
||||
objs = unreal.SequencerTools.get_bound_objects(
|
||||
unreal.EditorLevelLibrary.get_editor_world(),
|
||||
level_sequence,
|
||||
level_sequence.get_bindings(),
|
||||
unreal.SequencerScriptingRange(
|
||||
has_start_value=True,
|
||||
has_end_value=True,
|
||||
inclusive_start=level_sequence.get_playback_start(),
|
||||
exclusive_end=level_sequence.get_playback_end()
|
||||
)
|
||||
)
|
||||
|
||||
io_asset = legacy_io.Session["AVALON_ASSET"]
|
||||
asset_doc = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": io_asset
|
||||
})
|
||||
# Delete actors from the map
|
||||
for o in objs:
|
||||
if o.bound_objects[0].get_class().get_name() == "CineCameraActor":
|
||||
actor_path = o.bound_objects[0].get_path_name().split(":")[-1]
|
||||
actor = EditorLevelLibrary.get_actor_reference(actor_path)
|
||||
EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
data = asset_doc.get("data")
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/OpenPype"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
|
||||
if data:
|
||||
sequence.set_display_rate(unreal.FrameRate(data.get("fps"), 1.0))
|
||||
sequence.set_playback_start(data.get("frameStart"))
|
||||
sequence.set_playback_end(data.get("frameEnd"))
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
sub_scene = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
break
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == sequence_name:
|
||||
parent = s
|
||||
sub_scene = ss
|
||||
# subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
EditorAssetLibrary.delete_asset(level_sequence.get_path_name())
|
||||
|
||||
settings = unreal.MovieSceneUserImportFBXSettings()
|
||||
settings.set_editor_property('reduce_keys', False)
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
new_sequence = tools.create_asset(
|
||||
asset_name=sequence_name,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
new_sequence.set_display_rate(display_rate)
|
||||
new_sequence.set_playback_start(playback_start)
|
||||
new_sequence.set_playback_end(playback_end)
|
||||
|
||||
sub_scene.set_sequence(new_sequence)
|
||||
|
||||
unreal.SequencerTools.import_fbx(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
sequence,
|
||||
sequence.get_bindings(),
|
||||
new_sequence,
|
||||
new_sequence.get_bindings(),
|
||||
settings,
|
||||
str(representation["data"]["path"])
|
||||
)
|
||||
|
||||
data = {
|
||||
"representation": str(representation["_id"]),
|
||||
"parent": str(representation["parent"])
|
||||
}
|
||||
unreal_pipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container.get('container_name')), data)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
asset_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
def remove(self, container):
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
path = Path(container.get("namespace"))
|
||||
parent_path = str(path.parent.as_posix())
|
||||
|
||||
EditorAssetLibrary.delete_directory(path)
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{str(path.as_posix())}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
|
||||
if not sequences:
|
||||
raise Exception("Could not find sequence.")
|
||||
|
||||
world = ar.get_asset_by_object_path(
|
||||
EditorLevelLibrary.get_editor_world().get_path_name())
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{parent_path}"],
|
||||
recursive_paths=True)
|
||||
maps = ar.get_assets(filter)
|
||||
|
||||
# There should be only one map in the list
|
||||
if not maps:
|
||||
raise Exception("Could not find map.")
|
||||
|
||||
map = maps[0]
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(map.get_full_name())
|
||||
|
||||
# Remove the camera from the level.
|
||||
actors = EditorLevelLibrary.get_all_level_actors()
|
||||
|
||||
for a in actors:
|
||||
if a.__class__ == unreal.CineCameraActor:
|
||||
EditorLevelLibrary.destroy_actor(a)
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(world.get_full_name())
|
||||
|
||||
# There should be only one sequence in the path.
|
||||
sequence_name = sequences[0].asset_name
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/OpenPype"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
break
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == sequence_name:
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
EditorAssetLibrary.delete_directory(str(path.as_posix()))
|
||||
|
||||
# Check if there isn't any more assets in the parent folder, and
|
||||
# delete it if not.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False, include_folder=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -208,7 +208,14 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
actors.append(actor)
|
||||
|
||||
binding = sequence.add_possessable(actor)
|
||||
binding = None
|
||||
for p in sequence.get_possessables():
|
||||
if p.get_name() == actor.get_name():
|
||||
binding = p
|
||||
break
|
||||
|
||||
if not binding:
|
||||
binding = sequence.add_possessable(actor)
|
||||
|
||||
bindings.append(binding)
|
||||
|
||||
|
|
@ -299,15 +306,101 @@ class LayoutLoader(plugin.Loader):
|
|||
# Add animation to the sequencer
|
||||
bindings = bindings_dict.get(instance_name)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for binding in bindings:
|
||||
binding.add_track(unreal.MovieSceneSkeletalAnimationTrack)
|
||||
for track in binding.get_tracks():
|
||||
tracks = binding.get_tracks()
|
||||
track = None
|
||||
if not tracks:
|
||||
track = binding.add_track(
|
||||
unreal.MovieSceneSkeletalAnimationTrack)
|
||||
else:
|
||||
track = tracks[0]
|
||||
|
||||
sections = track.get_sections()
|
||||
section = None
|
||||
if not sections:
|
||||
section = track.add_section()
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
else:
|
||||
section = sections[0]
|
||||
|
||||
sec_params = section.get_editor_property('params')
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
curr_anim = sec_params.get_editor_property('animation')
|
||||
|
||||
if curr_anim:
|
||||
# Checks if the animation path has a container.
|
||||
# If it does, it means that the animation is already
|
||||
# in the sequencer.
|
||||
anim_path = str(Path(
|
||||
curr_anim.get_path_name()).parent
|
||||
).replace('\\', '/')
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["AssetContainer"],
|
||||
package_paths=[anim_path],
|
||||
recursive_paths=False)
|
||||
containers = ar.get_assets(filter)
|
||||
|
||||
if len(containers) > 0:
|
||||
return
|
||||
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
sec_params = section.get_editor_property('params')
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
|
||||
def _generate_sequence(self, h, h_dir):
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
sequence = tools.create_asset(
|
||||
asset_name=h,
|
||||
package_path=h_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
asset_data = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": h_dir.split('/')[-1]
|
||||
})
|
||||
|
||||
id = asset_data.get('_id')
|
||||
|
||||
start_frames = []
|
||||
end_frames = []
|
||||
|
||||
elements = list(
|
||||
legacy_io.find({"type": "asset", "data.visualParent": id}))
|
||||
for e in elements:
|
||||
start_frames.append(e.get('data').get('clipIn'))
|
||||
end_frames.append(e.get('data').get('clipOut'))
|
||||
|
||||
elements.extend(legacy_io.find({
|
||||
"type": "asset",
|
||||
"data.visualParent": e.get('_id')
|
||||
}))
|
||||
|
||||
min_frame = min(start_frames)
|
||||
max_frame = max(end_frames)
|
||||
|
||||
sequence.set_display_rate(
|
||||
unreal.FrameRate(asset_data.get('data').get("fps"), 1.0))
|
||||
sequence.set_playback_start(min_frame)
|
||||
sequence.set_playback_end(max_frame)
|
||||
|
||||
tracks = sequence.get_master_tracks()
|
||||
track = None
|
||||
for t in tracks:
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneCameraCutTrack.static_class()):
|
||||
track = t
|
||||
break
|
||||
if not track:
|
||||
track = sequence.add_master_track(
|
||||
unreal.MovieSceneCameraCutTrack)
|
||||
|
||||
return sequence, (min_frame, max_frame)
|
||||
|
||||
def _process(self, lib_path, asset_dir, sequence, loaded=None):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
|
@ -326,6 +419,8 @@ class LayoutLoader(plugin.Loader):
|
|||
actors_dict = {}
|
||||
bindings_dict = {}
|
||||
|
||||
loaded_assets = []
|
||||
|
||||
for element in data:
|
||||
reference = None
|
||||
if element.get('reference_fbx'):
|
||||
|
|
@ -360,7 +455,7 @@ class LayoutLoader(plugin.Loader):
|
|||
continue
|
||||
|
||||
options = {
|
||||
"asset_dir": asset_dir
|
||||
# "asset_dir": asset_dir
|
||||
}
|
||||
|
||||
assets = load_container(
|
||||
|
|
@ -370,6 +465,17 @@ class LayoutLoader(plugin.Loader):
|
|||
options=options
|
||||
)
|
||||
|
||||
container = None
|
||||
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == 'AssetContainer':
|
||||
container = obj
|
||||
if obj.get_class().get_name() == 'Skeleton':
|
||||
skeleton = obj
|
||||
|
||||
loaded_assets.append(container.get_path_name())
|
||||
|
||||
instances = [
|
||||
item for item in data
|
||||
if (item.get('reference_fbx') == reference or
|
||||
|
|
@ -390,15 +496,6 @@ class LayoutLoader(plugin.Loader):
|
|||
actors_dict[inst] = actors
|
||||
bindings_dict[inst] = bindings
|
||||
|
||||
if family == 'rig':
|
||||
# Finds skeleton among the imported assets
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == 'Skeleton':
|
||||
skeleton = obj
|
||||
if skeleton:
|
||||
break
|
||||
|
||||
if skeleton:
|
||||
skeleton_dict[reference] = skeleton
|
||||
else:
|
||||
|
|
@ -411,6 +508,8 @@ class LayoutLoader(plugin.Loader):
|
|||
asset_dir, path, instance_name, skeleton, actors_dict,
|
||||
animation_file, bindings_dict, sequence)
|
||||
|
||||
return loaded_assets
|
||||
|
||||
@staticmethod
|
||||
def _remove_family(assets, components, class_name, prop_name):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
|
@ -478,10 +577,10 @@ class LayoutLoader(plugin.Loader):
|
|||
hierarchy = context.get('asset').get('data').get('parents')
|
||||
root = self.ASSET_ROOT
|
||||
hierarchy_dir = root
|
||||
hierarchy_list = []
|
||||
hierarchy_dir_list = []
|
||||
for h in hierarchy:
|
||||
hierarchy_dir = f"{hierarchy_dir}/{h}"
|
||||
hierarchy_list.append(hierarchy_dir)
|
||||
hierarchy_dir_list.append(hierarchy_dir)
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
|
|
@ -499,43 +598,31 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
# Create map for the shot, and create hierarchy of map. If the maps
|
||||
# already exist, we will use them.
|
||||
maps = []
|
||||
for h in hierarchy_list:
|
||||
a = h.split('/')[-1]
|
||||
map = f"{h}/{a}_map.{a}_map"
|
||||
new = False
|
||||
|
||||
if not EditorAssetLibrary.does_asset_exist(map):
|
||||
EditorLevelLibrary.new_level(f"{h}/{a}_map")
|
||||
new = True
|
||||
|
||||
maps.append({"map": map, "new": new})
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
|
||||
level = f"{asset_dir}/{asset}_map.{asset}_map"
|
||||
EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map")
|
||||
maps.append(
|
||||
{"map": f"{asset_dir}/{asset}_map.{asset}_map", "new": True})
|
||||
|
||||
for i in range(0, len(maps) - 1):
|
||||
for j in range(i + 1, len(maps)):
|
||||
if maps[j].get('new'):
|
||||
EditorLevelLibrary.load_level(maps[i].get('map'))
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
maps[j].get('map'),
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
|
||||
EditorLevelLibrary.load_level(maps[-1].get('map'))
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
sequences = []
|
||||
frame_ranges = []
|
||||
i = 0
|
||||
for h in hierarchy_list:
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h, recursive=False, include_folder=False)
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
|
|
@ -545,55 +632,10 @@ class LayoutLoader(plugin.Loader):
|
|||
]
|
||||
|
||||
if not existing_sequences:
|
||||
sequence = tools.create_asset(
|
||||
asset_name=hierarchy[i],
|
||||
package_path=h,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
asset_data = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": h.split('/')[-1]
|
||||
})
|
||||
|
||||
id = asset_data.get('_id')
|
||||
|
||||
start_frames = []
|
||||
end_frames = []
|
||||
|
||||
elements = list(
|
||||
legacy_io.find({"type": "asset", "data.visualParent": id}))
|
||||
for e in elements:
|
||||
start_frames.append(e.get('data').get('clipIn'))
|
||||
end_frames.append(e.get('data').get('clipOut'))
|
||||
|
||||
elements.extend(legacy_io.find({
|
||||
"type": "asset",
|
||||
"data.visualParent": e.get('_id')
|
||||
}))
|
||||
|
||||
min_frame = min(start_frames)
|
||||
max_frame = max(end_frames)
|
||||
|
||||
sequence.set_display_rate(
|
||||
unreal.FrameRate(asset_data.get('data').get("fps"), 1.0))
|
||||
sequence.set_playback_start(min_frame)
|
||||
sequence.set_playback_end(max_frame)
|
||||
sequence, frame_range = self._generate_sequence(h, h_dir)
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append((min_frame, max_frame))
|
||||
|
||||
tracks = sequence.get_master_tracks()
|
||||
track = None
|
||||
for t in tracks:
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneCameraCutTrack.static_class()):
|
||||
track = t
|
||||
break
|
||||
if not track:
|
||||
track = sequence.add_master_track(
|
||||
unreal.MovieSceneCameraCutTrack)
|
||||
frame_ranges.append(frame_range)
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
|
|
@ -601,8 +643,6 @@ class LayoutLoader(plugin.Loader):
|
|||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
|
||||
i += 1
|
||||
|
||||
shot = tools.create_asset(
|
||||
asset_name=asset,
|
||||
package_path=asset_dir,
|
||||
|
|
@ -612,15 +652,11 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
# sequences and frame_ranges have the same length
|
||||
for i in range(0, len(sequences) - 1):
|
||||
maps_to_add = []
|
||||
for j in range(i + 1, len(maps)):
|
||||
maps_to_add.append(maps[j].get('map'))
|
||||
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
maps_to_add)
|
||||
[level])
|
||||
|
||||
data = self._get_data(asset)
|
||||
shot.set_display_rate(
|
||||
|
|
@ -631,11 +667,11 @@ class LayoutLoader(plugin.Loader):
|
|||
sequences[-1], shot,
|
||||
frame_ranges[-1][1],
|
||||
data.get('clipIn'), data.get('clipOut'),
|
||||
[maps[-1].get('map')])
|
||||
[level])
|
||||
|
||||
EditorLevelLibrary.load_level(maps[-1].get('map'))
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
self._process(self.fname, asset_dir, shot)
|
||||
loaded_assets = self._process(self.fname, asset_dir, shot)
|
||||
|
||||
for s in sequences:
|
||||
EditorAssetLibrary.save_asset(s.get_full_name())
|
||||
|
|
@ -656,7 +692,8 @@ class LayoutLoader(plugin.Loader):
|
|||
"loader": str(self.__class__.__name__),
|
||||
"representation": context["representation"]["_id"],
|
||||
"parent": context["representation"]["parent"],
|
||||
"family": context["representation"]["context"]["family"]
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"loaded_assets": loaded_assets
|
||||
}
|
||||
unreal_pipeline.imprint(
|
||||
"{}/{}".format(asset_dir, container_name), data)
|
||||
|
|
@ -667,148 +704,192 @@ class LayoutLoader(plugin.Loader):
|
|||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(maps[0].get('map'))
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, representation):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
root = "/Game/OpenPype"
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
context = representation.get("context")
|
||||
|
||||
hierarchy = context.get('hierarchy').split("/")
|
||||
h_dir = f"{root}/{hierarchy[0]}"
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
|
||||
# # Create a temporary level to delete the layout level.
|
||||
# EditorLevelLibrary.save_all_dirty_levels()
|
||||
# EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
# tmp_level = f"{root}/tmp/temp_map"
|
||||
# if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
# EditorLevelLibrary.new_level(tmp_level)
|
||||
# else:
|
||||
# EditorLevelLibrary.load_level(tmp_level)
|
||||
|
||||
# Get layout level
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
|
||||
layout_level = levels[0].get_editor_property('object_path')
|
||||
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(layout_level)
|
||||
|
||||
# Delete all the actors in the level
|
||||
actors = unreal.EditorLevelLibrary.get_all_level_actors()
|
||||
for actor in actors:
|
||||
unreal.EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/")
|
||||
|
||||
source_path = get_representation_path(representation)
|
||||
destination_path = container["namespace"]
|
||||
lib_path = Path(get_representation_path(representation))
|
||||
|
||||
self._remove_actors(destination_path)
|
||||
loaded_assets = self._process(
|
||||
source_path, asset_dir, sequences[0].get_asset())
|
||||
|
||||
# Delete old animations
|
||||
anim_path = f"{destination_path}/animations/"
|
||||
EditorAssetLibrary.delete_directory(anim_path)
|
||||
|
||||
with open(source_path, "r") as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
references = [e.get('reference_fbx') for e in data]
|
||||
asset_containers = self._get_asset_containers(destination_path)
|
||||
loaded = []
|
||||
|
||||
# Delete all the assets imported with the previous version of the
|
||||
# layout, if they're not in the new layout.
|
||||
for asset_container in asset_containers:
|
||||
if asset_container.get_editor_property(
|
||||
'asset_name') == container["objectName"]:
|
||||
continue
|
||||
ref = EditorAssetLibrary.get_metadata_tag(
|
||||
asset_container.get_asset(), 'representation')
|
||||
ppath = asset_container.get_editor_property('package_path')
|
||||
|
||||
if ref not in references:
|
||||
# If the asset is not in the new layout, delete it.
|
||||
# Also check if the parent directory is empty, and delete that
|
||||
# as well, if it is.
|
||||
EditorAssetLibrary.delete_directory(ppath)
|
||||
|
||||
parent = os.path.dirname(str(ppath))
|
||||
parent_content = EditorAssetLibrary.list_assets(
|
||||
parent, recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(parent_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(parent)
|
||||
else:
|
||||
# If the asset is in the new layout, search the instances in
|
||||
# the JSON file, and create actors for them.
|
||||
|
||||
actors_dict = {}
|
||||
skeleton_dict = {}
|
||||
|
||||
for element in data:
|
||||
reference = element.get('reference_fbx')
|
||||
instance_name = element.get('instance_name')
|
||||
|
||||
skeleton = None
|
||||
|
||||
if reference == ref and ref not in loaded:
|
||||
loaded.append(ref)
|
||||
|
||||
family = element.get('family')
|
||||
|
||||
assets = EditorAssetLibrary.list_assets(
|
||||
ppath, recursive=True, include_folder=False)
|
||||
|
||||
instances = [
|
||||
item for item in data
|
||||
if item.get('reference_fbx') == reference]
|
||||
|
||||
for instance in instances:
|
||||
transform = instance.get('transform')
|
||||
inst = instance.get('instance_name')
|
||||
|
||||
actors = []
|
||||
|
||||
if family == 'model':
|
||||
actors = self._process_family(
|
||||
assets, 'StaticMesh', transform, inst)
|
||||
elif family == 'rig':
|
||||
actors = self._process_family(
|
||||
assets, 'SkeletalMesh', transform, inst)
|
||||
actors_dict[inst] = actors
|
||||
|
||||
if family == 'rig':
|
||||
# Finds skeleton among the imported assets
|
||||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(
|
||||
asset).get_asset()
|
||||
if obj.get_class().get_name() == 'Skeleton':
|
||||
skeleton = obj
|
||||
if skeleton:
|
||||
break
|
||||
|
||||
if skeleton:
|
||||
skeleton_dict[reference] = skeleton
|
||||
else:
|
||||
skeleton = skeleton_dict.get(reference)
|
||||
|
||||
animation_file = element.get('animation')
|
||||
|
||||
if animation_file and skeleton:
|
||||
self._import_animation(
|
||||
destination_path, lib_path,
|
||||
instance_name, skeleton,
|
||||
actors_dict, animation_file)
|
||||
|
||||
self._process(source_path, destination_path, loaded)
|
||||
|
||||
container_path = "{}/{}".format(container["namespace"],
|
||||
container["objectName"])
|
||||
# update metadata
|
||||
data = {
|
||||
"representation": str(representation["_id"]),
|
||||
"parent": str(representation["parent"]),
|
||||
"loaded_assets": loaded_assets
|
||||
}
|
||||
unreal_pipeline.imprint(
|
||||
container_path,
|
||||
{
|
||||
"representation": str(representation["_id"]),
|
||||
"parent": str(representation["parent"])
|
||||
})
|
||||
"{}/{}".format(asset_dir, container.get('container_name')), data)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
destination_path, recursive=True, include_folder=False)
|
||||
asset_dir, recursive=True, include_folder=False)
|
||||
|
||||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
First, destroy all actors of the assets to be removed. Then, deletes
|
||||
the asset's directory.
|
||||
Delete the layout. First, check if the assets loaded with the layout
|
||||
are used by other layouts. If not, delete the assets.
|
||||
"""
|
||||
path = container["namespace"]
|
||||
parent_path = os.path.dirname(path)
|
||||
path = Path(container.get("namespace"))
|
||||
|
||||
self._remove_actors(path)
|
||||
containers = unreal_pipeline.ls()
|
||||
layout_containers = [
|
||||
c for c in containers
|
||||
if (c.get('asset_name') != container.get('asset_name') and
|
||||
c.get('family') == "layout")]
|
||||
|
||||
EditorAssetLibrary.delete_directory(path)
|
||||
# Check if the assets have been loaded by other layouts, and deletes
|
||||
# them if they haven't.
|
||||
for asset in container.get('loaded_assets'):
|
||||
layouts = [
|
||||
lc for lc in layout_containers
|
||||
if asset in lc.get('loaded_assets')]
|
||||
|
||||
if len(layouts) == 0:
|
||||
EditorAssetLibrary.delete_directory(str(Path(asset).parent))
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/OpenPype"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
master_level = levels[0].get_editor_property('object_path')
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == container.get('asset'):
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
# Create a temporary level to delete the layout level.
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
tmp_level = f"{root}/tmp/temp_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
EditorLevelLibrary.new_level(tmp_level)
|
||||
else:
|
||||
EditorLevelLibrary.load_level(tmp_level)
|
||||
|
||||
# Delete the layout directory.
|
||||
EditorAssetLibrary.delete_directory(str(path))
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
# Delete the parent folder if there aren't any more layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
parent_path, recursive=False, include_folder=True
|
||||
str(path.parent), recursive=False, include_folder=True
|
||||
)
|
||||
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(parent_path)
|
||||
EditorAssetLibrary.delete_directory(str(path.parent))
|
||||
|
|
|
|||
|
|
@ -52,54 +52,55 @@ class SkeletalMeshFBXLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(root, asset, name), suffix="")
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task = unreal.AssetImportTask()
|
||||
|
||||
task.set_editor_property('filename', self.fname)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', False)
|
||||
task.set_editor_property('filename', self.fname)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', False)
|
||||
|
||||
# set import options here
|
||||
options = unreal.FbxImportUI()
|
||||
options.set_editor_property('import_as_skeletal', True)
|
||||
options.set_editor_property('import_animations', False)
|
||||
options.set_editor_property('import_mesh', True)
|
||||
options.set_editor_property('import_materials', True)
|
||||
options.set_editor_property('import_textures', True)
|
||||
options.set_editor_property('skeleton', None)
|
||||
options.set_editor_property('create_physics_asset', False)
|
||||
# set import options here
|
||||
options = unreal.FbxImportUI()
|
||||
options.set_editor_property('import_as_skeletal', True)
|
||||
options.set_editor_property('import_animations', False)
|
||||
options.set_editor_property('import_mesh', True)
|
||||
options.set_editor_property('import_materials', False)
|
||||
options.set_editor_property('import_textures', False)
|
||||
options.set_editor_property('skeleton', None)
|
||||
options.set_editor_property('create_physics_asset', False)
|
||||
|
||||
options.set_editor_property('mesh_type_to_import',
|
||||
unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
options.set_editor_property(
|
||||
'mesh_type_to_import',
|
||||
unreal.FBXImportType.FBXIT_SKELETAL_MESH)
|
||||
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'import_content_type',
|
||||
unreal.FBXImportContentType.FBXICT_ALL
|
||||
)
|
||||
# set to import normals, otherwise Unreal will compute them
|
||||
# and it will take a long time, depending on the size of the mesh
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'normal_import_method',
|
||||
unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS
|
||||
)
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'import_content_type',
|
||||
unreal.FBXImportContentType.FBXICT_ALL)
|
||||
# set to import normals, otherwise Unreal will compute them
|
||||
# and it will take a long time, depending on the size of the mesh
|
||||
options.skeletal_mesh_import_data.set_editor_property(
|
||||
'normal_import_method',
|
||||
unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS)
|
||||
|
||||
task.options = options
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
task.options = options
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
|
||||
import pyblish.api
|
||||
from openpype.api import Anatomy
|
||||
from openpype.hosts.unreal.api import pipeline
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRenderInstances(pyblish.api.InstancePlugin):
|
||||
|
|
@ -77,9 +80,15 @@ class CollectRenderInstances(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug(f"new instance data: {new_data}")
|
||||
|
||||
project_dir = unreal.Paths.project_dir()
|
||||
render_dir = (f"{project_dir}/Saved/MovieRenders/"
|
||||
f"{s.get('output')}")
|
||||
try:
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
anatomy = Anatomy(project)
|
||||
root = anatomy.roots['renders']
|
||||
except Exception:
|
||||
raise Exception(
|
||||
"Could not find render root in anatomy settings.")
|
||||
|
||||
render_dir = f"{root}/{project}/{s.get('output')}"
|
||||
render_path = Path(render_dir)
|
||||
|
||||
frames = []
|
||||
|
|
|
|||
|
|
@ -493,8 +493,9 @@ def convert_for_ffmpeg(
|
|||
erase_reason = "has too long value ({} chars).".format(
|
||||
len(attr_value)
|
||||
)
|
||||
erase_attribute = True
|
||||
|
||||
if erase_attribute:
|
||||
if not erase_attribute:
|
||||
for char in NOT_ALLOWED_FFMPEG_CHARS:
|
||||
if char in attr_value:
|
||||
erase_attribute = True
|
||||
|
|
@ -623,8 +624,9 @@ def convert_input_paths_for_ffmpeg(
|
|||
erase_reason = "has too long value ({} chars).".format(
|
||||
len(attr_value)
|
||||
)
|
||||
erase_attribute = True
|
||||
|
||||
if erase_attribute:
|
||||
if not erase_attribute:
|
||||
for char in NOT_ALLOWED_FFMPEG_CHARS:
|
||||
if char in attr_value:
|
||||
erase_attribute = True
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ Provides:
|
|||
|
||||
import os
|
||||
import sys
|
||||
import collections
|
||||
import six
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
|
@ -84,6 +85,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
asset_types_by_short = self._ensure_asset_types_exists(
|
||||
session, component_list
|
||||
)
|
||||
self._fill_component_locations(session, component_list)
|
||||
|
||||
asset_versions_data_by_id = {}
|
||||
used_asset_versions = []
|
||||
|
|
@ -193,6 +195,70 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
def _fill_component_locations(self, session, component_list):
|
||||
components_by_location_name = collections.defaultdict(list)
|
||||
components_by_location_id = collections.defaultdict(list)
|
||||
for component_item in component_list:
|
||||
# Location entity can be prefilled
|
||||
# - this is not recommended as connection to ftrack server may
|
||||
# be lost and in that case the entity is not valid when gets
|
||||
# to this plugin
|
||||
location = component_item.get("component_location")
|
||||
if location is not None:
|
||||
continue
|
||||
|
||||
# Collect location id
|
||||
location_id = component_item.get("component_location_id")
|
||||
if location_id:
|
||||
components_by_location_id[location_id].append(
|
||||
component_item
|
||||
)
|
||||
continue
|
||||
|
||||
location_name = component_item.get("component_location_name")
|
||||
if location_name:
|
||||
components_by_location_name[location_name].append(
|
||||
component_item
|
||||
)
|
||||
continue
|
||||
|
||||
# Skip if there is nothing to do
|
||||
if not components_by_location_name and not components_by_location_id:
|
||||
return
|
||||
|
||||
# Query locations
|
||||
query_filters = []
|
||||
if components_by_location_id:
|
||||
joined_location_ids = ",".join([
|
||||
'"{}"'.format(location_id)
|
||||
for location_id in components_by_location_id
|
||||
])
|
||||
query_filters.append("id in ({})".format(joined_location_ids))
|
||||
|
||||
if components_by_location_name:
|
||||
joined_location_names = ",".join([
|
||||
'"{}"'.format(location_name)
|
||||
for location_name in components_by_location_name
|
||||
])
|
||||
query_filters.append("name in ({})".format(joined_location_names))
|
||||
|
||||
locations = session.query(
|
||||
"select id, name from Location where {}".format(
|
||||
" or ".join(query_filters)
|
||||
)
|
||||
).all()
|
||||
# Fill locations in components
|
||||
for location in locations:
|
||||
location_id = location["id"]
|
||||
location_name = location["name"]
|
||||
if location_id in components_by_location_id:
|
||||
for component in components_by_location_id[location_id]:
|
||||
component["component_location"] = location
|
||||
|
||||
if location_name in components_by_location_name:
|
||||
for component in components_by_location_name[location_name]:
|
||||
component["component_location"] = location
|
||||
|
||||
def _ensure_asset_types_exists(self, session, component_list):
|
||||
"""Make sure that all AssetType entities exists for integration.
|
||||
|
||||
|
|
|
|||
|
|
@ -106,11 +106,10 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
# These must be changed for each component
|
||||
"component_data": None,
|
||||
"component_path": None,
|
||||
"component_location": None
|
||||
"component_location": None,
|
||||
"component_location_name": None
|
||||
}
|
||||
|
||||
ft_session = instance.context.data["ftrackSession"]
|
||||
|
||||
# Filter types of representations
|
||||
review_representations = []
|
||||
thumbnail_representations = []
|
||||
|
|
@ -128,12 +127,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
other_representations.append(repre)
|
||||
|
||||
# Prepare ftrack locations
|
||||
unmanaged_location = ft_session.query(
|
||||
"Location where name is \"ftrack.unmanaged\""
|
||||
).one()
|
||||
ftrack_server_location = ft_session.query(
|
||||
"Location where name is \"ftrack.server\""
|
||||
).one()
|
||||
unmanaged_location_name = "ftrack.unmanaged"
|
||||
ftrack_server_location_name = "ftrack.server"
|
||||
|
||||
# Components data
|
||||
component_list = []
|
||||
|
|
@ -174,7 +169,10 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
first_thumbnail_component_repre = repre
|
||||
first_thumbnail_component = thumbnail_item
|
||||
# Set location
|
||||
thumbnail_item["component_location"] = ftrack_server_location
|
||||
thumbnail_item["component_location_name"] = (
|
||||
ftrack_server_location_name
|
||||
)
|
||||
|
||||
# Add item to component list
|
||||
component_list.append(thumbnail_item)
|
||||
|
||||
|
|
@ -293,7 +291,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
src_components_to_add.append(copy.deepcopy(review_item))
|
||||
|
||||
# Set location
|
||||
review_item["component_location"] = ftrack_server_location
|
||||
review_item["component_location_name"] = (
|
||||
ftrack_server_location_name
|
||||
)
|
||||
# Add item to component list
|
||||
component_list.append(review_item)
|
||||
|
||||
|
|
@ -305,8 +305,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
first_thumbnail_component
|
||||
)
|
||||
new_thumbnail_component["asset_data"]["name"] = asset_name
|
||||
new_thumbnail_component["component_location"] = (
|
||||
ftrack_server_location
|
||||
new_thumbnail_component["component_location_name"] = (
|
||||
ftrack_server_location_name
|
||||
)
|
||||
component_list.append(new_thumbnail_component)
|
||||
|
||||
|
|
@ -315,7 +315,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
# Make sure thumbnail is disabled
|
||||
copy_src_item["thumbnail"] = False
|
||||
# Set location
|
||||
copy_src_item["component_location"] = unmanaged_location
|
||||
copy_src_item["component_location_name"] = unmanaged_location_name
|
||||
# Modify name of component to have suffix "_src"
|
||||
component_data = copy_src_item["component_data"]
|
||||
component_name = component_data["name"]
|
||||
|
|
@ -340,7 +340,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
other_item["component_data"] = {
|
||||
"name": repre["name"]
|
||||
}
|
||||
other_item["component_location"] = unmanaged_location
|
||||
other_item["component_location_name"] = unmanaged_location_name
|
||||
other_item["component_path"] = published_path
|
||||
component_list.append(other_item)
|
||||
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
return
|
||||
|
||||
filtered_repres = self._get_filtered_repres(instance)
|
||||
|
||||
for repre in filtered_repres:
|
||||
repre_files = repre["files"]
|
||||
if not isinstance(repre_files, (list, tuple)):
|
||||
|
|
@ -151,6 +152,11 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
if convert_dir is not None and os.path.exists(convert_dir):
|
||||
shutil.rmtree(convert_dir)
|
||||
|
||||
# Create only one representation with name 'thumbnail'
|
||||
# TODO maybe handle way how to decide from which representation
|
||||
# will be thumbnail created
|
||||
break
|
||||
|
||||
def _get_filtered_repres(self, instance):
|
||||
filtered_repres = []
|
||||
src_repres = instance.data.get("representations") or []
|
||||
|
|
|
|||
|
|
@ -12,6 +12,26 @@
|
|||
"LC_ALL": "C"
|
||||
},
|
||||
"variants": {
|
||||
"2023": {
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2023\\bin\\maya.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2023/bin/maya"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": {
|
||||
"MAYA_VERSION": "2023"
|
||||
}
|
||||
},
|
||||
"2022": {
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
|
|
@ -91,9 +111,6 @@
|
|||
"environment": {
|
||||
"MAYA_VERSION": "2018"
|
||||
}
|
||||
},
|
||||
"__dynamic_keys_labels__": {
|
||||
"2022": "2022"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue