[Automated] Merged develop into main

This commit is contained in:
pypebot 2022-08-17 06:02:53 +02:00 committed by GitHub
commit be13023163
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
50 changed files with 1051 additions and 738 deletions

View file

@ -180,7 +180,7 @@ class ExtractLayout(openpype.api.Extractor):
"rotation": {
"x": asset.rotation_euler.x,
"y": asset.rotation_euler.y,
"z": asset.rotation_euler.z,
"z": asset.rotation_euler.z
},
"scale": {
"x": asset.scale.x,
@ -189,6 +189,18 @@ class ExtractLayout(openpype.api.Extractor):
}
}
json_element["transform_matrix"] = []
for row in list(asset.matrix_world.transposed()):
json_element["transform_matrix"].append(list(row))
json_element["basis"] = [
[1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
]
# Extract the animation as well
if family == "rig":
f, n = self._export_animation(

View file

@ -60,8 +60,7 @@ class RenderSettings(object):
try:
aov_separator = self._aov_chars[(
self._project_settings["maya"]
["create"]
["CreateRender"]
["RenderSettings"]
["aov_separator"]
)]
except KeyError:

View file

@ -154,12 +154,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
layer_name = "rs_{}".format(expected_layer_name)
# collect all frames we are expecting to be rendered
renderer = self.get_render_attribute("currentRenderer",
layer=layer_name)
# handle various renderman names
if renderer.startswith("renderman"):
renderer = "renderman"
# return all expected files for all cameras and aovs in given
# frame range
layer_render_products = get_layer_render_products(layer_name)
@ -202,8 +196,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
aov_dict = {}
default_render_file = context.data.get('project_settings')\
.get('maya')\
.get('create')\
.get('CreateRender')\
.get('RenderSettings')\
.get('default_render_image_folder') or ""
# replace relative paths with absolute. Render products are
# returned as list of dictionaries.

View file

@ -0,0 +1,146 @@
import math
import os
import json
from maya import cmds
from maya.api import OpenMaya as om
from bson.objectid import ObjectId
from openpype.pipeline import legacy_io
import openpype.api
class ExtractLayout(openpype.api.Extractor):
"""Extract a layout."""
label = "Extract Layout"
hosts = ["maya"]
families = ["layout"]
optional = True
def process(self, instance):
# Define extract output file path
stagingdir = self.staging_dir(instance)
# Perform extraction
self.log.info("Performing extraction..")
if "representations" not in instance.data:
instance.data["representations"] = []
json_data = []
for asset in cmds.sets(str(instance), query=True):
# Find the container
grp_name = asset.split(':')[0]
containers = cmds.ls(f"{grp_name}*_CON")
assert len(containers) == 1, \
f"More than one container found for {asset}"
container = containers[0]
representation_id = cmds.getAttr(f"{container}.representation")
representation = legacy_io.find_one(
{
"type": "representation",
"_id": ObjectId(representation_id)
}, projection={"parent": True, "context.family": True})
self.log.info(representation)
version_id = representation.get("parent")
family = representation.get("context").get("family")
json_element = {
"family": family,
"instance_name": cmds.getAttr(f"{container}.name"),
"representation": str(representation_id),
"version": str(version_id)
}
loc = cmds.xform(asset, query=True, translation=True)
rot = cmds.xform(asset, query=True, rotation=True, euler=True)
scl = cmds.xform(asset, query=True, relative=True, scale=True)
json_element["transform"] = {
"translation": {
"x": loc[0],
"y": loc[1],
"z": loc[2]
},
"rotation": {
"x": math.radians(rot[0]),
"y": math.radians(rot[1]),
"z": math.radians(rot[2])
},
"scale": {
"x": scl[0],
"y": scl[1],
"z": scl[2]
}
}
row_length = 4
t_matrix_list = cmds.xform(asset, query=True, matrix=True)
transform_mm = om.MMatrix(t_matrix_list)
transform = om.MTransformationMatrix(transform_mm)
t = transform.translation(om.MSpace.kWorld)
t = om.MVector(t.x, t.z, -t.y)
transform.setTranslation(t, om.MSpace.kWorld)
transform.rotateBy(
om.MEulerRotation(math.radians(-90), 0, 0), om.MSpace.kWorld)
transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kObject)
t_matrix_list = list(transform.asMatrix())
t_matrix = []
for i in range(0, len(t_matrix_list), row_length):
t_matrix.append(t_matrix_list[i:i + row_length])
json_element["transform_matrix"] = []
for row in t_matrix:
json_element["transform_matrix"].append(list(row))
basis_list = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
]
basis_mm = om.MMatrix(basis_list)
basis = om.MTransformationMatrix(basis_mm)
b_matrix_list = list(basis.asMatrix())
b_matrix = []
for i in range(0, len(b_matrix_list), row_length):
b_matrix.append(b_matrix_list[i:i + row_length])
json_element["basis"] = []
for row in b_matrix:
json_element["basis"].append(list(row))
json_data.append(json_element)
json_filename = "{}.json".format(instance.name)
json_path = os.path.join(stagingdir, json_filename)
with open(json_path, "w+") as file:
json.dump(json_data, fp=file, indent=2)
json_representation = {
'name': 'json',
'ext': 'json',
'files': json_filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(json_representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, json_representation)

View file

@ -429,7 +429,19 @@ class ExtractLook(openpype.api.Extractor):
# node doesn't have color space attribute
color_space = "Raw"
else:
if files_metadata[source]["color_space"] == "Raw":
# get the resolved files
metadata = files_metadata.get(source)
# if the files are unresolved from `source`
# assume color space from the first file of
# the resource
if not metadata:
first_file = next(iter(resource.get(
"files", [])), None)
if not first_file:
continue
first_filepath = os.path.normpath(first_file)
metadata = files_metadata[first_filepath]
if metadata["color_space"] == "Raw":
# set color space to raw if we linearized it
color_space = "Raw"
# Remap file node filename to destination

View file

@ -1,17 +1,15 @@
import maya.mel as mel
import pymel.core as pm
from maya import cmds
import pyblish.api
import openpype.api
def get_file_rule(rule):
"""Workaround for a bug in python with cmds.workspace"""
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
class ValidateRenderImageRule(pyblish.api.InstancePlugin):
"""Validates "images" file rule is set to "renders/"
"""Validates Maya Workpace "images" file rule matches project settings.
This validates against the configured default render image folder:
Studio Settings > Project > Maya >
Render Settings > Default render image folder.
"""
@ -23,24 +21,29 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin):
def process(self, instance):
default_render_file = self.get_default_render_image_folder(instance)
required_images_rule = self.get_default_render_image_folder(instance)
current_images_rule = cmds.workspace(fileRuleEntry="images")
assert get_file_rule("images") == default_render_file, (
"Workspace's `images` file rule must be set to: {}".format(
default_render_file
assert current_images_rule == required_images_rule, (
"Invalid workspace `images` file rule value: '{}'. "
"Must be set to: '{}'".format(
current_images_rule, required_images_rule
)
)
@classmethod
def repair(cls, instance):
default = cls.get_default_render_image_folder(instance)
pm.workspace.fileRules["images"] = default
pm.system.Workspace.save()
required_images_rule = cls.get_default_render_image_folder(instance)
current_images_rule = cmds.workspace(fileRuleEntry="images")
if current_images_rule != required_images_rule:
cmds.workspace(fileRule=("images", required_images_rule))
cmds.workspace(saveWorkspace=True)
@staticmethod
def get_default_render_image_folder(instance):
return instance.context.data.get('project_settings')\
.get('maya') \
.get('create') \
.get('CreateRender') \
.get('RenderSettings') \
.get('default_render_image_folder')

View file

@ -915,7 +915,7 @@ def get_render_path(node):
avalon_knob_data = read_avalon_data(node)
nuke_imageio_writes = get_imageio_node_setting(
node_class=avalon_knob_data["family"],
node_class=avalon_knob_data["families"],
plugin_name=avalon_knob_data["creator"],
subset=avalon_knob_data["subset"]
)
@ -1923,7 +1923,7 @@ class WorkfileSettings(object):
families.append(avalon_knob_data.get("families"))
nuke_imageio_writes = get_imageio_node_setting(
node_class=avalon_knob_data["family"],
node_class=avalon_knob_data["families"],
plugin_name=avalon_knob_data["creator"],
subset=avalon_knob_data["subset"]
)
@ -2222,7 +2222,7 @@ def get_write_node_template_attr(node):
avalon_knob_data = read_avalon_data(node)
# get template data
nuke_imageio_writes = get_imageio_node_setting(
node_class=avalon_knob_data["family"],
node_class=avalon_knob_data["families"],
plugin_name=avalon_knob_data["creator"],
subset=avalon_knob_data["subset"]
)

View file

@ -20,6 +20,34 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
icon = "cube"
color = "orange"
def get_task(self, filename, asset_dir, asset_name, replace):
task = unreal.AssetImportTask()
options = unreal.AbcImportSettings()
sm_settings = unreal.AbcStaticMeshSettings()
conversion_settings = unreal.AbcConversionSettings(
preset=unreal.AbcConversionPreset.CUSTOM,
flip_u=False, flip_v=False,
rotation=[0.0, 0.0, 0.0],
scale=[1.0, 1.0, 1.0])
task.set_editor_property('filename', filename)
task.set_editor_property('destination_path', asset_dir)
task.set_editor_property('destination_name', asset_name)
task.set_editor_property('replace_existing', replace)
task.set_editor_property('automated', True)
task.set_editor_property('save', True)
# set import options here
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
options.set_editor_property(
'import_type', unreal.AlembicImportType.SKELETAL)
options.static_mesh_settings = sm_settings
options.conversion_settings = conversion_settings
task.options = options
return task
def load(self, context, name, namespace, data):
"""Load and containerise representation into Content Browser.
@ -50,36 +78,24 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
asset_name = "{}_{}".format(asset, name)
else:
asset_name = "{}".format(name)
version = context.get('version').get('name')
tools = unreal.AssetToolsHelpers().get_asset_tools()
asset_dir, container_name = tools.create_unique_asset_name(
"{}/{}/{}".format(root, asset, name), suffix="")
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
container_name += suffix
unreal.EditorAssetLibrary.make_directory(asset_dir)
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
unreal.EditorAssetLibrary.make_directory(asset_dir)
task = unreal.AssetImportTask()
task = self.get_task(self.fname, asset_dir, asset_name, False)
task.set_editor_property('filename', self.fname)
task.set_editor_property('destination_path', asset_dir)
task.set_editor_property('destination_name', asset_name)
task.set_editor_property('replace_existing', False)
task.set_editor_property('automated', True)
task.set_editor_property('save', True)
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
# set import options here
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
options = unreal.AbcImportSettings()
options.set_editor_property(
'import_type', unreal.AlembicImportType.SKELETAL)
task.options = options
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
# Create Asset Container
unreal_pipeline.create_container(
container=container_name, path=asset_dir)
# Create Asset Container
unreal_pipeline.create_container(
container=container_name, path=asset_dir)
data = {
"schema": "openpype:container-2.0",
@ -110,23 +126,8 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
source_path = get_representation_path(representation)
destination_path = container["namespace"]
task = unreal.AssetImportTask()
task = self.get_task(source_path, destination_path, name, True)
task.set_editor_property('filename', source_path)
task.set_editor_property('destination_path', destination_path)
# strip suffix
task.set_editor_property('destination_name', name)
task.set_editor_property('replace_existing', True)
task.set_editor_property('automated', True)
task.set_editor_property('save', True)
# set import options here
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
options = unreal.AbcImportSettings()
options.set_editor_property(
'import_type', unreal.AlembicImportType.SKELETAL)
task.options = options
# do import fbx and replace existing data
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
container_path = "{}/{}".format(container["namespace"],

View file

@ -24,7 +24,11 @@ class StaticMeshAlembicLoader(plugin.Loader):
task = unreal.AssetImportTask()
options = unreal.AbcImportSettings()
sm_settings = unreal.AbcStaticMeshSettings()
conversion_settings = unreal.AbcConversionSettings()
conversion_settings = unreal.AbcConversionSettings(
preset=unreal.AbcConversionPreset.CUSTOM,
flip_u=False, flip_v=False,
rotation=[0.0, 0.0, 0.0],
scale=[1.0, 1.0, 1.0])
task.set_editor_property('filename', filename)
task.set_editor_property('destination_path', asset_dir)
@ -40,13 +44,6 @@ class StaticMeshAlembicLoader(plugin.Loader):
sm_settings.set_editor_property('merge_meshes', True)
conversion_settings.set_editor_property('flip_u', False)
conversion_settings.set_editor_property('flip_v', True)
conversion_settings.set_editor_property(
'scale', unreal.Vector(x=100.0, y=100.0, z=100.0))
conversion_settings.set_editor_property(
'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0))
options.static_mesh_settings = sm_settings
options.conversion_settings = conversion_settings
task.options = options
@ -83,22 +80,24 @@ class StaticMeshAlembicLoader(plugin.Loader):
asset_name = "{}_{}".format(asset, name)
else:
asset_name = "{}".format(name)
version = context.get('version').get('name')
tools = unreal.AssetToolsHelpers().get_asset_tools()
asset_dir, container_name = tools.create_unique_asset_name(
"{}/{}/{}".format(root, asset, name), suffix="")
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
container_name += suffix
unreal.EditorAssetLibrary.make_directory(asset_dir)
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
unreal.EditorAssetLibrary.make_directory(asset_dir)
task = self.get_task(self.fname, asset_dir, asset_name, False)
task = self.get_task(self.fname, asset_dir, asset_name, False)
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
# Create Asset Container
unreal_pipeline.create_container(
container=container_name, path=asset_dir)
# Create Asset Container
unreal_pipeline.create_container(
container=container_name, path=asset_dir)
data = {
"schema": "openpype:container-2.0",

View file

@ -9,7 +9,10 @@ from unreal import EditorLevelLibrary
from unreal import EditorLevelUtils
from unreal import AssetToolsHelpers
from unreal import FBXImportType
from unreal import MathLibrary as umath
from unreal import MovieSceneLevelVisibilityTrack
from unreal import MovieSceneSubTrack
from bson.objectid import ObjectId
from openpype.client import get_asset_by_name, get_assets
from openpype.pipeline import (
@ -21,6 +24,7 @@ from openpype.pipeline import (
legacy_io,
)
from openpype.pipeline.context_tools import get_current_project_asset
from openpype.api import get_current_project_settings
from openpype.hosts.unreal.api import plugin
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
@ -159,9 +163,29 @@ class LayoutLoader(plugin.Loader):
hid_section.set_row_index(index)
hid_section.set_level_names(maps)
@staticmethod
def _transform_from_basis(self, transform, basis):
"""Transform a transform from a basis to a new basis."""
# Get the basis matrix
basis_matrix = unreal.Matrix(
basis[0],
basis[1],
basis[2],
basis[3]
)
transform_matrix = unreal.Matrix(
transform[0],
transform[1],
transform[2],
transform[3]
)
new_transform = (
basis_matrix.get_inverse() * transform_matrix * basis_matrix)
return new_transform.transform()
def _process_family(
assets, class_name, transform, sequence, inst_name=None
self, assets, class_name, transform, basis, sequence, inst_name=None
):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
@ -171,30 +195,12 @@ class LayoutLoader(plugin.Loader):
for asset in assets:
obj = ar.get_asset_by_object_path(asset).get_asset()
if obj.get_class().get_name() == class_name:
t = self._transform_from_basis(transform, basis)
actor = EditorLevelLibrary.spawn_actor_from_object(
obj,
transform.get('translation')
obj, t.translation
)
if inst_name:
try:
# Rename method leads to crash
# actor.rename(name=inst_name)
# The label works, although it make it slightly more
# complicated to check for the names, as we need to
# loop through all the actors in the level
actor.set_actor_label(inst_name)
except Exception as e:
print(e)
actor.set_actor_rotation(unreal.Rotator(
umath.radians_to_degrees(
transform.get('rotation').get('x')),
-umath.radians_to_degrees(
transform.get('rotation').get('y')),
umath.radians_to_degrees(
transform.get('rotation').get('z')),
), False)
actor.set_actor_scale3d(transform.get('scale'))
actor.set_actor_rotation(t.rotation.rotator(), False)
actor.set_actor_scale3d(t.scale3d)
if class_name == 'SkeletalMesh':
skm_comp = actor.get_editor_property(
@ -203,16 +209,17 @@ class LayoutLoader(plugin.Loader):
actors.append(actor)
binding = None
for p in sequence.get_possessables():
if p.get_name() == actor.get_name():
binding = p
break
if sequence:
binding = None
for p in sequence.get_possessables():
if p.get_name() == actor.get_name():
binding = p
break
if not binding:
binding = sequence.add_possessable(actor)
if not binding:
binding = sequence.add_possessable(actor)
bindings.append(binding)
bindings.append(binding)
return actors, bindings
@ -301,52 +308,53 @@ class LayoutLoader(plugin.Loader):
actor.skeletal_mesh_component.animation_data.set_editor_property(
'anim_to_play', animation)
# Add animation to the sequencer
bindings = bindings_dict.get(instance_name)
if sequence:
# Add animation to the sequencer
bindings = bindings_dict.get(instance_name)
ar = unreal.AssetRegistryHelpers.get_asset_registry()
ar = unreal.AssetRegistryHelpers.get_asset_registry()
for binding in bindings:
tracks = binding.get_tracks()
track = None
track = tracks[0] if tracks else binding.add_track(
unreal.MovieSceneSkeletalAnimationTrack)
for binding in bindings:
tracks = binding.get_tracks()
track = None
track = tracks[0] if tracks else binding.add_track(
unreal.MovieSceneSkeletalAnimationTrack)
sections = track.get_sections()
section = None
if not sections:
section = track.add_section()
else:
section = sections[0]
sections = track.get_sections()
section = None
if not sections:
section = track.add_section()
else:
section = sections[0]
sec_params = section.get_editor_property('params')
curr_anim = sec_params.get_editor_property('animation')
if curr_anim:
# Checks if the animation path has a container.
# If it does, it means that the animation is
# already in the sequencer.
anim_path = str(Path(
curr_anim.get_path_name()).parent
).replace('\\', '/')
_filter = unreal.ARFilter(
class_names=["AssetContainer"],
package_paths=[anim_path],
recursive_paths=False)
containers = ar.get_assets(_filter)
if len(containers) > 0:
return
section.set_range(
sequence.get_playback_start(),
sequence.get_playback_end())
sec_params = section.get_editor_property('params')
curr_anim = sec_params.get_editor_property('animation')
if curr_anim:
# Checks if the animation path has a container.
# If it does, it means that the animation is already
# in the sequencer.
anim_path = str(Path(
curr_anim.get_path_name()).parent
).replace('\\', '/')
_filter = unreal.ARFilter(
class_names=["AssetContainer"],
package_paths=[anim_path],
recursive_paths=False)
containers = ar.get_assets(_filter)
if len(containers) > 0:
return
section.set_range(
sequence.get_playback_start(),
sequence.get_playback_end())
sec_params = section.get_editor_property('params')
sec_params.set_editor_property('animation', animation)
sec_params.set_editor_property('animation', animation)
@staticmethod
def _generate_sequence(self, h, h_dir):
def _generate_sequence(h, h_dir):
tools = unreal.AssetToolsHelpers().get_asset_tools()
sequence = tools.create_asset(
@ -402,7 +410,7 @@ class LayoutLoader(plugin.Loader):
return sequence, (min_frame, max_frame)
def _process(self, lib_path, asset_dir, sequence, loaded=None):
def _process(self, lib_path, asset_dir, sequence, repr_loaded=None):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
with open(lib_path, "r") as fp:
@ -410,8 +418,8 @@ class LayoutLoader(plugin.Loader):
all_loaders = discover_loader_plugins()
if not loaded:
loaded = []
if not repr_loaded:
repr_loaded = []
path = Path(lib_path)
@ -422,36 +430,65 @@ class LayoutLoader(plugin.Loader):
loaded_assets = []
for element in data:
reference = None
if element.get('reference_fbx'):
reference = element.get('reference_fbx')
representation = None
repr_format = None
if element.get('representation'):
# representation = element.get('representation')
self.log.info(element.get("version"))
valid_formats = ['fbx', 'abc']
repr_data = legacy_io.find_one({
"type": "representation",
"parent": ObjectId(element.get("version")),
"name": {"$in": valid_formats}
})
repr_format = repr_data.get('name')
if not repr_data:
self.log.error(
f"No valid representation found for version "
f"{element.get('version')}")
continue
representation = str(repr_data.get('_id'))
print(representation)
# This is to keep compatibility with old versions of the
# json format.
elif element.get('reference_fbx'):
representation = element.get('reference_fbx')
repr_format = 'fbx'
elif element.get('reference_abc'):
reference = element.get('reference_abc')
representation = element.get('reference_abc')
repr_format = 'abc'
# If reference is None, this element is skipped, as it cannot be
# imported in Unreal
if not reference:
if not representation:
continue
instance_name = element.get('instance_name')
skeleton = None
if reference not in loaded:
loaded.append(reference)
if representation not in repr_loaded:
repr_loaded.append(representation)
family = element.get('family')
loaders = loaders_from_representation(
all_loaders, reference)
all_loaders, representation)
loader = None
if reference == element.get('reference_fbx'):
if repr_format == 'fbx':
loader = self._get_fbx_loader(loaders, family)
elif reference == element.get('reference_abc'):
elif repr_format == 'abc':
loader = self._get_abc_loader(loaders, family)
if not loader:
self.log.error(
f"No valid loader found for {representation}")
continue
options = {
@ -460,7 +497,7 @@ class LayoutLoader(plugin.Loader):
assets = load_container(
loader,
reference,
representation,
namespace=instance_name,
options=options
)
@ -478,28 +515,36 @@ class LayoutLoader(plugin.Loader):
instances = [
item for item in data
if (item.get('reference_fbx') == reference or
item.get('reference_abc') == reference)]
if ((item.get('version') and
item.get('version') == element.get('version')) or
item.get('reference_fbx') == representation or
item.get('reference_abc') == representation)]
for instance in instances:
transform = instance.get('transform')
# transform = instance.get('transform')
transform = instance.get('transform_matrix')
basis = instance.get('basis')
inst = instance.get('instance_name')
actors = []
if family == 'model':
actors, _ = self._process_family(
assets, 'StaticMesh', transform, sequence, inst)
assets, 'StaticMesh', transform, basis,
sequence, inst
)
elif family == 'rig':
actors, bindings = self._process_family(
assets, 'SkeletalMesh', transform, sequence, inst)
assets, 'SkeletalMesh', transform, basis,
sequence, inst
)
actors_dict[inst] = actors
bindings_dict[inst] = bindings
if skeleton:
skeleton_dict[reference] = skeleton
skeleton_dict[representation] = skeleton
else:
skeleton = skeleton_dict.get(reference)
skeleton = skeleton_dict.get(representation)
animation_file = element.get('animation')
@ -573,6 +618,9 @@ class LayoutLoader(plugin.Loader):
Returns:
list(str): list of container content
"""
data = get_current_project_settings()
create_sequences = data["unreal"]["level_sequences_for_layouts"]
# Create directory for asset and avalon container
hierarchy = context.get('asset').get('data').get('parents')
root = self.ASSET_ROOT
@ -593,81 +641,88 @@ class LayoutLoader(plugin.Loader):
EditorAssetLibrary.make_directory(asset_dir)
# Create map for the shot, and create hierarchy of map. If the maps
# already exist, we will use them.
h_dir = hierarchy_dir_list[0]
h_asset = hierarchy[0]
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
if not EditorAssetLibrary.does_asset_exist(master_level):
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
master_level = None
shot = None
sequences = []
level = f"{asset_dir}/{asset}_map.{asset}_map"
EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map")
EditorLevelLibrary.load_level(master_level)
EditorLevelUtils.add_level_to_world(
EditorLevelLibrary.get_editor_world(),
level,
unreal.LevelStreamingDynamic
)
EditorLevelLibrary.save_all_dirty_levels()
EditorLevelLibrary.load_level(level)
if create_sequences:
# Create map for the shot, and create hierarchy of map. If the
# maps already exist, we will use them.
if hierarchy:
h_dir = hierarchy_dir_list[0]
h_asset = hierarchy[0]
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
if not EditorAssetLibrary.does_asset_exist(master_level):
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
# Get all the sequences in the hierarchy. It will create them, if
# they don't exist.
sequences = []
frame_ranges = []
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
root_content = EditorAssetLibrary.list_assets(
h_dir, recursive=False, include_folder=False)
if master_level:
EditorLevelLibrary.load_level(master_level)
EditorLevelUtils.add_level_to_world(
EditorLevelLibrary.get_editor_world(),
level,
unreal.LevelStreamingDynamic
)
EditorLevelLibrary.save_all_dirty_levels()
EditorLevelLibrary.load_level(level)
existing_sequences = [
EditorAssetLibrary.find_asset_data(asset)
for asset in root_content
if EditorAssetLibrary.find_asset_data(
asset).get_class().get_name() == 'LevelSequence'
]
# Get all the sequences in the hierarchy. It will create them, if
# they don't exist.
frame_ranges = []
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
root_content = EditorAssetLibrary.list_assets(
h_dir, recursive=False, include_folder=False)
if not existing_sequences:
sequence, frame_range = self._generate_sequence(h, h_dir)
existing_sequences = [
EditorAssetLibrary.find_asset_data(asset)
for asset in root_content
if EditorAssetLibrary.find_asset_data(
asset).get_class().get_name() == 'LevelSequence'
]
sequences.append(sequence)
frame_ranges.append(frame_range)
else:
for e in existing_sequences:
sequences.append(e.get_asset())
frame_ranges.append((
e.get_asset().get_playback_start(),
e.get_asset().get_playback_end()))
if not existing_sequences:
sequence, frame_range = self._generate_sequence(h, h_dir)
shot = tools.create_asset(
asset_name=asset,
package_path=asset_dir,
asset_class=unreal.LevelSequence,
factory=unreal.LevelSequenceFactoryNew()
)
sequences.append(sequence)
frame_ranges.append(frame_range)
else:
for e in existing_sequences:
sequences.append(e.get_asset())
frame_ranges.append((
e.get_asset().get_playback_start(),
e.get_asset().get_playback_end()))
# sequences and frame_ranges have the same length
for i in range(0, len(sequences) - 1):
self._set_sequence_hierarchy(
sequences[i], sequences[i + 1],
frame_ranges[i][1],
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
[level])
shot = tools.create_asset(
asset_name=asset,
package_path=asset_dir,
asset_class=unreal.LevelSequence,
factory=unreal.LevelSequenceFactoryNew()
)
project_name = legacy_io.active_project()
data = get_asset_by_name(project_name, asset)["data"]
shot.set_display_rate(
unreal.FrameRate(data.get("fps"), 1.0))
shot.set_playback_start(0)
shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1)
self._set_sequence_hierarchy(
sequences[-1], shot,
frame_ranges[-1][1],
data.get('clipIn'), data.get('clipOut'),
[level])
# sequences and frame_ranges have the same length
for i in range(0, len(sequences) - 1):
self._set_sequence_hierarchy(
sequences[i], sequences[i + 1],
frame_ranges[i][1],
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
[level])
EditorLevelLibrary.load_level(level)
project_name = legacy_io.active_project()
data = get_asset_by_name(project_name, asset)["data"]
shot.set_display_rate(
unreal.FrameRate(data.get("fps"), 1.0))
shot.set_playback_start(0)
shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1)
if sequences:
self._set_sequence_hierarchy(
sequences[-1], shot,
frame_ranges[-1][1],
data.get('clipIn'), data.get('clipOut'),
[level])
EditorLevelLibrary.load_level(level)
loaded_assets = self._process(self.fname, asset_dir, shot)
@ -702,32 +757,47 @@ class LayoutLoader(plugin.Loader):
for a in asset_content:
EditorAssetLibrary.save_asset(a)
EditorLevelLibrary.load_level(master_level)
if master_level:
EditorLevelLibrary.load_level(master_level)
return asset_content
def update(self, container, representation):
data = get_current_project_settings()
create_sequences = data["unreal"]["level_sequences_for_layouts"]
ar = unreal.AssetRegistryHelpers.get_asset_registry()
root = "/Game/OpenPype"
asset_dir = container.get('namespace')
context = representation.get("context")
hierarchy = context.get('hierarchy').split("/")
h_dir = f"{root}/{hierarchy[0]}"
h_asset = hierarchy[0]
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
sequence = None
master_level = None
# # Create a temporary level to delete the layout level.
# EditorLevelLibrary.save_all_dirty_levels()
# EditorAssetLibrary.make_directory(f"{root}/tmp")
# tmp_level = f"{root}/tmp/temp_map"
# if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
# EditorLevelLibrary.new_level(tmp_level)
# else:
# EditorLevelLibrary.load_level(tmp_level)
if create_sequences:
hierarchy = context.get('hierarchy').split("/")
h_dir = f"{root}/{hierarchy[0]}"
h_asset = hierarchy[0]
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
filter = unreal.ARFilter(
class_names=["LevelSequence"],
package_paths=[asset_dir],
recursive_paths=False)
sequences = ar.get_assets(filter)
sequence = sequences[0].get_asset()
prev_level = None
if not master_level:
curr_level = unreal.LevelEditorSubsystem().get_current_level()
curr_level_path = curr_level.get_outer().get_path_name()
# If the level path does not start with "/Game/", the current
# level is a temporary, unsaved level.
if curr_level_path.startswith("/Game/"):
prev_level = curr_level_path
# Get layout level
filter = unreal.ARFilter(
@ -735,11 +805,6 @@ class LayoutLoader(plugin.Loader):
package_paths=[asset_dir],
recursive_paths=False)
levels = ar.get_assets(filter)
filter = unreal.ARFilter(
class_names=["LevelSequence"],
package_paths=[asset_dir],
recursive_paths=False)
sequences = ar.get_assets(filter)
layout_level = levels[0].get_editor_property('object_path')
@ -751,14 +816,14 @@ class LayoutLoader(plugin.Loader):
for actor in actors:
unreal.EditorLevelLibrary.destroy_actor(actor)
EditorLevelLibrary.save_current_level()
if create_sequences:
EditorLevelLibrary.save_current_level()
EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/")
source_path = get_representation_path(representation)
loaded_assets = self._process(
source_path, asset_dir, sequences[0].get_asset())
loaded_assets = self._process(source_path, asset_dir, sequence)
data = {
"representation": str(representation["_id"]),
@ -776,13 +841,20 @@ class LayoutLoader(plugin.Loader):
for a in asset_content:
EditorAssetLibrary.save_asset(a)
EditorLevelLibrary.load_level(master_level)
if master_level:
EditorLevelLibrary.load_level(master_level)
elif prev_level:
EditorLevelLibrary.load_level(prev_level)
def remove(self, container):
"""
Delete the layout. First, check if the assets loaded with the layout
are used by other layouts. If not, delete the assets.
"""
data = get_current_project_settings()
create_sequences = data["unreal"]["level_sequences_for_layouts"]
root = "/Game/OpenPype"
path = Path(container.get("namespace"))
containers = unreal_pipeline.ls()
@ -793,7 +865,7 @@ class LayoutLoader(plugin.Loader):
# Check if the assets have been loaded by other layouts, and deletes
# them if they haven't.
for asset in container.get('loaded_assets'):
for asset in eval(container.get('loaded_assets')):
layouts = [
lc for lc in layout_containers
if asset in lc.get('loaded_assets')]
@ -801,71 +873,87 @@ class LayoutLoader(plugin.Loader):
if not layouts:
EditorAssetLibrary.delete_directory(str(Path(asset).parent))
# Remove the Level Sequence from the parent.
# We need to traverse the hierarchy from the master sequence to find
# the level sequence.
root = "/Game/OpenPype"
namespace = container.get('namespace').replace(f"{root}/", "")
ms_asset = namespace.split('/')[0]
ar = unreal.AssetRegistryHelpers.get_asset_registry()
_filter = unreal.ARFilter(
class_names=["LevelSequence"],
package_paths=[f"{root}/{ms_asset}"],
recursive_paths=False)
sequences = ar.get_assets(_filter)
master_sequence = sequences[0].get_asset()
_filter = unreal.ARFilter(
class_names=["World"],
package_paths=[f"{root}/{ms_asset}"],
recursive_paths=False)
levels = ar.get_assets(_filter)
master_level = levels[0].get_editor_property('object_path')
# Delete the parent folder if there aren't any more
# layouts in it.
asset_content = EditorAssetLibrary.list_assets(
str(Path(asset).parent.parent), recursive=False,
include_folder=True
)
sequences = [master_sequence]
if len(asset_content) == 0:
EditorAssetLibrary.delete_directory(
str(Path(asset).parent.parent))
parent = None
for s in sequences:
tracks = s.get_master_tracks()
subscene_track = None
visibility_track = None
for t in tracks:
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
subscene_track = t
if (t.get_class() ==
unreal.MovieSceneLevelVisibilityTrack.static_class()):
visibility_track = t
if subscene_track:
sections = subscene_track.get_sections()
for ss in sections:
if ss.get_sequence().get_name() == container.get('asset'):
parent = s
subscene_track.remove_section(ss)
break
sequences.append(ss.get_sequence())
# Update subscenes indexes.
i = 0
for ss in sections:
ss.set_row_index(i)
i += 1
master_sequence = None
master_level = None
sequences = []
if visibility_track:
sections = visibility_track.get_sections()
for ss in sections:
if (unreal.Name(f"{container.get('asset')}_map")
in ss.get_level_names()):
visibility_track.remove_section(ss)
# Update visibility sections indexes.
i = -1
prev_name = []
for ss in sections:
if prev_name != ss.get_level_names():
if create_sequences:
# Remove the Level Sequence from the parent.
# We need to traverse the hierarchy from the master sequence to
# find the level sequence.
namespace = container.get('namespace').replace(f"{root}/", "")
ms_asset = namespace.split('/')[0]
ar = unreal.AssetRegistryHelpers.get_asset_registry()
_filter = unreal.ARFilter(
class_names=["LevelSequence"],
package_paths=[f"{root}/{ms_asset}"],
recursive_paths=False)
sequences = ar.get_assets(_filter)
master_sequence = sequences[0].get_asset()
_filter = unreal.ARFilter(
class_names=["World"],
package_paths=[f"{root}/{ms_asset}"],
recursive_paths=False)
levels = ar.get_assets(_filter)
master_level = levels[0].get_editor_property('object_path')
sequences = [master_sequence]
parent = None
for s in sequences:
tracks = s.get_master_tracks()
subscene_track = None
visibility_track = None
for t in tracks:
if t.get_class() == MovieSceneSubTrack.static_class():
subscene_track = t
if (t.get_class() ==
MovieSceneLevelVisibilityTrack.static_class()):
visibility_track = t
if subscene_track:
sections = subscene_track.get_sections()
for ss in sections:
if (ss.get_sequence().get_name() ==
container.get('asset')):
parent = s
subscene_track.remove_section(ss)
break
sequences.append(ss.get_sequence())
# Update subscenes indexes.
i = 0
for ss in sections:
ss.set_row_index(i)
i += 1
ss.set_row_index(i)
prev_name = ss.get_level_names()
if parent:
break
assert parent, "Could not find the parent sequence"
if visibility_track:
sections = visibility_track.get_sections()
for ss in sections:
if (unreal.Name(f"{container.get('asset')}_map")
in ss.get_level_names()):
visibility_track.remove_section(ss)
# Update visibility sections indexes.
i = -1
prev_name = []
for ss in sections:
if prev_name != ss.get_level_names():
i += 1
ss.set_row_index(i)
prev_name = ss.get_level_names()
if parent:
break
assert parent, "Could not find the parent sequence"
# Create a temporary level to delete the layout level.
EditorLevelLibrary.save_all_dirty_levels()
@ -879,10 +967,9 @@ class LayoutLoader(plugin.Loader):
# Delete the layout directory.
EditorAssetLibrary.delete_directory(str(path))
EditorLevelLibrary.load_level(master_level)
EditorAssetLibrary.delete_directory(f"{root}/tmp")
EditorLevelLibrary.save_current_level()
if create_sequences:
EditorLevelLibrary.load_level(master_level)
EditorAssetLibrary.delete_directory(f"{root}/tmp")
# Delete the parent folder if there aren't any more layouts in it.
asset_content = EditorAssetLibrary.list_assets(

View file

@ -413,8 +413,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
# Gather needed data ------------------------------------------------
default_render_file = instance.context.data.get('project_settings')\
.get('maya')\
.get('create')\
.get('CreateRender')\
.get('RenderSettings')\
.get('default_render_image_folder')
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")

View file

@ -34,7 +34,7 @@ def get_openpype_version_from_path(path, build=True):
# if only builds are requested
if build and not os.path.isfile(exe): # noqa: E501
print(f" ! path is not a build: {path}")
print(" ! path is not a build: {}".format(path))
return None
version = {}
@ -70,11 +70,12 @@ def inject_openpype_environment(deadlinePlugin):
# lets go over all available and find compatible build.
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
if requested_version:
print((">>> Scanning for compatible requested "
f"version {requested_version}"))
print((
">>> Scanning for compatible requested version {}"
).format(requested_version))
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
if install_dir:
print(f"--- Looking for OpenPype at: {install_dir}")
print("--- Looking for OpenPype at: {}".format(install_dir))
sub_dirs = [
f.path for f in os.scandir(install_dir)
if f.is_dir()
@ -83,18 +84,20 @@ def inject_openpype_environment(deadlinePlugin):
version = get_openpype_version_from_path(subdir)
if not version:
continue
print(f" - found: {version} - {subdir}")
print(" - found: {} - {}".format(version, subdir))
openpype_versions.append((version, subdir))
exe = FileUtils.SearchFileList(exe_list)
if openpype_versions:
# if looking for requested compatible version,
# add the implicitly specified to the list too.
print(f"Looking for OpenPype at: {os.path.dirname(exe)}")
print("Looking for OpenPype at: {}".format(os.path.dirname(exe)))
version = get_openpype_version_from_path(
os.path.dirname(exe))
if version:
print(f" - found: {version} - {os.path.dirname(exe)}")
print(" - found: {} - {}".format(
version, os.path.dirname(exe)
))
openpype_versions.append((version, os.path.dirname(exe)))
if requested_version:
@ -106,8 +109,9 @@ def inject_openpype_environment(deadlinePlugin):
int(t) if t.isdigit() else t.lower()
for t in re.split(r"(\d+)", ver[0])
])
print(("*** Latest available version found is "
f"{openpype_versions[-1][0]}"))
print((
"*** Latest available version found is {}"
).format(openpype_versions[-1][0]))
requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501
compatible_versions = []
for version in openpype_versions:
@ -127,8 +131,9 @@ def inject_openpype_environment(deadlinePlugin):
int(t) if t.isdigit() else t.lower()
for t in re.split(r"(\d+)", ver[0])
])
print(("*** Latest compatible version found is "
f"{compatible_versions[-1][0]}"))
print((
"*** Latest compatible version found is {}"
).format(compatible_versions[-1][0]))
# create list of executables for different platform and let
# Deadline decide.
exe_list = [
@ -234,78 +239,6 @@ def inject_render_job_id(deadlinePlugin):
print(">>> Injection end.")
def pype_command_line(executable, arguments, workingDirectory):
"""Remap paths in comand line argument string.
Using Deadline rempper it will remap all path found in command-line.
Args:
executable (str): path to executable
arguments (str): arguments passed to executable
workingDirectory (str): working directory path
Returns:
Tuple(executable, arguments, workingDirectory)
"""
print("-" * 40)
print("executable: {}".format(executable))
print("arguments: {}".format(arguments))
print("workingDirectory: {}".format(workingDirectory))
print("-" * 40)
print("Remapping arguments ...")
arguments = RepositoryUtils.CheckPathMapping(arguments)
print("* {}".format(arguments))
print("-" * 40)
return executable, arguments, workingDirectory
def pype(deadlinePlugin):
"""Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars.
`PYPE_METADATA_FILE` is used on farm to point to rendered data. This path
originates on platform from which this job was published. To be able to
publish on different platform, this path needs to be remapped.
`PYPE_PYTHON_EXE` can be used to specify custom location of python
interpreter to use for Pype. This is remappeda also if present even
though it probably doesn't make much sense.
Arguments:
deadlinePlugin: Deadline job plugin passed by Deadline
"""
print(">>> Getting job ...")
job = deadlinePlugin.GetJob()
# PYPE should be here, not OPENPYPE - backward compatibility!!
pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE")
pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE")
print(">>> Having backward compatible env vars {}/{}".format(pype_metadata,
pype_python))
# test if it is pype publish job.
if pype_metadata:
pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata)
if platform.system().lower() == "linux":
pype_metadata = pype_metadata.replace("\\", "/")
print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata))
job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata)
deadlinePlugin.SetProcessEnvironmentVariable(
"PYPE_METADATA_FILE", pype_metadata)
if pype_python:
pype_python = RepositoryUtils.CheckPathMapping(pype_python)
if platform.system().lower() == "linux":
pype_python = pype_python.replace("\\", "/")
print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python))
job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python)
deadlinePlugin.SetProcessEnvironmentVariable(
"PYPE_PYTHON_EXE", pype_python)
deadlinePlugin.ModifyCommandLineCallback += pype_command_line
def __main__(deadlinePlugin):
print("*** GlobalJobPreload start ...")
print(">>> Getting job ...")
@ -329,5 +262,3 @@ def __main__(deadlinePlugin):
inject_render_job_id(deadlinePlugin)
elif openpype_render_job == '1' or openpype_remote_job == '1':
inject_openpype_environment(deadlinePlugin)
else:
pype(deadlinePlugin) # backward compatibility with Pype2

View file

@ -62,7 +62,7 @@ class AbstractProvider:
@abc.abstractmethod
def upload_file(self, source_path, path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Copy file from 'source_path' to 'target_path' on provider.
@ -75,7 +75,7 @@ class AbstractProvider:
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str): name of project_name
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -87,7 +87,7 @@ class AbstractProvider:
@abc.abstractmethod
def download_file(self, source_path, local_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Download file from provider into local system
@ -99,7 +99,7 @@ class AbstractProvider:
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name

View file

@ -224,7 +224,7 @@ class DropboxHandler(AbstractProvider):
return False
def upload_file(self, source_path, path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Copy file from 'source_path' to 'target_path' on provider.
@ -237,7 +237,7 @@ class DropboxHandler(AbstractProvider):
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -290,7 +290,7 @@ class DropboxHandler(AbstractProvider):
cursor.offset = f.tell()
server.update_db(
collection=collection,
project_name=project_name,
new_file_id=None,
file=file,
representation=representation,
@ -301,7 +301,7 @@ class DropboxHandler(AbstractProvider):
return path
def download_file(self, source_path, local_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Download file from provider into local system
@ -313,7 +313,7 @@ class DropboxHandler(AbstractProvider):
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -337,7 +337,7 @@ class DropboxHandler(AbstractProvider):
self.dbx.files_download_to_file(local_path, source_path)
server.update_db(
collection=collection,
project_name=project_name,
new_file_id=None,
file=file,
representation=representation,

View file

@ -251,7 +251,7 @@ class GDriveHandler(AbstractProvider):
return folder_id
def upload_file(self, source_path, path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Uploads single file from 'source_path' to destination 'path'.
@ -264,7 +264,7 @@ class GDriveHandler(AbstractProvider):
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -324,7 +324,7 @@ class GDriveHandler(AbstractProvider):
while response is None:
if server.is_representation_paused(representation['_id'],
check_parents=True,
project_name=collection):
project_name=project_name):
raise ValueError("Paused during process, please redo.")
if status:
status_val = float(status.progress())
@ -333,7 +333,7 @@ class GDriveHandler(AbstractProvider):
last_tick = time.time()
log.debug("Uploaded %d%%." %
int(status_val * 100))
server.update_db(collection=collection,
server.update_db(project_name=project_name,
new_file_id=None,
file=file,
representation=representation,
@ -358,7 +358,7 @@ class GDriveHandler(AbstractProvider):
return response['id']
def download_file(self, source_path, local_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Downloads single file from 'source_path' (remote) to 'local_path'.
@ -372,7 +372,7 @@ class GDriveHandler(AbstractProvider):
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -410,7 +410,7 @@ class GDriveHandler(AbstractProvider):
while response is None:
if server.is_representation_paused(representation['_id'],
check_parents=True,
project_name=collection):
project_name=project_name):
raise ValueError("Paused during process, please redo.")
if status:
status_val = float(status.progress())
@ -419,7 +419,7 @@ class GDriveHandler(AbstractProvider):
last_tick = time.time()
log.debug("Downloaded %d%%." %
int(status_val * 100))
server.update_db(collection=collection,
server.update_db(project_name=project_name,
new_file_id=None,
file=file,
representation=representation,

View file

@ -82,7 +82,7 @@ class LocalDriveHandler(AbstractProvider):
return editable
def upload_file(self, source_path, target_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False, direction="Upload"):
"""
Copies file from 'source_path' to 'target_path'
@ -95,7 +95,7 @@ class LocalDriveHandler(AbstractProvider):
thread = threading.Thread(target=self._copy,
args=(source_path, target_path))
thread.start()
self._mark_progress(collection, file, representation, server,
self._mark_progress(project_name, file, representation, server,
site, source_path, target_path, direction)
else:
if os.path.exists(target_path):
@ -105,13 +105,14 @@ class LocalDriveHandler(AbstractProvider):
return os.path.basename(target_path)
def download_file(self, source_path, local_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Download a file form 'source_path' to 'local_path'
"""
return self.upload_file(source_path, local_path,
server, collection, file, representation, site,
server, project_name, file,
representation, site,
overwrite, direction="Download")
def delete_file(self, path):
@ -188,7 +189,7 @@ class LocalDriveHandler(AbstractProvider):
except shutil.SameFileError:
print("same files, skipping")
def _mark_progress(self, collection, file, representation, server, site,
def _mark_progress(self, project_name, file, representation, server, site,
source_path, target_path, direction):
"""
Updates progress field in DB by values 0-1.
@ -204,7 +205,7 @@ class LocalDriveHandler(AbstractProvider):
status_val = target_file_size / source_file_size
last_tick = time.time()
log.debug(direction + "ed %d%%." % int(status_val * 100))
server.update_db(collection=collection,
server.update_db(project_name=project_name,
new_file_id=None,
file=file,
representation=representation,

View file

@ -222,7 +222,7 @@ class SFTPHandler(AbstractProvider):
return os.path.basename(path)
def upload_file(self, source_path, target_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Uploads single file from 'source_path' to destination 'path'.
@ -235,7 +235,7 @@ class SFTPHandler(AbstractProvider):
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -256,7 +256,7 @@ class SFTPHandler(AbstractProvider):
thread = threading.Thread(target=self._upload,
args=(source_path, target_path))
thread.start()
self._mark_progress(collection, file, representation, server,
self._mark_progress(project_name, file, representation, server,
site, source_path, target_path, "upload")
return os.path.basename(target_path)
@ -267,7 +267,7 @@ class SFTPHandler(AbstractProvider):
conn.put(source_path, target_path)
def download_file(self, source_path, target_path,
server, collection, file, representation, site,
server, project_name, file, representation, site,
overwrite=False):
"""
Downloads single file from 'source_path' (remote) to 'target_path'.
@ -281,7 +281,7 @@ class SFTPHandler(AbstractProvider):
arguments for saving progress:
server (SyncServer): server instance to call update_db on
collection (str): name of collection
project_name (str):
file (dict): info about uploaded file (matches structure from db)
representation (dict): complete repre containing 'file'
site (str): site name
@ -302,7 +302,7 @@ class SFTPHandler(AbstractProvider):
thread = threading.Thread(target=self._download,
args=(source_path, target_path))
thread.start()
self._mark_progress(collection, file, representation, server,
self._mark_progress(project_name, file, representation, server,
site, source_path, target_path, "download")
return os.path.basename(target_path)
@ -425,7 +425,7 @@ class SFTPHandler(AbstractProvider):
pysftp.exceptions.ConnectionException):
log.warning("Couldn't connect", exc_info=True)
def _mark_progress(self, collection, file, representation, server, site,
def _mark_progress(self, project_name, file, representation, server, site,
source_path, target_path, direction):
"""
Updates progress field in DB by values 0-1.
@ -446,7 +446,7 @@ class SFTPHandler(AbstractProvider):
status_val = target_file_size / source_file_size
last_tick = time.time()
log.debug(direction + "ed %d%%." % int(status_val * 100))
server.update_db(collection=collection,
server.update_db(project_name=project_name,
new_file_id=None,
file=file,
representation=representation,

View file

@ -14,7 +14,7 @@ from .utils import SyncStatus, ResumableError
log = PypeLogger().get_logger("SyncServer")
async def upload(module, collection, file, representation, provider_name,
async def upload(module, project_name, file, representation, provider_name,
remote_site_name, tree=None, preset=None):
"""
Upload single 'file' of a 'representation' to 'provider'.
@ -31,7 +31,7 @@ async def upload(module, collection, file, representation, provider_name,
Args:
module(SyncServerModule): object to run SyncServerModule API
collection (str): source collection
project_name (str): source db
file (dictionary): of file from representation in Mongo
representation (dictionary): of representation
provider_name (string): gdrive, gdc etc.
@ -47,15 +47,16 @@ async def upload(module, collection, file, representation, provider_name,
# thread can do that at a time, upload/download to prepared
# structure should be run in parallel
remote_handler = lib.factory.get_provider(provider_name,
collection,
project_name,
remote_site_name,
tree=tree,
presets=preset)
file_path = file.get("path", "")
try:
local_file_path, remote_file_path = resolve_paths(module,
file_path, collection, remote_site_name, remote_handler
local_file_path, remote_file_path = resolve_paths(
module, file_path, project_name,
remote_site_name, remote_handler
)
except Exception as exp:
print(exp)
@ -74,27 +75,28 @@ async def upload(module, collection, file, representation, provider_name,
local_file_path,
remote_file_path,
module,
collection,
project_name,
file,
representation,
remote_site_name,
True
)
module.handle_alternate_site(collection, representation, remote_site_name,
module.handle_alternate_site(project_name, representation,
remote_site_name,
file["_id"], file_id)
return file_id
async def download(module, collection, file, representation, provider_name,
async def download(module, project_name, file, representation, provider_name,
remote_site_name, tree=None, preset=None):
"""
Downloads file to local folder denoted in representation.Context.
Args:
module(SyncServerModule): object to run SyncServerModule API
collection (str): source collection
project_name (str): source
file (dictionary) : info about processed file
representation (dictionary): repr that 'file' belongs to
provider_name (string): 'gdrive' etc
@ -108,20 +110,20 @@ async def download(module, collection, file, representation, provider_name,
"""
with module.lock:
remote_handler = lib.factory.get_provider(provider_name,
collection,
project_name,
remote_site_name,
tree=tree,
presets=preset)
file_path = file.get("path", "")
local_file_path, remote_file_path = resolve_paths(
module, file_path, collection, remote_site_name, remote_handler
module, file_path, project_name, remote_site_name, remote_handler
)
local_folder = os.path.dirname(local_file_path)
os.makedirs(local_folder, exist_ok=True)
local_site = module.get_active_site(collection)
local_site = module.get_active_site(project_name)
loop = asyncio.get_running_loop()
file_id = await loop.run_in_executor(None,
@ -129,20 +131,20 @@ async def download(module, collection, file, representation, provider_name,
remote_file_path,
local_file_path,
module,
collection,
project_name,
file,
representation,
local_site,
True
)
module.handle_alternate_site(collection, representation, local_site,
module.handle_alternate_site(project_name, representation, local_site,
file["_id"], file_id)
return file_id
def resolve_paths(module, file_path, collection,
def resolve_paths(module, file_path, project_name,
remote_site_name=None, remote_handler=None):
"""
Returns tuple of local and remote file paths with {root}
@ -153,7 +155,7 @@ def resolve_paths(module, file_path, collection,
Args:
module(SyncServerModule): object to run SyncServerModule API
file_path(string): path with {root}
collection(string): project name
project_name(string): project name
remote_site_name(string): remote site
remote_handler(AbstractProvider): implementation
Returns:
@ -164,7 +166,7 @@ def resolve_paths(module, file_path, collection,
remote_file_path = remote_handler.resolve_path(file_path)
local_handler = lib.factory.get_provider(
'local_drive', collection, module.get_active_site(collection))
'local_drive', project_name, module.get_active_site(project_name))
local_file_path = local_handler.resolve_path(file_path)
return local_file_path, remote_file_path
@ -269,8 +271,8 @@ class SyncServerThread(threading.Thread):
- gets list of collections in DB
- gets list of active remote providers (has configuration,
credentials)
- for each collection it looks for representations that should
be synced
- for each project_name it looks for representations that
should be synced
- synchronize found collections
- update representations - fills error messages for exceptions
- waits X seconds and repeat
@ -282,17 +284,17 @@ class SyncServerThread(threading.Thread):
import time
start_time = time.time()
self.module.set_sync_project_settings() # clean cache
collection = None
project_name = None
enabled_projects = self.module.get_enabled_projects()
for collection in enabled_projects:
preset = self.module.sync_project_settings[collection]
for project_name in enabled_projects:
preset = self.module.sync_project_settings[project_name]
local_site, remote_site = self._working_sites(collection)
local_site, remote_site = self._working_sites(project_name)
if not all([local_site, remote_site]):
continue
sync_repres = self.module.get_sync_representations(
collection,
project_name,
local_site,
remote_site
)
@ -310,7 +312,7 @@ class SyncServerThread(threading.Thread):
remote_provider = \
self.module.get_provider_for_site(site=remote_site)
handler = lib.factory.get_provider(remote_provider,
collection,
project_name,
remote_site,
presets=site_preset)
limit = lib.factory.get_provider_batch_limit(
@ -341,7 +343,7 @@ class SyncServerThread(threading.Thread):
limit -= 1
task = asyncio.create_task(
upload(self.module,
collection,
project_name,
file,
sync,
remote_provider,
@ -353,7 +355,7 @@ class SyncServerThread(threading.Thread):
files_processed_info.append((file,
sync,
remote_site,
collection
project_name
))
processed_file_path.add(file_path)
if status == SyncStatus.DO_DOWNLOAD:
@ -361,7 +363,7 @@ class SyncServerThread(threading.Thread):
limit -= 1
task = asyncio.create_task(
download(self.module,
collection,
project_name,
file,
sync,
remote_provider,
@ -373,7 +375,7 @@ class SyncServerThread(threading.Thread):
files_processed_info.append((file,
sync,
local_site,
collection
project_name
))
processed_file_path.add(file_path)
@ -384,12 +386,12 @@ class SyncServerThread(threading.Thread):
return_exceptions=True)
for file_id, info in zip(files_created,
files_processed_info):
file, representation, site, collection = info
file, representation, site, project_name = info
error = None
if isinstance(file_id, BaseException):
error = str(file_id)
file_id = None
self.module.update_db(collection,
self.module.update_db(project_name,
file_id,
file,
representation,
@ -399,7 +401,7 @@ class SyncServerThread(threading.Thread):
duration = time.time() - start_time
log.debug("One loop took {:.2f}s".format(duration))
delay = self.module.get_loop_delay(collection)
delay = self.module.get_loop_delay(project_name)
log.debug("Waiting for {} seconds to new loop".format(delay))
self.timer = asyncio.create_task(self.run_timer(delay))
await asyncio.gather(self.timer)
@ -458,19 +460,19 @@ class SyncServerThread(threading.Thread):
self.timer.cancel()
self.timer = None
def _working_sites(self, collection):
if self.module.is_project_paused(collection):
def _working_sites(self, project_name):
if self.module.is_project_paused(project_name):
log.debug("Both sites same, skipping")
return None, None
local_site = self.module.get_active_site(collection)
remote_site = self.module.get_remote_site(collection)
local_site = self.module.get_active_site(project_name)
remote_site = self.module.get_remote_site(project_name)
if local_site == remote_site:
log.debug("{}-{} sites same, skipping".format(local_site,
remote_site))
return None, None
configured_sites = _get_configured_sites(self.module, collection)
configured_sites = _get_configured_sites(self.module, project_name)
if not all([local_site in configured_sites,
remote_site in configured_sites]):
log.debug("Some of the sites {} - {} is not ".format(local_site,

View file

@ -25,6 +25,8 @@ from .providers import lib
from .utils import time_function, SyncStatus, SiteAlreadyPresentError
from openpype.client import get_representations, get_representation_by_id
log = PypeLogger.get_logger("SyncServer")
@ -128,12 +130,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
self.projects_processed = set()
""" Start of Public API """
def add_site(self, collection, representation_id, site_name=None,
def add_site(self, project_name, representation_id, site_name=None,
force=False):
"""
Adds new site to representation to be synced.
'collection' must have synchronization enabled (globally or
'project_name' must have synchronization enabled (globally or
project only)
Used as a API endpoint from outside applications (Loader etc).
@ -141,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Use 'force' to reset existing site.
Args:
collection (string): project name (must match DB)
project_name (string): project name (must match DB)
representation_id (string): MongoDB _id value
site_name (string): name of configured and active site
force (bool): reset site if exists
@ -151,25 +153,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
not 'force'
ValueError - other errors (repre not found, misconfiguration)
"""
if not self.get_sync_project_setting(collection):
if not self.get_sync_project_setting(project_name):
raise ValueError("Project not configured")
if not site_name:
site_name = self.DEFAULT_SITE
self.reset_site_on_representation(collection,
self.reset_site_on_representation(project_name,
representation_id,
site_name=site_name,
force=force)
def remove_site(self, collection, representation_id, site_name,
def remove_site(self, project_name, representation_id, site_name,
remove_local_files=False):
"""
Removes 'site_name' for particular 'representation_id' on
'collection'
'project_name'
Args:
collection (string): project name (must match DB)
project_name (string): project name (must match DB)
representation_id (string): MongoDB _id value
site_name (string): name of configured and active site
remove_local_files (bool): remove only files for 'local_id'
@ -178,15 +180,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Returns:
throws ValueError if any issue
"""
if not self.get_sync_project_setting(collection):
if not self.get_sync_project_setting(project_name):
raise ValueError("Project not configured")
self.reset_site_on_representation(collection,
self.reset_site_on_representation(project_name,
representation_id,
site_name=site_name,
remove=True)
if remove_local_files:
self._remove_local_file(collection, representation_id, site_name)
self._remove_local_file(project_name, representation_id, site_name)
def compute_resource_sync_sites(self, project_name):
"""Get available resource sync sites state for publish process.
@ -333,9 +335,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return alt_site_pairs
def clear_project(self, collection, site_name):
def clear_project(self, project_name, site_name):
"""
Clear 'collection' of 'site_name' and its local files
Clear 'project_name' of 'site_name' and its local files
Works only on real local sites, not on 'studio'
"""
@ -344,16 +346,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
"files.sites.name": site_name
}
# TODO currently not possible to replace with get_representations
representations = list(
self.connection.database[collection].find(query))
self.connection.database[project_name].find(query))
if not representations:
self.log.debug("No repre found")
return
for repre in representations:
self.remove_site(collection, repre.get("_id"), site_name, True)
self.remove_site(project_name, repre.get("_id"), site_name, True)
def create_validate_project_task(self, collection, site_name):
def create_validate_project_task(self, project_name, site_name):
"""Adds metadata about project files validation on a queue.
This process will loop through all representation and check if
@ -370,33 +373,28 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
"""
task = {
"type": "validate",
"project_name": collection,
"func": lambda: self.validate_project(collection, site_name,
"project_name": project_name,
"func": lambda: self.validate_project(project_name, site_name,
reset_missing=True)
}
self.projects_processed.add(collection)
self.projects_processed.add(project_name)
self.long_running_tasks.append(task)
def validate_project(self, collection, site_name, reset_missing=False):
"""Validate 'collection' of 'site_name' and its local files
def validate_project(self, project_name, site_name, reset_missing=False):
"""Validate 'project_name' of 'site_name' and its local files
If file present and not marked with a 'site_name' in DB, DB is
updated with site name and file modified date.
Args:
collection (string): project name
project_name (string): project name
site_name (string): active site name
reset_missing (bool): if True reset site in DB if missing
physically
"""
self.log.debug("Validation of {} for {} started".format(collection,
self.log.debug("Validation of {} for {} started".format(project_name,
site_name))
query = {
"type": "representation"
}
representations = list(
self.connection.database[collection].find(query))
representations = list(get_representations(project_name))
if not representations:
self.log.debug("No repre found")
return
@ -416,7 +414,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
continue
file_path = repre_file.get("path", "")
local_file_path = self.get_local_file_path(collection,
local_file_path = self.get_local_file_path(project_name,
site_name,
file_path)
@ -428,14 +426,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
"Adding site {} for {}".format(site_name,
repre_id))
query = {
"_id": repre_id
}
created_dt = datetime.fromtimestamp(
os.path.getmtime(local_file_path))
elem = {"name": site_name,
"created_dt": created_dt}
self._add_site(collection, query, repre, elem,
self._add_site(project_name, repre, elem,
site_name=site_name,
file_id=repre_file["_id"],
force=True)
@ -445,41 +440,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
self.log.debug("Resetting site {} for {}".
format(site_name, repre_id))
self.reset_site_on_representation(
collection, repre_id, site_name=site_name,
project_name, repre_id, site_name=site_name,
file_id=repre_file["_id"])
sites_reset += 1
if sites_added % 100 == 0:
self.log.debug("Sites added {}".format(sites_added))
self.log.debug("Validation of {} for {} ended".format(collection,
self.log.debug("Validation of {} for {} ended".format(project_name,
site_name))
self.log.info("Sites added {}, sites reset {}".format(sites_added,
reset_missing))
def pause_representation(self, collection, representation_id, site_name):
def pause_representation(self, project_name, representation_id, site_name):
"""
Sets 'representation_id' as paused, eg. no syncing should be
happening on it.
Args:
collection (string): project name
project_name (string): project name
representation_id (string): MongoDB objectId value
site_name (string): 'gdrive', 'studio' etc.
"""
log.info("Pausing SyncServer for {}".format(representation_id))
self._paused_representations.add(representation_id)
self.reset_site_on_representation(collection, representation_id,
self.reset_site_on_representation(project_name, representation_id,
site_name=site_name, pause=True)
def unpause_representation(self, collection, representation_id, site_name):
def unpause_representation(self, project_name,
representation_id, site_name):
"""
Sets 'representation_id' as unpaused.
Does not fail or warn if repre wasn't paused.
Args:
collection (string): project name
project_name (string): project name
representation_id (string): MongoDB objectId value
site_name (string): 'gdrive', 'studio' etc.
"""
@ -489,7 +485,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
except KeyError:
pass
# self.paused_representations is not persistent
self.reset_site_on_representation(collection, representation_id,
self.reset_site_on_representation(project_name, representation_id,
site_name=site_name, pause=False)
def is_representation_paused(self, representation_id,
@ -520,7 +516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
happening on all representation inside.
Args:
project_name (string): collection name
project_name (string): project_name name
"""
log.info("Pausing SyncServer for {}".format(project_name))
self._paused_projects.add(project_name)
@ -532,7 +528,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Does not fail or warn if project wasn't paused.
Args:
project_name (string): collection name
project_name (string):
"""
log.info("Unpausing SyncServer for {}".format(project_name))
try:
@ -545,7 +541,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Returns if 'project_name' is paused or not.
Args:
project_name (string): collection name
project_name (string):
check_parents (bool): check if server itself
is not paused
Returns:
@ -944,8 +940,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return True
return False
def handle_alternate_site(self, collection, representation, processed_site,
file_id, synced_file_id):
def handle_alternate_site(self, project_name, representation,
processed_site, file_id, synced_file_id):
"""
For special use cases where one site vendors another.
@ -958,7 +954,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
same location >> file is accesible on 'sftp' site right away.
Args:
collection (str): name of project
project_name (str): name of project
representation (dict)
processed_site (str): real site_name of published/uploaded file
file_id (ObjectId): DB id of file handled
@ -982,26 +978,112 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
alternate_sites = set(alternate_sites)
for alt_site in alternate_sites:
query = {
"_id": representation["_id"]
}
elem = {"name": alt_site,
"created_dt": datetime.now(),
"id": synced_file_id}
self.log.debug("Adding alternate {} to {}".format(
alt_site, representation["_id"]))
self._add_site(collection, query,
self._add_site(project_name,
representation, elem,
alt_site, file_id=file_id, force=True)
def get_repre_info_for_versions(self, project_name, version_ids,
active_site, remote_site):
"""Returns representation documents for versions and sites combi
Args:
project_name (str)
version_ids (list): of version[_id]
active_site (string): 'local', 'studio' etc
remote_site (string): dtto
Returns:
"""
self.connection.Session["AVALON_PROJECT"] = project_name
query = [
{"$match": {"parent": {"$in": version_ids},
"type": "representation",
"files.sites.name": {"$exists": 1}}},
{"$unwind": "$files"},
{'$addFields': {
'order_local': {
'$filter': {
'input': '$files.sites', 'as': 'p',
'cond': {'$eq': ['$$p.name', active_site]}
}
}
}},
{'$addFields': {
'order_remote': {
'$filter': {
'input': '$files.sites', 'as': 'p',
'cond': {'$eq': ['$$p.name', remote_site]}
}
}
}},
{'$addFields': {
'progress_local': {"$arrayElemAt": [{
'$cond': [
{'$size': "$order_local.progress"},
"$order_local.progress",
# if exists created_dt count is as available
{'$cond': [
{'$size': "$order_local.created_dt"},
[1],
[0]
]}
]},
0
]}
}},
{'$addFields': {
'progress_remote': {"$arrayElemAt": [{
'$cond': [
{'$size': "$order_remote.progress"},
"$order_remote.progress",
# if exists created_dt count is as available
{'$cond': [
{'$size': "$order_remote.created_dt"},
[1],
[0]
]}
]},
0
]}
}},
{'$group': { # first group by repre
'_id': '$_id',
'parent': {'$first': '$parent'},
'avail_ratio_local': {
'$first': {
'$divide': [{'$sum': "$progress_local"}, {'$sum': 1}]
}
},
'avail_ratio_remote': {
'$first': {
'$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}]
}
}
}},
{'$group': { # second group by parent, eg version_id
'_id': '$parent',
'repre_count': {'$sum': 1}, # total representations
# fully available representation for site
'avail_repre_local': {'$sum': "$avail_ratio_local"},
'avail_repre_remote': {'$sum': "$avail_ratio_remote"},
}},
]
# docs = list(self.connection.aggregate(query))
return self.connection.aggregate(query)
""" End of Public API """
def get_local_file_path(self, collection, site_name, file_path):
def get_local_file_path(self, project_name, site_name, file_path):
"""
Externalized for app
"""
handler = LocalDriveHandler(collection, site_name)
handler = LocalDriveHandler(project_name, site_name)
local_file_path = handler.resolve_path(file_path)
return local_file_path
@ -1288,7 +1370,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return sites.get(site, 'N/A')
@time_function
def get_sync_representations(self, collection, active_site, remote_site):
def get_sync_representations(self, project_name, active_site, remote_site):
"""
Get representations that should be synced, these could be
recognised by presence of document in 'files.sites', where key is
@ -1299,8 +1381,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
better performance. Goal is to get as few representations as
possible.
Args:
collection (string): name of collection (in most cases matches
project name
project_name (string):
active_site (string): identifier of current active site (could be
'local_0' when working from home, 'studio' when working in the
studio (default)
@ -1309,10 +1390,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Returns:
(list) of dictionaries
"""
log.debug("Check representations for : {}".format(collection))
self.connection.Session["AVALON_PROJECT"] = collection
log.debug("Check representations for : {}".format(project_name))
self.connection.Session["AVALON_PROJECT"] = project_name
# retry_cnt - number of attempts to sync specific file before giving up
retries_arr = self._get_retries_arr(collection)
retries_arr = self._get_retries_arr(project_name)
match = {
"type": "representation",
"$or": [
@ -1449,14 +1530,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return SyncStatus.DO_NOTHING
def update_db(self, collection, new_file_id, file, representation,
def update_db(self, project_name, new_file_id, file, representation,
site, error=None, progress=None, priority=None):
"""
Update 'provider' portion of records in DB with success (file_id)
or error (exception)
Args:
collection (string): name of project - force to db connection as
project_name (string): name of project - force to db connection as
each file might come from different collection
new_file_id (string):
file (dictionary): info about processed file (pulled from DB)
@ -1499,7 +1580,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
if file_id:
arr_filter.append({'f._id': ObjectId(file_id)})
self.connection.database[collection].update_one(
self.connection.database[project_name].update_one(
query,
update,
upsert=True,
@ -1562,7 +1643,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return -1, None
def reset_site_on_representation(self, collection, representation_id,
def reset_site_on_representation(self, project_name, representation_id,
side=None, file_id=None, site_name=None,
remove=False, pause=None, force=False):
"""
@ -1579,7 +1660,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Should be used when repre should be synced to new site.
Args:
collection (string): name of project (eg. collection) in DB
project_name (string): name of project (eg. collection) in DB
representation_id(string): _id of representation
file_id (string): file _id in representation
side (string): local or remote side
@ -1593,20 +1674,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
not 'force'
ValueError - other errors (repre not found, misconfiguration)
"""
query = {
"_id": ObjectId(representation_id)
}
representation = self.connection.database[collection].find_one(query)
representation = get_representation_by_id(project_name,
representation_id)
if not representation:
raise ValueError("Representation {} not found in {}".
format(representation_id, collection))
format(representation_id, project_name))
if side and site_name:
raise ValueError("Misconfiguration, only one of side and " +
"site_name arguments should be passed.")
local_site = self.get_active_site(collection)
remote_site = self.get_remote_site(collection)
local_site = self.get_active_site(project_name)
remote_site = self.get_remote_site(project_name)
if side:
if side == 'local':
@ -1617,37 +1696,43 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
elem = {"name": site_name}
if file_id: # reset site for particular file
self._reset_site_for_file(collection, query,
self._reset_site_for_file(project_name, representation_id,
elem, file_id, site_name)
elif side: # reset site for whole representation
self._reset_site(collection, query, elem, site_name)
self._reset_site(project_name, representation_id, elem, site_name)
elif remove: # remove site for whole representation
self._remove_site(collection, query, representation, site_name)
self._remove_site(project_name,
representation, site_name)
elif pause is not None:
self._pause_unpause_site(collection, query,
self._pause_unpause_site(project_name,
representation, site_name, pause)
else: # add new site to all files for representation
self._add_site(collection, query, representation, elem, site_name,
self._add_site(project_name, representation, elem, site_name,
force=force)
def _update_site(self, collection, query, update, arr_filter):
def _update_site(self, project_name, representation_id,
update, arr_filter):
"""
Auxiliary method to call update_one function on DB
Used for refactoring ugly reset_provider_for_file
"""
self.connection.database[collection].update_one(
query = {
"_id": ObjectId(representation_id)
}
self.connection.database[project_name].update_one(
query,
update,
upsert=True,
array_filters=arr_filter
)
def _reset_site_for_file(self, collection, query,
def _reset_site_for_file(self, project_name, representation_id,
elem, file_id, site_name):
"""
Resets 'site_name' for 'file_id' on representation in 'query' on
'collection'
'project_name'
"""
update = {
"$set": {"files.$[f].sites.$[s]": elem}
@ -1660,9 +1745,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
{'f._id': file_id}
]
self._update_site(collection, query, update, arr_filter)
self._update_site(project_name, representation_id, update, arr_filter)
def _reset_site(self, collection, query, elem, site_name):
def _reset_site(self, project_name, representation_id, elem, site_name):
"""
Resets 'site_name' for all files of representation in 'query'
"""
@ -1674,9 +1759,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
{'s.name': site_name}
]
self._update_site(collection, query, update, arr_filter)
self._update_site(project_name, representation_id, update, arr_filter)
def _remove_site(self, collection, query, representation, site_name):
def _remove_site(self, project_name, representation, site_name):
"""
Removes 'site_name' for 'representation' in 'query'
@ -1698,10 +1783,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
}
arr_filter = []
self._update_site(collection, query, update, arr_filter)
self._update_site(project_name, representation["_id"],
update, arr_filter)
def _pause_unpause_site(self, collection, query,
representation, site_name, pause):
def _pause_unpause_site(self, project_name, representation,
site_name, pause):
"""
Pauses/unpauses all files for 'representation' based on 'pause'
@ -1733,12 +1819,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
{'s.name': site_name}
]
self._update_site(collection, query, update, arr_filter)
self._update_site(project_name, representation["_id"],
update, arr_filter)
def _add_site(self, collection, query, representation, elem, site_name,
def _add_site(self, project_name, representation, elem, site_name,
force=False, file_id=None):
"""
Adds 'site_name' to 'representation' on 'collection'
Adds 'site_name' to 'representation' on 'project_name'
Args:
representation (dict)
@ -1746,10 +1833,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Use 'force' to remove existing or raises ValueError
"""
representation_id = representation["_id"]
reset_existing = False
files = representation.get("files", [])
if not files:
log.debug("No files for {}".format(representation["_id"]))
log.debug("No files for {}".format(representation_id))
return
for repre_file in files:
@ -1759,7 +1847,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
for site in repre_file.get("sites"):
if site["name"] == site_name:
if force or site.get("error"):
self._reset_site_for_file(collection, query,
self._reset_site_for_file(project_name,
representation_id,
elem, repre_file["_id"],
site_name)
reset_existing = True
@ -1785,14 +1874,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
{'f._id': file_id}
]
self._update_site(collection, query, update, arr_filter)
self._update_site(project_name, representation_id,
update, arr_filter)
def _remove_local_file(self, collection, representation_id, site_name):
def _remove_local_file(self, project_name, representation_id, site_name):
"""
Removes all local files for 'site_name' of 'representation_id'
Args:
collection (string): project name (must match DB)
project_name (string): project name (must match DB)
representation_id (string): MongoDB _id value
site_name (string): name of configured and active site
@ -1808,21 +1898,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
provider_name = self.get_provider_for_site(site=site_name)
if provider_name == 'local_drive':
query = {
"_id": ObjectId(representation_id)
}
representation = list(
self.connection.database[collection].find(query))
representation = get_representation_by_id(project_name,
representation_id,
fields=["files"])
if not representation:
self.log.debug("No repre {} found".format(
representation_id))
return
representation = representation.pop()
local_file_path = ''
for file in representation.get("files"):
local_file_path = self.get_local_file_path(collection,
local_file_path = self.get_local_file_path(project_name,
site_name,
file.get("path", "")
)

View file

@ -11,6 +11,7 @@ from openpype.tools.utils.delegates import pretty_timestamp
from openpype.lib import PypeLogger
from openpype.api import get_local_site_id
from openpype.client import get_representation_by_id
from . import lib
@ -440,7 +441,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel):
full text filtering.
Allows pagination, most of heavy lifting is being done on DB side.
Single model matches to single collection. When project is changed,
Single model matches to single project. When project is changed,
model is reset and refreshed.
Args:
@ -919,11 +920,10 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel):
repre_id = self.data(index, Qt.UserRole)
representation = list(self.dbcon.find({"type": "representation",
"_id": repre_id}))
representation = get_representation_by_id(self.project, repre_id)
if representation:
self.sync_server.update_db(self.project, None, None,
representation.pop(),
representation,
get_local_site_id(),
priority=value)
self.is_editing = False
@ -1357,11 +1357,10 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel):
file_id = self.data(index, Qt.UserRole)
updated_file = None
# conversion from cursor to list
representations = list(self.dbcon.find({"type": "representation",
"_id": self._id}))
representation = get_representation_by_id(self.project, self._id)
if not representation:
return
representation = representations.pop()
for repre_file in representation["files"]:
if repre_file["_id"] == file_id:
updated_file = repre_file

View file

@ -1210,7 +1210,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
# Get instance data
pixel_aspect = temp_data["pixel_aspect"]
if reformat_in_baking:
self.log.debug((
"Using resolution from input. It is already "
@ -1230,6 +1229,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
# - settings value can't have None but has value of 0
output_width = output_def.get("width") or output_width or None
output_height = output_def.get("height") or output_height or None
# Force to use input resolution if output resolution was not defined
# in settings. Resolution from instance is not used when
# 'use_input_res' is set to 'True'.
use_input_res = False
# Overscal color
overscan_color_value = "black"
@ -1241,6 +1244,17 @@ class ExtractReview(pyblish.api.InstancePlugin):
)
self.log.debug("Overscan color: `{}`".format(overscan_color_value))
# Scale input to have proper pixel aspect ratio
# - scale width by the pixel aspect ratio
scale_pixel_aspect = output_def.get("scale_pixel_aspect", True)
if scale_pixel_aspect and pixel_aspect != 1:
# Change input width after pixel aspect
input_width = int(input_width * pixel_aspect)
use_input_res = True
filters.append((
"scale={}x{}:flags=lanczos".format(input_width, input_height)
))
# Convert overscan value video filters
overscan_crop = output_def.get("overscan_crop")
overscan = OverscanCrop(
@ -1251,13 +1265,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
# resolution by it's values
if overscan_crop_filters:
filters.extend(overscan_crop_filters)
# Change input resolution after overscan crop
input_width = overscan.width()
input_height = overscan.height()
# Use output resolution as inputs after cropping to skip usage of
# instance data resolution
if output_width is None or output_height is None:
output_width = input_width
output_height = input_height
use_input_res = True
# Make sure input width and height is not an odd number
input_width_is_odd = bool(input_width % 2 != 0)
@ -1283,8 +1294,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
self.log.debug("input_width: `{}`".format(input_width))
self.log.debug("input_height: `{}`".format(input_height))
# Use instance resolution if output definition has not set it.
if output_width is None or output_height is None:
# Use instance resolution if output definition has not set it
# - use instance resolution only if there were not scale changes
# that may massivelly affect output 'use_input_res'
if not use_input_res and output_width is None or output_height is None:
output_width = temp_data["resolution_width"]
output_height = temp_data["resolution_height"]
@ -1326,7 +1339,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
output_width == input_width
and output_height == input_height
and not letter_box_enabled
and pixel_aspect == 1
):
self.log.debug(
"Output resolution is same as input's"
@ -1336,39 +1348,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
new_repre["resolutionHeight"] = input_height
return filters
# defining image ratios
input_res_ratio = (
(float(input_width) * pixel_aspect) / input_height
)
output_res_ratio = float(output_width) / float(output_height)
self.log.debug("input_res_ratio: `{}`".format(input_res_ratio))
self.log.debug("output_res_ratio: `{}`".format(output_res_ratio))
# Round ratios to 2 decimal places for comparing
input_res_ratio = round(input_res_ratio, 2)
output_res_ratio = round(output_res_ratio, 2)
# get scale factor
scale_factor_by_width = (
float(output_width) / (input_width * pixel_aspect)
)
scale_factor_by_height = (
float(output_height) / input_height
)
self.log.debug(
"scale_factor_by_with: `{}`".format(scale_factor_by_width)
)
self.log.debug(
"scale_factor_by_height: `{}`".format(scale_factor_by_height)
)
# scaling none square pixels and 1920 width
if (
input_height != output_height
or input_width != output_width
or pixel_aspect != 1
):
if input_height != output_height or input_width != output_width:
filters.extend([
(
"scale={}x{}"

View file

@ -85,6 +85,7 @@
],
"width": 0,
"height": 0,
"scale_pixel_aspect": true,
"bg_color": [
0,
0,

View file

@ -33,7 +33,7 @@
},
"RenderSettings": {
"apply_render_settings": true,
"default_render_image_folder": "",
"default_render_image_folder": "renders",
"aov_separator": "underscore",
"reset_current_frame": false,
"arnold_renderer": {

View file

@ -1,4 +1,5 @@
{
"level_sequences_for_layouts": false,
"project_setup": {
"dev_mode": true
}

View file

@ -5,6 +5,11 @@
"label": "Unreal Engine",
"is_file": true,
"children": [
{
"type": "boolean",
"key": "level_sequences_for_layouts",
"label": "Generate level sequences when loading layouts"
},
{
"type": "dict",
"collapsible": true,

View file

@ -319,6 +319,15 @@
"minimum": 0,
"maximum": 100000
},
{
"type": "label",
"label": "Rescale input when it's pixel aspect ratio is not 1. Usefull for anamorph reviews."
},
{
"key": "scale_pixel_aspect",
"label": "Scale pixel aspect",
"type": "boolean"
},
{
"type": "label",
"label": "Background color is used only when input have transparency and Alpha is higher than 0."

View file

@ -272,15 +272,15 @@ class SubsetsModel(TreeModel, BaseRepresentationModel):
# update availability on active site when version changes
if self.sync_server.enabled and version_doc:
query = self._repre_per_version_pipeline(
repre_info = self.sync_server.get_repre_info_for_versions(
project_name,
[version_doc["_id"]],
self.active_site,
self.remote_site
)
docs = list(self.dbcon.aggregate(query))
if docs:
repre = docs.pop()
version_doc["data"].update(self._get_repre_dict(repre))
if repre_info:
version_doc["data"].update(
self._get_repre_dict(repre_info[0]))
self.set_version(index, version_doc)
@ -478,16 +478,16 @@ class SubsetsModel(TreeModel, BaseRepresentationModel):
for _subset_id, doc in last_versions_by_subset_id.items():
version_ids.add(doc["_id"])
query = self._repre_per_version_pipeline(
repres = self.sync_server.get_repre_info_for_versions(
project_name,
list(version_ids), self.active_site, self.remote_site
)
for doc in self.dbcon.aggregate(query):
for repre in repres:
if self._doc_fetching_stop:
return
doc["active_provider"] = self.active_provider
doc["remote_provider"] = self.remote_provider
repre_info[doc["_id"]] = doc
repre_info[repre["_id"]] = repre
self._doc_payload = {
"asset_docs_by_id": asset_docs_by_id,
@ -827,83 +827,6 @@ class SubsetsModel(TreeModel, BaseRepresentationModel):
return data
def _repre_per_version_pipeline(self, version_ids,
active_site, remote_site):
query = [
{"$match": {"parent": {"$in": version_ids},
"type": "representation",
"files.sites.name": {"$exists": 1}}},
{"$unwind": "$files"},
{'$addFields': {
'order_local': {
'$filter': {
'input': '$files.sites', 'as': 'p',
'cond': {'$eq': ['$$p.name', active_site]}
}
}
}},
{'$addFields': {
'order_remote': {
'$filter': {
'input': '$files.sites', 'as': 'p',
'cond': {'$eq': ['$$p.name', remote_site]}
}
}
}},
{'$addFields': {
'progress_local': {"$arrayElemAt": [{
'$cond': [
{'$size': "$order_local.progress"},
"$order_local.progress",
# if exists created_dt count is as available
{'$cond': [
{'$size': "$order_local.created_dt"},
[1],
[0]
]}
]},
0
]}
}},
{'$addFields': {
'progress_remote': {"$arrayElemAt": [{
'$cond': [
{'$size': "$order_remote.progress"},
"$order_remote.progress",
# if exists created_dt count is as available
{'$cond': [
{'$size': "$order_remote.created_dt"},
[1],
[0]
]}
]},
0
]}
}},
{'$group': { # first group by repre
'_id': '$_id',
'parent': {'$first': '$parent'},
'avail_ratio_local': {
'$first': {
'$divide': [{'$sum': "$progress_local"}, {'$sum': 1}]
}
},
'avail_ratio_remote': {
'$first': {
'$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}]
}
}
}},
{'$group': { # second group by parent, eg version_id
'_id': '$parent',
'repre_count': {'$sum': 1}, # total representations
# fully available representation for site
'avail_repre_local': {'$sum': "$avail_ratio_local"},
'avail_repre_remote': {'$sum': "$avail_ratio_remote"},
}},
]
return query
class GroupMemberFilterProxyModel(QtCore.QSortFilterProxyModel):
"""Provide the feature of filtering group by the acceptance of members

View file

@ -567,12 +567,12 @@ class SubsetWidget(QtWidgets.QWidget):
# Trigger
project_name = self.dbcon.active_project()
subset_names_by_version_id = collections.defaultdict(set)
subset_name_by_version_id = dict()
for item in items:
version_id = item["version_document"]["_id"]
subset_names_by_version_id[version_id].add(item["subset"])
subset_name_by_version_id[version_id] = item["subset"]
version_ids = set(subset_names_by_version_id.keys())
version_ids = set(subset_name_by_version_id.keys())
repre_docs = get_representations(
project_name,
representation_names=[representation_name],
@ -584,14 +584,15 @@ class SubsetWidget(QtWidgets.QWidget):
for repre_doc in repre_docs:
repre_ids.append(repre_doc["_id"])
# keep only version ids without representation with that name
version_id = repre_doc["parent"]
if version_id not in version_ids:
version_ids.remove(version_id)
version_ids.discard(version_id)
for version_id in version_ids:
if version_ids:
# report versions that didn't have valid representation
joined_subset_names = ", ".join([
'"{}"'.format(subset)
for subset in subset_names_by_version_id[version_id]
'"{}"'.format(subset_name_by_version_id[version_id])
for version_id in version_ids
])
self.echo("Subsets {} don't have representation '{}'".format(
joined_subset_names, representation_name

View file

@ -8,6 +8,20 @@ sidebar_label: Unreal
OpenPype supports Unreal in similar ways as in other DCCs Yet there are few specific you need to be aware of.
### Creating the Unreal project
Selecting a task and opening it with Unreal will generate the Unreal project, if it hasn't been created before.
By default, OpenPype includes the plugin that will be built together with the project.
Alternatively, the Environment variable `"OPENPYPE_UNREAL_PLUGIN"` can be set to the path of a compiled version of the plugin.
The version of the compiled plugin must match the version of Unreal with which the project is being created.
:::note
Unreal version 5.0 onwards requires the following Environment variable:
`"UE_PYTHONPATH": "{PYTHONPATH}"`
:::
### Project naming
Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are
@ -15,9 +29,9 @@ invalid. If OpenPype detects such name it automatically prepends letter **P** to
## OpenPype global tools
OpenPype global tools can be found in *Window* main menu:
OpenPype global tools can be found in Unreal's toolbar and in the *Tools* main menu:
![Unreal OpenPype Menu](assets/unreal-avalon_tools.jpg)
![Unreal OpenPype Menu](assets/unreal_openpype_tools.png)
- [Create](artist_tools.md#creator)
- [Load](artist_tools.md#loader)
@ -31,10 +45,118 @@ OpenPype global tools can be found in *Window* main menu:
To import Static Mesh model, just choose **OpenPype → Load ...** and select your mesh. Static meshes are transferred as FBX files as specified in [Unreal Engine 4 Static Mesh Pipeline](https://docs.unrealengine.com/en-US/Engine/Content/Importing/FBX/StaticMeshes/index.html). This action will create new folder with subset name (`unrealStaticMeshMain_CON` for example) and put all data into it. Inside, you can find:
![Unreal Container Content](assets/unreal-container.jpg)
![Unreal Container Content](assets/unreal_container.jpg)
In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **unrealStaticMeshCube** is the geometry itself, **unrealStaticMeshCube_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata.
In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **antennaA_modelMain** is the geometry itself, **modelMain_v002_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata.
### Publishing
Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype → Create ...**. This will create folder named by subset you've chosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype → Publish ...**
Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype → Create ...**. This will create folder named by subset you've chosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype → Publish ...**
## Layout
There are two different layout options in Unreal, depending on the type of project you are working on.
One only imports the layout, and saves it in a level.
The other uses [Master Sequences](https://docs.unrealengine.com/4.27/en-US/AnimatingObjects/Sequencer/Overview/TracksShot/) to track the whole level sequence hierarchy.
You can choose in the Project Settings if you want to generate the level sequences.
![Unreal OP Settings Level Sequence](assets/unreal_setting_level_sequence.png)
### Loading
To load a layout, click on the OpenPype icon in Unreals main taskbar, and select **Load**.
![Unreal OP Tools Load](assets/unreal_openpype_tools_load.png)
Select the task on the left, then right click on the layout asset and select **Load Layout**.
![Unreal Layout Load](assets/unreal_load_layout.png)
If you need to load multiple layouts, you can select more than one task on the left, and you can load them together.
![Unreal Layout Load Batch](assets/unreal_load_layout_batch.png)
### Navigating the project
The layout will be imported in the directory `/Content/OpenPype`. The layout will be split into two subfolders:
- *Assets*, which will contain all the rigs and models contained in the layout;
- *Asset name* (in the following example, *episode 2*), a folder named as the **asset** of the current **task**.
![Unreal Layout Loading Result](assets/unreal_layout_loading_result.png)
If you chose to generate the level sequences, in the second folder you will find the master level for the task (usually an episode), the level sequence and the folders for all the scenes in the episodes.
Otherwise you will find the level generated for the loaded layout.
#### Layout without level sequences
In the layout folder, you will find the level with the imported layout and an object of *AssetContainer* type. The latter is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata.
![Unreal Layout Loading No Sequence](assets/unreal_layout_loading_no_sequence.png)
The layout level will and should contain only the data included in the layout. To add lighting, or other elements, like an environment, you have to create a master level, and add the layout level as a [streaming level](https://docs.unrealengine.com/5.0/en-US/level-streaming-in-unreal-engine/).
Create the master level and open it. Then, open the *Levels* window (from the menu **Windows → Levels**). Click on **Levels → Add Existing** and select the layout level and the other levels you with to include in the scene. The following example shows a master level in which have been added a light level and the layout level.
![Unreal Add Level](assets/unreal_add_level.png)
![Unreal Level List](assets/unreal_level_list_no_sequences.png)
#### Layout with level sequences
In the episode folder, you will find the master level for the episode, the master level sequence and the folders for all the scenes in the episodes.
After opening the master level, open the *Levels* window (from the menu **Windows → Levels**), and you will see the list of the levels of each shot of the episode for which a layout has been loaded.
![Unreal Level List](assets/unreal_level_list.png)
If it has not been added already, you will need to add the environment to the level. Click on **Levels → Add Existing** and select the level with the environment (check with the studio where it is located).
![Unreal Add Level](assets/unreal_add_level.png)
After adding the environment level to the master level, you will need to set it as always loaded by right clicking it, and selecting **Change Streaming Method** and selecting **Always Loaded**.
![Unreal Level Streaming Method](assets/unreal_level_streaming_method.png)
### Update layouts
To manage loaded layouts, click on the OpenPype icon in Unreals main taskbar, and select **Manage**.
![Unreal OP Tools Manage](assets/unreal_openpype_tools_manage.png)
You will get a list of all the assets that have been loaded in the project.
The version number will be in red if it isnt the latest version. Right click on the element, and select Update if you need to update the layout.
:::note
**DO NOT** update rigs or models imported with a layout. Update only the layout.
:::
## Rendering
:::note
The rendering requires a layout loaded with the option to create the level sequences **on**.
:::
To render and publish an episode, a scene or a shot, you will need to create a publish instance. The publish instance for the rendering is based on one level sequence. That means that if you want to render the whole episode, you will need to create it for the level sequence of the episode, but if you want to render just one shot, you will need to create it for that shot.
Navigate to the folder that contains the level sequence that you need to render. Select the level sequence, and then click on the OpenPype icon in Unreals main taskbar, and select **Create**.
![Unreal OP Tools Create](assets/unreal_openpype_tools_create.png)
In the Instance Creator, select **Unreal - Render**, give it a name, and click **Create**.
![Unreal OP Instance Creator](assets/unreal_create_render.png)
The render instance will be created in `/Content/OpenPype/PublishInstances`.
Select the instance you need to render, and then click on the OpenPype icon in Unreals main taskbar, and select **Render**. You can render more than one instance at a time, if needed. Just select all the instances that you need to render before selecting the **Render** button from the OpenPype menu.
![Unreal OP Tools Render](assets/unreal_openpype_tools_render.png)
Once the render is finished, click on the OpenPype icon in Unreals main taskbar, and select **Publish**.
![Unreal OP Tools Publish](assets/unreal_openpype_tools_publish.png)
On the left, you will see the render instances. They will be automatically reorganised to have an instance for each shot. So, for example, if you have created the render instance for the whole episode, here you will have an instance for each shot in the episode.
![Unreal Publish Render](assets/unreal_publish_render.png)
Click on the play button in the bottom right, and it will start the publishing process.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 150 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 119 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 242 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB