[Automated] Merged develop into main

This commit is contained in:
ynbot 2023-11-16 13:44:46 +01:00 committed by GitHub
commit 4690f1257c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
111 changed files with 1749 additions and 1898 deletions

View file

@ -35,6 +35,7 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.17.6-nightly.3
- 3.17.6-nightly.2
- 3.17.6-nightly.1
- 3.17.5
@ -134,7 +135,6 @@ body:
- 3.15.2-nightly.2
- 3.15.2-nightly.1
- 3.15.1
- 3.15.1-nightly.6
validations:
required: true
- type: dropdown

View file

@ -10,6 +10,7 @@ from .pipeline import (
ls,
publish,
containerise,
BlenderHost,
)
from .plugin import (
@ -47,6 +48,7 @@ __all__ = [
"ls",
"publish",
"containerise",
"BlenderHost",
"Creator",
"Loader",

View file

@ -188,7 +188,7 @@ def imprint(node: bpy.types.bpy_struct_meta_idprop, data: Dict):
# Support values evaluated at imprint
value = value()
if not isinstance(value, (int, float, bool, str, list)):
if not isinstance(value, (int, float, bool, str, list, dict)):
raise TypeError(f"Unsupported type: {type(value)}")
imprint_data[key] = value
@ -278,9 +278,11 @@ def get_selected_collections():
list: A list of `bpy.types.Collection` objects that are currently
selected in the outliner.
"""
window = bpy.context.window or bpy.context.window_manager.windows[0]
try:
area = next(
area for area in bpy.context.window.screen.areas
area for area in window.screen.areas
if area.type == 'OUTLINER')
region = next(
region for region in area.regions
@ -290,10 +292,10 @@ def get_selected_collections():
"must be in the main Blender window.") from e
with bpy.context.temp_override(
window=bpy.context.window,
window=window,
area=area,
region=region,
screen=bpy.context.window.screen
screen=window.screen
):
ids = bpy.context.selected_ids

View file

@ -31,6 +31,14 @@ PREVIEW_COLLECTIONS: Dict = dict()
TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1
def execute_function_in_main_thread(f):
"""Decorator to move a function call into main thread items"""
def wrapper(*args, **kwargs):
mti = MainThreadItem(f, *args, **kwargs)
execute_in_main_thread(mti)
return wrapper
class BlenderApplication(QtWidgets.QApplication):
_instance = None
blender_windows = {}
@ -238,8 +246,24 @@ class LaunchQtApp(bpy.types.Operator):
self.before_window_show()
def pull_to_front(window):
"""Pull window forward to screen.
If Window is minimized this will un-minimize, then it can be raised
and activated to the front.
"""
window.setWindowState(
(window.windowState() & ~QtCore.Qt.WindowMinimized) |
QtCore.Qt.WindowActive
)
window.raise_()
window.activateWindow()
if isinstance(self._window, ModuleType):
self._window.show()
pull_to_front(self._window)
# Pull window to the front
window = None
if hasattr(self._window, "window"):
window = self._window.window
@ -254,6 +278,7 @@ class LaunchQtApp(bpy.types.Operator):
on_top_flags = origin_flags | QtCore.Qt.WindowStaysOnTopHint
self._window.setWindowFlags(on_top_flags)
self._window.show()
pull_to_front(self._window)
# if on_top_flags != origin_flags:
# self._window.setWindowFlags(origin_flags)
@ -275,6 +300,10 @@ class LaunchCreator(LaunchQtApp):
def before_window_show(self):
self._window.refresh()
def execute(self, context):
host_tools.show_publisher(tab="create")
return {"FINISHED"}
class LaunchLoader(LaunchQtApp):
"""Launch Avalon Loader."""
@ -299,7 +328,7 @@ class LaunchPublisher(LaunchQtApp):
bl_label = "Publish..."
def execute(self, context):
host_tools.show_publish()
host_tools.show_publisher(tab="publish")
return {"FINISHED"}
@ -416,7 +445,6 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
layout.operator(SetResolution.bl_idname, text="Set Resolution")
layout.separator()
layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...")
# TODO (jasper): maybe add 'Reload Pipeline'
def draw_avalon_menu(self, context):

View file

@ -10,6 +10,12 @@ from . import ops
import pyblish.api
from openpype.host import (
HostBase,
IWorkfileHost,
IPublishHost,
ILoadHost
)
from openpype.client import get_asset_by_name
from openpype.pipeline import (
schema,
@ -29,6 +35,14 @@ from openpype.lib import (
)
import openpype.hosts.blender
from openpype.settings import get_project_settings
from .workio import (
open_file,
save_file,
current_file,
has_unsaved_changes,
file_extensions,
work_root,
)
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__))
@ -47,6 +61,101 @@ IS_HEADLESS = bpy.app.background
log = Logger.get_logger(__name__)
class BlenderHost(HostBase, IWorkfileHost, IPublishHost, ILoadHost):
name = "blender"
def install(self):
"""Override install method from HostBase.
Install Blender host functionality."""
install()
def get_containers(self) -> Iterator:
"""List containers from active Blender scene."""
return ls()
def get_workfile_extensions(self) -> List[str]:
"""Override get_workfile_extensions method from IWorkfileHost.
Get workfile possible extensions.
Returns:
List[str]: Workfile extensions.
"""
return file_extensions()
def save_workfile(self, dst_path: str = None):
"""Override save_workfile method from IWorkfileHost.
Save currently opened workfile.
Args:
dst_path (str): Where the current scene should be saved. Or use
current path if `None` is passed.
"""
save_file(dst_path if dst_path else bpy.data.filepath)
def open_workfile(self, filepath: str):
"""Override open_workfile method from IWorkfileHost.
Open workfile at specified filepath in the host.
Args:
filepath (str): Path to workfile.
"""
open_file(filepath)
def get_current_workfile(self) -> str:
"""Override get_current_workfile method from IWorkfileHost.
Retrieve currently opened workfile path.
Returns:
str: Path to currently opened workfile.
"""
return current_file()
def workfile_has_unsaved_changes(self) -> bool:
"""Override wokfile_has_unsaved_changes method from IWorkfileHost.
Returns True if opened workfile has no unsaved changes.
Returns:
bool: True if scene is saved and False if it has unsaved
modifications.
"""
return has_unsaved_changes()
def work_root(self, session) -> str:
"""Override work_root method from IWorkfileHost.
Modify workdir per host.
Args:
session (dict): Session context data.
Returns:
str: Path to new workdir.
"""
return work_root(session)
def get_context_data(self) -> dict:
"""Override abstract method from IPublishHost.
Get global data related to creation-publishing from workfile.
Returns:
dict: Context data stored using 'update_context_data'.
"""
property = bpy.context.scene.get(AVALON_PROPERTY)
if property:
return property.to_dict()
return {}
def update_context_data(self, data: dict, changes: dict):
"""Override abstract method from IPublishHost.
Store global context data to workfile.
Args:
data (dict): New data as are.
changes (dict): Only data that has been changed. Each value has
tuple with '(<old>, <new>)' value.
"""
bpy.context.scene[AVALON_PROPERTY] = data
def pype_excepthook_handler(*args):
traceback.print_exception(*args)

View file

@ -1,31 +1,34 @@
"""Shared functionality for pipeline plugins for Blender."""
import itertools
from pathlib import Path
from typing import Dict, List, Optional
import bpy
from openpype.pipeline import (
LegacyCreator,
Creator,
CreatedInstance,
LoaderPlugin,
get_current_task_name,
)
from openpype.lib import BoolDef
from .pipeline import (
AVALON_CONTAINERS,
AVALON_INSTANCES,
AVALON_PROPERTY,
)
from .ops import (
MainThreadItem,
execute_in_main_thread
)
from .lib import (
imprint,
get_selection
)
from .lib import imprint
VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"]
def asset_name(
def prepare_scene_name(
asset: str, subset: str, namespace: Optional[str] = None
) -> str:
"""Return a consistent name for an asset."""
@ -144,20 +147,211 @@ def deselect_all():
bpy.context.view_layer.objects.active = active
class Creator(LegacyCreator):
"""Base class for Creator plug-ins."""
class BaseCreator(Creator):
"""Base class for Blender Creator plug-ins."""
defaults = ['Main']
def process(self):
collection = bpy.data.collections.new(name=self.data["subset"])
bpy.context.scene.collection.children.link(collection)
imprint(collection, self.data)
create_as_asset_group = False
if (self.options or {}).get("useSelection"):
for obj in get_selection():
collection.objects.link(obj)
@staticmethod
def cache_subsets(shared_data):
"""Cache instances for Creators shared data.
return collection
Create `blender_cached_subsets` key when needed in shared data and
fill it with all collected instances from the scene under its
respective creator identifiers.
If legacy instances are detected in the scene, create
`blender_cached_legacy_subsets` key and fill it with
all legacy subsets from this family as a value. # key or value?
Args:
shared_data(Dict[str, Any]): Shared data.
Return:
Dict[str, Any]: Shared data with cached subsets.
"""
if not shared_data.get('blender_cached_subsets'):
cache = {}
cache_legacy = {}
avalon_instances = bpy.data.collections.get(AVALON_INSTANCES)
avalon_instance_objs = (
avalon_instances.objects if avalon_instances else []
)
for obj_or_col in itertools.chain(
avalon_instance_objs,
bpy.data.collections
):
avalon_prop = obj_or_col.get(AVALON_PROPERTY, {})
if not avalon_prop:
continue
if avalon_prop.get('id') != 'pyblish.avalon.instance':
continue
creator_id = avalon_prop.get('creator_identifier')
if creator_id:
# Creator instance
cache.setdefault(creator_id, []).append(obj_or_col)
else:
family = avalon_prop.get('family')
if family:
# Legacy creator instance
cache_legacy.setdefault(family, []).append(obj_or_col)
shared_data["blender_cached_subsets"] = cache
shared_data["blender_cached_legacy_subsets"] = cache_legacy
return shared_data
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
"""Override abstract method from Creator.
Create new instance and store it.
Args:
subset_name(str): Subset name of created instance.
instance_data(dict): Instance base data.
pre_create_data(dict): Data based on pre creation attributes.
Those may affect how creator works.
"""
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create asset group
name = prepare_scene_name(instance_data["asset"], subset_name)
if self.create_as_asset_group:
# Create instance as empty
instance_node = bpy.data.objects.new(name=name, object_data=None)
instance_node.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(instance_node)
else:
# Create instance collection
instance_node = bpy.data.collections.new(name=name)
instances.children.link(instance_node)
self.set_instance_data(subset_name, instance_data)
instance = CreatedInstance(
self.family, subset_name, instance_data, self
)
instance.transient_data["instance_node"] = instance_node
self._add_instance_to_context(instance)
imprint(instance_node, instance_data)
return instance_node
def collect_instances(self):
"""Override abstract method from BaseCreator.
Collect existing instances related to this creator plugin."""
# Cache subsets in shared data
self.cache_subsets(self.collection_shared_data)
# Get cached subsets
cached_subsets = self.collection_shared_data.get(
"blender_cached_subsets"
)
if not cached_subsets:
return
# Process only instances that were created by this creator
for instance_node in cached_subsets.get(self.identifier, []):
property = instance_node.get(AVALON_PROPERTY)
# Create instance object from existing data
instance = CreatedInstance.from_existing(
instance_data=property.to_dict(),
creator=self
)
instance.transient_data["instance_node"] = instance_node
# Add instance to create context
self._add_instance_to_context(instance)
def update_instances(self, update_list):
"""Override abstract method from BaseCreator.
Store changes of existing instances so they can be recollected.
Args:
update_list(List[UpdateData]): Changed instances
and their changes, as a list of tuples."""
for created_instance, changes in update_list:
data = created_instance.data_to_store()
node = created_instance.transient_data["instance_node"]
if not node:
# We can't update if we don't know the node
self.log.error(
f"Unable to update instance {created_instance} "
f"without instance node."
)
return
# Rename the instance node in the scene if subset or asset changed
if (
"subset" in changes.changed_keys
or "asset" in changes.changed_keys
):
name = prepare_scene_name(
asset=data["asset"], subset=data["subset"]
)
node.name = name
imprint(node, data)
def remove_instances(self, instances: List[CreatedInstance]):
for instance in instances:
node = instance.transient_data["instance_node"]
if isinstance(node, bpy.types.Collection):
for children in node.children_recursive:
if isinstance(children, bpy.types.Collection):
bpy.data.collections.remove(children)
else:
bpy.data.objects.remove(children)
bpy.data.collections.remove(node)
elif isinstance(node, bpy.types.Object):
bpy.data.objects.remove(node)
self._remove_instance_from_context(instance)
def set_instance_data(
self,
subset_name: str,
instance_data: dict
):
"""Fill instance data with required items.
Args:
subset_name(str): Subset name of created instance.
instance_data(dict): Instance base data.
instance_node(bpy.types.ID): Instance node in blender scene.
"""
if not instance_data:
instance_data = {}
instance_data.update(
{
"id": "pyblish.avalon.instance",
"creator_identifier": self.identifier,
"subset": subset_name,
}
)
def get_pre_create_attr_defs(self):
return [
BoolDef("use_selection",
label="Use selection",
default=True)
]
class Loader(LoaderPlugin):
@ -251,7 +445,7 @@ class AssetLoader(LoaderPlugin):
namespace: Use pre-defined namespace
options: Additional settings dictionary
"""
# TODO (jasper): make it possible to add the asset several times by
# TODO: make it possible to add the asset several times by
# just re-using the collection
filepath = self.filepath_from_context(context)
assert Path(filepath).exists(), f"{filepath} doesn't exist."
@ -262,7 +456,7 @@ class AssetLoader(LoaderPlugin):
asset, subset
)
namespace = namespace or f"{asset}_{unique_number}"
name = name or asset_name(
name = name or prepare_scene_name(
asset, subset, unique_number
)
@ -291,7 +485,9 @@ class AssetLoader(LoaderPlugin):
# asset = context["asset"]["name"]
# subset = context["subset"]["name"]
# instance_name = asset_name(asset, subset, unique_number) + '_CON'
# instance_name = prepare_scene_name(
# asset, subset, unique_number
# ) + '_CON'
# return self._get_instance_collection(instance_name, nodes)

View file

@ -1,9 +1,9 @@
from openpype.pipeline import install_host
from openpype.hosts.blender import api
from openpype.hosts.blender.api import BlenderHost
def register():
install_host(api)
install_host(BlenderHost())
def unregister():

View file

@ -0,0 +1,78 @@
# -*- coding: utf-8 -*-
"""Converter for legacy Houdini subsets."""
from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin
from openpype.hosts.blender.api.lib import imprint
class BlenderLegacyConvertor(SubsetConvertorPlugin):
"""Find and convert any legacy subsets in the scene.
This Converter will find all legacy subsets in the scene and will
transform them to the current system. Since the old subsets doesn't
retain any information about their original creators, the only mapping
we can do is based on their families.
Its limitation is that you can have multiple creators creating subset
of the same family and there is no way to handle it. This code should
nevertheless cover all creators that came with OpenPype.
"""
identifier = "io.openpype.creators.blender.legacy"
family_to_id = {
"action": "io.openpype.creators.blender.action",
"camera": "io.openpype.creators.blender.camera",
"animation": "io.openpype.creators.blender.animation",
"blendScene": "io.openpype.creators.blender.blendscene",
"layout": "io.openpype.creators.blender.layout",
"model": "io.openpype.creators.blender.model",
"pointcache": "io.openpype.creators.blender.pointcache",
"render": "io.openpype.creators.blender.render",
"review": "io.openpype.creators.blender.review",
"rig": "io.openpype.creators.blender.rig",
}
def __init__(self, *args, **kwargs):
super(BlenderLegacyConvertor, self).__init__(*args, **kwargs)
self.legacy_subsets = {}
def find_instances(self):
"""Find legacy subsets in the scene.
Legacy subsets are the ones that doesn't have `creator_identifier`
parameter on them.
This is using cached entries done in
:py:meth:`~BaseCreator.cache_subsets()`
"""
self.legacy_subsets = self.collection_shared_data.get(
"blender_cached_legacy_subsets")
if not self.legacy_subsets:
return
self.add_convertor_item(
"Found {} incompatible subset{}".format(
len(self.legacy_subsets),
"s" if len(self.legacy_subsets) > 1 else ""
)
)
def convert(self):
"""Convert all legacy subsets to current.
It is enough to add `creator_identifier` and `instance_node`.
"""
if not self.legacy_subsets:
return
for family, instance_nodes in self.legacy_subsets.items():
if family in self.family_to_id:
for instance_node in instance_nodes:
creator_identifier = self.family_to_id[family]
self.log.info(
"Converting {} to {}".format(instance_node.name,
creator_identifier)
)
imprint(instance_node, data={
"creator_identifier": creator_identifier
})

View file

@ -2,30 +2,29 @@
import bpy
from openpype.pipeline import get_current_task_name
import openpype.hosts.blender.api.plugin
from openpype.hosts.blender.api import lib
from openpype.hosts.blender.api import lib, plugin
class CreateAction(openpype.hosts.blender.api.plugin.Creator):
"""Action output for character rigs"""
class CreateAction(plugin.BaseCreator):
"""Action output for character rigs."""
name = "actionMain"
identifier = "io.openpype.creators.blender.action"
label = "Action"
family = "action"
icon = "male"
def process(self):
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
# Run parent create method
collection = super().create(
subset_name, instance_data, pre_create_data
)
asset = self.data["asset"]
subset = self.data["subset"]
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
self.data['task'] = get_current_task_name()
lib.imprint(collection, self.data)
# Get instance name
name = plugin.prepare_scene_name(instance_data["asset"], subset_name)
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
for obj in lib.get_selection():
if (obj.animation_data is not None
and obj.animation_data.action is not None):

View file

@ -1,51 +1,32 @@
"""Create an animation asset."""
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreateAnimation(plugin.Creator):
"""Animation output for character rigs"""
class CreateAnimation(plugin.BaseCreator):
"""Animation output for character rigs."""
name = "animationMain"
identifier = "io.openpype.creators.blender.animation"
label = "Animation"
family = "animation"
icon = "male"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
# Run parent create method
collection = super().create(
subset_name, instance_data, pre_create_data
)
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create instance object
# name = self.name
# if not name:
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
# asset_group = bpy.data.objects.new(name=name, object_data=None)
# asset_group.empty_display_type = 'SINGLE_ARROW'
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
selected = lib.get_selection()
for obj in selected:
asset_group.objects.link(obj)
elif (self.options or {}).get("asset_group"):
obj = (self.options or {}).get("asset_group")
asset_group.objects.link(obj)
collection.objects.link(obj)
elif pre_create_data.get("asset_group"):
# Use for Load Blend automated creation of animation instances
# upon loading rig files
obj = pre_create_data.get("asset_group")
collection.objects.link(obj)
return asset_group
return collection

View file

@ -2,51 +2,33 @@
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreateBlendScene(plugin.Creator):
"""Generic group of assets"""
class CreateBlendScene(plugin.BaseCreator):
"""Generic group of assets."""
name = "blendScene"
identifier = "io.openpype.creators.blender.blendscene"
label = "Blender Scene"
family = "blendScene"
icon = "cubes"
maintain_selection = False
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
instance_node = super().create(subset_name,
instance_data,
pre_create_data)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
# Create the new asset group as collection
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
selection = lib.get_selection(include_collections=True)
for data in selection:
if isinstance(data, bpy.types.Collection):
asset_group.children.link(data)
instance_node.children.link(data)
elif isinstance(data, bpy.types.Object):
asset_group.objects.link(data)
instance_node.objects.link(data)
return asset_group
return instance_node

View file

@ -2,62 +2,41 @@
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api import plugin, lib
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
class CreateCamera(plugin.Creator):
"""Polygonal static geometry"""
class CreateCamera(plugin.BaseCreator):
"""Polygonal static geometry."""
name = "cameraMain"
identifier = "io.openpype.creators.blender.camera"
label = "Camera"
family = "camera"
icon = "video-camera"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
create_as_asset_group = True
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = super().create(subset_name,
instance_data,
pre_create_data)
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
self.data['task'] = get_current_task_name()
print(f"self.data: {self.data}")
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
if obj.parent in selected:
obj.select_set(False)
continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
bpy.context.view_layer.objects.active = asset_group
if pre_create_data.get("use_selection"):
for obj in lib.get_selection():
obj.parent = asset_group
else:
plugin.deselect_all()
camera = bpy.data.cameras.new(subset)
camera_obj = bpy.data.objects.new(subset, camera)
camera = bpy.data.cameras.new(subset_name)
camera_obj = bpy.data.objects.new(subset_name, camera)
instances = bpy.data.collections.get(AVALON_INSTANCES)
instances.objects.link(camera_obj)
camera_obj.select_set(True)
asset_group.select_set(True)
bpy.context.view_layer.objects.active = asset_group
bpy.ops.object.parent_set(keep_transform=True)
camera_obj.parent = asset_group
return asset_group

View file

@ -2,50 +2,31 @@
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreateLayout(plugin.Creator):
"""Layout output for character rigs"""
class CreateLayout(plugin.BaseCreator):
"""Layout output for character rigs."""
name = "layoutMain"
identifier = "io.openpype.creators.blender.layout"
label = "Layout"
family = "layout"
icon = "cubes"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
create_as_asset_group = True
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
asset_group = super().create(subset_name,
instance_data,
pre_create_data)
# Add selected objects to instance
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
if obj.parent in selected:
obj.select_set(False)
continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
for obj in lib.get_selection():
obj.parent = asset_group
return asset_group

View file

@ -2,50 +2,30 @@
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreateModel(plugin.Creator):
"""Polygonal static geometry"""
class CreateModel(plugin.BaseCreator):
"""Polygonal static geometry."""
name = "modelMain"
identifier = "io.openpype.creators.blender.model"
label = "Model"
family = "model"
icon = "cube"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
create_as_asset_group = True
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
asset_group = super().create(subset_name,
instance_data,
pre_create_data)
# Add selected objects to instance
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
if obj.parent in selected:
obj.select_set(False)
continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
for obj in lib.get_selection():
obj.parent = asset_group
return asset_group

View file

@ -1,51 +1,29 @@
"""Create a pointcache asset."""
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreatePointcache(plugin.Creator):
"""Polygonal static geometry"""
class CreatePointcache(plugin.BaseCreator):
"""Polygonal static geometry."""
name = "pointcacheMain"
identifier = "io.openpype.creators.blender.pointcache"
label = "Point Cache"
family = "pointcache"
icon = "gears"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
# Run parent create method
collection = super().create(
subset_name, instance_data, pre_create_data
)
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
if pre_create_data.get("use_selection"):
objects = lib.get_selection()
for obj in objects:
collection.objects.link(obj)
if obj.type == 'EMPTY':
objects.extend(obj.children)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
# Add selected objects to instance
if (self.options or {}).get("useSelection"):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
if obj.parent in selected:
obj.select_set(False)
continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
return asset_group
return collection

View file

@ -1,42 +1,31 @@
"""Create render."""
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.render_lib import prepare_rendering
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
class CreateRenderlayer(plugin.Creator):
"""Single baked camera"""
class CreateRenderlayer(plugin.BaseCreator):
"""Single baked camera."""
name = "renderingMain"
identifier = "io.openpype.creators.blender.render"
label = "Render"
family = "render"
icon = "eye"
def process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.collections.new(name=name)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
try:
instances.children.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
# Run parent create method
collection = super().create(
subset_name, instance_data, pre_create_data
)
prepare_rendering(asset_group)
prepare_rendering(collection)
except Exception:
# Remove the instance if there was an error
bpy.data.collections.remove(asset_group)
bpy.data.collections.remove(collection)
raise
# TODO: this is undesiderable, but it's the only way to be sure that
@ -50,4 +39,4 @@ class CreateRenderlayer(plugin.Creator):
# now it is to force the file to be saved.
bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
return asset_group
return collection

View file

@ -1,47 +1,27 @@
"""Create review."""
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreateReview(plugin.Creator):
"""Single baked camera"""
class CreateReview(plugin.BaseCreator):
"""Single baked camera."""
name = "reviewDefault"
identifier = "io.openpype.creators.blender.review"
label = "Review"
family = "review"
icon = "video-camera"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
# Run parent create method
collection = super().create(
subset_name, instance_data, pre_create_data
)
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
selected = lib.get_selection()
for obj in selected:
asset_group.objects.link(obj)
elif (self.options or {}).get("asset_group"):
obj = (self.options or {}).get("asset_group")
asset_group.objects.link(obj)
collection.objects.link(obj)
return asset_group
return collection

View file

@ -2,50 +2,30 @@
import bpy
from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
from openpype.hosts.blender.api import plugin, lib
class CreateRig(plugin.Creator):
"""Artist-friendly rig with controls to direct motion"""
class CreateRig(plugin.BaseCreator):
"""Artist-friendly rig with controls to direct motion."""
name = "rigMain"
identifier = "io.openpype.creators.blender.rig"
label = "Rig"
family = "rig"
icon = "wheelchair"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
create_as_asset_group = True
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
def create(
self, subset_name: str, instance_data: dict, pre_create_data: dict
):
asset_group = super().create(subset_name,
instance_data,
pre_create_data)
# Add selected objects to instance
if (self.options or {}).get("useSelection"):
if pre_create_data.get("use_selection"):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
if obj.parent in selected:
obj.select_set(False)
continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
for obj in lib.get_selection():
obj.parent = asset_group
return asset_group

View file

@ -0,0 +1,105 @@
import bpy
from openpype.pipeline import CreatedInstance, AutoCreator
from openpype.client import get_asset_by_name
from openpype.hosts.blender.api.plugin import BaseCreator
from openpype.hosts.blender.api.pipeline import (
AVALON_PROPERTY,
AVALON_CONTAINERS
)
class CreateWorkfile(BaseCreator, AutoCreator):
"""Workfile auto-creator.
The workfile instance stores its data on the `AVALON_CONTAINERS` collection
as custom attributes, because unlike other instances it doesn't have an
instance node of its own.
"""
identifier = "io.openpype.creators.blender.workfile"
label = "Workfile"
family = "workfile"
icon = "fa5.file"
def create(self):
"""Create workfile instances."""
current_instance = next(
(
instance for instance in self.create_context.instances
if instance.creator_identifier == self.identifier
),
None,
)
project_name = self.project_name
asset_name = self.create_context.get_current_asset_name()
task_name = self.create_context.get_current_task_name()
host_name = self.create_context.host_name
if not current_instance:
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
task_name, task_name, asset_doc, project_name, host_name
)
data = {
"asset": asset_name,
"task": task_name,
"variant": task_name,
}
data.update(
self.get_dynamic_data(
task_name,
task_name,
asset_doc,
project_name,
host_name,
current_instance,
)
)
self.log.info("Auto-creating workfile instance...")
current_instance = CreatedInstance(
self.family, subset_name, data, self
)
instance_node = bpy.data.collections.get(AVALON_CONTAINERS, {})
current_instance.transient_data["instance_node"] = instance_node
self._add_instance_to_context(current_instance)
elif (
current_instance["asset"] != asset_name
or current_instance["task"] != task_name
):
# Update instance context if it's different
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(
task_name, task_name, asset_doc, project_name, host_name
)
current_instance["asset"] = asset_name
current_instance["task"] = task_name
current_instance["subset"] = subset_name
def collect_instances(self):
instance_node = bpy.data.collections.get(AVALON_CONTAINERS)
if not instance_node:
return
property = instance_node.get(AVALON_PROPERTY)
if not property:
return
# Create instance object from existing data
instance = CreatedInstance.from_existing(
instance_data=property.to_dict(),
creator=self
)
instance.transient_data["instance_node"] = instance_node
# Add instance to create context
self._add_instance_to_context(instance)
def remove_instances(self, instances):
for instance in instances:
node = instance.transient_data["instance_node"]
del node[AVALON_PROPERTY]
self._remove_instance_from_context(instance)

View file

@ -7,7 +7,7 @@ def append_workfile(context, fname, do_import):
asset = context['asset']['name']
subset = context['subset']['name']
group_name = plugin.asset_name(asset, subset)
group_name = plugin.prepare_scene_name(asset, subset)
# We need to preserve the original names of the scenes, otherwise,
# if there are duplicate names in the current workfile, the imported

View file

@ -137,9 +137,9 @@ class CacheModelLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
containers = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -7,7 +7,7 @@ from typing import Dict, List, Optional
import bpy
from openpype.pipeline import get_representation_path
import openpype.hosts.blender.api.plugin
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import (
containerise_existing,
AVALON_PROPERTY,
@ -16,7 +16,7 @@ from openpype.hosts.blender.api.pipeline import (
logger = logging.getLogger("openpype").getChild("blender").getChild("load_action")
class BlendActionLoader(openpype.hosts.blender.api.plugin.AssetLoader):
class BlendActionLoader(plugin.AssetLoader):
"""Load action from a .blend file.
Warning:
@ -46,8 +46,8 @@ class BlendActionLoader(openpype.hosts.blender.api.plugin.AssetLoader):
libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
lib_container = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
container_name = openpype.hosts.blender.api.plugin.asset_name(
lib_container = plugin.prepare_scene_name(asset, subset)
container_name = plugin.prepare_scene_name(
asset, subset, namespace
)
@ -152,7 +152,7 @@ class BlendActionLoader(openpype.hosts.blender.api.plugin.AssetLoader):
assert libpath.is_file(), (
f"The file doesn't exist: {libpath}"
)
assert extension in openpype.hosts.blender.api.plugin.VALID_EXTENSIONS, (
assert extension in plugin.VALID_EXTENSIONS, (
f"Unsupported file: {libpath}"
)

View file

@ -42,9 +42,9 @@ class AudioLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -4,11 +4,11 @@ from pathlib import Path
import bpy
from openpype.pipeline import (
legacy_create,
get_representation_path,
AVALON_CONTAINER_ID,
registered_host
)
from openpype.pipeline.create import get_legacy_creator_by_name
from openpype.pipeline.create import CreateContext
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.lib import imprint
from openpype.hosts.blender.api.pipeline import (
@ -57,19 +57,21 @@ class BlendLoader(plugin.AssetLoader):
obj.get(AVALON_PROPERTY).get('family') == 'rig'
)
]
if not rigs:
return
# Create animation instances for each rig
creator_identifier = "io.openpype.creators.blender.animation"
host = registered_host()
create_context = CreateContext(host)
for rig in rigs:
creator_plugin = get_legacy_creator_by_name("CreateAnimation")
legacy_create(
creator_plugin,
name=rig.name.split(':')[-1] + "_animation",
asset=asset,
options={
"useSelection": False,
create_context.create(
creator_identifier=creator_identifier,
variant=rig.name.split(':')[-1],
pre_create_data={
"use_selection": False,
"asset_group": rig
},
data={
"dependencies": representation
}
)
@ -90,7 +92,6 @@ class BlendLoader(plugin.AssetLoader):
members.append(data)
container = self._get_asset_container(data_to.objects)
print(container)
assert container, "No asset group found"
container.name = group_name
@ -104,8 +105,6 @@ class BlendLoader(plugin.AssetLoader):
print(obj)
bpy.context.scene.collection.objects.link(obj)
print("")
# Remove the library from the blend file
library = bpy.data.libraries.get(bpy.path.basename(libpath))
bpy.data.libraries.remove(library)
@ -134,9 +133,9 @@ class BlendLoader(plugin.AssetLoader):
representation = str(context["representation"]["_id"])
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -85,9 +85,9 @@ class BlendSceneLoader(plugin.AssetLoader):
except ValueError:
family = "model"
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -87,9 +87,9 @@ class AbcCameraLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -90,9 +90,9 @@ class FbxCameraLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -134,9 +134,9 @@ class FbxModelLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -123,6 +123,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
# raise ValueError("Creator plugin \"CreateCamera\" was "
# "not found.")
# TODO: Refactor legacy create usage to new style creators
# legacy_create(
# creator_plugin,
# name="camera",
@ -148,9 +149,9 @@ class JsonLayoutLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
asset_name = plugin.prepare_scene_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)

View file

@ -96,14 +96,14 @@ class BlendLookLoader(plugin.AssetLoader):
asset = context["asset"]["name"]
subset = context["subset"]["name"]
lib_container = plugin.asset_name(
lib_container = plugin.prepare_scene_name(
asset, subset
)
unique_number = plugin.get_unique_number(
asset, subset
)
namespace = namespace or f"{asset}_{unique_number}"
container_name = plugin.asset_name(
container_name = plugin.prepare_scene_name(
asset, subset, unique_number
)

View file

@ -1,72 +1,15 @@
import os
import bpy
import pyblish.api
from openpype.pipeline import get_current_task_name, get_current_asset_name
from openpype.hosts.blender.api import workio
class SaveWorkfiledAction(pyblish.api.Action):
"""Save Workfile."""
label = "Save Workfile"
on = "failed"
icon = "save"
def process(self, context, plugin):
bpy.ops.wm.avalon_workfiles()
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.5
label = "Blender Current File"
hosts = ["blender"]
actions = [SaveWorkfiledAction]
def process(self, context):
"""Inject the current working file"""
current_file = workio.current_file()
context.data["currentFile"] = current_file
assert current_file, (
"Current file is empty. Save the file before continuing."
)
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
task = get_current_task_name()
data = {}
# create instance
instance = context.create_instance(name=filename)
subset = "workfile" + task.capitalize()
data.update({
"subset": subset,
"asset": get_current_asset_name(),
"label": subset,
"publish": True,
"family": "workfile",
"families": ["workfile"],
"setMembers": [current_file],
"frameStart": bpy.context.scene.frame_start,
"frameEnd": bpy.context.scene.frame_end,
})
data["representations"] = [{
"name": ext.lstrip("."),
"ext": ext.lstrip("."),
"files": file,
"stagingDir": folder,
}]
instance.data.update(data)
self.log.info("Collected instance: {}".format(file))
self.log.info("Scene path: {}".format(current_file))
self.log.info("staging Dir: {}".format(folder))
self.log.info("subset: {}".format(subset))

View file

@ -0,0 +1,43 @@
import bpy
import pyblish.api
from openpype.pipeline.publish import KnownPublishError
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class CollectBlenderInstanceData(pyblish.api.InstancePlugin):
"""Validator to verify that the instance is not empty"""
order = pyblish.api.CollectorOrder
hosts = ["blender"]
families = ["model", "pointcache", "animation", "rig", "camera", "layout",
"blendScene"]
label = "Collect Instance"
def process(self, instance):
instance_node = instance.data["transientData"]["instance_node"]
# Collect members of the instance
members = [instance_node]
if isinstance(instance_node, bpy.types.Collection):
members.extend(instance_node.objects)
members.extend(instance_node.children)
# Special case for animation instances, include armatures
if instance.data["family"] == "animation":
for obj in instance_node.objects:
if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY):
members.extend(
child for child in obj.children
if child.type == 'ARMATURE'
)
elif isinstance(instance_node, bpy.types.Object):
members.extend(instance_node.children_recursive)
else:
raise KnownPublishError(
f"Unsupported instance node type '{type(instance_node)}' "
f"for instance '{instance}'"
)
instance[:] = members

View file

@ -1,70 +0,0 @@
from typing import Generator
import bpy
import pyblish.api
from openpype.hosts.blender.api.pipeline import (
AVALON_INSTANCES,
AVALON_PROPERTY,
)
class CollectInstances(pyblish.api.ContextPlugin):
"""Collect the data of a model."""
hosts = ["blender"]
label = "Collect Instances"
order = pyblish.api.CollectorOrder
@staticmethod
def get_asset_groups() -> Generator:
"""Return all instances that are empty objects asset groups.
"""
instances = bpy.data.collections.get(AVALON_INSTANCES)
for obj in list(instances.objects) + list(instances.children):
avalon_prop = obj.get(AVALON_PROPERTY) or {}
if avalon_prop.get('id') == 'pyblish.avalon.instance':
yield obj
@staticmethod
def create_instance(context, group):
avalon_prop = group[AVALON_PROPERTY]
asset = avalon_prop['asset']
family = avalon_prop['family']
subset = avalon_prop['subset']
task = avalon_prop['task']
name = f"{asset}_{subset}"
return context.create_instance(
name=name,
family=family,
families=[family],
subset=subset,
asset=asset,
task=task,
)
def process(self, context):
"""Collect the models from the current Blender scene."""
asset_groups = self.get_asset_groups()
for group in asset_groups:
instance = self.create_instance(context, group)
instance.data["instance_group"] = group
members = []
if isinstance(group, bpy.types.Collection):
members = list(group.objects)
family = instance.data["family"]
if family == "animation":
for obj in group.objects:
if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY):
members.extend(
child for child in obj.children
if child.type == 'ARMATURE')
else:
members = group.children_recursive
members.append(group)
instance[:] = members
self.log.debug(instance.data)
for obj in instance:
self.log.debug(obj)

View file

@ -73,11 +73,12 @@ class CollectBlenderRender(pyblish.api.InstancePlugin):
def process(self, instance):
context = instance.context
render_data = bpy.data.collections[str(instance)].get("render_data")
instance_node = instance.data["transientData"]["instance_node"]
render_data = instance_node.get("render_data")
assert render_data, "No render data found."
self.log.info(f"render_data: {dict(render_data)}")
self.log.debug(f"render_data: {dict(render_data)}")
render_product = render_data.get("render_product")
aov_file_product = render_data.get("aov_file_product")
@ -120,4 +121,4 @@ class CollectBlenderRender(pyblish.api.InstancePlugin):
"renderProducts": colorspace.ARenderProduct(),
})
self.log.info(f"data: {instance.data}")
self.log.debug(f"data: {instance.data}")

View file

@ -16,10 +16,12 @@ class CollectReview(pyblish.api.InstancePlugin):
self.log.debug(f"instance: {instance}")
datablock = instance.data["transientData"]["instance_node"]
# get cameras
cameras = [
obj
for obj in instance
for obj in datablock.all_objects
if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA"
]

View file

@ -0,0 +1,37 @@
from pathlib import Path
from pyblish.api import InstancePlugin, CollectorOrder
class CollectWorkfile(InstancePlugin):
"""Inject workfile data into its instance."""
order = CollectorOrder
label = "Collect Workfile"
hosts = ["blender"]
families = ["workfile"]
def process(self, instance):
"""Process collector."""
context = instance.context
filepath = Path(context.data["currentFile"])
ext = filepath.suffix
instance.data.update(
{
"setMembers": [filepath.as_posix()],
"frameStart": context.data.get("frameStart", 1),
"frameEnd": context.data.get("frameEnd", 1),
"handleStart": context.data.get("handleStart", 1),
"handledEnd": context.data.get("handleEnd", 1),
"representations": [
{
"name": ext.lstrip("."),
"ext": ext.lstrip("."),
"files": filepath.name,
"stagingDir": filepath.parent,
}
],
}
)

View file

@ -4,10 +4,9 @@ import bpy
from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractABC(publish.Extractor):
class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""Extract as ABC."""
label = "Extract ABC"
@ -15,6 +14,9 @@ class ExtractABC(publish.Extractor):
families = ["pointcache"]
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.abc"
@ -25,18 +27,16 @@ class ExtractABC(publish.Extractor):
plugin.deselect_all()
selected = []
active = None
asset_group = instance.data["transientData"]["instance_node"]
selected = []
for obj in instance:
obj.select_set(True)
selected.append(obj)
# Set as active the asset group
if obj.get(AVALON_PROPERTY):
active = obj
if isinstance(obj, bpy.types.Object):
obj.select_set(True)
selected.append(obj)
context = plugin.create_blender_context(
active=active, selected=selected)
active=asset_group, selected=selected)
with bpy.context.temp_override(**context):
# We export the abc
@ -59,8 +59,8 @@ class ExtractABC(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)
class ExtractModelABC(ExtractABC):

View file

@ -6,7 +6,10 @@ from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
class ExtractAnimationABC(publish.Extractor):
class ExtractAnimationABC(
publish.Extractor,
publish.OptionalPyblishPluginMixin,
):
"""Extract as ABC."""
label = "Extract Animation ABC"
@ -15,6 +18,9 @@ class ExtractAnimationABC(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.abc"
@ -26,7 +32,7 @@ class ExtractAnimationABC(publish.Extractor):
plugin.deselect_all()
selected = []
asset_group = None
asset_group = instance.data["transientData"]["instance_node"]
objects = []
for obj in instance:
@ -66,5 +72,5 @@ class ExtractAnimationABC(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -5,7 +5,7 @@ import bpy
from openpype.pipeline import publish
class ExtractBlend(publish.Extractor):
class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""Extract a blend file."""
label = "Extract Blend"
@ -14,6 +14,9 @@ class ExtractBlend(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
@ -60,5 +63,5 @@ class ExtractBlend(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -5,7 +5,10 @@ import bpy
from openpype.pipeline import publish
class ExtractBlendAnimation(publish.Extractor):
class ExtractBlendAnimation(
publish.Extractor,
publish.OptionalPyblishPluginMixin,
):
"""Extract a blend file."""
label = "Extract Blend"
@ -14,6 +17,9 @@ class ExtractBlendAnimation(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
@ -50,5 +56,5 @@ class ExtractBlendAnimation(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -7,7 +7,7 @@ from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractCameraABC(publish.Extractor):
class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""Extract camera as ABC."""
label = "Extract Camera (ABC)"
@ -16,6 +16,9 @@ class ExtractCameraABC(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.abc"
@ -26,12 +29,7 @@ class ExtractCameraABC(publish.Extractor):
plugin.deselect_all()
asset_group = None
for obj in instance:
if obj.get(AVALON_PROPERTY):
asset_group = obj
break
assert asset_group, "No asset group found"
asset_group = instance.data["transientData"]["instance_node"]
# Need to cast to list because children is a tuple
selected = list(asset_group.children)
@ -64,5 +62,5 @@ class ExtractCameraABC(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -6,7 +6,7 @@ from openpype.pipeline import publish
from openpype.hosts.blender.api import plugin
class ExtractCamera(publish.Extractor):
class ExtractCamera(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""Extract as the camera as FBX."""
label = "Extract Camera (FBX)"
@ -15,6 +15,9 @@ class ExtractCamera(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.fbx"
@ -73,5 +76,5 @@ class ExtractCamera(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -7,7 +7,7 @@ from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractFBX(publish.Extractor):
class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""Extract as FBX."""
label = "Extract FBX"
@ -16,6 +16,9 @@ class ExtractFBX(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.fbx"
@ -26,14 +29,12 @@ class ExtractFBX(publish.Extractor):
plugin.deselect_all()
selected = []
asset_group = None
asset_group = instance.data["transientData"]["instance_node"]
selected = []
for obj in instance:
obj.select_set(True)
selected.append(obj)
if obj.get(AVALON_PROPERTY):
asset_group = obj
context = plugin.create_blender_context(
active=asset_group, selected=selected)
@ -84,5 +85,5 @@ class ExtractFBX(publish.Extractor):
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, representation)

View file

@ -10,7 +10,41 @@ from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractAnimationFBX(publish.Extractor):
def get_all_parents(obj):
"""Get all recursive parents of object"""
result = []
while True:
obj = obj.parent
if not obj:
break
result.append(obj)
return result
def get_highest_root(objects):
# Get the highest object that is also in the collection
included_objects = {obj.name_full for obj in objects}
num_parents_to_obj = {}
for obj in objects:
if isinstance(obj, bpy.types.Object):
parents = get_all_parents(obj)
# included parents
parents = [parent for parent in parents if
parent.name_full in included_objects]
if not parents:
# A node without parents must be a highest root
return obj
num_parents_to_obj.setdefault(len(parents), obj)
minimum_parent = min(num_parents_to_obj)
return num_parents_to_obj[minimum_parent]
class ExtractAnimationFBX(
publish.Extractor,
publish.OptionalPyblishPluginMixin,
):
"""Extract as animation."""
label = "Extract FBX"
@ -19,23 +53,43 @@ class ExtractAnimationFBX(publish.Extractor):
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
# Perform extraction
self.log.debug("Performing extraction..")
# The first collection object in the instance is taken, as there
# should be only one that contains the asset group.
collection = [
obj for obj in instance if type(obj) is bpy.types.Collection][0]
asset_group = instance.data["transientData"]["instance_node"]
# Again, the first object in the collection is taken , as there
# should be only the asset group in the collection.
asset_group = collection.objects[0]
# Get objects in this collection (but not in children collections)
# and for those objects include the children hierarchy
# TODO: Would it make more sense for the Collect Instance collector
# to also always retrieve all the children?
objects = set(asset_group.objects)
armature = [
obj for obj in asset_group.children if obj.type == 'ARMATURE'][0]
# From the direct children of the collection find the 'root' node
# that we want to export - it is the 'highest' node in a hierarchy
root = get_highest_root(objects)
for obj in list(objects):
objects.update(obj.children_recursive)
# Find all armatures among the objects, assume to find only one
armatures = [obj for obj in objects if obj.type == "ARMATURE"]
if not armatures:
raise RuntimeError(
f"Unable to find ARMATURE in collection: "
f"{asset_group.name}"
)
elif len(armatures) > 1:
self.log.warning(
"Found more than one ARMATURE, using "
f"only first of: {armatures}"
)
armature = armatures[0]
object_action_pairs = []
original_actions = []
@ -44,9 +98,6 @@ class ExtractAnimationFBX(publish.Extractor):
ending_frames = []
# For each armature, we make a copy of the current action
curr_action = None
copy_action = None
if armature.animation_data and armature.animation_data.action:
curr_action = armature.animation_data.action
copy_action = curr_action.copy()
@ -56,12 +107,20 @@ class ExtractAnimationFBX(publish.Extractor):
starting_frames.append(curr_frame_range[0])
ending_frames.append(curr_frame_range[1])
else:
self.log.info("Object have no animation.")
self.log.info(
f"Armature '{armature.name}' has no animation, "
f"skipping FBX animation extraction for {instance}."
)
return
asset_group_name = asset_group.name
asset_group.name = asset_group.get(AVALON_PROPERTY).get("asset_name")
asset_name = asset_group.get(AVALON_PROPERTY).get("asset_name")
if asset_name:
# Rename for the export; this data is only present when loaded
# from a JSON Layout (layout family)
asset_group.name = asset_name
# Remove : from the armature name for the export
armature_name = armature.name
original_name = armature_name.split(':')[1]
armature.name = original_name
@ -84,13 +143,13 @@ class ExtractAnimationFBX(publish.Extractor):
for obj in bpy.data.objects:
obj.select_set(False)
asset_group.select_set(True)
root.select_set(True)
armature.select_set(True)
fbx_filename = f"{instance.name}_{armature.name}.fbx"
filepath = os.path.join(stagingdir, fbx_filename)
override = plugin.create_blender_context(
active=asset_group, selected=[asset_group, armature])
active=root, selected=[root, armature])
bpy.ops.export_scene.fbx(
override,
filepath=filepath,
@ -104,7 +163,7 @@ class ExtractAnimationFBX(publish.Extractor):
)
armature.name = armature_name
asset_group.name = asset_group_name
asset_group.select_set(False)
root.select_set(True)
armature.select_set(False)
# We delete the baked action and set the original one back
@ -158,5 +217,5 @@ class ExtractAnimationFBX(publish.Extractor):
instance.data["representations"].append(fbx_representation)
instance.data["representations"].append(json_representation)
self.log.info("Extracted instance '{}' to: {}".format(
instance.name, fbx_representation))
self.log.debug("Extracted instance '{}' to: {}".format(
instance.name, fbx_representation))

View file

@ -11,7 +11,7 @@ from openpype.hosts.blender.api import plugin
from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
class ExtractLayout(publish.Extractor):
class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""Extract a layout."""
label = "Extract Layout"
@ -45,7 +45,7 @@ class ExtractLayout(publish.Extractor):
starting_frames.append(curr_frame_range[0])
ending_frames.append(curr_frame_range[1])
else:
self.log.info("Object have no animation.")
self.log.info("Object has no animation.")
continue
asset_group_name = asset.name
@ -113,6 +113,9 @@ class ExtractLayout(publish.Extractor):
return None, n
def process(self, instance):
if not self.is_active(instance.data):
return
# Define extract output file path
stagingdir = self.staging_dir(instance)
@ -125,13 +128,22 @@ class ExtractLayout(publish.Extractor):
json_data = []
fbx_files = []
asset_group = bpy.data.objects[str(instance)]
asset_group = instance.data["transientData"]["instance_node"]
fbx_count = 0
project_name = instance.context.data["projectEntity"]["name"]
for asset in asset_group.children:
metadata = asset.get(AVALON_PROPERTY)
if not metadata:
# Avoid raising error directly if there's just invalid data
# inside the instance; better to log it to the artist
# TODO: This should actually be validated in a validator
self.log.warning(
f"Found content in layout that is not a loaded "
f"asset, skipping: {asset.name_full}"
)
continue
version_id = metadata["parent"]
family = metadata["family"]
@ -245,5 +257,5 @@ class ExtractLayout(publish.Extractor):
}
instance.data["representations"].append(fbx_representation)
self.log.info("Extracted instance '%s' to: %s",
instance.name, json_representation)
self.log.debug("Extracted instance '%s' to: %s",
instance.name, json_representation)

View file

@ -9,7 +9,7 @@ from openpype.hosts.blender.api import capture
from openpype.hosts.blender.api.lib import maintained_time
class ExtractPlayblast(publish.Extractor):
class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin):
"""
Extract viewport playblast.
@ -24,7 +24,8 @@ class ExtractPlayblast(publish.Extractor):
order = pyblish.api.ExtractorOrder + 0.01
def process(self, instance):
self.log.debug("Extracting capture..")
if not self.is_active(instance.data):
return
# get scene fps
fps = instance.data.get("fps")

View file

@ -1,8 +1,12 @@
import pyblish.api
from openpype.pipeline.publish import OptionalPyblishPluginMixin
from openpype.hosts.blender.api.workio import save_file
class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
class IncrementWorkfileVersion(
pyblish.api.ContextPlugin,
OptionalPyblishPluginMixin
):
"""Increment current workfile version."""
order = pyblish.api.IntegratorOrder + 0.9
@ -13,6 +17,8 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
"pointcache", "render"]
def process(self, context):
if not self.is_active(context.data):
return
assert all(result["success"] for result in context.data["results"]), (
"Publishing not successful so version is not increased.")
@ -23,4 +29,4 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
save_file(filepath, copy=False)
self.log.info('Incrementing script version')
self.log.debug('Incrementing blender workfile version')

View file

@ -1,9 +1,13 @@
import json
import pyblish.api
from openpype.pipeline.publish import OptionalPyblishPluginMixin
class IntegrateAnimation(pyblish.api.InstancePlugin):
class IntegrateAnimation(
pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin,
):
"""Generate a JSON file for animation."""
label = "Integrate Animation"
@ -13,7 +17,7 @@ class IntegrateAnimation(pyblish.api.InstancePlugin):
families = ["setdress"]
def process(self, instance):
self.log.info("Integrate Animation")
self.log.debug("Integrate Animation")
representation = instance.data.get('representations')[0]
json_path = representation.get('publishedFiles')[0]

View file

@ -5,10 +5,15 @@ import bpy
import pyblish.api
import openpype.hosts.blender.api.action
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline.publish import (
ValidateContentsOrder,
PublishValidationError,
OptionalPyblishPluginMixin
)
class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Camera must have a keyframe at frame 0.
Unreal shifts the first keyframe to frame 0. Forcing the camera to have
@ -40,8 +45,12 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
return invalid
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Camera must have a keyframe at frame 0: {invalid}"
names = ", ".join(obj.name for obj in invalid)
raise PublishValidationError(
f"Camera must have a keyframe at frame 0: {names}"
)

View file

@ -36,12 +36,12 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
"Render output folder "
"doesn't match the blender scene name! "
"Use Repair action to "
"fix the folder file path.."
"fix the folder file path."
)
@classmethod
def repair(cls, instance):
container = bpy.data.collections[str(instance)]
container = instance.data["transientData"]["instance_node"]
prepare_rendering(container)
bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
cls.log.debug("Reset the render output folder...")

View file

@ -2,8 +2,24 @@ import bpy
import pyblish.api
from openpype.pipeline.publish import (
OptionalPyblishPluginMixin,
PublishValidationError
)
class ValidateFileSaved(pyblish.api.InstancePlugin):
class SaveWorkfileAction(pyblish.api.Action):
"""Save Workfile."""
label = "Save Workfile"
on = "failed"
icon = "save"
def process(self, context, plugin):
bpy.ops.wm.avalon_workfiles()
class ValidateFileSaved(pyblish.api.ContextPlugin,
OptionalPyblishPluginMixin):
"""Validate that the workfile has been saved."""
order = pyblish.api.ValidatorOrder - 0.01
@ -11,10 +27,35 @@ class ValidateFileSaved(pyblish.api.InstancePlugin):
label = "Validate File Saved"
optional = False
exclude_families = []
actions = [SaveWorkfileAction]
def process(self, instance):
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
def process(self, context):
if not self.is_active(context.data):
return
if not context.data["currentFile"]:
# File has not been saved at all and has no filename
raise PublishValidationError(
"Current file is empty. Save the file before continuing."
)
# Do not validate workfile has unsaved changes if only instances
# present of families that should be excluded
families = {
instance.data["family"] for instance in context
# Consider only enabled instances
if instance.data.get("publish", True)
and instance.data.get("active", True)
}
def is_excluded(family):
return any(family in exclude_family
for exclude_family in self.exclude_families)
if all(is_excluded(family) for family in families):
self.log.debug("Only excluded families found, skipping workfile "
"unsaved changes validation..")
return
if bpy.data.is_dirty:
raise RuntimeError("Workfile is not saved.")
raise PublishValidationError("Workfile has unsaved changes.")

View file

@ -1,6 +1,5 @@
import bpy
import pyblish.api
from openpype.pipeline.publish import PublishValidationError
class ValidateInstanceEmpty(pyblish.api.InstancePlugin):
@ -13,11 +12,8 @@ class ValidateInstanceEmpty(pyblish.api.InstancePlugin):
optional = False
def process(self, instance):
asset_group = instance.data["instance_group"]
if isinstance(asset_group, bpy.types.Collection):
if not (asset_group.objects or asset_group.children):
raise RuntimeError(f"Instance {instance.name} is empty.")
elif isinstance(asset_group, bpy.types.Object):
if not asset_group.children:
raise RuntimeError(f"Instance {instance.name} is empty.")
# Members are collected by `collect_instance` so we only need to check
# whether any member is included. The instance node will be included
# as a member as well, hence we will check for at least 2 members
if len(instance) < 2:
raise PublishValidationError(f"Instance {instance.name} is empty.")

View file

@ -4,17 +4,24 @@ import bpy
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline.publish import (
ValidateContentsOrder,
OptionalPyblishPluginMixin,
PublishValidationError
)
import openpype.hosts.blender.api.action
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
class ValidateMeshHasUvs(
pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin,
):
"""Validate that the current mesh has UV's."""
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
label = "Mesh Has UV's"
label = "Mesh Has UVs"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
optional = True
@ -49,8 +56,11 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
return invalid
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
raise PublishValidationError(
f"Meshes found in instance without valid UV's: {invalid}"
)

View file

@ -4,11 +4,16 @@ import bpy
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline.publish import (
ValidateContentsOrder,
OptionalPyblishPluginMixin,
PublishValidationError
)
import openpype.hosts.blender.api.action
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
class ValidateMeshNoNegativeScale(pyblish.api.Validator,
OptionalPyblishPluginMixin):
"""Ensure that meshes don't have a negative scale."""
order = ValidateContentsOrder
@ -27,8 +32,12 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator):
return invalid
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Meshes found in instance with negative scale: {invalid}"
names = ", ".join(obj.name for obj in invalid)
raise PublishValidationError(
f"Meshes found in instance with negative scale: {names}"
)

View file

@ -5,10 +5,15 @@ import bpy
import pyblish.api
import openpype.hosts.blender.api.action
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline.publish import (
ValidateContentsOrder,
OptionalPyblishPluginMixin,
PublishValidationError
)
class ValidateNoColonsInName(pyblish.api.InstancePlugin):
class ValidateNoColonsInName(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""There cannot be colons in names
Object or bone names cannot include colons. Other software do not
@ -36,8 +41,12 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
return invalid
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Objects found with colon in name: {invalid}"
names = ", ".join(obj.name for obj in invalid)
raise PublishValidationError(
f"Objects found with colon in name: {names}"
)

View file

@ -3,10 +3,17 @@ from typing import List
import bpy
import pyblish.api
from openpype.pipeline.publish import (
OptionalPyblishPluginMixin,
PublishValidationError
)
import openpype.hosts.blender.api.action
class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
class ValidateObjectIsInObjectMode(
pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin,
):
"""Validate that the objects in the instance are in Object Mode."""
order = pyblish.api.ValidatorOrder - 0.01
@ -25,8 +32,12 @@ class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
return invalid
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Object found in instance is not in Object Mode: {invalid}"
names = ", ".join(obj.name for obj in invalid)
raise PublishValidationError(
f"Object found in instance is not in Object Mode: {names}"
)

View file

@ -2,8 +2,14 @@ import bpy
import pyblish.api
from openpype.pipeline.publish import (
OptionalPyblishPluginMixin,
PublishValidationError
)
class ValidateRenderCameraIsSet(pyblish.api.InstancePlugin):
class ValidateRenderCameraIsSet(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Validate that there is a camera set as active for rendering."""
order = pyblish.api.ValidatorOrder
@ -13,5 +19,8 @@ class ValidateRenderCameraIsSet(pyblish.api.InstancePlugin):
optional = False
def process(self, instance):
if not self.is_active(instance.data):
return
if not bpy.context.scene.camera:
raise RuntimeError("No camera is active for rendering.")
raise PublishValidationError("No camera is active for rendering.")

View file

@ -6,10 +6,15 @@ import bpy
import pyblish.api
import openpype.hosts.blender.api.action
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline.publish import (
ValidateContentsOrder,
OptionalPyblishPluginMixin,
PublishValidationError
)
class ValidateTransformZero(pyblish.api.InstancePlugin):
class ValidateTransformZero(pyblish.api.InstancePlugin,
OptionalPyblishPluginMixin):
"""Transforms can't have any values
To solve this issue, try freezing the transforms. So long
@ -38,9 +43,13 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
return invalid
def process(self, instance):
if not self.is_active(instance.data):
return
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
"Object found in instance has not"
f" transform to zero: {invalid}"
names = ", ".join(obj.name for obj in invalid)
raise PublishValidationError(
"Objects found in instance which do not"
f" have transform set to zero: {names}"
)

View file

@ -149,9 +149,7 @@ class CreateSaver(NewCreator):
# get frame padding from anatomy templates
anatomy = Anatomy()
frame_padding = int(
anatomy.templates["render"].get("frame_padding", 4)
)
frame_padding = anatomy.templates["frame_padding"]
# Subset change detected
workdir = os.path.normpath(legacy_io.Session["AVALON_WORKDIR"])

View file

@ -62,19 +62,6 @@ SHAPE_ATTRS = {"castsShadows",
"doubleSided",
"opposite"}
RENDER_ATTRS = {"vray": {
"node": "vraySettings",
"prefix": "fileNamePrefix",
"padding": "fileNamePadding",
"ext": "imageFormatStr"
},
"default": {
"node": "defaultRenderGlobals",
"prefix": "imageFilePrefix",
"padding": "extensionPadding"
}
}
DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,

View file

@ -33,6 +33,14 @@ class RenderSettings(object):
def get_image_prefix_attr(cls, renderer):
return cls._image_prefix_nodes[renderer]
@staticmethod
def get_padding_attr(renderer):
"""Return attribute for renderer that defines frame padding amount"""
if renderer == "vray":
return "vraySettings.fileNamePadding"
else:
return "defaultRenderGlobals.extensionPadding"
def __init__(self, project_settings=None):
if not project_settings:
project_settings = get_project_settings(

View file

@ -12,6 +12,7 @@ from openpype.pipeline.publish import (
PublishValidationError,
)
from openpype.hosts.maya.api import lib
from openpype.hosts.maya.api.lib_rendersettings import RenderSettings
def convert_to_int_or_float(string_value):
@ -129,13 +130,13 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
layer = instance.data['renderlayer']
cameras = instance.data.get("cameras", [])
# Get the node attributes for current renderer
attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS['default'])
# Prefix attribute can return None when a value was never set
prefix = lib.get_attr_in_layer(cls.ImagePrefixes[renderer],
layer=layer) or ""
padding = lib.get_attr_in_layer("{node}.{padding}".format(**attrs),
layer=layer)
padding = lib.get_attr_in_layer(
attr=RenderSettings.get_padding_attr(renderer),
layer=layer
)
anim_override = lib.get_attr_in_layer("defaultRenderGlobals.animation",
layer=layer)
@ -372,8 +373,6 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
lib.set_attribute(data["attribute"], data["values"][0], node)
with lib.renderlayer(layer_node):
default = lib.RENDER_ATTRS['default']
render_attrs = lib.RENDER_ATTRS.get(renderer, default)
# Repair animation must be enabled
cmds.setAttr("defaultRenderGlobals.animation", True)
@ -391,15 +390,13 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
default_prefix = default_prefix.replace(variant, "")
if renderer != "renderman":
node = render_attrs["node"]
prefix_attr = render_attrs["prefix"]
prefix_attr = RenderSettings.get_image_prefix_attr(renderer)
fname_prefix = default_prefix
cmds.setAttr("{}.{}".format(node, prefix_attr),
fname_prefix, type="string")
# Repair padding
padding_attr = render_attrs["padding"]
padding_attr = RenderSettings.get_padding_attr(renderer)
cmds.setAttr("{}.{}".format(node, padding_attr),
cls.DEFAULT_PADDING)
else:

View file

@ -701,6 +701,8 @@ or updating already created. Publishing will create OTIO file.
# parent time properties
"trackStartFrame": track_start_frame,
"timelineOffset": timeline_offset,
"isEditorial": True,
# creator_attributes
"creator_attributes": creator_attributes
}

View file

@ -27,6 +27,12 @@ class CollectSequenceFrameData(
if not self.is_active(instance.data):
return
# editorial would fail since they might not be in database yet
is_editorial = instance.data.get("isEditorial")
if is_editorial:
self.log.debug("Instance is Editorial. Skipping.")
return
frame_data = self.get_frame_data_from_repre_sequence(instance)
if not frame_data:

View file

@ -30,12 +30,17 @@ class ValidateFrameRange(OptionalPyblishPluginMixin,
if not self.is_active(instance.data):
return
# editorial would fail since they might not be in database yet
is_editorial = instance.data.get("isEditorial")
if is_editorial:
self.log.debug("Instance is Editorial. Skipping.")
return
if (self.skip_timelines_check and
any(re.search(pattern, instance.data["task"])
for pattern in self.skip_timelines_check)):
self.log.info("Skipping for {} task".format(instance.data["task"]))
asset_doc = instance.data["assetEntity"]
asset_data = asset_doc["data"]
frame_start = asset_data["frameStart"]
frame_end = asset_data["frameEnd"]

View file

@ -190,7 +190,7 @@ class LoadImage(plugin.Loader):
if pop_idx is None:
self.log.warning(
"Didn't found container in workfile containers. {}".format(
"Didn't find container in workfile containers. {}".format(
container
)
)

View file

@ -85,7 +85,7 @@ class AyonDeadlinePlugin(DeadlinePlugin):
}
for env, val in environment.items():
self.SetProcessEnvironmentVariable(env, val)
self.SetEnvironmentVariable(env, val)
exe_list = self.GetConfigEntry("AyonExecutable")
# clean '\ ' for MacOS pasting
@ -101,11 +101,11 @@ class AyonDeadlinePlugin(DeadlinePlugin):
if exe == "":
self.FailRender(
"Ayon executable was not found " +
"in the semicolon separated list " +
"\"" + ";".join(exe_list) + "\". " +
"The path to the render executable can be configured " +
"from the Plugin Configuration in the Deadline Monitor.")
"Ayon executable was not found in the semicolon separated "
"list: \"{}\". The path to the render executable can be "
"configured from the Plugin Configuration in the Deadline "
"Monitor.".format(exe_list)
)
return exe
def RenderArgument(self):

View file

@ -495,7 +495,10 @@ def inject_ayon_environment(deadlinePlugin):
"AYON_BUNDLE_NAME": ayon_bundle_name,
}
for env, val in environment.items():
# Add the env var for the Render Plugin that is about to render
deadlinePlugin.SetEnvironmentVariable(env, val)
# Add the env var for current calls to `DeadlinePlugin.RunProcess`
deadlinePlugin.SetProcessEnvironmentVariable(env, val)
args_str = subprocess.list2cmdline(args)
print(">>> Executing: {} {}".format(exe, args_str))

View file

@ -66,7 +66,7 @@ class TransferHierarchicalValues(ServerAction):
"items": [{
"type": "label",
"value": (
"Didn't found custom attributes"
"Didn't find custom attributes"
" that can be transferred."
)
}]

View file

@ -257,7 +257,7 @@ class NextTaskUpdate(BaseEvent):
new_task_name = mapping.get(old_status_name)
if not new_task_name:
self.log.debug(
"Didn't found mapping for status \"{}\".".format(
"Didn't find mapping for status \"{}\".".format(
task_status["name"]
)
)

View file

@ -387,7 +387,7 @@ class SyncToAvalonEvent(BaseEvent):
if not data:
# TODO logging
self.log.warning(
"Didn't found entity by key/value \"{}\" / \"{}\"".format(
"Didn't find entity by key/value \"{}\" / \"{}\"".format(
key, value
)
)

View file

@ -51,7 +51,7 @@ class ComponentOpen(BaseAction):
else:
return {
'success': False,
'message': "Didn't found file: " + fpath
'message': "Didn't find file: " + fpath
}
return {

View file

@ -169,7 +169,7 @@ class DeleteAssetSubset(BaseAction):
return {
"success": True,
"message": (
"Didn't found entities in avalon."
"Didn't find entities in avalon."
" You can use Ftrack's Delete button for the selection."
)
}

View file

@ -61,7 +61,7 @@ class Delivery(BaseAction):
return {
"success": False,
"message": (
"Didn't found project \"{}\" in avalon."
"Didn't find project \"{}\" in avalon."
).format(project_name)
}

View file

@ -29,7 +29,7 @@ class JobKiller(BaseAction):
if not jobs:
return {
"success": True,
"message": "Didn't found any running jobs"
"message": "Didn't find any running jobs"
}
# Collect user ids from jobs

View file

@ -10,6 +10,7 @@ from maya import cmds
import pyblish.api
from openpype.lib import requests_post
from openpype.hosts.maya.api import lib
from openpype.hosts.maya.api.lib_rendersettings import RenderSettings
from openpype.pipeline import legacy_io
from openpype.settings import get_system_settings
@ -68,10 +69,8 @@ def get_renderer_variables(renderlayer=None):
"""
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
render_attrs["padding"]))
padding = cmds.getAttr(RenderSettings.get_padding_attr(renderer))
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]

View file

@ -58,21 +58,21 @@ class ExplicitCleanUp(pyblish.api.ContextPlugin):
# Store failed paths with exception
failed = []
# Store removed filepaths for logging
succeded_files = set()
succeeded_files = set()
# Remove file by file
for filepath in filepaths:
try:
os.remove(filepath)
succeded_files.add(filepath)
succeeded_files.add(filepath)
except Exception as exc:
failed.append((filepath, exc))
if succeded_files:
if succeeded_files:
self.log.info(
"Removed files:\n{}".format("\n".join(succeded_files))
"Removed files:\n{}".format("\n".join(sorted(succeeded_files)))
)
# Delete folders with it's content
# Delete folders with its content
succeeded = set()
for dirpath in dirpaths:
# Check if directory still exists
@ -87,17 +87,21 @@ class ExplicitCleanUp(pyblish.api.ContextPlugin):
if succeeded:
self.log.info(
"Removed directories:\n{}".format("\n".join(succeeded))
"Removed directories:\n{}".format(
"\n".join(sorted(succeeded))
)
)
# Prepare lines for report of failed removements
# Prepare lines for report of failed removals
lines = []
for filepath, exc in failed:
lines.append("{}: {}".format(filepath, str(exc)))
if lines:
self.log.warning(
"Failed to remove filepaths:\n{}".format("\n".join(lines))
"Failed to remove filepaths:\n{}".format(
"\n".join(sorted(lines))
)
)
def _remove_empty_dirs(self, empty_dirpaths):
@ -134,8 +138,8 @@ class ExplicitCleanUp(pyblish.api.ContextPlugin):
if to_skip_dirpaths:
self.log.debug(
"Skipped directories because contain files:\n{}".format(
"\n".join(to_skip_dirpaths)
"Skipped directories because they contain files:\n{}".format(
"\n".join(sorted(to_skip_dirpaths))
)
)
@ -147,6 +151,6 @@ class ExplicitCleanUp(pyblish.api.ContextPlugin):
if to_delete_dirpaths:
self.log.debug(
"Deleted empty directories:\n{}".format(
"\n".join(to_delete_dirpaths)
"\n".join(sorted(to_delete_dirpaths))
)
)

View file

@ -103,10 +103,10 @@ class CollectComment(
instance.data["comment"] = instance_comment
if instance_comment:
msg_end = " has comment set to: \"{}\"".format(
msg_end = "has comment set to: \"{}\"".format(
instance_comment)
else:
msg_end = " does not have set comment"
msg_end = "does not have set comment"
self.log.debug("Instance {} {}".format(instance_label, msg_end))
def cleanup_comment(self, comment):

View file

@ -54,6 +54,8 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
staging_dir = data_object.get("stagingDir")
if staging_dir:
data_object["stagingDir"] = anatomy.fill_root(staging_dir)
self.log.debug("Filling stagingDir with root to: %s",
data_object["stagingDir"])
def _process_path(self, data, anatomy):
"""Process data of a single JSON publish metadata file.
@ -108,7 +110,6 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
instance = self._context.create_instance(
instance_data.get("subset")
)
self.log.debug("Filling stagingDir...")
self._fill_staging_dir(instance_data, anatomy)
instance.data.update(instance_data)
@ -161,7 +162,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
anatomy.project_name
))
self.log.debug("anatomy: {}".format(anatomy.roots))
self.log.debug("Anatomy roots: {}".format(anatomy.roots))
try:
session_is_set = False
for path in paths:

View file

@ -68,6 +68,12 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
]
def process(self, instance):
# editorial would fail since they might not be in database yet
is_editorial = instance.data.get("isEditorial")
if is_editorial:
self.log.debug("Instance is Editorial. Skipping.")
return
anatomy = instance.context.data["anatomy"]
template_data = copy.deepcopy(instance.data["anatomyData"])

View file

@ -171,8 +171,6 @@ class ExtractBurnin(publish.Extractor):
).format(host_name, family, task_name, task_type, subset))
return
self.log.debug("profile: {}".format(profile))
# Pre-filter burnin definitions by instance families
burnin_defs = self.filter_burnins_defs(profile, instance)
if not burnin_defs:
@ -450,7 +448,7 @@ class ExtractBurnin(publish.Extractor):
filling burnin strings. `temp_data` are for repre pre-process
preparation.
"""
self.log.debug("Prepring basic data for burnins")
self.log.debug("Preparing basic data for burnins")
context = instance.context
version = instance.data.get("version")

View file

@ -326,7 +326,6 @@ class ExtractOIIOTranscode(publish.Extractor):
" | Task type \"{}\" | Subset \"{}\" "
).format(host_name, family, task_name, task_type, subset))
self.log.debug("profile: {}".format(profile))
return profile
def _repre_is_valid(self, repre):

View file

@ -143,7 +143,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
custom_tags = repre.get("custom_tags")
if "review" not in tags:
self.log.debug((
"Repre: {} - Didn't found \"review\" in tags. Skipping"
"Repre: {} - Didn't find \"review\" in tags. Skipping"
).format(repre_name))
continue

View file

@ -200,7 +200,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin):
if thumb_repre_doc is None:
self.log.debug(
"There is not representation with name \"thumbnail\""
"There is no representation with name \"thumbnail\""
)
return None

View file

@ -137,7 +137,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
if thumb_repre_doc is None:
self.log.debug(
"There is not representation with name \"thumbnail\""
"There is no representation with name \"thumbnail\""
)
return None

View file

@ -19,16 +19,16 @@
"rules": {}
},
"viewer": {
"viewerProcess": "sRGB"
"viewerProcess": "sRGB (default)"
},
"baking": {
"viewerProcess": "rec709"
"viewerProcess": "rec709 (default)"
},
"workfile": {
"colorManagement": "Nuke",
"colorManagement": "OCIO",
"OCIO_config": "nuke-default",
"workingSpaceLUT": "linear",
"monitorLut": "sRGB"
"workingSpaceLUT": "scene_linear",
"monitorLut": "sRGB (default)"
},
"nodes": {
"requiredNodes": [
@ -76,7 +76,7 @@
{
"type": "text",
"name": "colorspace",
"value": "linear"
"value": "scene_linear"
},
{
"type": "bool",
@ -129,7 +129,7 @@
{
"type": "text",
"name": "colorspace",
"value": "linear"
"value": "scene_linear"
},
{
"type": "bool",
@ -177,7 +177,7 @@
{
"type": "text",
"name": "colorspace",
"value": "sRGB"
"value": "texture_paint"
},
{
"type": "bool",
@ -193,7 +193,7 @@
"inputs": [
{
"regex": "(beauty).*(?=.exr)",
"colorspace": "linear"
"colorspace": "scene_linear"
}
]
}

View file

@ -352,7 +352,7 @@ class DictConditionalEntity(ItemEntity):
break
if result_key is None:
raise ValueError("Didn't found child {}".format(child_obj))
raise ValueError("Didn't find child {}".format(child_obj))
return "/".join([self.path, result_key])

View file

@ -232,7 +232,7 @@ class DictImmutableKeysEntity(ItemEntity):
break
if result_key is None:
raise ValueError("Didn't found child {}".format(child_obj))
raise ValueError("Didn't find child {}".format(child_obj))
return "/".join([self.path, result_key])

View file

@ -284,7 +284,7 @@ class DictMutableKeysEntity(EndpointEntity):
break
if result_key is None:
raise ValueError("Didn't found child {}".format(child_obj))
raise ValueError("Didn't find child {}".format(child_obj))
return "/".join([self.path, result_key])

View file

@ -295,7 +295,7 @@ class ListStrictEntity(ItemEntity):
break
if result_idx is None:
raise ValueError("Didn't found child {}".format(child_obj))
raise ValueError("Didn't find child {}".format(child_obj))
return "/".join([self.path, str(result_idx)])

View file

@ -258,7 +258,7 @@ class ListEntity(EndpointEntity):
break
if result_idx is None:
raise ValueError("Didn't found child {}".format(child_obj))
raise ValueError("Didn't find child {}".format(child_obj))
return "/".join([self.path, str(result_idx)])

View file

@ -270,7 +270,7 @@ class RootEntity(BaseItemEntity):
for key, _child_entity in self.non_gui_children.items():
if _child_entity is child_entity:
return key
raise ValueError("Didn't found child {}".format(child_entity))
raise ValueError("Didn't find child {}".format(child_entity))
@property
def value(self):

View file

@ -13,6 +13,7 @@ from .hierarchy import (
HIERARCHY_MODEL_SENDER,
)
from .thumbnails import ThumbnailsModel
from .selection import HierarchyExpectedSelection
__all__ = (
@ -29,4 +30,6 @@ __all__ = (
"HIERARCHY_MODEL_SENDER",
"ThumbnailsModel",
"HierarchyExpectedSelection",
)

View file

@ -81,11 +81,11 @@ class NestedCacheItem:
"""Helper for cached items stored in nested structure.
Example:
>>> cache = NestedCacheItem(levels=2)
>>> cache = NestedCacheItem(levels=2, default_factory=lambda: 0)
>>> cache["a"]["b"].is_valid
False
>>> cache["a"]["b"].get_data()
None
0
>>> cache["a"]["b"] = 1
>>> cache["a"]["b"].is_valid
True
@ -167,8 +167,51 @@ class NestedCacheItem:
return self[key]
def cached_count(self):
"""Amount of cached items.
Returns:
int: Amount of cached items.
"""
return len(self._data_by_key)
def clear_key(self, key):
"""Clear cached item by key.
Args:
key (str): Key of the cache item.
"""
self._data_by_key.pop(key, None)
def clear_invalid(self):
"""Clear all invalid cache items.
Note:
To clear all cache items use 'reset'.
"""
changed = {}
children_are_nested = self._levels > 1
for key, cache in tuple(self._data_by_key.items()):
if children_are_nested:
output = cache.clear_invalid()
if output:
changed[key] = output
if not cache.cached_count():
self._data_by_key.pop(key)
elif not cache.is_valid:
changed[key] = cache.get_data()
self._data_by_key.pop(key)
return changed
def reset(self):
"""Reset cache."""
"""Reset cache.
Note:
To clear only invalid cache items use 'clear_invalid'.
"""
self._data_by_key = {}

View file

@ -0,0 +1,179 @@
class _ExampleController:
def emit_event(self, topic, data, **kwargs):
pass
class HierarchyExpectedSelection:
"""Base skeleton of expected selection model.
Expected selection model holds information about which entities should be
selected. The order of selection is very important as change of project
will affect what folders are available in folders UI and so on. Because
of that should expected selection model know what is current entity
to select.
If any of 'handle_project', 'handle_folder' or 'handle_task' is set to
'False' expected selection data won't contain information about the
entity type at all. Also if project is not handled then it is not
necessary to call 'expected_project_selected'. Same goes for folder and
task.
Model is triggering event with 'expected_selection_changed' topic and
data > data structure is matching 'get_expected_selection_data' method.
Questions:
Require '_ExampleController' as abstraction?
Args:
controller (Any): Controller object. ('_ExampleController')
handle_project (bool): Project can be considered as can have expected
selection.
handle_folder (bool): Folder can be considered as can have expected
selection.
handle_task (bool): Task can be considered as can have expected
selection.
"""
def __init__(
self,
controller,
handle_project=True,
handle_folder=True,
handle_task=True
):
self._project_name = None
self._folder_id = None
self._task_name = None
self._project_selected = True
self._folder_selected = True
self._task_selected = True
self._controller = controller
self._handle_project = handle_project
self._handle_folder = handle_folder
self._handle_task = handle_task
def set_expected_selection(
self,
project_name=None,
folder_id=None,
task_name=None
):
"""Sets expected selection.
Args:
project_name (Optional[str]): Project name.
folder_id (Optional[str]): Folder id.
task_name (Optional[str]): Task name.
"""
self._project_name = project_name
self._folder_id = folder_id
self._task_name = task_name
self._project_selected = not self._handle_project
self._folder_selected = not self._handle_folder
self._task_selected = not self._handle_task
self._emit_change()
def get_expected_selection_data(self):
project_current = False
folder_current = False
task_current = False
if not self._project_selected:
project_current = True
elif not self._folder_selected:
folder_current = True
elif not self._task_selected:
task_current = True
data = {}
if self._handle_project:
data["project"] = {
"name": self._project_name,
"current": project_current,
"selected": self._project_selected,
}
if self._handle_folder:
data["folder"] = {
"id": self._folder_id,
"current": folder_current,
"selected": self._folder_selected,
}
if self._handle_task:
data["task"] = {
"name": self._task_name,
"current": task_current,
"selected": self._task_selected,
}
return data
def is_expected_project_selected(self, project_name):
if not self._handle_project:
return True
return project_name == self._project_name and self._project_selected
def is_expected_folder_selected(self, folder_id):
if not self._handle_folder:
return True
return folder_id == self._folder_id and self._folder_selected
def expected_project_selected(self, project_name):
"""UI selected requested project.
Other entity types can be requested for selection.
Args:
project_name (str): Name of project.
"""
if project_name != self._project_name:
return False
self._project_selected = True
self._emit_change()
return True
def expected_folder_selected(self, folder_id):
"""UI selected requested folder.
Other entity types can be requested for selection.
Args:
folder_id (str): Folder id.
"""
if folder_id != self._folder_id:
return False
self._folder_selected = True
self._emit_change()
return True
def expected_task_selected(self, folder_id, task_name):
"""UI selected requested task.
Other entity types can be requested for selection.
Because task name is not unique across project a folder id is also
required to confirm the right task has been selected.
Args:
folder_id (str): Folder id.
task_name (str): Task name.
"""
if self._folder_id != folder_id:
return False
if task_name != self._task_name:
return False
self._task_selected = True
self._emit_change()
return True
def _emit_change(self):
self._controller.emit_event(
"expected_selection_changed",
self.get_expected_selection_data(),
)

View file

@ -503,17 +503,6 @@ class ProjectsCombobox(QtWidgets.QWidget):
self._projects_model.set_current_context_project(project_name)
self._projects_proxy_model.invalidateFilter()
def _update_select_item_visiblity(self, **kwargs):
if not self._select_item_visible:
return
if "project_name" not in kwargs:
project_name = self.get_selected_project_name()
else:
project_name = kwargs.get("project_name")
# Hide the item if a project is selected
self._projects_model.set_selected_project(project_name)
def set_select_item_visible(self, visible):
self._select_item_visible = visible
self._projects_model.set_select_item_visible(visible)
@ -534,6 +523,17 @@ class ProjectsCombobox(QtWidgets.QWidget):
def set_library_filter_enabled(self, enabled):
return self._projects_proxy_model.set_library_filter_enabled(enabled)
def _update_select_item_visiblity(self, **kwargs):
if not self._select_item_visible:
return
if "project_name" not in kwargs:
project_name = self.get_selected_project_name()
else:
project_name = kwargs.get("project_name")
# Hide the item if a project is selected
self._projects_model.set_selected_project(project_name)
def _on_current_index_changed(self, idx):
if not self._listen_selection_change:
return

View file

@ -443,8 +443,11 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
pass
@abstractmethod
def get_project_entity(self):
"""Get current project entity.
def get_project_entity(self, project_name):
"""Get project entity by name.
Args:
project_name (str): Project name.
Returns:
dict[str, Any]: Project entity data.
@ -453,10 +456,11 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
pass
@abstractmethod
def get_folder_entity(self, folder_id):
def get_folder_entity(self, project_name, folder_id):
"""Get folder entity by id.
Args:
project_name (str): Project name.
folder_id (str): Folder id.
Returns:
@ -466,10 +470,11 @@ class AbstractWorkfilesBackend(AbstractWorkfilesCommon):
pass
@abstractmethod
def get_task_entity(self, task_id):
def get_task_entity(self, project_name, task_id):
"""Get task entity by id.
Args:
project_name (str): Project name.
task_id (str): Task id.
Returns:
@ -574,12 +579,10 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
pass
@abstractmethod
def set_selected_task(self, folder_id, task_id, task_name):
def set_selected_task(self, task_id, task_name):
"""Change selected task.
Args:
folder_id (Union[str, None]): Folder id or None if no folder
is selected.
task_id (Union[str, None]): Task id or None if no task
is selected.
task_name (Union[str, None]): Task name or None if no task
@ -711,21 +714,27 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
pass
@abstractmethod
def expected_representation_selected(self, representation_id):
def expected_representation_selected(
self, folder_id, task_name, representation_id
):
"""Expected representation was selected in UI.
Args:
folder_id (str): Folder id under which representation is.
task_name (str): Task name under which representation is.
representation_id (str): Representation id which was selected.
"""
pass
@abstractmethod
def expected_workfile_selected(self, workfile_path):
def expected_workfile_selected(self, folder_id, task_name, workfile_name):
"""Expected workfile was selected in UI.
Args:
workfile_path (str): Workfile path which was selected.
folder_id (str): Folder id under which workfile is.
task_name (str): Task name under which workfile is.
workfile_name (str): Workfile filename which was selected.
"""
pass
@ -738,7 +747,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
# Model functions
@abstractmethod
def get_folder_items(self, sender):
def get_folder_items(self, project_name, sender):
"""Folder items to visualize project hierarchy.
This function may trigger events 'folders.refresh.started' and
@ -746,6 +755,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
That may help to avoid re-refresh of folder items in UI elements.
Args:
project_name (str): Project name for which are folders requested.
sender (str): Who requested folder items.
Returns:
@ -756,7 +766,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
pass
@abstractmethod
def get_task_items(self, folder_id, sender):
def get_task_items(self, project_name, folder_id, sender):
"""Task items.
This function may trigger events 'tasks.refresh.started' and
@ -764,6 +774,7 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
That may help to avoid re-refresh of task items in UI elements.
Args:
project_name (str): Project name for which are tasks requested.
folder_id (str): Folder ID for which are tasks requested.
sender (str): Who requested folder items.
@ -892,22 +903,25 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
At this moment the only information which can be saved about
workfile is 'note'.
When 'note' is 'None' it is only validated if workfile info exists,
and if not then creates one with empty note.
Args:
folder_id (str): Folder id.
task_id (str): Task id.
filepath (str): Workfile path.
note (str): Note.
note (Union[str, None]): Note.
"""
pass
# General commands
@abstractmethod
def refresh(self):
"""Refresh everything, models, ui etc.
def reset(self):
"""Reset everything, models, ui etc.
Triggers 'controller.refresh.started' event at the beginning and
'controller.refresh.finished' at the end.
Triggers 'controller.reset.started' event at the beginning and
'controller.reset.finished' at the end.
"""
pass

View file

@ -16,93 +16,120 @@ from openpype.pipeline.context_tools import (
)
from openpype.pipeline.workfile import create_workdir_extra_folders
from openpype.tools.ayon_utils.models import (
HierarchyModel,
HierarchyExpectedSelection,
ProjectsModel,
)
from .abstract import (
AbstractWorkfilesFrontend,
AbstractWorkfilesBackend,
)
from .models import SelectionModel, EntitiesModel, WorkfilesModel
from .models import SelectionModel, WorkfilesModel
class ExpectedSelection:
def __init__(self):
self._folder_id = None
self._task_name = None
class WorkfilesToolExpectedSelection(HierarchyExpectedSelection):
def __init__(self, controller):
super(WorkfilesToolExpectedSelection, self).__init__(
controller,
handle_project=False,
handle_folder=True,
handle_task=True,
)
self._workfile_name = None
self._representation_id = None
self._folder_selected = True
self._task_selected = True
self._workfile_name_selected = True
self._representation_id_selected = True
self._workfile_selected = True
self._representation_selected = True
def set_expected_selection(
self,
folder_id,
task_name,
project_name=None,
folder_id=None,
task_name=None,
workfile_name=None,
representation_id=None
representation_id=None,
):
self._folder_id = folder_id
self._task_name = task_name
self._workfile_name = workfile_name
self._representation_id = representation_id
self._folder_selected = False
self._task_selected = False
self._workfile_name_selected = workfile_name is None
self._representation_id_selected = representation_id is None
self._workfile_selected = False
self._representation_selected = False
super(WorkfilesToolExpectedSelection, self).set_expected_selection(
project_name,
folder_id,
task_name,
)
def get_expected_selection_data(self):
return {
"folder_id": self._folder_id,
"task_name": self._task_name,
"workfile_name": self._workfile_name,
"representation_id": self._representation_id,
"folder_selected": self._folder_selected,
"task_selected": self._task_selected,
"workfile_name_selected": self._workfile_name_selected,
"representation_id_selected": self._representation_id_selected,
data = super(
WorkfilesToolExpectedSelection, self
).get_expected_selection_data()
_is_current = (
self._project_selected
and self._folder_selected
and self._task_selected
)
workfile_is_current = False
repre_is_current = False
if _is_current:
workfile_is_current = not self._workfile_selected
repre_is_current = not self._representation_selected
data["workfile"] = {
"name": self._workfile_name,
"current": workfile_is_current,
"selected": self._workfile_selected,
}
data["representation"] = {
"id": self._representation_id,
"current": repre_is_current,
"selected": self._workfile_selected,
}
return data
def is_expected_folder_selected(self, folder_id):
return folder_id == self._folder_id and self._folder_selected
def is_expected_workfile_selected(self, workfile_name):
return (
workfile_name == self._workfile_name
and self._workfile_selected
)
def is_expected_task_selected(self, folder_id, task_name):
if not self.is_expected_folder_selected(folder_id):
return False
return task_name == self._task_name and self._task_selected
def is_expected_representation_selected(self, representation_id):
return (
representation_id == self._representation_id
and self._representation_selected
)
def expected_folder_selected(self, folder_id):
def expected_workfile_selected(self, folder_id, task_name, workfile_name):
if folder_id != self._folder_id:
return False
self._folder_selected = True
return True
def expected_task_selected(self, folder_id, task_name):
if not self.is_expected_folder_selected(folder_id):
return False
if task_name != self._task_name:
return False
self._task_selected = True
return True
def expected_workfile_selected(self, folder_id, task_name, workfile_name):
if not self.is_expected_task_selected(folder_id, task_name):
return False
if workfile_name != self._workfile_name:
return False
self._workfile_name_selected = True
self._workfile_selected = True
self._emit_change()
return True
def expected_representation_selected(
self, folder_id, task_name, representation_id
):
if not self.is_expected_task_selected(folder_id, task_name):
if folder_id != self._folder_id:
return False
if task_name != self._task_name:
return False
if representation_id != self._representation_id:
return False
self._representation_id_selected = True
self._representation_selected = True
self._emit_change()
return True
@ -136,9 +163,9 @@ class BaseWorkfileController(
# Expected selected folder and task
self._expected_selection = self._create_expected_selection_obj()
self._selection_model = self._create_selection_model()
self._entities_model = self._create_entities_model()
self._projects_model = self._create_projects_model()
self._hierarchy_model = self._create_hierarchy_model()
self._workfiles_model = self._create_workfiles_model()
@property
@ -151,13 +178,16 @@ class BaseWorkfileController(
return self._host_is_valid
def _create_expected_selection_obj(self):
return ExpectedSelection()
return WorkfilesToolExpectedSelection(self)
def _create_projects_model(self):
return ProjectsModel(self)
def _create_selection_model(self):
return SelectionModel(self)
def _create_entities_model(self):
return EntitiesModel(self)
def _create_hierarchy_model(self):
return HierarchyModel(self)
def _create_workfiles_model(self):
return WorkfilesModel(self)
@ -193,14 +223,17 @@ class BaseWorkfileController(
self._project_anatomy = Anatomy(self.get_current_project_name())
return self._project_anatomy
def get_project_entity(self):
return self._entities_model.get_project_entity()
def get_project_entity(self, project_name):
return self._projects_model.get_project_entity(
project_name)
def get_folder_entity(self, folder_id):
return self._entities_model.get_folder_entity(folder_id)
def get_folder_entity(self, project_name, folder_id):
return self._hierarchy_model.get_folder_entity(
project_name, folder_id)
def get_task_entity(self, task_id):
return self._entities_model.get_task_entity(task_id)
def get_task_entity(self, project_name, task_id):
return self._hierarchy_model.get_task_entity(
project_name, task_id)
# ---------------------------------
# Implementation of abstract methods
@ -293,9 +326,8 @@ class BaseWorkfileController(
def get_selected_task_name(self):
return self._selection_model.get_selected_task_name()
def set_selected_task(self, folder_id, task_id, task_name):
return self._selection_model.set_selected_task(
folder_id, task_id, task_name)
def set_selected_task(self, task_id, task_name):
return self._selection_model.set_selected_task(task_id, task_name)
def get_selected_workfile_path(self):
return self._selection_model.get_selected_workfile_path()
@ -318,7 +350,11 @@ class BaseWorkfileController(
representation_id=None
):
self._expected_selection.set_expected_selection(
folder_id, task_name, workfile_name, representation_id
self.get_current_project_name(),
folder_id,
task_name,
workfile_name,
representation_id
)
self._trigger_expected_selection_changed()
@ -355,11 +391,13 @@ class BaseWorkfileController(
)
# Model functions
def get_folder_items(self, sender):
return self._entities_model.get_folder_items(sender)
def get_folder_items(self, project_name, sender=None):
return self._hierarchy_model.get_folder_items(project_name, sender)
def get_task_items(self, folder_id, sender):
return self._entities_model.get_tasks_items(folder_id, sender)
def get_task_items(self, project_name, folder_id, sender=None):
return self._hierarchy_model.get_task_items(
project_name, folder_id, sender
)
def get_workarea_dir_by_context(self, folder_id, task_id):
return self._workfiles_model.get_workarea_dir_by_context(
@ -394,7 +432,9 @@ class BaseWorkfileController(
def get_published_file_items(self, folder_id, task_id):
task_name = None
if task_id:
task = self.get_task_entity(task_id)
task = self.get_task_entity(
self.get_current_project_name(), task_id
)
task_name = task.get("name")
return self._workfiles_model.get_published_file_items(
@ -410,21 +450,27 @@ class BaseWorkfileController(
folder_id, task_id, filepath, note
)
def refresh(self):
def reset(self):
if not self._host_is_valid:
self._emit_event("controller.refresh.started")
self._emit_event("controller.refresh.finished")
self._emit_event("controller.reset.started")
self._emit_event("controller.reset.finished")
return
expected_folder_id = self.get_selected_folder_id()
expected_task_name = self.get_selected_task_name()
expected_work_path = self.get_selected_workfile_path()
expected_repre_id = self.get_selected_representation_id()
expected_work_name = None
if expected_work_path:
expected_work_name = os.path.basename(expected_work_path)
self._emit_event("controller.refresh.started")
self._emit_event("controller.reset.started")
context = self._get_host_current_context()
project_name = context["project_name"]
folder_name = context["asset_name"]
task_name = context["task_name"]
current_file = self.get_current_workfile()
folder_id = None
if folder_name:
folder = ayon_api.get_folder_by_name(project_name, folder_name)
@ -439,18 +485,25 @@ class BaseWorkfileController(
self._current_folder_id = folder_id
self._current_task_name = task_name
self._projects_model.reset()
self._hierarchy_model.reset()
if not expected_folder_id:
expected_folder_id = folder_id
expected_task_name = task_name
if current_file:
expected_work_name = os.path.basename(current_file)
self._emit_event("controller.reset.finished")
self._expected_selection.set_expected_selection(
expected_folder_id, expected_task_name
project_name,
expected_folder_id,
expected_task_name,
expected_work_name,
expected_repre_id,
)
self._entities_model.refresh()
self._emit_event("controller.refresh.finished")
# Controller actions
def open_workfile(self, folder_id, task_id, filepath):
self._emit_event("open_workfile.started")
@ -579,9 +632,9 @@ class BaseWorkfileController(
self, project_name, folder_id, task_id, folder=None, task=None
):
if folder is None:
folder = self.get_folder_entity(folder_id)
folder = self.get_folder_entity(project_name, folder_id)
if task is None:
task = self.get_task_entity(task_id)
task = self.get_task_entity(project_name, task_id)
# NOTE keys should be OpenPype compatible
return {
"project_name": project_name,
@ -633,8 +686,8 @@ class BaseWorkfileController(
):
# Trigger before save event
project_name = self.get_current_project_name()
folder = self.get_folder_entity(folder_id)
task = self.get_task_entity(task_id)
folder = self.get_folder_entity(project_name, folder_id)
task = self.get_task_entity(project_name, task_id)
task_name = task["name"]
# QUESTION should the data be different for 'before' and 'after'?
@ -674,6 +727,9 @@ class BaseWorkfileController(
else:
self._host_save_workfile(dst_filepath)
# Make sure workfile info exists
self.save_workfile_info(folder_id, task_id, dst_filepath, None)
# Create extra folders
create_workdir_extra_folders(
workdir,
@ -685,4 +741,4 @@ class BaseWorkfileController(
# Trigger after save events
emit_event("workfile.save.after", event_data, source="workfiles.tool")
self.refresh()
self.reset()

View file

@ -1,10 +1,8 @@
from .hierarchy import EntitiesModel
from .selection import SelectionModel
from .workfiles import WorkfilesModel
__all__ = (
"SelectionModel",
"EntitiesModel",
"WorkfilesModel",
)

View file

@ -1,236 +0,0 @@
"""Hierarchy model that handles folders and tasks.
The model can be extracted for common usage. In that case it will be required
to add more handling of project name changes.
"""
import time
import collections
import contextlib
import ayon_api
from openpype.tools.ayon_workfiles.abstract import (
FolderItem,
TaskItem,
)
def _get_task_items_from_tasks(tasks):
"""
Returns:
TaskItem: Task item.
"""
output = []
for task in tasks:
folder_id = task["folderId"]
output.append(TaskItem(
task["id"],
task["name"],
task["type"],
folder_id,
None,
None
))
return output
def _get_folder_item_from_hierarchy_item(item):
return FolderItem(
item["id"],
item["parentId"],
item["name"],
item["label"],
None,
None,
)
class CacheItem:
def __init__(self, lifetime=120):
self._lifetime = lifetime
self._last_update = None
self._data = None
@property
def is_valid(self):
if self._last_update is None:
return False
return (time.time() - self._last_update) < self._lifetime
def set_invalid(self, data=None):
self._last_update = None
self._data = data
def get_data(self):
return self._data
def update_data(self, data):
self._data = data
self._last_update = time.time()
class EntitiesModel(object):
event_source = "entities.model"
def __init__(self, controller):
project_cache = CacheItem()
project_cache.set_invalid({})
folders_cache = CacheItem()
folders_cache.set_invalid({})
self._project_cache = project_cache
self._folders_cache = folders_cache
self._tasks_cache = {}
self._folders_by_id = {}
self._tasks_by_id = {}
self._folders_refreshing = False
self._tasks_refreshing = set()
self._controller = controller
def reset(self):
self._project_cache.set_invalid({})
self._folders_cache.set_invalid({})
self._tasks_cache = {}
self._folders_by_id = {}
self._tasks_by_id = {}
def refresh(self):
self._refresh_folders_cache()
def get_project_entity(self):
if not self._project_cache.is_valid:
project_name = self._controller.get_current_project_name()
project_entity = ayon_api.get_project(project_name)
self._project_cache.update_data(project_entity)
return self._project_cache.get_data()
def get_folder_items(self, sender):
if not self._folders_cache.is_valid:
self._refresh_folders_cache(sender)
return self._folders_cache.get_data()
def get_tasks_items(self, folder_id, sender):
if not folder_id:
return []
task_cache = self._tasks_cache.get(folder_id)
if task_cache is None or not task_cache.is_valid:
self._refresh_tasks_cache(folder_id, sender)
task_cache = self._tasks_cache.get(folder_id)
return task_cache.get_data()
def get_folder_entity(self, folder_id):
if folder_id not in self._folders_by_id:
entity = None
if folder_id:
project_name = self._controller.get_current_project_name()
entity = ayon_api.get_folder_by_id(project_name, folder_id)
self._folders_by_id[folder_id] = entity
return self._folders_by_id[folder_id]
def get_task_entity(self, task_id):
if task_id not in self._tasks_by_id:
entity = None
if task_id:
project_name = self._controller.get_current_project_name()
entity = ayon_api.get_task_by_id(project_name, task_id)
self._tasks_by_id[task_id] = entity
return self._tasks_by_id[task_id]
@contextlib.contextmanager
def _folder_refresh_event_manager(self, project_name, sender):
self._folders_refreshing = True
self._controller.emit_event(
"folders.refresh.started",
{"project_name": project_name, "sender": sender},
self.event_source
)
try:
yield
finally:
self._controller.emit_event(
"folders.refresh.finished",
{"project_name": project_name, "sender": sender},
self.event_source
)
self._folders_refreshing = False
@contextlib.contextmanager
def _task_refresh_event_manager(
self, project_name, folder_id, sender
):
self._tasks_refreshing.add(folder_id)
self._controller.emit_event(
"tasks.refresh.started",
{
"project_name": project_name,
"folder_id": folder_id,
"sender": sender,
},
self.event_source
)
try:
yield
finally:
self._controller.emit_event(
"tasks.refresh.finished",
{
"project_name": project_name,
"folder_id": folder_id,
"sender": sender,
},
self.event_source
)
self._tasks_refreshing.discard(folder_id)
def _refresh_folders_cache(self, sender=None):
if self._folders_refreshing:
return
project_name = self._controller.get_current_project_name()
with self._folder_refresh_event_manager(project_name, sender):
folder_items = self._query_folders(project_name)
self._folders_cache.update_data(folder_items)
def _query_folders(self, project_name):
hierarchy = ayon_api.get_folders_hierarchy(project_name)
folder_items = {}
hierachy_queue = collections.deque(hierarchy["hierarchy"])
while hierachy_queue:
item = hierachy_queue.popleft()
folder_item = _get_folder_item_from_hierarchy_item(item)
folder_items[folder_item.entity_id] = folder_item
hierachy_queue.extend(item["children"] or [])
return folder_items
def _refresh_tasks_cache(self, folder_id, sender=None):
if folder_id in self._tasks_refreshing:
return
project_name = self._controller.get_current_project_name()
with self._task_refresh_event_manager(
project_name, folder_id, sender
):
cache_item = self._tasks_cache.get(folder_id)
if cache_item is None:
cache_item = CacheItem()
self._tasks_cache[folder_id] = cache_item
task_items = self._query_tasks(project_name, folder_id)
cache_item.update_data(task_items)
def _query_tasks(self, project_name, folder_id):
tasks = list(ayon_api.get_tasks(
project_name,
folder_ids=[folder_id],
fields={"id", "name", "label", "folderId", "type"}
))
return _get_task_items_from_tasks(tasks)

Some files were not shown because too many files have changed in this diff Show more