mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge remote-tracking branch 'origin/develop' into enhancement/OP-3075_houdini-new-publisher
This commit is contained in:
commit
44a7e844b2
168 changed files with 5367 additions and 1314 deletions
|
|
@ -85,6 +85,7 @@ from .context_tools import (
|
|||
register_host,
|
||||
registered_host,
|
||||
deregister_host,
|
||||
get_process_id,
|
||||
)
|
||||
install = install_host
|
||||
uninstall = uninstall_host
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import json
|
|||
import types
|
||||
import logging
|
||||
import platform
|
||||
import uuid
|
||||
|
||||
import pyblish.api
|
||||
from pyblish.lib import MessageHandler
|
||||
|
|
@ -37,6 +38,7 @@ from . import (
|
|||
|
||||
|
||||
_is_installed = False
|
||||
_process_id = None
|
||||
_registered_root = {"_": ""}
|
||||
_registered_host = {"_": None}
|
||||
# Keep modules manager (and it's modules) in memory
|
||||
|
|
@ -546,3 +548,18 @@ def change_current_context(asset_doc, task_name, template_key=None):
|
|||
emit_event("taskChanged", data)
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def get_process_id():
|
||||
"""Fake process id created on demand using uuid.
|
||||
|
||||
Can be used to create process specific folders in temp directory.
|
||||
|
||||
Returns:
|
||||
str: Process id.
|
||||
"""
|
||||
|
||||
global _process_id
|
||||
if _process_id is None:
|
||||
_process_id = str(uuid.uuid4())
|
||||
return _process_id
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from .constants import (
|
||||
SUBSET_NAME_ALLOWED_SYMBOLS,
|
||||
DEFAULT_SUBSET_TEMPLATE,
|
||||
PRE_CREATE_THUMBNAIL_KEY,
|
||||
)
|
||||
|
||||
from .subset_name import (
|
||||
|
|
@ -24,6 +25,8 @@ from .creator_plugins import (
|
|||
deregister_creator_plugin,
|
||||
register_creator_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
|
||||
cache_and_get_instances,
|
||||
)
|
||||
|
||||
from .context import (
|
||||
|
|
@ -40,6 +43,7 @@ from .legacy_create import (
|
|||
__all__ = (
|
||||
"SUBSET_NAME_ALLOWED_SYMBOLS",
|
||||
"DEFAULT_SUBSET_TEMPLATE",
|
||||
"PRE_CREATE_THUMBNAIL_KEY",
|
||||
|
||||
"TaskNotSetError",
|
||||
"get_subset_name",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_."
|
||||
DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}"
|
||||
PRE_CREATE_THUMBNAIL_KEY = "thumbnail_source"
|
||||
|
||||
|
||||
__all__ = (
|
||||
"SUBSET_NAME_ALLOWED_SYMBOLS",
|
||||
"DEFAULT_SUBSET_TEMPLATE",
|
||||
"PRE_CREATE_THUMBNAIL_KEY",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1078,6 +1078,8 @@ class CreateContext:
|
|||
# Shared data across creators during collection phase
|
||||
self._collection_shared_data = None
|
||||
|
||||
self.thumbnail_paths_by_instance_id = {}
|
||||
|
||||
# Trigger reset if was enabled
|
||||
if reset:
|
||||
self.reset(discover_publish_plugins)
|
||||
|
|
@ -1147,6 +1149,29 @@ class CreateContext:
|
|||
|
||||
self.reset_finalization()
|
||||
|
||||
def refresh_thumbnails(self):
|
||||
"""Cleanup thumbnail paths.
|
||||
|
||||
Remove all thumbnail filepaths that are empty or lead to files which
|
||||
does not exists or of instances that are not available anymore.
|
||||
"""
|
||||
|
||||
invalid = set()
|
||||
for instance_id, path in self.thumbnail_paths_by_instance_id.items():
|
||||
instance_available = True
|
||||
if instance_id is not None:
|
||||
instance_available = instance_id in self._instances_by_id
|
||||
|
||||
if (
|
||||
not instance_available
|
||||
or not path
|
||||
or not os.path.exists(path)
|
||||
):
|
||||
invalid.add(instance_id)
|
||||
|
||||
for instance_id in invalid:
|
||||
self.thumbnail_paths_by_instance_id.pop(instance_id)
|
||||
|
||||
def reset_preparation(self):
|
||||
"""Prepare attributes that must be prepared/cleaned before reset."""
|
||||
|
||||
|
|
@ -1158,6 +1183,7 @@ class CreateContext:
|
|||
|
||||
# Stop access to collection shared data
|
||||
self._collection_shared_data = None
|
||||
self.refresh_thumbnails()
|
||||
|
||||
def reset_avalon_context(self):
|
||||
"""Give ability to reset avalon context.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import copy
|
||||
import collections
|
||||
|
||||
from abc import (
|
||||
ABCMeta,
|
||||
|
|
@ -392,8 +393,9 @@ class BaseCreator:
|
|||
asset_doc(dict): Asset document for which subset is created.
|
||||
project_name(str): Project name.
|
||||
host_name(str): Which host creates subset.
|
||||
instance(str|None): Object of 'CreatedInstance' for which is
|
||||
subset name updated. Passed only on subset name update.
|
||||
instance(CreatedInstance|None): Object of 'CreatedInstance' for
|
||||
which is subset name updated. Passed only on subset name
|
||||
update.
|
||||
"""
|
||||
|
||||
dynamic_data = self.get_dynamic_data(
|
||||
|
|
@ -442,6 +444,13 @@ class BaseCreator:
|
|||
|
||||
return self.create_context.collection_shared_data
|
||||
|
||||
def set_instance_thumbnail_path(self, instance_id, thumbnail_path=None):
|
||||
"""Set path to thumbnail for instance."""
|
||||
|
||||
self.create_context.thumbnail_paths_by_instance_id[instance_id] = (
|
||||
thumbnail_path
|
||||
)
|
||||
|
||||
|
||||
class Creator(BaseCreator):
|
||||
"""Creator that has more information for artist to show in UI.
|
||||
|
|
@ -468,6 +477,13 @@ class Creator(BaseCreator):
|
|||
# - in some cases it may confuse artists because it would not be used
|
||||
# e.g. for buld creators
|
||||
create_allow_context_change = True
|
||||
# A thumbnail can be passed in precreate attributes
|
||||
# - if is set to True is should expect that a thumbnail path under key
|
||||
# PRE_CREATE_THUMBNAIL_KEY can be sent in data with precreate data
|
||||
# - is disabled by default because the feature was added in later stages
|
||||
# and creators who would not expect PRE_CREATE_THUMBNAIL_KEY could
|
||||
# cause issues with instance data
|
||||
create_allow_thumbnail = False
|
||||
|
||||
# Precreate attribute definitions showed before creation
|
||||
# - similar to instance attribute definitions
|
||||
|
|
@ -660,3 +676,34 @@ def deregister_creator_plugin_path(path):
|
|||
deregister_plugin_path(BaseCreator, path)
|
||||
deregister_plugin_path(LegacyCreator, path)
|
||||
deregister_plugin_path(SubsetConvertorPlugin, path)
|
||||
|
||||
|
||||
def cache_and_get_instances(creator, shared_key, list_instances_func):
|
||||
"""Common approach to cache instances in shared data.
|
||||
|
||||
This is helper function which does not handle cases when a 'shared_key' is
|
||||
used for different list instances functions. The same approach of caching
|
||||
instances into 'collection_shared_data' is not required but is so common
|
||||
we've decided to unify it to some degree.
|
||||
|
||||
Function 'list_instances_func' is called only if 'shared_key' is not
|
||||
available in 'collection_shared_data' on creator.
|
||||
|
||||
Args:
|
||||
creator (Creator): Plugin which would like to get instance data.
|
||||
shared_key (str): Key under which output of function will be stored.
|
||||
list_instances_func (Function): Function that will return instance data
|
||||
if data were not yet stored under 'shared_key'.
|
||||
|
||||
Returns:
|
||||
Dict[str, Dict[str, Any]]: Cached instances by creator identifier from
|
||||
result of passed function.
|
||||
"""
|
||||
|
||||
if shared_key not in creator.collection_shared_data:
|
||||
value = collections.defaultdict(list)
|
||||
for instance in list_instances_func():
|
||||
identifier = instance.get("creator_identifier")
|
||||
value[identifier].append(instance)
|
||||
creator.collection_shared_data[shared_key] = value
|
||||
return creator.collection_shared_data[shared_key]
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
import json
|
||||
from uuid import uuid4
|
||||
from openpype.lib import Logger, filter_profiles
|
||||
from openpype.lib.pype_info import get_workstation_info
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import get_process_id
|
||||
|
||||
|
||||
def _read_lock_file(lock_filepath):
|
||||
|
|
@ -37,7 +37,7 @@ def is_workfile_locked_for_current_process(filepath):
|
|||
|
||||
lock_filepath = _get_lock_file(filepath)
|
||||
data = _read_lock_file(lock_filepath)
|
||||
return data["process_id"] == _get_process_id()
|
||||
return data["process_id"] == get_process_id()
|
||||
|
||||
|
||||
def delete_workfile_lock(filepath):
|
||||
|
|
@ -49,7 +49,7 @@ def delete_workfile_lock(filepath):
|
|||
def create_workfile_lock(filepath):
|
||||
lock_filepath = _get_lock_file(filepath)
|
||||
info = get_workstation_info()
|
||||
info["process_id"] = _get_process_id()
|
||||
info["process_id"] = get_process_id()
|
||||
with open(lock_filepath, "w") as stream:
|
||||
json.dump(info, stream)
|
||||
|
||||
|
|
@ -59,14 +59,6 @@ def remove_workfile_lock(filepath):
|
|||
delete_workfile_lock(filepath)
|
||||
|
||||
|
||||
def _get_process_id():
|
||||
process_id = os.environ.get("OPENPYPE_PROCESS_ID")
|
||||
if not process_id:
|
||||
process_id = str(uuid4())
|
||||
os.environ["OPENPYPE_PROCESS_ID"] = process_id
|
||||
return process_id
|
||||
|
||||
|
||||
def is_workfile_lock_enabled(host_name, project_name, project_setting=None):
|
||||
if project_setting is None:
|
||||
project_setting = get_project_settings(project_name)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue