mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 21:32:15 +01:00
Merge branch 'develop' into feature/OP-2735_Flame-babyPublisher-shot-name-form-sequence-shot-name-attribute
This commit is contained in:
commit
783f7782b7
7 changed files with 74 additions and 40 deletions
|
|
@ -2,7 +2,7 @@ from openpype.api import Anatomy
|
|||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
prepare_host_environments,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
)
|
||||
|
||||
|
|
@ -14,14 +14,6 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
|
||||
def execute(self):
|
||||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
if not self.application.is_host:
|
||||
self.log.info(
|
||||
"Skipped hook {}. Application is not marked as host.".format(
|
||||
self.__class__.__name__
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
self.prepare_global_data()
|
||||
|
||||
if not self.data.get("asset_doc"):
|
||||
|
|
@ -49,7 +41,7 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
"log": self.log
|
||||
})
|
||||
|
||||
prepare_host_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_app_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_context_environments(temp_data)
|
||||
|
||||
temp_data.pop("log")
|
||||
|
|
|
|||
|
|
@ -70,9 +70,9 @@ def get_resolve_module():
|
|||
sys.exit()
|
||||
# assign global var and return
|
||||
bmdvr = bmd.scriptapp("Resolve")
|
||||
# bmdvf = bmd.scriptapp("Fusion")
|
||||
bmdvf = bmd.scriptapp("Fusion")
|
||||
resolve.api.bmdvr = bmdvr
|
||||
resolve.api.bmdvf = bmdvr.Fusion()
|
||||
resolve.api.bmdvf = bmdvf
|
||||
log.info(("Assigning resolve module to "
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}"))
|
||||
log.info(("Assigning resolve module to "
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ from .applications import (
|
|||
PostLaunchHook,
|
||||
|
||||
EnvironmentPrepData,
|
||||
prepare_host_environments,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments,
|
||||
get_app_environments_for_context,
|
||||
apply_project_environments_value
|
||||
|
|
@ -261,7 +261,7 @@ __all__ = [
|
|||
"PreLaunchHook",
|
||||
"PostLaunchHook",
|
||||
"EnvironmentPrepData",
|
||||
"prepare_host_environments",
|
||||
"prepare_app_environments",
|
||||
"prepare_context_environments",
|
||||
"get_app_environments_for_context",
|
||||
"apply_project_environments_value",
|
||||
|
|
|
|||
|
|
@ -1295,7 +1295,7 @@ def get_app_environments_for_context(
|
|||
"env": env
|
||||
})
|
||||
|
||||
prepare_host_environments(data, env_group)
|
||||
prepare_app_environments(data, env_group)
|
||||
prepare_context_environments(data, env_group)
|
||||
|
||||
# Discard avalon connection
|
||||
|
|
@ -1316,7 +1316,7 @@ def _merge_env(env, current_env):
|
|||
return result
|
||||
|
||||
|
||||
def prepare_host_environments(data, env_group=None, implementation_envs=True):
|
||||
def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
||||
"""Modify launch environments based on launched app and context.
|
||||
|
||||
Args:
|
||||
|
|
@ -1474,6 +1474,22 @@ def prepare_context_environments(data, env_group=None):
|
|||
)
|
||||
|
||||
app = data["app"]
|
||||
context_env = {
|
||||
"AVALON_PROJECT": project_doc["name"],
|
||||
"AVALON_ASSET": asset_doc["name"],
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_APP_NAME": app.full_name
|
||||
}
|
||||
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
json.dumps(context_env, indent=4)
|
||||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
if not app.is_host:
|
||||
return
|
||||
|
||||
workdir_data = get_workdir_data(
|
||||
project_doc, asset_doc, task_name, app.host_name
|
||||
)
|
||||
|
|
@ -1504,20 +1520,8 @@ def prepare_context_environments(data, env_group=None):
|
|||
"Couldn't create workdir because: {}".format(str(exc))
|
||||
)
|
||||
|
||||
context_env = {
|
||||
"AVALON_PROJECT": project_doc["name"],
|
||||
"AVALON_ASSET": asset_doc["name"],
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_APP": app.host_name,
|
||||
"AVALON_APP_NAME": app.full_name,
|
||||
"AVALON_WORKDIR": workdir
|
||||
}
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
json.dumps(context_env, indent=4)
|
||||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
data["env"]["AVALON_APP"] = app.host_name
|
||||
data["env"]["AVALON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,9 @@ import uuid
|
|||
from datetime import datetime
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from avalon.api import AvalonMongoDB
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
|
||||
|
||||
|
||||
class DeleteAssetSubset(BaseAction):
|
||||
|
|
@ -554,8 +555,8 @@ class DeleteAssetSubset(BaseAction):
|
|||
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
|
||||
))
|
||||
|
||||
entities_by_link_len = (
|
||||
self._filter_entities_to_delete(ftrack_ids_to_delete, session)
|
||||
entities_by_link_len = self._prepare_entities_before_delete(
|
||||
ftrack_ids_to_delete, session
|
||||
)
|
||||
for link_len in sorted(entities_by_link_len.keys(), reverse=True):
|
||||
for entity in entities_by_link_len[link_len]:
|
||||
|
|
@ -609,7 +610,7 @@ class DeleteAssetSubset(BaseAction):
|
|||
|
||||
return self.report_handle(report_messages, project_name, event)
|
||||
|
||||
def _filter_entities_to_delete(self, ftrack_ids_to_delete, session):
|
||||
def _prepare_entities_before_delete(self, ftrack_ids_to_delete, session):
|
||||
"""Filter children entities to avoid CircularDependencyError."""
|
||||
joined_ids_to_delete = ", ".join(
|
||||
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
|
||||
|
|
@ -638,6 +639,21 @@ class DeleteAssetSubset(BaseAction):
|
|||
parent_ids_to_delete.append(entity["id"])
|
||||
to_delete_entities.append(entity)
|
||||
|
||||
# Unset 'task_id' from AssetVersion entities
|
||||
# - when task is deleted the asset version is not marked for deletion
|
||||
task_ids = set(
|
||||
entity["id"]
|
||||
for entity in to_delete_entities
|
||||
if entity.entity_type.lower() == "task"
|
||||
)
|
||||
for chunk in create_chunks(task_ids):
|
||||
asset_versions = session.query((
|
||||
"select id, task_id from AssetVersion where task_id in ({})"
|
||||
).format(self.join_query_keys(chunk))).all()
|
||||
for asset_version in asset_versions:
|
||||
asset_version["task_id"] = None
|
||||
session.commit()
|
||||
|
||||
entities_by_link_len = collections.defaultdict(list)
|
||||
for entity in to_delete_entities:
|
||||
entities_by_link_len[len(entity["link"])].append(entity)
|
||||
|
|
|
|||
|
|
@ -33,6 +33,30 @@ CURRENT_DOC_SCHEMAS = {
|
|||
}
|
||||
|
||||
|
||||
def create_chunks(iterable, chunk_size=None):
|
||||
"""Separate iterable into multiple chunks by size.
|
||||
|
||||
Args:
|
||||
iterable(list|tuple|set): Object that will be separated into chunks.
|
||||
chunk_size(int): Size of one chunk. Default value is 200.
|
||||
|
||||
Returns:
|
||||
list<list>: Chunked items.
|
||||
"""
|
||||
chunks = []
|
||||
if not iterable:
|
||||
return chunks
|
||||
|
||||
tupled_iterable = tuple(iterable)
|
||||
iterable_size = len(tupled_iterable)
|
||||
if chunk_size is None:
|
||||
chunk_size = 200
|
||||
|
||||
for idx in range(0, iterable_size, chunk_size):
|
||||
chunks.append(tupled_iterable[idx:idx + chunk_size])
|
||||
return chunks
|
||||
|
||||
|
||||
def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
||||
schema_name = "asset-3.0"
|
||||
if in_schema:
|
||||
|
|
@ -1147,10 +1171,8 @@ class SyncEntitiesFactory:
|
|||
ids_len = len(tupled_ids)
|
||||
chunk_size = int(5000 / ids_len)
|
||||
all_links = []
|
||||
for idx in range(0, ids_len, chunk_size):
|
||||
entity_ids_joined = join_query_keys(
|
||||
tupled_ids[idx:idx + chunk_size]
|
||||
)
|
||||
for chunk in create_chunks(ftrack_ids, chunk_size):
|
||||
entity_ids_joined = join_query_keys(chunk)
|
||||
|
||||
all_links.extend(self.session.query((
|
||||
"select from_id, to_id from"
|
||||
|
|
|
|||
|
|
@ -12,11 +12,11 @@ import TabItem from '@theme/TabItem';
|
|||
|
||||
For [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) support you need to set a few things up in both OpenPype and Deadline itself
|
||||
|
||||
1. Deploy OpenPype executable to all nodes of Deadline farm. See [Install & Run](admin_use).
|
||||
1. Deploy OpenPype executable to all nodes of Deadline farm. See [Install & Run](admin_use.md)
|
||||
|
||||
2. Enable Deadline Module in the [OpenPype Admin Settings](admin_settings_system.md#deadline).
|
||||
|
||||
3. Set up *Deadline Web API service*. For more details on how to do it, see [here](https://docs.thinkboxsoftware.com/products/deadline/10.0/1_User%20Manual/manual/web-service.html).
|
||||
3. Set up *Deadline Web API service*. For more details on how to do it, see [here](https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/web-service.html).
|
||||
|
||||
4. Point OpenPype to your deadline webservice URL in the [OpenPype Admin Settings](admin_settings_system.md#deadline).
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue