mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
a2b6bb6d96
28 changed files with 527 additions and 296 deletions
2
.github/workflows/nightly_merge.yml
vendored
2
.github/workflows/nightly_merge.yml
vendored
|
|
@ -25,5 +25,5 @@ jobs:
|
|||
- name: Invoke pre-release workflow
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
with:
|
||||
workflow: Nightly Prerelease
|
||||
workflow: prerelease.yml
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.n
|
|||
|
||||
# we need to build our own patchelf
|
||||
WORKDIR /temp-patchelf
|
||||
RUN git clone https://github.com/NixOS/patchelf.git . \
|
||||
RUN git clone -b 0.17.0 --single-branch https://github.com/NixOS/patchelf.git . \
|
||||
&& source scl_source enable devtoolset-7 \
|
||||
&& ./bootstrap.sh \
|
||||
&& ./configure \
|
||||
|
|
|
|||
|
|
@ -69,6 +69,19 @@ def convert_ids(in_ids):
|
|||
|
||||
|
||||
def get_projects(active=True, inactive=False, fields=None):
|
||||
"""Yield all project entity documents.
|
||||
|
||||
Args:
|
||||
active (Optional[bool]): Include active projects. Defaults to True.
|
||||
inactive (Optional[bool]): Include inactive projects.
|
||||
Defaults to False.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Yields:
|
||||
dict: Project entity data which can be reduced to specified 'fields'.
|
||||
None is returned if project with specified filters was not found.
|
||||
"""
|
||||
mongodb = get_project_database()
|
||||
for project_name in mongodb.collection_names():
|
||||
if project_name in ("system.indexes",):
|
||||
|
|
@ -81,6 +94,20 @@ def get_projects(active=True, inactive=False, fields=None):
|
|||
|
||||
|
||||
def get_project(project_name, active=True, inactive=True, fields=None):
|
||||
"""Return project entity document by project name.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
active (Optional[bool]): Allow active project. Defaults to True.
|
||||
inactive (Optional[bool]): Allow inactive project. Defaults to True.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Union[Dict, None]: Project entity data which can be reduced to
|
||||
specified 'fields'. None is returned if project with specified
|
||||
filters was not found.
|
||||
"""
|
||||
# Skip if both are disabled
|
||||
if not active and not inactive:
|
||||
return None
|
||||
|
|
@ -124,17 +151,18 @@ def get_whole_project(project_name):
|
|||
|
||||
|
||||
def get_asset_by_id(project_name, asset_id, fields=None):
|
||||
"""Receive asset data by it's id.
|
||||
"""Receive asset data by its id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
asset_id (Union[str, ObjectId]): Asset's id.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
dict: Asset entity data.
|
||||
None: Asset was not found by id.
|
||||
Union[Dict, None]: Asset entity data which can be reduced to
|
||||
specified 'fields'. None is returned if asset with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
asset_id = convert_id(asset_id)
|
||||
|
|
@ -147,17 +175,18 @@ def get_asset_by_id(project_name, asset_id, fields=None):
|
|||
|
||||
|
||||
def get_asset_by_name(project_name, asset_name, fields=None):
|
||||
"""Receive asset data by it's name.
|
||||
"""Receive asset data by its name.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
asset_name (str): Asset's name.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
dict: Asset entity data.
|
||||
None: Asset was not found by name.
|
||||
Union[Dict, None]: Asset entity data which can be reduced to
|
||||
specified 'fields'. None is returned if asset with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
if not asset_name:
|
||||
|
|
@ -195,8 +224,8 @@ def _get_assets(
|
|||
parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
|
||||
standard (bool): Query standard assets (type 'asset').
|
||||
archived (bool): Query archived assets (type 'archived_asset').
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Query cursor as iterable which returns asset documents matching
|
||||
|
|
@ -261,8 +290,8 @@ def get_assets(
|
|||
asset_names (Iterable[str]): Name assets that should be found.
|
||||
parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
|
||||
archived (bool): Add also archived assets.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Query cursor as iterable which returns asset documents matching
|
||||
|
|
@ -300,8 +329,8 @@ def get_archived_assets(
|
|||
be found.
|
||||
asset_names (Iterable[str]): Name assets that should be found.
|
||||
parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Query cursor as iterable which returns asset documents matching
|
||||
|
|
@ -356,17 +385,18 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None):
|
|||
|
||||
|
||||
def get_subset_by_id(project_name, subset_id, fields=None):
|
||||
"""Single subset entity data by it's id.
|
||||
"""Single subset entity data by its id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
subset_id (Union[str, ObjectId]): Id of subset which should be found.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If subset with specified filters was not found.
|
||||
Dict: Subset document which can be reduced to specified 'fields'.
|
||||
Union[Dict, None]: Subset entity data which can be reduced to
|
||||
specified 'fields'. None is returned if subset with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
subset_id = convert_id(subset_id)
|
||||
|
|
@ -379,20 +409,19 @@ def get_subset_by_id(project_name, subset_id, fields=None):
|
|||
|
||||
|
||||
def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
|
||||
"""Single subset entity data by it's name and it's version id.
|
||||
"""Single subset entity data by its name and its version id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
subset_name (str): Name of subset.
|
||||
asset_id (Union[str, ObjectId]): Id of parent asset.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Union[None, Dict[str, Any]]: None if subset with specified filters was
|
||||
not found or dict subset document which can be reduced to
|
||||
specified 'fields'.
|
||||
|
||||
Union[Dict, None]: Subset entity data which can be reduced to
|
||||
specified 'fields'. None is returned if subset with specified
|
||||
filters was not found.
|
||||
"""
|
||||
if not subset_name:
|
||||
return None
|
||||
|
|
@ -434,8 +463,8 @@ def get_subsets(
|
|||
names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering
|
||||
using asset ids and list of subset names under the asset.
|
||||
archived (bool): Look for archived subsets too.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching subsets.
|
||||
|
|
@ -520,17 +549,18 @@ def get_subset_families(project_name, subset_ids=None):
|
|||
|
||||
|
||||
def get_version_by_id(project_name, version_id, fields=None):
|
||||
"""Single version entity data by it's id.
|
||||
"""Single version entity data by its id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
version_id (Union[str, ObjectId]): Id of version which should be found.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If version with specified filters was not found.
|
||||
Dict: Version document which can be reduced to specified 'fields'.
|
||||
Union[Dict, None]: Version entity data which can be reduced to
|
||||
specified 'fields'. None is returned if version with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
version_id = convert_id(version_id)
|
||||
|
|
@ -546,18 +576,19 @@ def get_version_by_id(project_name, version_id, fields=None):
|
|||
|
||||
|
||||
def get_version_by_name(project_name, version, subset_id, fields=None):
|
||||
"""Single version entity data by it's name and subset id.
|
||||
"""Single version entity data by its name and subset id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
version (int): name of version entity (it's version).
|
||||
version (int): name of version entity (its version).
|
||||
subset_id (Union[str, ObjectId]): Id of version which should be found.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If version with specified filters was not found.
|
||||
Dict: Version document which can be reduced to specified 'fields'.
|
||||
Union[Dict, None]: Version entity data which can be reduced to
|
||||
specified 'fields'. None is returned if version with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
subset_id = convert_id(subset_id)
|
||||
|
|
@ -574,7 +605,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None):
|
|||
|
||||
|
||||
def version_is_latest(project_name, version_id):
|
||||
"""Is version the latest from it's subset.
|
||||
"""Is version the latest from its subset.
|
||||
|
||||
Note:
|
||||
Hero versions are considered as latest.
|
||||
|
|
@ -680,8 +711,8 @@ def get_versions(
|
|||
versions (Iterable[int]): Version names (as integers).
|
||||
Filter ignored if 'None' is passed.
|
||||
hero (bool): Look also for hero versions.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching versions.
|
||||
|
|
@ -705,12 +736,13 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
|
|||
project_name (str): Name of project where to look for queried entities.
|
||||
subset_id (Union[str, ObjectId]): Subset id under which
|
||||
is hero version.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If hero version for passed subset id does not exists.
|
||||
Dict: Hero version entity data.
|
||||
Union[Dict, None]: Hero version entity data which can be reduced to
|
||||
specified 'fields'. None is returned if hero version with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
subset_id = convert_id(subset_id)
|
||||
|
|
@ -730,17 +762,18 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
|
|||
|
||||
|
||||
def get_hero_version_by_id(project_name, version_id, fields=None):
|
||||
"""Hero version by it's id.
|
||||
"""Hero version by its id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
version_id (Union[str, ObjectId]): Hero version id.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If hero version with passed id was not found.
|
||||
Dict: Hero version entity data.
|
||||
Union[Dict, None]: Hero version entity data which can be reduced to
|
||||
specified 'fields'. None is returned if hero version with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
version_id = convert_id(version_id)
|
||||
|
|
@ -773,8 +806,8 @@ def get_hero_versions(
|
|||
should look for hero versions. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter
|
||||
ignored if 'None' is passed.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor|list: Iterable yielding hero versions matching passed filters.
|
||||
|
|
@ -801,8 +834,8 @@ def get_output_link_versions(project_name, version_id, fields=None):
|
|||
project_name (str): Name of project where to look for queried entities.
|
||||
version_id (Union[str, ObjectId]): Version id which can be used
|
||||
as input link for other versions.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Iterable: Iterable cursor yielding versions that are used as input
|
||||
|
|
@ -828,8 +861,8 @@ def get_last_versions(project_name, subset_ids, fields=None):
|
|||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
dict[ObjectId, int]: Key is subset id and value is last version name.
|
||||
|
|
@ -913,12 +946,13 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None):
|
|||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
subset_id (Union[str, ObjectId]): Id of version which should be found.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If version with specified filters was not found.
|
||||
Dict: Version document which can be reduced to specified 'fields'.
|
||||
Union[Dict, None]: Version entity data which can be reduced to
|
||||
specified 'fields'. None is returned if version with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
subset_id = convert_id(subset_id)
|
||||
|
|
@ -945,12 +979,13 @@ def get_last_version_by_subset_name(
|
|||
asset_id (Union[str, ObjectId]): Asset id which is parent of passed
|
||||
subset name.
|
||||
asset_name (str): Asset name which is parent of passed subset name.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If version with specified filters was not found.
|
||||
Dict: Version document which can be reduced to specified 'fields'.
|
||||
Union[Dict, None]: Version entity data which can be reduced to
|
||||
specified 'fields'. None is returned if version with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
if not asset_id and not asset_name:
|
||||
|
|
@ -972,18 +1007,18 @@ def get_last_version_by_subset_name(
|
|||
|
||||
|
||||
def get_representation_by_id(project_name, representation_id, fields=None):
|
||||
"""Representation entity data by it's id.
|
||||
"""Representation entity data by its id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
representation_id (Union[str, ObjectId]): Representation id.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If representation with specified filters was not found.
|
||||
Dict: Representation entity data which can be reduced
|
||||
to specified 'fields'.
|
||||
Union[Dict, None]: Representation entity data which can be reduced to
|
||||
specified 'fields'. None is returned if representation with
|
||||
specified filters was not found.
|
||||
"""
|
||||
|
||||
if not representation_id:
|
||||
|
|
@ -1004,19 +1039,19 @@ def get_representation_by_id(project_name, representation_id, fields=None):
|
|||
def get_representation_by_name(
|
||||
project_name, representation_name, version_id, fields=None
|
||||
):
|
||||
"""Representation entity data by it's name and it's version id.
|
||||
"""Representation entity data by its name and its version id.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
representation_name (str): Representation name.
|
||||
version_id (Union[str, ObjectId]): Id of parent version entity.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If representation with specified filters was not found.
|
||||
Dict: Representation entity data which can be reduced
|
||||
to specified 'fields'.
|
||||
Union[dict[str, Any], None]: Representation entity data which can be
|
||||
reduced to specified 'fields'. None is returned if representation
|
||||
with specified filters was not found.
|
||||
"""
|
||||
|
||||
version_id = convert_id(version_id)
|
||||
|
|
@ -1202,8 +1237,8 @@ def get_representations(
|
|||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
archived (bool): Output will also contain archived representations.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching representations.
|
||||
|
|
@ -1247,8 +1282,8 @@ def get_archived_representations(
|
|||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching representations.
|
||||
|
|
@ -1377,8 +1412,8 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id):
|
|||
src_id (Union[str, ObjectId]): Id of source entity.
|
||||
|
||||
Returns:
|
||||
ObjectId: Thumbnail id assigned to entity.
|
||||
None: If Source entity does not have any thumbnail id assigned.
|
||||
Union[ObjectId, None]: Thumbnail id assigned to entity. If Source
|
||||
entity does not have any thumbnail id assigned.
|
||||
"""
|
||||
|
||||
if not src_type or not src_id:
|
||||
|
|
@ -1397,14 +1432,14 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None):
|
|||
"""Receive thumbnails entity data.
|
||||
|
||||
Thumbnail entity can be used to receive binary content of thumbnail based
|
||||
on it's content and ThumbnailResolvers.
|
||||
on its content and ThumbnailResolvers.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail
|
||||
entities.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
cursor: Cursor of queried documents.
|
||||
|
|
@ -1429,12 +1464,13 @@ def get_thumbnail(project_name, thumbnail_id, fields=None):
|
|||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If thumbnail with specified id was not found.
|
||||
Dict: Thumbnail entity data which can be reduced to specified 'fields'.
|
||||
Union[Dict, None]: Thumbnail entity data which can be reduced to
|
||||
specified 'fields'.None is returned if thumbnail with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
if not thumbnail_id:
|
||||
|
|
@ -1458,8 +1494,13 @@ def get_workfile_info(
|
|||
project_name (str): Name of project where to look for queried entities.
|
||||
asset_id (Union[str, ObjectId]): Id of asset entity.
|
||||
task_name (str): Task name on asset.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
fields (Optional[Iterable[str]]): Fields that should be returned. All
|
||||
fields are returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Union[Dict, None]: Workfile entity data which can be reduced to
|
||||
specified 'fields'.None is returned if workfile with specified
|
||||
filters was not found.
|
||||
"""
|
||||
|
||||
if not asset_id or not task_name or not filename:
|
||||
|
|
|
|||
|
|
@ -104,3 +104,6 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -73,3 +73,6 @@ class AbcArchiveLoader(load.LoaderPlugin):
|
|||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -106,3 +106,6 @@ class BgeoLoader(load.LoaderPlugin):
|
|||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -192,3 +192,6 @@ class CameraLoader(load.LoaderPlugin):
|
|||
|
||||
new_node.moveToGoodPosition()
|
||||
return new_node
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -125,3 +125,6 @@ class ImageLoader(load.LoaderPlugin):
|
|||
prefix, padding, suffix = first_fname.rsplit(".", 2)
|
||||
fname = ".".join([prefix, "$F{}".format(len(padding)), suffix])
|
||||
return os.path.join(root, fname).replace("\\", "/")
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -79,3 +79,6 @@ class USDSublayerLoader(load.LoaderPlugin):
|
|||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -79,3 +79,6 @@ class USDReferenceLoader(load.LoaderPlugin):
|
|||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -102,3 +102,6 @@ class VdbLoader(load.LoaderPlugin):
|
|||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin):
|
|||
# which isn't the actual frame range that this instance renders.
|
||||
instance.data["handleStart"] = 0
|
||||
instance.data["handleEnd"] = 0
|
||||
instance.data["fps"] = instance.context.data["fps"]
|
||||
|
||||
# Get the camera from the rop node to collect the focal length
|
||||
ropnode_path = instance.data["instance_node"]
|
||||
|
|
|
|||
24
openpype/hosts/max/hooks/force_startup_script.py
Normal file
24
openpype/hosts/max/hooks/force_startup_script.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pre-launch to force 3ds max startup script."""
|
||||
from openpype.lib import PreLaunchHook
|
||||
import os
|
||||
|
||||
|
||||
class ForceStartupScript(PreLaunchHook):
|
||||
"""Inject OpenPype environment to 3ds max.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
environment.
|
||||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = ["3dsmax"]
|
||||
order = 11
|
||||
|
||||
def execute(self):
|
||||
startup_args = [
|
||||
"-U",
|
||||
"MAXScript",
|
||||
f"{os.getenv('OPENPYPE_ROOT')}\\openpype\\hosts\\max\\startup\\startup.ms"] # noqa
|
||||
self.launch_context.launch_args.append(startup_args)
|
||||
|
|
@ -32,6 +32,10 @@ from openpype.pipeline import (
|
|||
load_container,
|
||||
registered_host,
|
||||
)
|
||||
from openpype.pipeline.create import (
|
||||
legacy_create,
|
||||
get_legacy_creator_by_name,
|
||||
)
|
||||
from openpype.pipeline.context_tools import (
|
||||
get_current_asset_name,
|
||||
get_current_project_asset,
|
||||
|
|
@ -2153,17 +2157,23 @@ def set_scene_resolution(width, height, pixelAspect):
|
|||
cmds.setAttr("%s.pixelAspect" % control_node, pixelAspect)
|
||||
|
||||
|
||||
def get_frame_range():
|
||||
"""Get the current assets frame range and handles."""
|
||||
def get_frame_range(include_animation_range=False):
|
||||
"""Get the current assets frame range and handles.
|
||||
|
||||
Args:
|
||||
include_animation_range (bool, optional): Whether to include
|
||||
`animationStart` and `animationEnd` keys to define the outer
|
||||
range of the timeline. It is excluded by default.
|
||||
|
||||
Returns:
|
||||
dict: Asset's expected frame range values.
|
||||
|
||||
"""
|
||||
|
||||
# Set frame start/end
|
||||
project_name = get_current_project_name()
|
||||
task_name = get_current_task_name()
|
||||
asset_name = get_current_asset_name()
|
||||
asset = get_asset_by_name(project_name, asset_name)
|
||||
settings = get_project_settings(project_name)
|
||||
include_handles_settings = settings["maya"]["include_handles"]
|
||||
current_task = asset.get("data").get("tasks").get(task_name)
|
||||
|
||||
frame_start = asset["data"].get("frameStart")
|
||||
frame_end = asset["data"].get("frameEnd")
|
||||
|
|
@ -2175,32 +2185,39 @@ def get_frame_range():
|
|||
handle_start = asset["data"].get("handleStart") or 0
|
||||
handle_end = asset["data"].get("handleEnd") or 0
|
||||
|
||||
animation_start = frame_start
|
||||
animation_end = frame_end
|
||||
|
||||
include_handles = include_handles_settings["include_handles_default"]
|
||||
for item in include_handles_settings["per_task_type"]:
|
||||
if current_task["type"] in item["task_type"]:
|
||||
include_handles = item["include_handles"]
|
||||
break
|
||||
if include_handles:
|
||||
animation_start -= int(handle_start)
|
||||
animation_end += int(handle_end)
|
||||
|
||||
cmds.playbackOptions(
|
||||
minTime=frame_start,
|
||||
maxTime=frame_end,
|
||||
animationStartTime=animation_start,
|
||||
animationEndTime=animation_end
|
||||
)
|
||||
cmds.currentTime(frame_start)
|
||||
|
||||
return {
|
||||
frame_range = {
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end
|
||||
}
|
||||
if include_animation_range:
|
||||
# The animation range values are only included to define whether
|
||||
# the Maya time slider should include the handles or not.
|
||||
# Some usages of this function use the full dictionary to define
|
||||
# instance attributes for which we want to exclude the animation
|
||||
# keys. That is why these are excluded by default.
|
||||
task_name = get_current_task_name()
|
||||
settings = get_project_settings(project_name)
|
||||
include_handles_settings = settings["maya"]["include_handles"]
|
||||
current_task = asset.get("data").get("tasks").get(task_name)
|
||||
|
||||
animation_start = frame_start
|
||||
animation_end = frame_end
|
||||
|
||||
include_handles = include_handles_settings["include_handles_default"]
|
||||
for item in include_handles_settings["per_task_type"]:
|
||||
if current_task["type"] in item["task_type"]:
|
||||
include_handles = item["include_handles"]
|
||||
break
|
||||
if include_handles:
|
||||
animation_start -= int(handle_start)
|
||||
animation_end += int(handle_end)
|
||||
|
||||
frame_range["animationStart"] = animation_start
|
||||
frame_range["animationEnd"] = animation_end
|
||||
|
||||
return frame_range
|
||||
|
||||
|
||||
def reset_frame_range(playback=True, render=True, fps=True):
|
||||
|
|
@ -2219,18 +2236,23 @@ def reset_frame_range(playback=True, render=True, fps=True):
|
|||
)
|
||||
set_scene_fps(fps)
|
||||
|
||||
frame_range = get_frame_range()
|
||||
frame_range = get_frame_range(include_animation_range=True)
|
||||
if not frame_range:
|
||||
# No frame range data found for asset
|
||||
return
|
||||
|
||||
frame_start = frame_range["frameStart"] - int(frame_range["handleStart"])
|
||||
frame_end = frame_range["frameEnd"] + int(frame_range["handleEnd"])
|
||||
frame_start = frame_range["frameStart"]
|
||||
frame_end = frame_range["frameEnd"]
|
||||
animation_start = frame_range["animationStart"]
|
||||
animation_end = frame_range["animationEnd"]
|
||||
|
||||
if playback:
|
||||
cmds.playbackOptions(minTime=frame_start)
|
||||
cmds.playbackOptions(maxTime=frame_end)
|
||||
cmds.playbackOptions(animationStartTime=frame_start)
|
||||
cmds.playbackOptions(animationEndTime=frame_end)
|
||||
cmds.playbackOptions(minTime=frame_start)
|
||||
cmds.playbackOptions(maxTime=frame_end)
|
||||
cmds.playbackOptions(
|
||||
minTime=frame_start,
|
||||
maxTime=frame_end,
|
||||
animationStartTime=animation_start,
|
||||
animationEndTime=animation_end
|
||||
)
|
||||
cmds.currentTime(frame_start)
|
||||
|
||||
if render:
|
||||
|
|
@ -3913,3 +3935,53 @@ def get_capture_preset(task_name, task_type, subset, project_settings, log):
|
|||
capture_preset = plugin_settings["capture_preset"]
|
||||
|
||||
return capture_preset or {}
|
||||
|
||||
|
||||
def create_rig_animation_instance(nodes, context, namespace, log=None):
|
||||
"""Create an animation publish instance for loaded rigs.
|
||||
|
||||
See the RecreateRigAnimationInstance inventory action on how to use this
|
||||
for loaded rig containers.
|
||||
|
||||
Arguments:
|
||||
nodes (list): Member nodes of the rig instance.
|
||||
context (dict): Representation context of the rig container
|
||||
namespace (str): Namespace of the rig container
|
||||
log (logging.Logger, optional): Logger to log to if provided
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
output = next((node for node in nodes if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in nodes if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = (
|
||||
cmds.ls(nodes, assemblies=True, long=True) or
|
||||
get_highest_in_hierarchy(nodes)
|
||||
)
|
||||
assert roots, "No root nodes in rig, this is a bug."
|
||||
|
||||
asset = legacy_io.Session["AVALON_ASSET"]
|
||||
dependency = str(context["representation"]["_id"])
|
||||
|
||||
if log:
|
||||
log.info("Creating subset: {}".format(namespace))
|
||||
|
||||
# Create the animation instance
|
||||
creator_plugin = get_legacy_creator_by_name("CreateAnimation")
|
||||
with maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=namespace,
|
||||
asset=asset,
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependency}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,12 @@ from openpype.hosts.maya.api import (
|
|||
class CreateAnimation(plugin.Creator):
|
||||
"""Animation output for character rigs"""
|
||||
|
||||
# We hide the animation creator from the UI since the creation of it
|
||||
# is automated upon loading a rig. There's an inventory action to recreate
|
||||
# it for loaded rigs if by chance someone deleted the animation instance.
|
||||
# Note: This setting is actually applied from project settings
|
||||
enabled = False
|
||||
|
||||
name = "animationDefault"
|
||||
label = "Animation"
|
||||
family = "animation"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
from openpype.pipeline import (
|
||||
InventoryAction,
|
||||
get_representation_context
|
||||
)
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
create_rig_animation_instance,
|
||||
get_container_members,
|
||||
)
|
||||
|
||||
|
||||
class RecreateRigAnimationInstance(InventoryAction):
|
||||
"""Recreate animation publish instance for loaded rigs"""
|
||||
|
||||
label = "Recreate rig animation instance"
|
||||
icon = "wrench"
|
||||
color = "#888888"
|
||||
|
||||
@staticmethod
|
||||
def is_compatible(container):
|
||||
return (
|
||||
container.get("loader") == "ReferenceLoader"
|
||||
and container.get("name", "").startswith("rig")
|
||||
)
|
||||
|
||||
def process(self, containers):
|
||||
|
||||
for container in containers:
|
||||
# todo: delete an existing entry if it exist or skip creation
|
||||
|
||||
namespace = container["namespace"]
|
||||
representation_id = container["representation"]
|
||||
context = get_representation_context(representation_id)
|
||||
nodes = get_container_members(container)
|
||||
|
||||
create_rig_animation_instance(nodes, context, namespace)
|
||||
|
|
@ -4,16 +4,12 @@ import contextlib
|
|||
from maya import cmds
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.create import (
|
||||
legacy_create,
|
||||
get_legacy_creator_by_name,
|
||||
)
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
get_container_members,
|
||||
parent_nodes
|
||||
parent_nodes,
|
||||
create_rig_animation_instance
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -114,9 +110,6 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
# Name of creator class that will be used to create animation instance
|
||||
animation_creator_name = "CreateAnimation"
|
||||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
import maya.cmds as cmds
|
||||
|
||||
|
|
@ -220,37 +213,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
self._lock_camera_transforms(members)
|
||||
|
||||
def _post_process_rig(self, name, namespace, context, options):
|
||||
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in self if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = cmds.ls(self[:], assemblies=True, long=True)
|
||||
assert roots, "No root nodes in rig, this is a bug."
|
||||
|
||||
asset = legacy_io.Session["AVALON_ASSET"]
|
||||
dependency = str(context["representation"]["_id"])
|
||||
|
||||
self.log.info("Creating subset: {}".format(namespace))
|
||||
|
||||
# Create the animation instance
|
||||
creator_plugin = get_legacy_creator_by_name(
|
||||
self.animation_creator_name
|
||||
nodes = self[:]
|
||||
create_rig_animation_instance(
|
||||
nodes, context, namespace, log=self.log
|
||||
)
|
||||
with maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
legacy_create(
|
||||
creator_plugin,
|
||||
name=namespace,
|
||||
asset=asset,
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependency}
|
||||
)
|
||||
|
||||
def _lock_camera_transforms(self, nodes):
|
||||
cameras = cmds.ls(nodes, type="camera")
|
||||
|
|
|
|||
97
openpype/hosts/maya/tools/mayalookassigner/alembic.py
Normal file
97
openpype/hosts/maya/tools/mayalookassigner/alembic.py
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tools for loading looks to vray proxies."""
|
||||
import os
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
import alembic.Abc
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_alembic_paths_by_property(filename, attr, verbose=False):
|
||||
# type: (str, str, bool) -> dict
|
||||
"""Return attribute value per objects in the Alembic file.
|
||||
|
||||
Reads an Alembic archive hierarchy and retrieves the
|
||||
value from the `attr` properties on the objects.
|
||||
|
||||
Args:
|
||||
filename (str): Full path to Alembic archive to read.
|
||||
attr (str): Id attribute.
|
||||
verbose (bool): Whether to verbosely log missing attributes.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of node full path with its id
|
||||
|
||||
"""
|
||||
# Normalize alembic path
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
filename = str(filename) # path must be string
|
||||
|
||||
try:
|
||||
archive = alembic.Abc.IArchive(filename)
|
||||
except RuntimeError:
|
||||
# invalid alembic file - probably vrmesh
|
||||
log.warning("{} is not an alembic file".format(filename))
|
||||
return {}
|
||||
root = archive.getTop()
|
||||
|
||||
iterator = list(root.children)
|
||||
obj_ids = {}
|
||||
|
||||
for obj in iterator:
|
||||
name = obj.getFullName()
|
||||
|
||||
# include children for coming iterations
|
||||
iterator.extend(obj.children)
|
||||
|
||||
props = obj.getProperties()
|
||||
if props.getNumProperties() == 0:
|
||||
# Skip those without properties, e.g. '/materials' in a gpuCache
|
||||
continue
|
||||
|
||||
# THe custom attribute is under the properties' first container under
|
||||
# the ".arbGeomParams"
|
||||
prop = props.getProperty(0) # get base property
|
||||
|
||||
_property = None
|
||||
try:
|
||||
geo_params = prop.getProperty('.arbGeomParams')
|
||||
_property = geo_params.getProperty(attr)
|
||||
except KeyError:
|
||||
if verbose:
|
||||
log.debug("Missing attr on: {0}".format(name))
|
||||
continue
|
||||
|
||||
if not _property.isConstant():
|
||||
log.warning("Id not constant on: {0}".format(name))
|
||||
|
||||
# Get first value sample
|
||||
value = _property.getValue()[0]
|
||||
|
||||
obj_ids[name] = value
|
||||
|
||||
return obj_ids
|
||||
|
||||
|
||||
def get_alembic_ids_cache(path):
|
||||
# type: (str) -> dict
|
||||
"""Build a id to node mapping in Alembic file.
|
||||
|
||||
Nodes without IDs are ignored.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of id to nodes in the Alembic.
|
||||
|
||||
"""
|
||||
node_ids = get_alembic_paths_by_property(path, attr="cbId")
|
||||
id_nodes = defaultdict(list)
|
||||
for node, _id in six.iteritems(node_ids):
|
||||
id_nodes[_id].append(node)
|
||||
|
||||
return dict(six.iteritems(id_nodes))
|
||||
|
|
@ -9,6 +9,7 @@ from openpype.pipeline import legacy_io
|
|||
from openpype.client import get_last_version_by_subset_name
|
||||
from openpype.hosts.maya import api
|
||||
from . import lib
|
||||
from .alembic import get_alembic_ids_cache
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -68,6 +69,11 @@ def get_nodes_by_id(standin):
|
|||
(dict): Dictionary with node full name/path and id.
|
||||
"""
|
||||
path = cmds.getAttr(standin + ".dso")
|
||||
|
||||
if path.endswith(".abc"):
|
||||
# Support alembic files directly
|
||||
return get_alembic_ids_cache(path)
|
||||
|
||||
json_path = None
|
||||
for f in os.listdir(os.path.dirname(path)):
|
||||
if f.endswith(".json"):
|
||||
|
|
|
|||
|
|
@ -1,108 +1,20 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tools for loading looks to vray proxies."""
|
||||
import os
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
import alembic.Abc
|
||||
from maya import cmds
|
||||
|
||||
from openpype.client import get_last_version_by_subset_name
|
||||
from openpype.pipeline import legacy_io
|
||||
import openpype.hosts.maya.lib as maya_lib
|
||||
from . import lib
|
||||
from .alembic import get_alembic_ids_cache
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_alembic_paths_by_property(filename, attr, verbose=False):
|
||||
# type: (str, str, bool) -> dict
|
||||
"""Return attribute value per objects in the Alembic file.
|
||||
|
||||
Reads an Alembic archive hierarchy and retrieves the
|
||||
value from the `attr` properties on the objects.
|
||||
|
||||
Args:
|
||||
filename (str): Full path to Alembic archive to read.
|
||||
attr (str): Id attribute.
|
||||
verbose (bool): Whether to verbosely log missing attributes.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of node full path with its id
|
||||
|
||||
"""
|
||||
# Normalize alembic path
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
filename = str(filename) # path must be string
|
||||
|
||||
try:
|
||||
archive = alembic.Abc.IArchive(filename)
|
||||
except RuntimeError:
|
||||
# invalid alembic file - probably vrmesh
|
||||
log.warning("{} is not an alembic file".format(filename))
|
||||
return {}
|
||||
root = archive.getTop()
|
||||
|
||||
iterator = list(root.children)
|
||||
obj_ids = {}
|
||||
|
||||
for obj in iterator:
|
||||
name = obj.getFullName()
|
||||
|
||||
# include children for coming iterations
|
||||
iterator.extend(obj.children)
|
||||
|
||||
props = obj.getProperties()
|
||||
if props.getNumProperties() == 0:
|
||||
# Skip those without properties, e.g. '/materials' in a gpuCache
|
||||
continue
|
||||
|
||||
# THe custom attribute is under the properties' first container under
|
||||
# the ".arbGeomParams"
|
||||
prop = props.getProperty(0) # get base property
|
||||
|
||||
_property = None
|
||||
try:
|
||||
geo_params = prop.getProperty('.arbGeomParams')
|
||||
_property = geo_params.getProperty(attr)
|
||||
except KeyError:
|
||||
if verbose:
|
||||
log.debug("Missing attr on: {0}".format(name))
|
||||
continue
|
||||
|
||||
if not _property.isConstant():
|
||||
log.warning("Id not constant on: {0}".format(name))
|
||||
|
||||
# Get first value sample
|
||||
value = _property.getValue()[0]
|
||||
|
||||
obj_ids[name] = value
|
||||
|
||||
return obj_ids
|
||||
|
||||
|
||||
def get_alembic_ids_cache(path):
|
||||
# type: (str) -> dict
|
||||
"""Build a id to node mapping in Alembic file.
|
||||
|
||||
Nodes without IDs are ignored.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of id to nodes in the Alembic.
|
||||
|
||||
"""
|
||||
node_ids = get_alembic_paths_by_property(path, attr="cbId")
|
||||
id_nodes = defaultdict(list)
|
||||
for node, _id in six.iteritems(node_ids):
|
||||
id_nodes[_id].append(node)
|
||||
|
||||
return dict(six.iteritems(id_nodes))
|
||||
|
||||
|
||||
def assign_vrayproxy_shaders(vrayproxy, assignments):
|
||||
# type: (str, dict) -> None
|
||||
"""Assign shaders to content of Vray Proxy.
|
||||
|
|
|
|||
|
|
@ -495,17 +495,17 @@ def get_avalon_knob_data(node, prefix="avalon:", create=True):
|
|||
data (dict)
|
||||
"""
|
||||
|
||||
data = {}
|
||||
if AVALON_TAB not in node.knobs():
|
||||
return data
|
||||
|
||||
# check if lists
|
||||
if not isinstance(prefix, list):
|
||||
prefix = list([prefix])
|
||||
|
||||
data = dict()
|
||||
prefix = [prefix]
|
||||
|
||||
# loop prefix
|
||||
for p in prefix:
|
||||
# check if the node is avalon tracked
|
||||
if AVALON_TAB not in node.knobs():
|
||||
continue
|
||||
try:
|
||||
# check if data available on the node
|
||||
test = node[AVALON_DATA_GROUP].value()
|
||||
|
|
@ -516,8 +516,7 @@ def get_avalon_knob_data(node, prefix="avalon:", create=True):
|
|||
if create:
|
||||
node = set_avalon_knob_data(node)
|
||||
return get_avalon_knob_data(node)
|
||||
else:
|
||||
return {}
|
||||
return {}
|
||||
|
||||
# get data from filtered knobs
|
||||
data.update({k.replace(p, ''): node[k].value()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin
|
|||
from openpype.hosts.nuke.api.lib import (
|
||||
INSTANCE_DATA_KNOB,
|
||||
get_node_data,
|
||||
get_avalon_knob_data
|
||||
get_avalon_knob_data,
|
||||
AVALON_TAB,
|
||||
)
|
||||
from openpype.hosts.nuke.api.plugin import convert_to_valid_instaces
|
||||
|
||||
|
|
@ -17,13 +18,15 @@ class LegacyConverted(SubsetConvertorPlugin):
|
|||
legacy_found = False
|
||||
# search for first available legacy item
|
||||
for node in nuke.allNodes(recurseGroups=True):
|
||||
|
||||
if node.Class() in ["Viewer", "Dot"]:
|
||||
continue
|
||||
|
||||
if get_node_data(node, INSTANCE_DATA_KNOB):
|
||||
continue
|
||||
|
||||
if AVALON_TAB not in node.knobs():
|
||||
continue
|
||||
|
||||
# get data from avalon knob
|
||||
avalon_knob_data = get_avalon_knob_data(
|
||||
node, ["avalon:", "ak:"], create=False)
|
||||
|
|
|
|||
|
|
@ -554,7 +554,7 @@
|
|||
"publish_mip_map": true
|
||||
},
|
||||
"CreateAnimation": {
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"write_color_sets": false,
|
||||
"write_face_sets": false,
|
||||
"include_parent_hierarchy": false,
|
||||
|
|
@ -1459,7 +1459,7 @@
|
|||
]
|
||||
},
|
||||
"reference_loader": {
|
||||
"namespace": "{asset_name}_{subset}_##",
|
||||
"namespace": "{asset_name}_{subset}_##_",
|
||||
"group_name": "_GRP"
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -119,9 +119,7 @@
|
|||
"label": "3ds max",
|
||||
"icon": "{}/app_icons/3dsmax.png",
|
||||
"host_name": "max",
|
||||
"environment": {
|
||||
"ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup"
|
||||
},
|
||||
"environment": {},
|
||||
"variants": {
|
||||
"2023": {
|
||||
"use_python_2": false,
|
||||
|
|
@ -133,9 +131,7 @@
|
|||
"linux": []
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [
|
||||
"-U MAXScript {OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup\\startup.ms"
|
||||
],
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
|
|
|
|||
|
|
@ -211,6 +211,10 @@ class AssetsDialog(QtWidgets.QDialog):
|
|||
layout.addWidget(asset_view, 1)
|
||||
layout.addLayout(btns_layout, 0)
|
||||
|
||||
controller.event_system.add_callback(
|
||||
"controller.reset.finished", self._on_controller_reset
|
||||
)
|
||||
|
||||
asset_view.double_clicked.connect(self._on_ok_clicked)
|
||||
filter_input.textChanged.connect(self._on_filter_change)
|
||||
ok_btn.clicked.connect(self._on_ok_clicked)
|
||||
|
|
@ -245,6 +249,10 @@ class AssetsDialog(QtWidgets.QDialog):
|
|||
new_pos.setY(new_pos.y() - int(self.height() / 2))
|
||||
self.move(new_pos)
|
||||
|
||||
def _on_controller_reset(self):
|
||||
# Change reset enabled so model is reset on show event
|
||||
self._soft_reset_enabled = True
|
||||
|
||||
def showEvent(self, event):
|
||||
"""Refresh asset model on show."""
|
||||
super(AssetsDialog, self).showEvent(event)
|
||||
|
|
|
|||
|
|
@ -284,6 +284,9 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
controller.event_system.add_callback(
|
||||
"publish.has_validated.changed", self._on_publish_validated_change
|
||||
)
|
||||
controller.event_system.add_callback(
|
||||
"publish.finished.changed", self._on_publish_finished_change
|
||||
)
|
||||
controller.event_system.add_callback(
|
||||
"publish.process.stopped", self._on_publish_stop
|
||||
)
|
||||
|
|
@ -400,6 +403,7 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
# TODO capture changes and ask user if wants to save changes on close
|
||||
if not self._controller.host_context_has_changed:
|
||||
self._save_changes(False)
|
||||
self._comment_input.setText("") # clear comment
|
||||
self._reset_on_show = True
|
||||
self._controller.clear_thumbnail_temp_dir_path()
|
||||
super(PublisherWindow, self).closeEvent(event)
|
||||
|
|
@ -777,6 +781,11 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
if event["value"]:
|
||||
self._validate_btn.setEnabled(False)
|
||||
|
||||
def _on_publish_finished_change(self, event):
|
||||
if event["value"]:
|
||||
# Successful publish, remove comment from UI
|
||||
self._comment_input.setText("")
|
||||
|
||||
def _on_publish_stop(self):
|
||||
self._set_publish_overlay_visibility(False)
|
||||
self._reset_btn.setEnabled(True)
|
||||
|
|
|
|||
|
|
@ -238,12 +238,12 @@ For resolution and frame range, use **OpenPype → Set Frame Range** and
|
|||
|
||||
Creating and publishing rigs with OpenPype follows similar workflow as with
|
||||
other data types. Create your rig and mark parts of your hierarchy in sets to
|
||||
help OpenPype validators and extractors to check it and publish it.
|
||||
help OpenPype validators and extractors to check and publish it.
|
||||
|
||||
### Preparing rig for publish
|
||||
|
||||
When creating rigs, it is recommended (and it is in fact enforced by validators)
|
||||
to separate bones or driving objects, their controllers and geometry so they are
|
||||
to separate bones or driven objects, their controllers and geometry so they are
|
||||
easily managed. Currently OpenPype doesn't allow to publish model at the same time as
|
||||
its rig so for demonstration purposes, I'll first create simple model for robotic
|
||||
arm, just made out of simple boxes and I'll publish it.
|
||||
|
|
@ -252,41 +252,48 @@ arm, just made out of simple boxes and I'll publish it.
|
|||
|
||||
For more information about publishing models, see [Publishing models](artist_hosts_maya.md#publishing-models).
|
||||
|
||||
Now lets start with empty scene. Load your model - **OpenPype → Load...**, right
|
||||
Now let's start with empty scene. Load your model - **OpenPype → Load...**, right
|
||||
click on it and select **Reference (abc)**.
|
||||
|
||||
I've created few bones and their controllers in two separate
|
||||
groups - `rig_GRP` and `controls_GRP`. Naming is not important - just adhere to
|
||||
your naming conventions.
|
||||
I've created a few bones in `rig_GRP`, their controllers in `controls_GRP` and
|
||||
placed the rig's output geometry in `geometry_GRP`. Naming of the groups is not important - just adhere to
|
||||
your naming conventions. Then I parented everything into a single top group named `arm_rig`.
|
||||
|
||||
Then I've put everything into `arm_rig` group.
|
||||
|
||||
When you've prepared your hierarchy, it's time to create *Rig instance* in OpenPype.
|
||||
Select your whole rig hierarchy and go **OpenPype → Create...**. Select **Rig**.
|
||||
Set is created in your scene to mark rig parts for export. Notice that it has
|
||||
two subsets - `controls_SET` and `out_SET`. Put your controls into `controls_SET`
|
||||
With the prepared hierarchy it is time to create a *Rig instance* in OpenPype.
|
||||
Select the top group of your rig and go to **OpenPype → Create...**. Select **Rig**.
|
||||
A publish set for your rig is created in your scene to mark rig parts for export.
|
||||
Notice that it has two subsets - `controls_SET` and `out_SET`. Put your controls into `controls_SET`
|
||||
and geometry to `out_SET`. You should end up with something like this:
|
||||
|
||||

|
||||
|
||||
:::note controls_SET and out_SET contents
|
||||
It is totally allowed to put the `geometry_GRP` in the `out_SET` as opposed to
|
||||
the individual meshes - it's even **recommended**. However, the `controls_SET`
|
||||
requires the individual controls in it that the artist is supposed to animate
|
||||
and manipulate so the publish validators can accurately check the rig's
|
||||
controls.
|
||||
:::
|
||||
|
||||
### Publishing rigs
|
||||
|
||||
Publishing rig is done in same way as publishing everything else. Save your scene
|
||||
and go **OpenPype → Publish**. When you run validation you'll mostly run at first into
|
||||
few issues. Although number of them will seem to be intimidating at first, you'll
|
||||
find out they are mostly minor things easily fixed.
|
||||
Publishing rigs is done in a same way as publishing everything else. Save your scene
|
||||
and go **OpenPype → Publish**. When you run validation you'll most likely run into
|
||||
a few issues at first. Although a number of them will seem to be intimidating you
|
||||
will find out they are mostly minor things, easily fixed and are there to optimize
|
||||
your rig for consistency and safe usage by the artist.
|
||||
|
||||
* **Non Duplicate Instance Members (ID)** - This will most likely fail because when
|
||||
- **Non Duplicate Instance Members (ID)** - This will most likely fail because when
|
||||
creating rigs, we usually duplicate few parts of it to reuse them. But duplication
|
||||
will duplicate also ID of original object and OpenPype needs every object to have
|
||||
unique ID. This is easily fixed by **Repair** action next to validator name. click
|
||||
on little up arrow on right side of validator name and select **Repair** form menu.
|
||||
|
||||
* **Joints Hidden** - This is enforcing joints (bones) to be hidden for user as
|
||||
- **Joints Hidden** - This is enforcing joints (bones) to be hidden for user as
|
||||
animator usually doesn't need to see them and they clutter his viewports. So
|
||||
well behaving rig should have them hidden. **Repair** action will help here also.
|
||||
|
||||
* **Rig Controllers** will check if there are no transforms on unlocked attributes
|
||||
- **Rig Controllers** will check if there are no transforms on unlocked attributes
|
||||
of controllers. This is needed because animator should have ease way to reset rig
|
||||
to it's default position. It also check that those attributes doesn't have any
|
||||
incoming connections from other parts of scene to ensure that published rig doesn't
|
||||
|
|
@ -297,6 +304,19 @@ have any missing dependencies.
|
|||
You can load rig with [Loader](artist_tools_loader). Go **OpenPype → Load...**,
|
||||
select your rig, right click on it and **Reference** it.
|
||||
|
||||
### Animation instances
|
||||
|
||||
Whenever you load a rig an animation publish instance is automatically created
|
||||
for it. This means that if you load a rig you don't need to create a pointcache
|
||||
instance yourself to publish the geometry. This is all cleanly prepared for you
|
||||
when loading a published rig.
|
||||
|
||||
:::tip Missing animation instance for your loaded rig?
|
||||
Did you accidentally delete the animation instance for a loaded rig? You can
|
||||
recreate it using the [**Recreate rig animation instance**](artist_hosts_maya.md#recreate-rig-animation-instance)
|
||||
inventory action.
|
||||
:::
|
||||
|
||||
## Point caches
|
||||
OpenPype is using Alembic format for point caches. Workflow is very similar as
|
||||
other data types.
|
||||
|
|
@ -646,3 +666,15 @@ Select 1 container of type `animation` or `pointcache`, then 1+ container of any
|
|||
The action searches the selected containers for 1 animation container of type `animation` or `pointcache`. This animation container will be connected to the rest of the selected containers. Matching geometries between containers is done by comparing the attribute `cbId`.
|
||||
|
||||
The connection between geometries is done with a live blendshape.
|
||||
|
||||
### Recreate rig animation instance
|
||||
|
||||
This action can regenerate an animation instance for a loaded rig, for example
|
||||
for when it was accidentally deleted by the user.
|
||||
|
||||

|
||||
|
||||
#### Usage
|
||||
|
||||
Select 1 or more container of type `rig` for which you want to recreate the
|
||||
animation instance.
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 46 KiB |
Loading…
Add table
Add a link
Reference in a new issue