mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
6357de7078
45 changed files with 757 additions and 476 deletions
|
|
@ -29,6 +29,7 @@ from .entities import (
|
|||
get_representations,
|
||||
get_representation_parents,
|
||||
get_representations_parents,
|
||||
get_archived_representations,
|
||||
|
||||
get_thumbnail,
|
||||
get_thumbnails,
|
||||
|
|
@ -66,6 +67,7 @@ __all__ = (
|
|||
"get_representations",
|
||||
"get_representation_parents",
|
||||
"get_representations_parents",
|
||||
"get_archived_representations",
|
||||
|
||||
"get_thumbnail",
|
||||
"get_thumbnails",
|
||||
|
|
|
|||
|
|
@ -384,6 +384,7 @@ def get_subsets(
|
|||
subset_ids=None,
|
||||
subset_names=None,
|
||||
asset_ids=None,
|
||||
names_by_asset_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
|
|
@ -399,6 +400,9 @@ def get_subsets(
|
|||
Filter ignored if 'None' is passed.
|
||||
asset_ids (list[str|ObjectId]): Asset ids under which should look for
|
||||
the subsets. Filter ignored if 'None' is passed.
|
||||
names_by_asset_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using asset ids and list of subset names under the asset.
|
||||
archived (bool): Look for archived subsets too.
|
||||
fields (list[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
|
|
@ -432,6 +436,18 @@ def get_subsets(
|
|||
return []
|
||||
query_filter["name"] = {"$in": list(subset_names)}
|
||||
|
||||
if names_by_asset_ids is not None:
|
||||
or_query = []
|
||||
for asset_id, names in names_by_asset_ids.items():
|
||||
if asset_id and names:
|
||||
or_query.append({
|
||||
"parent": _convert_id(asset_id),
|
||||
"name": {"$in": list(names)}
|
||||
})
|
||||
if not or_query:
|
||||
return []
|
||||
query_filter["$or"] = or_query
|
||||
|
||||
conn = _get_project_connection(project_name)
|
||||
return conn.find(query_filter, _prepare_fields(fields))
|
||||
|
||||
|
|
@ -742,7 +758,10 @@ def get_last_versions(project_name, subset_ids, fields=None):
|
|||
"""Latest versions for entered subset_ids.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
subset_ids (list): List of subset ids.
|
||||
fields (list[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
dict[ObjectId, int]: Key is subset id and value is last version name.
|
||||
|
|
@ -752,7 +771,34 @@ def get_last_versions(project_name, subset_ids, fields=None):
|
|||
if not subset_ids:
|
||||
return {}
|
||||
|
||||
_pipeline = [
|
||||
if fields is not None:
|
||||
fields = list(fields)
|
||||
if not fields:
|
||||
return {}
|
||||
|
||||
# Avoid double query if only name and _id are requested
|
||||
name_needed = False
|
||||
limit_query = False
|
||||
if fields:
|
||||
fields_s = set(fields)
|
||||
if "name" in fields_s:
|
||||
name_needed = True
|
||||
fields_s.remove("name")
|
||||
|
||||
for field in ("_id", "parent"):
|
||||
if field in fields_s:
|
||||
fields_s.remove(field)
|
||||
limit_query = len(fields_s) == 0
|
||||
|
||||
group_item = {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"}
|
||||
}
|
||||
# Add name if name is needed (only for limit query)
|
||||
if name_needed:
|
||||
group_item["name"] = {"$last": "$name"}
|
||||
|
||||
aggregation_pipeline = [
|
||||
# Find all versions of those subsets
|
||||
{"$match": {
|
||||
"type": "version",
|
||||
|
|
@ -761,16 +807,24 @@ def get_last_versions(project_name, subset_ids, fields=None):
|
|||
# Sorting versions all together
|
||||
{"$sort": {"name": 1}},
|
||||
# Group them by "parent", but only take the last
|
||||
{"$group": {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"}
|
||||
}}
|
||||
{"$group": group_item}
|
||||
]
|
||||
|
||||
conn = _get_project_connection(project_name)
|
||||
aggregate_result = conn.aggregate(aggregation_pipeline)
|
||||
if limit_query:
|
||||
output = {}
|
||||
for item in aggregate_result:
|
||||
subset_id = item["_id"]
|
||||
item_data = {"_id": item["_version_id"], "parent": subset_id}
|
||||
if name_needed:
|
||||
item_data["name"] = item["name"]
|
||||
output[subset_id] = item_data
|
||||
return output
|
||||
|
||||
version_ids = [
|
||||
doc["_version_id"]
|
||||
for doc in conn.aggregate(_pipeline)
|
||||
for doc in aggregate_result
|
||||
]
|
||||
|
||||
fields = _prepare_fields(fields, ["parent"])
|
||||
|
|
@ -867,7 +921,7 @@ def get_representation_by_id(project_name, representation_id, fields=None):
|
|||
if not representation_id:
|
||||
return None
|
||||
|
||||
repre_types = ["representation", "archived_representations"]
|
||||
repre_types = ["representation", "archived_representation"]
|
||||
query_filter = {
|
||||
"type": {"$in": repre_types}
|
||||
}
|
||||
|
|
@ -911,43 +965,26 @@ def get_representation_by_name(
|
|||
return conn.find_one(query_filter, _prepare_fields(fields))
|
||||
|
||||
|
||||
def get_representations(
|
||||
def _get_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
extensions=None,
|
||||
names_by_version_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
representation_ids,
|
||||
representation_names,
|
||||
version_ids,
|
||||
extensions,
|
||||
names_by_version_ids,
|
||||
standard,
|
||||
archived,
|
||||
fields
|
||||
):
|
||||
"""Representaion entities data from one project filtered by filters.
|
||||
|
||||
Filters are additive (all conditions must pass to return subset).
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
representation_ids (list[str|ObjectId]): Representation ids used as
|
||||
filter. Filter ignored if 'None' is passed.
|
||||
representation_names (list[str]): Representations names used as filter.
|
||||
Filter ignored if 'None' is passed.
|
||||
version_ids (list[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
extensions (list[str]): Filter by extension of main representation
|
||||
file (without dot).
|
||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
archived (bool): Output will also contain archived representations.
|
||||
fields (list[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching representations.
|
||||
"""
|
||||
|
||||
repre_types = ["representation"]
|
||||
repre_types = []
|
||||
if standard:
|
||||
repre_types.append("representation")
|
||||
if archived:
|
||||
repre_types.append("archived_representations")
|
||||
repre_types.append("archived_representation")
|
||||
|
||||
if not repre_types:
|
||||
return []
|
||||
|
||||
if len(repre_types) == 1:
|
||||
query_filter = {"type": repre_types[0]}
|
||||
else:
|
||||
|
|
@ -992,6 +1029,99 @@ def get_representations(
|
|||
return conn.find(query_filter, _prepare_fields(fields))
|
||||
|
||||
|
||||
def get_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
extensions=None,
|
||||
names_by_version_ids=None,
|
||||
archived=False,
|
||||
standard=True,
|
||||
fields=None
|
||||
):
|
||||
"""Representaion entities data from one project filtered by filters.
|
||||
|
||||
Filters are additive (all conditions must pass to return subset).
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
representation_ids (list[str|ObjectId]): Representation ids used as
|
||||
filter. Filter ignored if 'None' is passed.
|
||||
representation_names (list[str]): Representations names used as filter.
|
||||
Filter ignored if 'None' is passed.
|
||||
version_ids (list[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
extensions (list[str]): Filter by extension of main representation
|
||||
file (without dot).
|
||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
archived (bool): Output will also contain archived representations.
|
||||
fields (list[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching representations.
|
||||
"""
|
||||
|
||||
return _get_representations(
|
||||
project_name=project_name,
|
||||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
extensions=extensions,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
standard=True,
|
||||
archived=archived,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_archived_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
extensions=None,
|
||||
names_by_version_ids=None,
|
||||
fields=None
|
||||
):
|
||||
"""Archived representaion entities data from project with applied filters.
|
||||
|
||||
Filters are additive (all conditions must pass to return subset).
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
representation_ids (list[str|ObjectId]): Representation ids used as
|
||||
filter. Filter ignored if 'None' is passed.
|
||||
representation_names (list[str]): Representations names used as filter.
|
||||
Filter ignored if 'None' is passed.
|
||||
version_ids (list[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
extensions (list[str]): Filter by extension of main representation
|
||||
file (without dot).
|
||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
fields (list[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
Cursor: Iterable cursor yielding all matching representations.
|
||||
"""
|
||||
|
||||
return _get_representations(
|
||||
project_name=project_name,
|
||||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
extensions=extensions,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
standard=False,
|
||||
archived=True,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_representations_parents(project_name, representations):
|
||||
"""Prepare parents of representation entities.
|
||||
|
||||
|
|
|
|||
|
|
@ -17,11 +17,8 @@ class RenderCreator(Creator):
|
|||
|
||||
create_allow_context_change = True
|
||||
|
||||
def __init__(
|
||||
self, create_context, system_settings, project_settings, headless=False
|
||||
):
|
||||
super(RenderCreator, self).__init__(create_context, system_settings,
|
||||
project_settings, headless)
|
||||
def __init__(self, project_settings, *args, **kwargs):
|
||||
super(RenderCreator, self).__init__(project_settings, *args, **kwargs)
|
||||
self._default_variants = (project_settings["aftereffects"]
|
||||
["create"]
|
||||
["RenderCreator"]
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
import flame
|
||||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
class LoadClip(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
|
@ -22,7 +22,7 @@ class LoadClip(opfapi.ClipLoader):
|
|||
# settings
|
||||
reel_group_name = "OpenPype_Reels"
|
||||
reel_name = "Loaded"
|
||||
clip_name_template = "{asset}_{subset}_{output}"
|
||||
clip_name_template = "{asset}_{subset}<_{output}>"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
|
|
@ -36,8 +36,8 @@ class LoadClip(opfapi.ClipLoader):
|
|||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
clip_name = self.clip_name_template.format(
|
||||
**context["representation"]["context"])
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
context["representation"]["context"])
|
||||
|
||||
# TODO: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import os
|
|||
import flame
|
||||
from pprint import pformat
|
||||
import openpype.hosts.flame.api as opfapi
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class LoadClipBatch(opfapi.ClipLoader):
|
||||
|
|
@ -21,7 +22,7 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
|
||||
# settings
|
||||
reel_name = "OP_LoadedReel"
|
||||
clip_name_template = "{asset}_{subset}_{output}"
|
||||
clip_name_template = "{asset}_{subset}<_{output}>"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
|
|
@ -39,8 +40,8 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
if not context["representation"]["context"].get("output"):
|
||||
self.clip_name_template.replace("output", "representation")
|
||||
|
||||
clip_name = self.clip_name_template.format(
|
||||
**context["representation"]["context"])
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
context["representation"]["context"])
|
||||
|
||||
# TODO: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
|
|
|
|||
85
openpype/hosts/hiero/api/launchforhiero.py
Normal file
85
openpype/hosts/hiero/api/launchforhiero.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
import logging
|
||||
|
||||
from scriptsmenu import scriptsmenu
|
||||
from Qt import QtWidgets
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _hiero_main_window():
|
||||
"""Return Hiero's main window"""
|
||||
for obj in QtWidgets.QApplication.topLevelWidgets():
|
||||
if (obj.inherits('QMainWindow') and
|
||||
obj.metaObject().className() == 'Foundry::UI::DockMainWindow'):
|
||||
return obj
|
||||
raise RuntimeError('Could not find HieroWindow instance')
|
||||
|
||||
|
||||
def _hiero_main_menubar():
|
||||
"""Retrieve the main menubar of the Hiero window"""
|
||||
hiero_window = _hiero_main_window()
|
||||
menubar = [i for i in hiero_window.children() if isinstance(
|
||||
i,
|
||||
QtWidgets.QMenuBar
|
||||
)]
|
||||
|
||||
assert len(menubar) == 1, "Error, could not find menu bar!"
|
||||
return menubar[0]
|
||||
|
||||
|
||||
def find_scripts_menu(title, parent):
|
||||
"""
|
||||
Check if the menu exists with the given title in the parent
|
||||
|
||||
Args:
|
||||
title (str): the title name of the scripts menu
|
||||
|
||||
parent (QtWidgets.QMenuBar): the menubar to check
|
||||
|
||||
Returns:
|
||||
QtWidgets.QMenu or None
|
||||
|
||||
"""
|
||||
|
||||
menu = None
|
||||
search = [i for i in parent.children() if
|
||||
isinstance(i, scriptsmenu.ScriptsMenu)
|
||||
and i.title() == title]
|
||||
if search:
|
||||
assert len(search) < 2, ("Multiple instances of menu '{}' "
|
||||
"in menu bar".format(title))
|
||||
menu = search[0]
|
||||
|
||||
return menu
|
||||
|
||||
|
||||
def main(title="Scripts", parent=None, objectName=None):
|
||||
"""Build the main scripts menu in Hiero
|
||||
|
||||
Args:
|
||||
title (str): name of the menu in the application
|
||||
|
||||
parent (QtWidgets.QtMenuBar): the parent object for the menu
|
||||
|
||||
objectName (str): custom objectName for scripts menu
|
||||
|
||||
Returns:
|
||||
scriptsmenu.ScriptsMenu instance
|
||||
|
||||
"""
|
||||
hieromainbar = parent or _hiero_main_menubar()
|
||||
try:
|
||||
# check menu already exists
|
||||
menu = find_scripts_menu(title, hieromainbar)
|
||||
if not menu:
|
||||
log.info("Attempting to build menu ...")
|
||||
object_name = objectName or title.lower()
|
||||
menu = scriptsmenu.ScriptsMenu(title=title,
|
||||
parent=hieromainbar,
|
||||
objectName=object_name)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
return
|
||||
|
||||
return menu
|
||||
|
|
@ -9,6 +9,7 @@ from openpype.pipeline import legacy_io
|
|||
from openpype.tools.utils import host_tools
|
||||
|
||||
from . import tags
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -41,6 +42,7 @@ def menu_install():
|
|||
Installing menu into Hiero
|
||||
|
||||
"""
|
||||
|
||||
from Qt import QtGui
|
||||
from . import (
|
||||
publish, launch_workfiles_app, reload_config,
|
||||
|
|
@ -138,3 +140,30 @@ def menu_install():
|
|||
exeprimental_action.triggered.connect(
|
||||
lambda: host_tools.show_experimental_tools_dialog(parent=main_window)
|
||||
)
|
||||
|
||||
|
||||
def add_scripts_menu():
|
||||
try:
|
||||
from . import launchforhiero
|
||||
except ImportError:
|
||||
|
||||
log.warning(
|
||||
"Skipping studio.menu install, because "
|
||||
"'scriptsmenu' module seems unavailable."
|
||||
)
|
||||
return
|
||||
|
||||
# load configuration of custom menu
|
||||
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
config = project_settings["hiero"]["scriptsmenu"]["definition"]
|
||||
_menu = project_settings["hiero"]["scriptsmenu"]["name"]
|
||||
|
||||
if not config:
|
||||
log.warning("Skipping studio menu, no definition found.")
|
||||
return
|
||||
|
||||
# run the launcher for Hiero menu
|
||||
studio_menu = launchforhiero.main(title=_menu.title())
|
||||
|
||||
# apply configuration
|
||||
studio_menu.build_from_configuration(studio_menu, config)
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ def install():
|
|||
|
||||
# install menu
|
||||
menu.menu_install()
|
||||
menu.add_scripts_menu()
|
||||
|
||||
# register hiero events
|
||||
events.register_hiero_events()
|
||||
|
|
|
|||
|
|
@ -1,111 +0,0 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
extract_alembic,
|
||||
suspended_refresh,
|
||||
maintained_selection,
|
||||
iter_visible_nodes_in_range
|
||||
)
|
||||
|
||||
|
||||
class ExtractAnimation(openpype.api.Extractor):
|
||||
"""Produce an alembic of just point positions and normals.
|
||||
|
||||
Positions and normals, uvs, creases are preserved, but nothing more,
|
||||
for plain and predictable point caches.
|
||||
|
||||
Plugin can run locally or remotely (on a farm - if instance is marked with
|
||||
"farm" it will be skipped in local processing, but processed on farm)
|
||||
"""
|
||||
|
||||
label = "Extract Animation"
|
||||
hosts = ["maya"]
|
||||
families = ["animation"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
# Collect the out set nodes
|
||||
out_sets = [node for node in instance if node.endswith("out_SET")]
|
||||
if len(out_sets) != 1:
|
||||
raise RuntimeError("Couldn't find exactly one out_SET: "
|
||||
"{0}".format(out_sets))
|
||||
out_set = out_sets[0]
|
||||
roots = cmds.sets(out_set, query=True)
|
||||
|
||||
# Include all descendants
|
||||
nodes = roots + cmds.listRelatives(roots,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
# Collect the start and end including handles
|
||||
start = instance.data["frameStartHandle"]
|
||||
end = instance.data["frameEndHandle"]
|
||||
|
||||
self.log.info("Extracting animation..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
||||
parent_dir = self.staging_dir(instance)
|
||||
filename = "{name}.abc".format(**instance.data)
|
||||
path = os.path.join(parent_dir, filename)
|
||||
|
||||
options = {
|
||||
"step": instance.data.get("step", 1.0) or 1.0,
|
||||
"attr": ["cbId"],
|
||||
"writeVisibility": True,
|
||||
"writeCreases": True,
|
||||
"uvWrite": True,
|
||||
"selection": True,
|
||||
"worldSpace": instance.data.get("worldSpace", True),
|
||||
"writeColorSets": instance.data.get("writeColorSets", False),
|
||||
"writeFaceSets": instance.data.get("writeFaceSets", False)
|
||||
}
|
||||
|
||||
if not instance.data.get("includeParentHierarchy", True):
|
||||
# Set the root nodes if we don't want to include parents
|
||||
# The roots are to be considered the ones that are the actual
|
||||
# direct members of the set
|
||||
options["root"] = roots
|
||||
|
||||
if int(cmds.about(version=True)) >= 2017:
|
||||
# Since Maya 2017 alembic supports multiple uv sets - write them.
|
||||
options["writeUVSets"] = True
|
||||
|
||||
if instance.data.get("visibleOnly", False):
|
||||
# If we only want to include nodes that are visible in the frame
|
||||
# range then we need to do our own check. Alembic's `visibleOnly`
|
||||
# flag does not filter out those that are only hidden on some
|
||||
# frames as it counts "animated" or "connected" visibilities as
|
||||
# if it's always visible.
|
||||
nodes = list(iter_visible_nodes_in_range(nodes,
|
||||
start=start,
|
||||
end=end))
|
||||
|
||||
with suspended_refresh():
|
||||
with maintained_selection():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
extract_alembic(file=path,
|
||||
startFrame=float(start),
|
||||
endFrame=float(end),
|
||||
**options)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"stagingDir": dirname,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
|
@ -33,7 +33,7 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
nodes = instance[:]
|
||||
nodes, roots = self.get_members_and_roots(instance)
|
||||
|
||||
# Collect the start and end including handles
|
||||
start = float(instance.data.get("frameStartHandle", 1))
|
||||
|
|
@ -46,10 +46,6 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
|
||||
attr_prefixes = [value for value in attr_prefixes if value.strip()]
|
||||
|
||||
# Get extra export arguments
|
||||
writeColorSets = instance.data.get("writeColorSets", False)
|
||||
writeFaceSets = instance.data.get("writeFaceSets", False)
|
||||
|
||||
self.log.info("Extracting pointcache..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
||||
|
|
@ -63,8 +59,8 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
"attrPrefix": attr_prefixes,
|
||||
"writeVisibility": True,
|
||||
"writeCreases": True,
|
||||
"writeColorSets": writeColorSets,
|
||||
"writeFaceSets": writeFaceSets,
|
||||
"writeColorSets": instance.data.get("writeColorSets", False),
|
||||
"writeFaceSets": instance.data.get("writeFaceSets", False),
|
||||
"uvWrite": True,
|
||||
"selection": True,
|
||||
"worldSpace": instance.data.get("worldSpace", True)
|
||||
|
|
@ -74,7 +70,7 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
# Set the root nodes if we don't want to include parents
|
||||
# The roots are to be considered the ones that are the actual
|
||||
# direct members of the set
|
||||
options["root"] = instance.data.get("setMembers")
|
||||
options["root"] = roots
|
||||
|
||||
if int(cmds.about(version=True)) >= 2017:
|
||||
# Since Maya 2017 alembic supports multiple uv sets - write them.
|
||||
|
|
@ -112,3 +108,28 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
def get_members_and_roots(self, instance):
|
||||
return instance[:], instance.data.get("setMembers")
|
||||
|
||||
|
||||
class ExtractAnimation(ExtractAlembic):
|
||||
label = "Extract Animation"
|
||||
families = ["animation"]
|
||||
|
||||
def get_members_and_roots(self, instance):
|
||||
|
||||
# Collect the out set nodes
|
||||
out_sets = [node for node in instance if node.endswith("out_SET")]
|
||||
if len(out_sets) != 1:
|
||||
raise RuntimeError("Couldn't find exactly one out_SET: "
|
||||
"{0}".format(out_sets))
|
||||
out_set = out_sets[0]
|
||||
roots = cmds.sets(out_set, query=True)
|
||||
|
||||
# Include all descendants
|
||||
nodes = roots + cmds.listRelatives(roots,
|
||||
allDescendents=True,
|
||||
fullPath=True) or []
|
||||
|
||||
return nodes, roots
|
||||
|
|
|
|||
|
|
@ -152,6 +152,7 @@ class ExtractSlateFrame(openpype.api.Extractor):
|
|||
self.log.debug("__ first_frame: {}".format(first_frame))
|
||||
self.log.debug("__ slate_first_frame: {}".format(slate_first_frame))
|
||||
|
||||
above_slate_node = slate_node.dependencies().pop()
|
||||
# fallback if files does not exists
|
||||
if self._check_frames_exists(instance):
|
||||
# Read node
|
||||
|
|
@ -164,8 +165,16 @@ class ExtractSlateFrame(openpype.api.Extractor):
|
|||
r_node["colorspace"].setValue(instance.data["colorspace"])
|
||||
previous_node = r_node
|
||||
temporary_nodes = [previous_node]
|
||||
|
||||
# adding copy metadata node for correct frame metadata
|
||||
cm_node = nuke.createNode("CopyMetaData")
|
||||
cm_node.setInput(0, previous_node)
|
||||
cm_node.setInput(1, above_slate_node)
|
||||
previous_node = cm_node
|
||||
temporary_nodes.append(cm_node)
|
||||
|
||||
else:
|
||||
previous_node = slate_node.dependencies().pop()
|
||||
previous_node = above_slate_node
|
||||
temporary_nodes = []
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
|
|
|
|||
|
|
@ -319,14 +319,13 @@ def get_current_timeline_items(
|
|||
selected_track_count = timeline.GetTrackCount(track_type)
|
||||
|
||||
# loop all tracks and get items
|
||||
_clips = dict()
|
||||
_clips = {}
|
||||
for track_index in range(1, (int(selected_track_count) + 1)):
|
||||
_track_name = timeline.GetTrackName(track_type, track_index)
|
||||
|
||||
# filter out all unmathed track names
|
||||
if track_name:
|
||||
if _track_name not in track_name:
|
||||
continue
|
||||
if track_name and _track_name not in track_name:
|
||||
continue
|
||||
|
||||
timeline_items = timeline.GetItemListInTrack(
|
||||
track_type, track_index)
|
||||
|
|
@ -348,12 +347,8 @@ def get_current_timeline_items(
|
|||
"index": clip_index
|
||||
}
|
||||
ti_color = ti.GetClipColor()
|
||||
if filter is True:
|
||||
if selecting_color in ti_color:
|
||||
selected_clips.append(data)
|
||||
else:
|
||||
if filter and selecting_color in ti_color or not filter:
|
||||
selected_clips.append(data)
|
||||
|
||||
return selected_clips
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -506,7 +506,7 @@ class Creator(LegacyCreator):
|
|||
super(Creator, self).__init__(*args, **kwargs)
|
||||
from openpype.api import get_current_project_settings
|
||||
resolve_p_settings = get_current_project_settings().get("resolve")
|
||||
self.presets = dict()
|
||||
self.presets = {}
|
||||
if resolve_p_settings:
|
||||
self.presets = resolve_p_settings["create"].get(
|
||||
self.__class__.__name__, {})
|
||||
|
|
|
|||
|
|
@ -116,12 +116,13 @@ class CreateShotClip(resolve.Creator):
|
|||
"order": 0},
|
||||
"vSyncTrack": {
|
||||
"value": gui_tracks, # noqa
|
||||
"type": "QComboBox",
|
||||
"label": "Hero track",
|
||||
"target": "ui",
|
||||
"toolTip": "Select driving track name which should be mastering all others", # noqa
|
||||
"order": 1}
|
||||
"type": "QComboBox",
|
||||
"label": "Hero track",
|
||||
"target": "ui",
|
||||
"toolTip": "Select driving track name which should be mastering all others", # noqa
|
||||
"order": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"publishSettings": {
|
||||
"type": "section",
|
||||
|
|
@ -172,28 +173,31 @@ class CreateShotClip(resolve.Creator):
|
|||
"target": "ui",
|
||||
"order": 4,
|
||||
"value": {
|
||||
"workfileFrameStart": {
|
||||
"value": 1001,
|
||||
"type": "QSpinBox",
|
||||
"label": "Workfiles Start Frame",
|
||||
"target": "tag",
|
||||
"toolTip": "Set workfile starting frame number", # noqa
|
||||
"order": 0},
|
||||
"handleStart": {
|
||||
"value": 0,
|
||||
"type": "QSpinBox",
|
||||
"label": "Handle start (head)",
|
||||
"target": "tag",
|
||||
"toolTip": "Handle at start of clip", # noqa
|
||||
"order": 1},
|
||||
"handleEnd": {
|
||||
"value": 0,
|
||||
"type": "QSpinBox",
|
||||
"label": "Handle end (tail)",
|
||||
"target": "tag",
|
||||
"toolTip": "Handle at end of clip", # noqa
|
||||
"order": 2},
|
||||
}
|
||||
"workfileFrameStart": {
|
||||
"value": 1001,
|
||||
"type": "QSpinBox",
|
||||
"label": "Workfiles Start Frame",
|
||||
"target": "tag",
|
||||
"toolTip": "Set workfile starting frame number", # noqa
|
||||
"order": 0
|
||||
},
|
||||
"handleStart": {
|
||||
"value": 0,
|
||||
"type": "QSpinBox",
|
||||
"label": "Handle start (head)",
|
||||
"target": "tag",
|
||||
"toolTip": "Handle at start of clip", # noqa
|
||||
"order": 1
|
||||
},
|
||||
"handleEnd": {
|
||||
"value": 0,
|
||||
"type": "QSpinBox",
|
||||
"label": "Handle end (tail)",
|
||||
"target": "tag",
|
||||
"toolTip": "Handle at end of clip", # noqa
|
||||
"order": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -229,8 +233,10 @@ class CreateShotClip(resolve.Creator):
|
|||
v_sync_track = widget.result["vSyncTrack"]["value"]
|
||||
|
||||
# sort selected trackItems by
|
||||
sorted_selected_track_items = list()
|
||||
unsorted_selected_track_items = list()
|
||||
sorted_selected_track_items = []
|
||||
unsorted_selected_track_items = []
|
||||
print("_____ selected ______")
|
||||
print(self.selected)
|
||||
for track_item_data in self.selected:
|
||||
if track_item_data["track"]["name"] in v_sync_track:
|
||||
sorted_selected_track_items.append(track_item_data)
|
||||
|
|
@ -253,10 +259,10 @@ class CreateShotClip(resolve.Creator):
|
|||
"sq_frame_start": sq_frame_start,
|
||||
"sq_markers": sq_markers
|
||||
}
|
||||
|
||||
print(kwargs)
|
||||
for i, track_item_data in enumerate(sorted_selected_track_items):
|
||||
self.rename_index = i
|
||||
|
||||
self.log.info(track_item_data)
|
||||
# convert track item to timeline media pool item
|
||||
track_item = resolve.PublishClip(
|
||||
self, track_item_data, **kwargs).convert()
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class LoadClip(resolve.TimelineItemLoader):
|
|||
"""
|
||||
|
||||
families = ["render2d", "source", "plate", "render", "review"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264", ".mov"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "h264", "mov"]
|
||||
|
||||
label = "Load as clip"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -30,7 +30,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
"asset": asset,
|
||||
"subset": "{}{}".format(asset, subset.capitalize()),
|
||||
"item": project,
|
||||
"family": "workfile"
|
||||
"family": "workfile",
|
||||
"families": []
|
||||
}
|
||||
|
||||
# create instance with workfile
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
import copy
|
||||
|
||||
from openpype.client import get_project
|
||||
from openpype.api import ProjectSettings
|
||||
|
|
@ -373,6 +374,10 @@ class PrepareProjectServer(ServerAction):
|
|||
project_name, project_code
|
||||
))
|
||||
create_project(project_name, project_code)
|
||||
self.trigger_event(
|
||||
"openpype.project.created",
|
||||
{"project_name": project_name}
|
||||
)
|
||||
|
||||
project_settings = ProjectSettings(project_name)
|
||||
project_anatomy_settings = project_settings["project_anatomy"]
|
||||
|
|
@ -400,6 +405,10 @@ class PrepareProjectServer(ServerAction):
|
|||
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
|
||||
session.commit()
|
||||
|
||||
event_data = copy.deepcopy(in_data)
|
||||
event_data["project_name"] = project_name
|
||||
self.trigger_event("openpype.project.prepared", event_data)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import time
|
||||
import sys
|
||||
import json
|
||||
import traceback
|
||||
|
||||
import ftrack_api
|
||||
|
||||
from openpype_modules.ftrack.lib import ServerAction
|
||||
from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
|
|
@ -180,6 +181,13 @@ class SyncToAvalonServer(ServerAction):
|
|||
"* Total time: {}".format(time_7 - time_start)
|
||||
)
|
||||
|
||||
if self.entities_factory.project_created:
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic="openpype.project.created",
|
||||
data={"project_name": project_name}
|
||||
)
|
||||
self.session.event_hub.publish(event)
|
||||
|
||||
report = self.entities_factory.report()
|
||||
if report and report.get("items"):
|
||||
default_title = "Synchronization report ({}):".format(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
import copy
|
||||
|
||||
from openpype.client import get_project
|
||||
from openpype.api import ProjectSettings
|
||||
|
|
@ -399,6 +400,10 @@ class PrepareProjectLocal(BaseAction):
|
|||
project_name, project_code
|
||||
))
|
||||
create_project(project_name, project_code)
|
||||
self.trigger_event(
|
||||
"openpype.project.created",
|
||||
{"project_name": project_name}
|
||||
)
|
||||
|
||||
project_settings = ProjectSettings(project_name)
|
||||
project_anatomy_settings = project_settings["project_anatomy"]
|
||||
|
|
@ -433,6 +438,10 @@ class PrepareProjectLocal(BaseAction):
|
|||
self.process_identifier()
|
||||
)
|
||||
self.trigger_action(trigger_identifier, event)
|
||||
|
||||
event_data = copy.deepcopy(in_data)
|
||||
event_data["project_name"] = project_name
|
||||
self.trigger_event("openpype.project.prepared", event_data)
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import time
|
||||
import sys
|
||||
import json
|
||||
import traceback
|
||||
|
||||
import ftrack_api
|
||||
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
|
|
@ -184,6 +185,13 @@ class SyncToAvalonLocal(BaseAction):
|
|||
"* Total time: {}".format(time_7 - time_start)
|
||||
)
|
||||
|
||||
if self.entities_factory.project_created:
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic="openpype.project.created",
|
||||
data={"project_name": project_name}
|
||||
)
|
||||
self.session.event_hub.publish(event)
|
||||
|
||||
report = self.entities_factory.report()
|
||||
if report and report.get("items"):
|
||||
default_title = "Synchronization report ({}):".format(
|
||||
|
|
|
|||
|
|
@ -443,6 +443,7 @@ class SyncEntitiesFactory:
|
|||
}
|
||||
|
||||
self.create_list = []
|
||||
self.project_created = False
|
||||
self.unarchive_list = []
|
||||
self.updates = collections.defaultdict(dict)
|
||||
|
||||
|
|
@ -2214,6 +2215,7 @@ class SyncEntitiesFactory:
|
|||
self._avalon_ents_by_name[project_item["name"]] = str(new_id)
|
||||
|
||||
self.create_list.append(project_item)
|
||||
self.project_created = True
|
||||
|
||||
# store mongo id to ftrack entity
|
||||
entity = self.entities_dict[self.ft_project_id]["entity"]
|
||||
|
|
|
|||
|
|
@ -535,7 +535,7 @@ class BaseHandler(object):
|
|||
)
|
||||
|
||||
def trigger_event(
|
||||
self, topic, event_data={}, session=None, source=None,
|
||||
self, topic, event_data=None, session=None, source=None,
|
||||
event=None, on_error="ignore"
|
||||
):
|
||||
if session is None:
|
||||
|
|
@ -543,6 +543,9 @@ class BaseHandler(object):
|
|||
|
||||
if not source and event:
|
||||
source = event.get("source")
|
||||
|
||||
if event_data is None:
|
||||
event_data = {}
|
||||
# Create and trigger event
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
|
|
|
|||
|
|
@ -32,11 +32,17 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
|||
context.data["kitsu_project"] = kitsu_project
|
||||
self.log.debug("Collect kitsu project: {}".format(kitsu_project))
|
||||
|
||||
kitsu_asset = gazu.asset.get_asset(zou_asset_data["id"])
|
||||
if not kitsu_asset:
|
||||
raise AssertionError("Asset not found in kitsu!")
|
||||
context.data["kitsu_asset"] = kitsu_asset
|
||||
self.log.debug("Collect kitsu asset: {}".format(kitsu_asset))
|
||||
entity_type = zou_asset_data["type"]
|
||||
if entity_type == "Shot":
|
||||
kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"])
|
||||
else:
|
||||
kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"])
|
||||
|
||||
if not kitsu_entity:
|
||||
raise AssertionError(f"{entity_type} not found in kitsu!")
|
||||
|
||||
context.data["kitsu_entity"] = kitsu_entity
|
||||
self.log.debug(f"Collect kitsu {entity_type}: {kitsu_entity}")
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
|
|
@ -57,7 +63,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
|||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
kitsu_asset, kitsu_task_type
|
||||
kitsu_entity, kitsu_task_type
|
||||
)
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
|
|
|
|||
|
|
@ -165,10 +165,12 @@ class Listener:
|
|||
zou_ids_and_asset_docs[asset["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [asset], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_asset(self, data):
|
||||
"""Delete asset of OP DB."""
|
||||
|
|
@ -212,10 +214,12 @@ class Listener:
|
|||
zou_ids_and_asset_docs[episode["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [episode], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_episode(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
|
|
@ -260,10 +264,12 @@ class Listener:
|
|||
zou_ids_and_asset_docs[sequence["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_sequence(self, data):
|
||||
"""Delete sequence of OP DB."""
|
||||
|
|
@ -308,10 +314,12 @@ class Listener:
|
|||
zou_ids_and_asset_docs[shot["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
update_op_result = update_op_assets(
|
||||
self.dbcon, project_doc, [shot], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
)
|
||||
if update_op_result:
|
||||
asset_doc_id, asset_update = update_op_result[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_shot(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
|
|
|
|||
|
|
@ -82,22 +82,37 @@ def update_op_assets(
|
|||
item_data["zou"] = item
|
||||
|
||||
# == Asset settings ==
|
||||
# Frame in, fallback on 0
|
||||
frame_in = int(item_data.get("frame_in") or 0)
|
||||
# Frame in, fallback to project's value or default value (1001)
|
||||
# TODO: get default from settings/project_anatomy/attributes.json
|
||||
try:
|
||||
frame_in = int(
|
||||
item_data.pop(
|
||||
"frame_in", project_doc["data"].get("frameStart")
|
||||
)
|
||||
)
|
||||
except (TypeError, ValueError):
|
||||
frame_in = 1001
|
||||
item_data["frameStart"] = frame_in
|
||||
item_data.pop("frame_in", None)
|
||||
# Frame out, fallback on frame_in + duration
|
||||
frames_duration = int(item.get("nb_frames") or 1)
|
||||
frame_out = (
|
||||
item_data["frame_out"]
|
||||
if item_data.get("frame_out")
|
||||
else frame_in + frames_duration
|
||||
)
|
||||
item_data["frameEnd"] = int(frame_out)
|
||||
item_data.pop("frame_out", None)
|
||||
# Fps, fallback to project's value when entity fps is deleted
|
||||
if not item_data.get("fps") and item_doc["data"].get("fps"):
|
||||
item_data["fps"] = project_doc["data"]["fps"]
|
||||
# Frames duration, fallback on 0
|
||||
try:
|
||||
frames_duration = int(item_data.pop("nb_frames", 0))
|
||||
except (TypeError, ValueError):
|
||||
frames_duration = 0
|
||||
# Frame out, fallback on frame_in + duration or project's value or 1001
|
||||
frame_out = item_data.pop("frame_out", None)
|
||||
if not frame_out:
|
||||
frame_out = frame_in + frames_duration
|
||||
try:
|
||||
frame_out = int(frame_out)
|
||||
except (TypeError, ValueError):
|
||||
frame_out = 1001
|
||||
item_data["frameEnd"] = frame_out
|
||||
# Fps, fallback to project's value or default value (25.0)
|
||||
try:
|
||||
fps = float(item_data.get("fps", project_doc["data"].get("fps")))
|
||||
except (TypeError, ValueError):
|
||||
fps = 25.0
|
||||
item_data["fps"] = fps
|
||||
|
||||
# Tasks
|
||||
tasks_list = []
|
||||
|
|
@ -106,9 +121,8 @@ def update_op_assets(
|
|||
tasks_list = all_tasks_for_asset(item)
|
||||
elif item_type == "Shot":
|
||||
tasks_list = all_tasks_for_shot(item)
|
||||
# TODO frame in and out
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {"type": t["task_type_name"]}
|
||||
t["task_type_name"]: {"type": t["task_type_name"], "zou": t}
|
||||
for t in tasks_list
|
||||
}
|
||||
|
||||
|
|
@ -229,9 +243,9 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
project_data.update(
|
||||
{
|
||||
"code": project_code,
|
||||
"fps": project["fps"],
|
||||
"resolutionWidth": project["resolution"].split("x")[0],
|
||||
"resolutionHeight": project["resolution"].split("x")[1],
|
||||
"fps": float(project["fps"]),
|
||||
"resolutionWidth": int(project["resolution"].split("x")[0]),
|
||||
"resolutionHeight": int(project["resolution"].split("x")[1]),
|
||||
"zou_id": project["id"],
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -748,6 +748,10 @@ class CreateContext:
|
|||
def host_name(self):
|
||||
return os.environ["AVALON_APP"]
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self.dbcon.active_project()
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
"""Dynamic access to logger."""
|
||||
|
|
@ -839,9 +843,8 @@ class CreateContext:
|
|||
self.plugins_with_defs = plugins_with_defs
|
||||
|
||||
# Prepare settings
|
||||
project_name = self.dbcon.Session["AVALON_PROJECT"]
|
||||
system_settings = get_system_settings()
|
||||
project_settings = get_project_settings(project_name)
|
||||
project_settings = get_project_settings(self.project_name)
|
||||
|
||||
# Discover and prepare creators
|
||||
creators = {}
|
||||
|
|
@ -873,9 +876,9 @@ class CreateContext:
|
|||
continue
|
||||
|
||||
creator = creator_class(
|
||||
self,
|
||||
system_settings,
|
||||
project_settings,
|
||||
system_settings,
|
||||
self,
|
||||
self.headless
|
||||
)
|
||||
creators[creator_identifier] = creator
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class BaseCreator:
|
|||
host_name = None
|
||||
|
||||
def __init__(
|
||||
self, create_context, system_settings, project_settings, headless=False
|
||||
self, project_settings, system_settings, create_context, headless=False
|
||||
):
|
||||
# Reference to CreateContext
|
||||
self.create_context = create_context
|
||||
|
|
@ -92,6 +92,12 @@ class BaseCreator:
|
|||
"""Family that plugin represents."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
"""Family that plugin represents."""
|
||||
|
||||
return self.create_context.project_name
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
|
|
|
|||
|
|
@ -208,10 +208,12 @@ def get_representation_context(representation):
|
|||
|
||||
assert representation is not None, "This is a bug"
|
||||
|
||||
if not isinstance(representation, dict):
|
||||
representation = get_representation_by_id(representation)
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
if not isinstance(representation, dict):
|
||||
representation = get_representation_by_id(
|
||||
project_name, representation
|
||||
)
|
||||
|
||||
version, subset, asset, project = get_representation_parents(
|
||||
project_name, representation
|
||||
)
|
||||
|
|
@ -394,7 +396,7 @@ def update_container(container, version=-1):
|
|||
assert current_representation is not None, "This is a bug"
|
||||
|
||||
current_version = get_version_by_id(
|
||||
project_name, current_representation["_id"], fields=["parent"]
|
||||
project_name, current_representation["parent"], fields=["parent"]
|
||||
)
|
||||
if version == -1:
|
||||
new_version = get_last_version_by_subset_id(
|
||||
|
|
|
|||
|
|
@ -4,13 +4,14 @@ import uuid
|
|||
|
||||
import clique
|
||||
from pymongo import UpdateOne
|
||||
import ftrack_api
|
||||
import qargparse
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from openpype.client import get_versions, get_representations
|
||||
from openpype import style
|
||||
from openpype.pipeline import load, AvalonMongoDB, Anatomy
|
||||
from openpype.lib import StringTemplate
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
class DeleteOldVersions(load.SubsetLoaderPlugin):
|
||||
|
|
@ -197,18 +198,10 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
def get_data(self, context, versions_count):
|
||||
subset = context["subset"]
|
||||
asset = context["asset"]
|
||||
anatomy = Anatomy(context["project"]["name"])
|
||||
project_name = context["project"]["name"]
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
self.dbcon = AvalonMongoDB()
|
||||
self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"]
|
||||
self.dbcon.install()
|
||||
|
||||
versions = list(
|
||||
self.dbcon.find({
|
||||
"type": "version",
|
||||
"parent": {"$in": [subset["_id"]]}
|
||||
})
|
||||
)
|
||||
versions = list(get_versions(project_name, subset_ids=[subset["_id"]]))
|
||||
|
||||
versions_by_parent = collections.defaultdict(list)
|
||||
for ent in versions:
|
||||
|
|
@ -267,10 +260,9 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
print(msg)
|
||||
return
|
||||
|
||||
repres = list(self.dbcon.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": version_ids}
|
||||
}))
|
||||
repres = list(get_representations(
|
||||
project_name, version_ids=version_ids
|
||||
))
|
||||
|
||||
self.log.debug(
|
||||
"Collected representations to remove ({})".format(len(repres))
|
||||
|
|
@ -329,7 +321,7 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
|
||||
return data
|
||||
|
||||
def main(self, data, remove_publish_folder):
|
||||
def main(self, project_name, data, remove_publish_folder):
|
||||
# Size of files.
|
||||
size = 0
|
||||
if not data:
|
||||
|
|
@ -366,30 +358,70 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
))
|
||||
|
||||
if mongo_changes_bulk:
|
||||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
dbcon.install()
|
||||
dbcon.bulk_write(mongo_changes_bulk)
|
||||
dbcon.uninstall()
|
||||
|
||||
self.dbcon.uninstall()
|
||||
self._ftrack_delete_versions(data)
|
||||
|
||||
return size
|
||||
|
||||
def _ftrack_delete_versions(self, data):
|
||||
"""Delete version on ftrack.
|
||||
|
||||
Handling of ftrack logic in this plugin is not ideal. But in OP3 it is
|
||||
almost impossible to solve the issue other way.
|
||||
|
||||
Note:
|
||||
Asset versions on ftrack are not deleted but marked as
|
||||
"not published" which cause that they're invisible.
|
||||
|
||||
Args:
|
||||
data (dict): Data sent to subset loader with full context.
|
||||
"""
|
||||
|
||||
# First check for ftrack id on asset document
|
||||
# - skip if ther is none
|
||||
asset_ftrack_id = data["asset"]["data"].get("ftrackId")
|
||||
if not asset_ftrack_id:
|
||||
self.log.info((
|
||||
"Asset does not have filled ftrack id. Skipped delete"
|
||||
" of ftrack version."
|
||||
))
|
||||
return
|
||||
|
||||
# Check if ftrack module is enabled
|
||||
modules_manager = ModulesManager()
|
||||
ftrack_module = modules_manager.modules_by_name.get("ftrack")
|
||||
if not ftrack_module or not ftrack_module.enabled:
|
||||
return
|
||||
|
||||
import ftrack_api
|
||||
|
||||
session = ftrack_api.Session()
|
||||
subset_name = data["subset"]["name"]
|
||||
versions = {
|
||||
'"{}"'.format(version_doc["name"])
|
||||
for version_doc in data["versions"]
|
||||
}
|
||||
asset_versions = session.query(
|
||||
(
|
||||
"select id, is_published from AssetVersion where"
|
||||
" asset.parent.id is \"{}\""
|
||||
" and asset.name is \"{}\""
|
||||
" and version in ({})"
|
||||
).format(
|
||||
asset_ftrack_id,
|
||||
subset_name,
|
||||
",".join(versions)
|
||||
)
|
||||
).all()
|
||||
|
||||
# Set attribute `is_published` to `False` on ftrack AssetVersions
|
||||
session = ftrack_api.Session()
|
||||
query = (
|
||||
"AssetVersion where asset.parent.id is \"{}\""
|
||||
" and asset.name is \"{}\""
|
||||
" and version is \"{}\""
|
||||
)
|
||||
for v in data["versions"]:
|
||||
try:
|
||||
ftrack_version = session.query(
|
||||
query.format(
|
||||
data["asset"]["data"]["ftrackId"],
|
||||
data["subset"]["name"],
|
||||
v["name"]
|
||||
)
|
||||
).one()
|
||||
except ftrack_api.exception.NoResultFoundError:
|
||||
continue
|
||||
|
||||
ftrack_version["is_published"] = False
|
||||
for asset_version in asset_versions:
|
||||
asset_version["is_published"] = False
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
|
|
@ -402,8 +434,6 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
self.log.error(msg)
|
||||
self.message(msg)
|
||||
|
||||
return size
|
||||
|
||||
def load(self, contexts, name=None, namespace=None, options=None):
|
||||
try:
|
||||
size = 0
|
||||
|
|
@ -422,7 +452,8 @@ class DeleteOldVersions(load.SubsetLoaderPlugin):
|
|||
if not data:
|
||||
continue
|
||||
|
||||
size += self.main(data, remove_publish_folder)
|
||||
project_name = context["project"]["name"]
|
||||
size += self.main(project_name, data, remove_publish_folder)
|
||||
print("Progressing {}/{}".format(count + 1, len(contexts)))
|
||||
|
||||
msg = "Total size of files: " + self.sizeof_fmt(size)
|
||||
|
|
@ -448,7 +479,7 @@ class CalculateOldVersions(DeleteOldVersions):
|
|||
)
|
||||
]
|
||||
|
||||
def main(self, data, remove_publish_folder):
|
||||
def main(self, project_name, data, remove_publish_folder):
|
||||
size = 0
|
||||
|
||||
if not data:
|
||||
|
|
|
|||
|
|
@ -3,8 +3,9 @@ from collections import defaultdict
|
|||
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
|
||||
from openpype.client import get_representations
|
||||
from openpype.lib import config
|
||||
from openpype.pipeline import load, AvalonMongoDB, Anatomy
|
||||
from openpype.pipeline import load, Anatomy
|
||||
from openpype import resources, style
|
||||
|
||||
from openpype.lib.delivery import (
|
||||
|
|
@ -68,17 +69,13 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
|
|||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
project = contexts[0]["project"]["name"]
|
||||
self.anatomy = Anatomy(project)
|
||||
project_name = contexts[0]["project"]["name"]
|
||||
self.anatomy = Anatomy(project_name)
|
||||
self._representations = None
|
||||
self.log = log
|
||||
self.currently_uploaded = 0
|
||||
|
||||
self.dbcon = AvalonMongoDB()
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project
|
||||
self.dbcon.install()
|
||||
|
||||
self._set_representations(contexts)
|
||||
self._set_representations(project_name, contexts)
|
||||
|
||||
dropdown = QtWidgets.QComboBox()
|
||||
self.templates = self._get_templates(self.anatomy)
|
||||
|
|
@ -238,13 +235,12 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
|
|||
|
||||
return templates
|
||||
|
||||
def _set_representations(self, contexts):
|
||||
def _set_representations(self, project_name, contexts):
|
||||
version_ids = [context["version"]["_id"] for context in contexts]
|
||||
|
||||
repres = list(self.dbcon.find({
|
||||
"type": "representation",
|
||||
"parent": {"$in": version_ids}
|
||||
}))
|
||||
repres = list(get_representations(
|
||||
project_name, version_ids=version_ids
|
||||
))
|
||||
|
||||
self._representations = repres
|
||||
|
||||
|
|
|
|||
|
|
@ -27,6 +27,11 @@ import collections
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
get_assets,
|
||||
get_subsets,
|
||||
get_last_versions
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
|
|
@ -44,13 +49,14 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
self.log.info("Collecting anatomy data for all instances.")
|
||||
|
||||
self.fill_missing_asset_docs(context)
|
||||
self.fill_latest_versions(context)
|
||||
project_name = legacy_io.active_project()
|
||||
self.fill_missing_asset_docs(context, project_name)
|
||||
self.fill_latest_versions(context, project_name)
|
||||
self.fill_anatomy_data(context)
|
||||
|
||||
self.log.info("Anatomy Data collection finished.")
|
||||
|
||||
def fill_missing_asset_docs(self, context):
|
||||
def fill_missing_asset_docs(self, context, project_name):
|
||||
self.log.debug("Qeurying asset documents for instances.")
|
||||
|
||||
context_asset_doc = context.data.get("assetEntity")
|
||||
|
|
@ -84,10 +90,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
self.log.debug("Querying asset documents with names: {}".format(
|
||||
", ".join(["\"{}\"".format(name) for name in asset_names])
|
||||
))
|
||||
asset_docs = legacy_io.find({
|
||||
"type": "asset",
|
||||
"name": {"$in": asset_names}
|
||||
})
|
||||
|
||||
asset_docs = get_assets(project_name, asset_names=asset_names)
|
||||
asset_docs_by_name = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
|
|
@ -111,7 +115,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
"Not found asset documents with names \"{}\"."
|
||||
).format(joined_asset_names))
|
||||
|
||||
def fill_latest_versions(self, context):
|
||||
def fill_latest_versions(self, context, project_name):
|
||||
"""Try to find latest version for each instance's subset.
|
||||
|
||||
Key "latestVersion" is always set to latest version or `None`.
|
||||
|
|
@ -126,7 +130,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
self.log.debug("Qeurying latest versions for instances.")
|
||||
|
||||
hierarchy = {}
|
||||
subset_filters = []
|
||||
names_by_asset_ids = collections.defaultdict(set)
|
||||
for instance in context:
|
||||
# Make sure `"latestVersion"` key is set
|
||||
latest_version = instance.data.get("latestVersion")
|
||||
|
|
@ -147,67 +151,33 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
if subset_name not in hierarchy[asset_id]:
|
||||
hierarchy[asset_id][subset_name] = []
|
||||
hierarchy[asset_id][subset_name].append(instance)
|
||||
subset_filters.append({
|
||||
"parent": asset_id,
|
||||
"name": subset_name
|
||||
})
|
||||
names_by_asset_ids[asset_id].add(subset_name)
|
||||
|
||||
subset_docs = []
|
||||
if subset_filters:
|
||||
subset_docs = list(legacy_io.find({
|
||||
"type": "subset",
|
||||
"$or": subset_filters
|
||||
}))
|
||||
if names_by_asset_ids:
|
||||
subset_docs = list(get_subsets(
|
||||
project_name, names_by_asset_ids=names_by_asset_ids
|
||||
))
|
||||
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
for subset_doc in subset_docs
|
||||
]
|
||||
|
||||
last_version_by_subset_id = self._query_last_versions(subset_ids)
|
||||
last_version_docs_by_subset_id = get_last_versions(
|
||||
project_name, subset_ids, fields=["name"]
|
||||
)
|
||||
for subset_doc in subset_docs:
|
||||
subset_id = subset_doc["_id"]
|
||||
last_version = last_version_by_subset_id.get(subset_id)
|
||||
if last_version is None:
|
||||
last_version_doc = last_version_docs_by_subset_id.get(subset_id)
|
||||
if last_version_docs_by_subset_id is None:
|
||||
continue
|
||||
|
||||
asset_id = subset_doc["parent"]
|
||||
subset_name = subset_doc["name"]
|
||||
_instances = hierarchy[asset_id][subset_name]
|
||||
for _instance in _instances:
|
||||
_instance.data["latestVersion"] = last_version
|
||||
|
||||
def _query_last_versions(self, subset_ids):
|
||||
"""Retrieve all latest versions for entered subset_ids.
|
||||
|
||||
Args:
|
||||
subset_ids (list): List of subset ids with type `ObjectId`.
|
||||
|
||||
Returns:
|
||||
dict: Key is subset id and value is last version name.
|
||||
"""
|
||||
_pipeline = [
|
||||
# Find all versions of those subsets
|
||||
{"$match": {
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_ids}
|
||||
}},
|
||||
# Sorting versions all together
|
||||
{"$sort": {"name": 1}},
|
||||
# Group them by "parent", but only take the last
|
||||
{"$group": {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"},
|
||||
"name": {"$last": "$name"}
|
||||
}}
|
||||
]
|
||||
|
||||
last_version_by_subset_id = {}
|
||||
for doc in legacy_io.aggregate(_pipeline):
|
||||
subset_id = doc["_id"]
|
||||
last_version_by_subset_id[subset_id] = doc["name"]
|
||||
|
||||
return last_version_by_subset_id
|
||||
_instance.data["latestVersion"] = last_version_doc["name"]
|
||||
|
||||
def fill_anatomy_data(self, context):
|
||||
self.log.debug("Storing anatomy data to instance data.")
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ Provides:
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import get_project, get_asset_by_name
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
|
|
@ -25,10 +26,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
|
||||
project_entity = legacy_io.find_one({
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
})
|
||||
project_entity = get_project(project_name)
|
||||
assert project_entity, (
|
||||
"Project '{0}' was not found."
|
||||
).format(project_name)
|
||||
|
|
@ -39,11 +37,8 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
if not asset_name:
|
||||
self.log.info("Context is not set. Can't collect global data.")
|
||||
return
|
||||
asset_entity = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
|
||||
asset_entity = get_asset_by_name(project_name, asset_name)
|
||||
assert asset_entity, (
|
||||
"No asset found by the name '{0}' in project '{1}'"
|
||||
).format(asset_name, project_name)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import get_representations
|
||||
from openpype.pipeline import (
|
||||
registered_host,
|
||||
legacy_io,
|
||||
|
|
@ -39,23 +38,29 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
|||
return
|
||||
|
||||
loaded_versions = []
|
||||
_containers = list(host.ls())
|
||||
_repr_ids = [ObjectId(c["representation"]) for c in _containers]
|
||||
repre_docs = legacy_io.find(
|
||||
{"_id": {"$in": _repr_ids}},
|
||||
projection={"_id": 1, "parent": 1}
|
||||
containers = list(host.ls())
|
||||
repre_ids = {
|
||||
container["representation"]
|
||||
for container in containers
|
||||
}
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
repre_docs = get_representations(
|
||||
project_name,
|
||||
representation_ids=repre_ids,
|
||||
fields=["_id", "parent"]
|
||||
)
|
||||
version_by_repr = {
|
||||
str(doc["_id"]): doc["parent"]
|
||||
repre_doc_by_str_id = {
|
||||
str(doc["_id"]): doc
|
||||
for doc in repre_docs
|
||||
}
|
||||
|
||||
# QUESTION should we add same representation id when loaded multiple
|
||||
# times?
|
||||
for con in _containers:
|
||||
for con in containers:
|
||||
repre_id = con["representation"]
|
||||
version_id = version_by_repr.get(repre_id)
|
||||
if version_id is None:
|
||||
repre_doc = repre_doc_by_str_id.get(repre_id)
|
||||
if repre_doc is None:
|
||||
self.log.warning((
|
||||
"Skipping container,"
|
||||
" did not find representation document. {}"
|
||||
|
|
@ -66,8 +71,8 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
|||
# may have more then one representation that are same version
|
||||
version = {
|
||||
"subsetName": con["name"],
|
||||
"representation": ObjectId(repre_id),
|
||||
"version": version_id,
|
||||
"representation": repre_doc["_id"],
|
||||
"version": repre_doc["parent"],
|
||||
}
|
||||
loaded_versions.append(version)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,11 @@
|
|||
from copy import deepcopy
|
||||
import pyblish.api
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
get_asset_by_id,
|
||||
get_asset_by_name,
|
||||
get_archived_assets
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
|
|
@ -19,14 +25,14 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if not legacy_io.Session:
|
||||
legacy_io.install()
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
hierarchy_context = self._get_active_assets(context)
|
||||
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
|
||||
|
||||
self.project = None
|
||||
self.import_to_avalon(hierarchy_context)
|
||||
self.import_to_avalon(project_name, hierarchy_context)
|
||||
|
||||
|
||||
def import_to_avalon(self, input_data, parent=None):
|
||||
def import_to_avalon(self, project_name, input_data, parent=None):
|
||||
for name in input_data:
|
||||
self.log.info("input_data[name]: {}".format(input_data[name]))
|
||||
entity_data = input_data[name]
|
||||
|
|
@ -62,7 +68,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
update_data = True
|
||||
# Process project
|
||||
if entity_type.lower() == "project":
|
||||
entity = legacy_io.find_one({"type": "project"})
|
||||
entity = get_project(project_name)
|
||||
# TODO: should be in validator?
|
||||
assert (entity is not None), "Did not find project in DB"
|
||||
|
||||
|
|
@ -79,7 +85,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
)
|
||||
# Else process assset
|
||||
else:
|
||||
entity = legacy_io.find_one({"type": "asset", "name": name})
|
||||
entity = get_asset_by_name(project_name, name)
|
||||
if entity:
|
||||
# Do not override data, only update
|
||||
cur_entity_data = entity.get("data") or {}
|
||||
|
|
@ -103,10 +109,10 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
# Skip updating data
|
||||
update_data = False
|
||||
|
||||
archived_entities = legacy_io.find({
|
||||
"type": "archived_asset",
|
||||
"name": name
|
||||
})
|
||||
archived_entities = get_archived_assets(
|
||||
project_name,
|
||||
asset_names=[name]
|
||||
)
|
||||
unarchive_entity = None
|
||||
for archived_entity in archived_entities:
|
||||
archived_parents = (
|
||||
|
|
@ -120,7 +126,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
|
||||
if unarchive_entity is None:
|
||||
# Create entity if doesn"t exist
|
||||
entity = self.create_avalon_asset(name, data)
|
||||
entity = self.create_avalon_asset(
|
||||
project_name, name, data
|
||||
)
|
||||
else:
|
||||
# Unarchive if entity was archived
|
||||
entity = self.unarchive_entity(unarchive_entity, data)
|
||||
|
|
@ -133,7 +141,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
)
|
||||
|
||||
if "childs" in entity_data:
|
||||
self.import_to_avalon(entity_data["childs"], entity)
|
||||
self.import_to_avalon(
|
||||
project_name, entity_data["childs"], entity
|
||||
)
|
||||
|
||||
def unarchive_entity(self, entity, data):
|
||||
# Unarchived asset should not use same data
|
||||
|
|
@ -151,7 +161,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
)
|
||||
return new_entity
|
||||
|
||||
def create_avalon_asset(self, name, data):
|
||||
def create_avalon_asset(self, project_name, name, data):
|
||||
item = {
|
||||
"schema": "openpype:asset-3.0",
|
||||
"name": name,
|
||||
|
|
@ -162,7 +172,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
self.log.debug("Creating asset: {}".format(item))
|
||||
entity_id = legacy_io.insert_one(item).inserted_id
|
||||
|
||||
return legacy_io.find_one({"_id": entity_id})
|
||||
return get_asset_by_id(project_name, entity_id)
|
||||
|
||||
def _get_active_assets(self, context):
|
||||
""" Returns only asset dictionary.
|
||||
|
|
|
|||
|
|
@ -71,18 +71,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
if not is_oiio_supported():
|
||||
thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa
|
||||
else:
|
||||
# Check if the file can be read by OIIO
|
||||
oiio_tool_path = get_oiio_tools_path()
|
||||
args = [
|
||||
oiio_tool_path, "--info", "-i", full_output_path
|
||||
]
|
||||
returncode = execute(args, silent=True)
|
||||
# If the input can read by OIIO then use OIIO method for
|
||||
# conversion otherwise use ffmpeg
|
||||
if returncode == 0:
|
||||
self.log.info("Input can be read by OIIO, converting with oiiotool now.") # noqa
|
||||
thumbnail_created = self.create_thumbnail_oiio(full_input_path, full_output_path) # noqa
|
||||
else:
|
||||
self.log.info("Trying to convert with OIIO") # noqa
|
||||
thumbnail_created = self.create_thumbnail_oiio(full_input_path, full_output_path) # noqa
|
||||
|
||||
if not thumbnail_created:
|
||||
self.log.info("Converting with FFMPEG because input can't be read by OIIO.") # noqa
|
||||
thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa
|
||||
|
||||
|
|
@ -8,6 +8,12 @@ from bson.objectid import ObjectId
|
|||
from pymongo import InsertOne, ReplaceOne
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
get_version_by_id,
|
||||
get_hero_version_by_subset_id,
|
||||
get_archived_representations,
|
||||
get_representations,
|
||||
)
|
||||
from openpype.lib import (
|
||||
create_hard_link,
|
||||
filter_profiles
|
||||
|
|
@ -85,9 +91,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
hero_template
|
||||
))
|
||||
|
||||
self.integrate_instance(instance, template_key, hero_template)
|
||||
self.integrate_instance(
|
||||
instance, project_name, template_key, hero_template
|
||||
)
|
||||
|
||||
def integrate_instance(self, instance, template_key, hero_template):
|
||||
def integrate_instance(
|
||||
self, instance, project_name, template_key, hero_template
|
||||
):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
published_repres = instance.data["published_representations"]
|
||||
hero_publish_dir = self.get_publish_dir(instance, template_key)
|
||||
|
|
@ -118,8 +128,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
"Published version entity was not sent in representation data."
|
||||
" Querying entity from database."
|
||||
))
|
||||
src_version_entity = (
|
||||
self.version_from_representations(published_repres)
|
||||
src_version_entity = self.version_from_representations(
|
||||
project_name, published_repres
|
||||
)
|
||||
|
||||
if not src_version_entity:
|
||||
|
|
@ -170,8 +180,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
other_file_paths_mapping.append((file_path, dst_filepath))
|
||||
|
||||
# Current version
|
||||
old_version, old_repres = (
|
||||
self.current_hero_ents(src_version_entity)
|
||||
old_version, old_repres = self.current_hero_ents(
|
||||
project_name, src_version_entity
|
||||
)
|
||||
|
||||
old_repres_by_name = {
|
||||
|
|
@ -223,11 +233,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
if old_repres_by_name:
|
||||
old_repres_to_delete = old_repres_by_name
|
||||
|
||||
archived_repres = list(legacy_io.find({
|
||||
archived_repres = list(get_archived_representations(
|
||||
project_name,
|
||||
# Check what is type of archived representation
|
||||
"type": "archived_repsentation",
|
||||
"parent": new_version_id
|
||||
}))
|
||||
version_ids=[new_version_id]
|
||||
))
|
||||
archived_repres_by_name = {}
|
||||
for repre in archived_repres:
|
||||
repre_name_low = repre["name"].lower()
|
||||
|
|
@ -586,25 +596,23 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
shutil.copy(src_path, dst_path)
|
||||
|
||||
def version_from_representations(self, repres):
|
||||
def version_from_representations(self, project_name, repres):
|
||||
for repre in repres:
|
||||
version = legacy_io.find_one({"_id": repre["parent"]})
|
||||
version = get_version_by_id(project_name, repre["parent"])
|
||||
if version:
|
||||
return version
|
||||
|
||||
def current_hero_ents(self, version):
|
||||
hero_version = legacy_io.find_one({
|
||||
"parent": version["parent"],
|
||||
"type": "hero_version"
|
||||
})
|
||||
def current_hero_ents(self, project_name, version):
|
||||
hero_version = get_hero_version_by_subset_id(
|
||||
project_name, version["parent"]
|
||||
)
|
||||
|
||||
if not hero_version:
|
||||
return (None, [])
|
||||
|
||||
hero_repres = list(legacy_io.find({
|
||||
"parent": hero_version["_id"],
|
||||
"type": "representation"
|
||||
}))
|
||||
hero_repres = list(get_representations(
|
||||
project_name, version_ids=[hero_version["_id"]]
|
||||
))
|
||||
return (hero_version, hero_repres)
|
||||
|
||||
def _update_path(self, anatomy, path, src_file, dst_file):
|
||||
|
|
|
|||
|
|
@ -16,6 +16,15 @@ from pymongo import DeleteOne, InsertOne
|
|||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_subset_by_id,
|
||||
get_subset_by_name,
|
||||
get_version_by_id,
|
||||
get_version_by_name,
|
||||
get_representations,
|
||||
get_archived_representations,
|
||||
)
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.lib import (
|
||||
prepare_template_data,
|
||||
|
|
@ -201,6 +210,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
context = instance.context
|
||||
|
||||
project_entity = instance.data["projectEntity"]
|
||||
project_name = project_entity["name"]
|
||||
|
||||
context_asset_name = None
|
||||
context_asset_doc = context.data.get("assetEntity")
|
||||
|
|
@ -210,11 +220,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
asset_name = instance.data["asset"]
|
||||
asset_entity = instance.data.get("assetEntity")
|
||||
if not asset_entity or asset_entity["name"] != context_asset_name:
|
||||
asset_entity = legacy_io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
asset_entity = get_asset_by_name(project_name, asset_name)
|
||||
assert asset_entity, (
|
||||
"No asset found by the name \"{0}\" in project \"{1}\""
|
||||
).format(asset_name, project_entity["name"])
|
||||
|
|
@ -270,7 +276,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"Establishing staging directory @ {0}".format(stagingdir)
|
||||
)
|
||||
|
||||
subset = self.get_subset(asset_entity, instance)
|
||||
subset = self.get_subset(project_name, asset_entity, instance)
|
||||
instance.data["subsetEntity"] = subset
|
||||
|
||||
version_number = instance.data["version"]
|
||||
|
|
@ -297,11 +303,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
for _repre in repres
|
||||
]
|
||||
|
||||
existing_version = legacy_io.find_one({
|
||||
'type': 'version',
|
||||
'parent': subset["_id"],
|
||||
'name': version_number
|
||||
})
|
||||
existing_version = get_version_by_name(
|
||||
project_name, version_number, subset["_id"]
|
||||
)
|
||||
|
||||
if existing_version is None:
|
||||
version_id = legacy_io.insert_one(version).inserted_id
|
||||
|
|
@ -322,10 +326,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
version_id = existing_version['_id']
|
||||
|
||||
# Find representations of existing version and archive them
|
||||
current_repres = list(legacy_io.find({
|
||||
"type": "representation",
|
||||
"parent": version_id
|
||||
}))
|
||||
current_repres = list(get_representations(
|
||||
project_name, version_ids=[version_id]
|
||||
))
|
||||
bulk_writes = []
|
||||
for repre in current_repres:
|
||||
if append_repres:
|
||||
|
|
@ -345,18 +348,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# bulk updates
|
||||
if bulk_writes:
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
legacy_io.database[project_name].bulk_write(
|
||||
bulk_writes
|
||||
)
|
||||
|
||||
version = legacy_io.find_one({"_id": version_id})
|
||||
version = get_version_by_id(project_name, version_id)
|
||||
instance.data["versionEntity"] = version
|
||||
|
||||
existing_repres = list(legacy_io.find({
|
||||
"parent": version_id,
|
||||
"type": "archived_representation"
|
||||
}))
|
||||
existing_repres = list(get_archived_representations(
|
||||
project_name,
|
||||
version_ids=[version_id]
|
||||
))
|
||||
|
||||
instance.data['version'] = version['name']
|
||||
|
||||
|
|
@ -792,13 +794,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
create_hard_link(src, dst)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
def get_subset(self, project_name, asset, instance):
|
||||
subset_name = instance.data["subset"]
|
||||
subset = legacy_io.find_one({
|
||||
"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": subset_name
|
||||
})
|
||||
subset = get_subset_by_name(project_name, subset_name, asset["_id"])
|
||||
|
||||
if subset is None:
|
||||
self.log.info("Subset '%s' not found, creating ..." % subset_name)
|
||||
|
|
@ -825,7 +823,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"parent": asset["_id"]
|
||||
}).inserted_id
|
||||
|
||||
subset = legacy_io.find_one({"_id": _id})
|
||||
subset = get_subset_by_id(project_name, _id)
|
||||
|
||||
# QUESTION Why is changing of group and updating it's
|
||||
# families in 'get_subset'?
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import six
|
|||
import pyblish.api
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.client import get_version_by_id
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
|
|
@ -70,7 +71,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
|
|||
|
||||
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
|
||||
|
||||
version = legacy_io.find_one({"_id": thumb_repre["parent"]})
|
||||
version = get_version_by_id(project_name, thumb_repre["parent"])
|
||||
if not version:
|
||||
raise AssertionError(
|
||||
"There does not exist version with id {}".format(
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from pprint import pformat
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.client import get_assets
|
||||
|
||||
|
||||
class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
|
||||
|
|
@ -29,8 +30,10 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
|
|||
if not legacy_io.Session:
|
||||
legacy_io.install()
|
||||
|
||||
db_assets = list(legacy_io.find(
|
||||
{"type": "asset"}, {"name": 1, "data.parents": 1}))
|
||||
project_name = legacy_io.active_project()
|
||||
db_assets = list(get_assets(
|
||||
project_name, fields=["name", "data.parents"]
|
||||
))
|
||||
self.log.debug("__ db_assets: {}".format(db_assets))
|
||||
|
||||
asset_db_docs = {
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@
|
|||
],
|
||||
"reel_group_name": "OpenPype_Reels",
|
||||
"reel_name": "Loaded",
|
||||
"clip_name_template": "{asset}_{subset}_{output}"
|
||||
"clip_name_template": "{asset}_{subset}<_{output}>"
|
||||
},
|
||||
"LoadClipBatch": {
|
||||
"enabled": true,
|
||||
|
|
@ -121,7 +121,7 @@
|
|||
"exr16fpdwaa"
|
||||
],
|
||||
"reel_name": "OP_LoadedReel",
|
||||
"clip_name_template": "{asset}_{subset}_{output}"
|
||||
"clip_name_template": "{asset}_{subset}<_{output}>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -51,5 +51,17 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"filters": {}
|
||||
"filters": {},
|
||||
"scriptsmenu": {
|
||||
"name": "OpenPype Tools",
|
||||
"definition": [
|
||||
{
|
||||
"type": "action",
|
||||
"sourcetype": "python",
|
||||
"title": "OpenPype Docs",
|
||||
"command": "import webbrowser;webbrowser.open(url='https://openpype.io/docs/artist_hosts_hiero')",
|
||||
"tooltip": "Open the OpenPype Hiero user doc page"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -206,6 +206,10 @@
|
|||
{
|
||||
"type": "schema",
|
||||
"name": "schema_publish_gui_filter"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_scriptsmenu"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
9
website/docs/admin_hosts_hiero.md
Normal file
9
website/docs/admin_hosts_hiero.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
id: admin_hosts_hiero
|
||||
title: Hiero
|
||||
sidebar_label: Hiero
|
||||
---
|
||||
|
||||
## Custom Menu
|
||||
You can add your custom tools menu into Hiero by extending definitions in **Hiero -> Scripts Menu Definition**.
|
||||

|
||||
BIN
website/docs/assets/hiero-admin_scriptsmenu.png
Normal file
BIN
website/docs/assets/hiero-admin_scriptsmenu.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 23 KiB |
|
|
@ -100,6 +100,7 @@ module.exports = {
|
|||
label: "Integrations",
|
||||
items: [
|
||||
"admin_hosts_blender",
|
||||
"admin_hosts_hiero",
|
||||
"admin_hosts_maya",
|
||||
"admin_hosts_nuke",
|
||||
"admin_hosts_resolve",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue