[Automated] Merged develop into main

This commit is contained in:
pypebot 2021-11-03 04:35:04 +01:00 committed by GitHub
commit 82fd6ef48e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 595 additions and 208 deletions

View file

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""Houdini specific Avalon/Pyblish plugin definitions."""
import sys
from avalon.api import CreatorError
from avalon import houdini
import six
@ -8,7 +9,7 @@ import hou
from openpype.api import PypeCreatorMixin
class OpenPypeCreatorError(Exception):
class OpenPypeCreatorError(CreatorError):
pass

View file

@ -4,8 +4,8 @@ import contextlib
import logging
from Qt import QtCore, QtGui
from avalon.tools.widgets import AssetWidget
from avalon import style
from openpype.tools.utils.widgets import AssetWidget
from avalon import style, io
from pxr import Sdf
@ -31,7 +31,7 @@ def pick_asset(node):
# Construct the AssetWidget as a frameless popup so it automatically
# closes when clicked outside of it.
global tool
tool = AssetWidget(silo_creatable=False)
tool = AssetWidget(io)
tool.setContentsMargins(5, 5, 5, 5)
tool.setWindowTitle("Pick Asset")
tool.setStyleSheet(style.load_stylesheet())
@ -41,8 +41,6 @@ def pick_asset(node):
# Select the current asset if there is any
name = parm.eval()
if name:
from avalon import io
db_asset = io.find_one({"name": name, "type": "asset"})
if db_asset:
silo = db_asset.get("silo")

View file

@ -0,0 +1,96 @@
# -*- coding: utf-8 -*-
from openpype.hosts.houdini.api import plugin
from avalon.houdini import lib
from avalon import io
import hou
class CreateHDA(plugin.Creator):
"""Publish Houdini Digital Asset file."""
name = "hda"
label = "Houdini Digital Asset (Hda)"
family = "hda"
icon = "gears"
maintain_selection = False
def __init__(self, *args, **kwargs):
super(CreateHDA, self).__init__(*args, **kwargs)
self.data.pop("active", None)
def _check_existing(self, subset_name):
# type: (str) -> bool
"""Check if existing subset name versions already exists."""
# Get all subsets of the current asset
asset_id = io.find_one({"name": self.data["asset"], "type": "asset"},
projection={"_id": True})['_id']
subset_docs = io.find(
{
"type": "subset",
"parent": asset_id
}, {"name": 1}
)
existing_subset_names = set(subset_docs.distinct("name"))
existing_subset_names_low = {
_name.lower() for _name in existing_subset_names
}
return subset_name.lower() in existing_subset_names_low
def _process(self, instance):
subset_name = self.data["subset"]
# get selected nodes
out = hou.node("/obj")
self.nodes = hou.selectedNodes()
if (self.options or {}).get("useSelection") and self.nodes:
# if we have `use selection` enabled and we have some
# selected nodes ...
to_hda = self.nodes[0]
if len(self.nodes) > 1:
# if there is more then one node, create subnet first
subnet = out.createNode(
"subnet", node_name="{}_subnet".format(self.name))
to_hda = subnet
else:
# in case of no selection, just create subnet node
subnet = out.createNode(
"subnet", node_name="{}_subnet".format(self.name))
subnet.moveToGoodPosition()
to_hda = subnet
if not to_hda.type().definition():
# if node type has not its definition, it is not user
# created hda. We test if hda can be created from the node.
if not to_hda.canCreateDigitalAsset():
raise Exception(
"cannot create hda from node {}".format(to_hda))
hda_node = to_hda.createDigitalAsset(
name=subset_name,
hda_file_name="$HIP/{}.hda".format(subset_name)
)
hou.moveNodesTo(self.nodes, hda_node)
hda_node.layoutChildren()
else:
if self._check_existing(subset_name):
raise plugin.OpenPypeCreatorError(
("subset {} is already published with different HDA"
"definition.").format(subset_name))
hda_node = to_hda
hda_node.setName(subset_name)
# delete node created by Avalon in /out
# this needs to be addressed in future Houdini workflow refactor.
hou.node("/out/{}".format(subset_name)).destroy()
try:
lib.imprint(hda_node, self.data)
except hou.OperationFailed:
raise plugin.OpenPypeCreatorError(
("Cannot set metadata on asset. Might be that it already is "
"OpenPype asset.")
)
return hda_node

View file

@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
from avalon import api
from avalon.houdini import pipeline
class HdaLoader(api.Loader):
"""Load Houdini Digital Asset file."""
families = ["hda"]
label = "Load Hda"
representations = ["hda"]
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name=None, namespace=None, data=None):
import os
import hou
# Format file name, Houdini only wants forward slashes
file_path = os.path.normpath(self.fname)
file_path = file_path.replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
# Create a unique name
counter = 1
namespace = namespace or context["asset"]["name"]
formatted = "{}_{}".format(namespace, name) if namespace else name
node_name = "{0}_{1:03d}".format(formatted, counter)
hou.hda.installFile(file_path)
hda_node = obj.createNode(name, node_name)
self[:] = [hda_node]
return pipeline.containerise(
node_name,
namespace,
[hda_node],
context,
self.__class__.__name__,
suffix="",
)
def update(self, container, representation):
import hou
hda_node = container["node"]
file_path = api.get_representation_path(representation)
file_path = file_path.replace("\\", "/")
hou.hda.installFile(file_path)
defs = hda_node.type().allInstalledDefinitions()
def_paths = [d.libraryFilePath() for d in defs]
new = def_paths.index(file_path)
defs[new].setIsPreferred(True)
def remove(self, container):
node = container["node"]
node.destroy()

View file

@ -23,8 +23,10 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
return
# Check bypass state and reverse
active = True
node = instance[0]
active = not node.isBypassed()
if hasattr(node, "isBypassed"):
active = not node.isBypassed()
# Set instance active state
instance.data.update(

View file

@ -31,6 +31,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
def process(self, context):
nodes = hou.node("/out").children()
nodes += hou.node("/obj").children()
# Include instances in USD stage only when it exists so it
# remains backwards compatible with version before houdini 18
@ -49,9 +50,12 @@ class CollectInstances(pyblish.api.ContextPlugin):
has_family = node.evalParm("family")
assert has_family, "'%s' is missing 'family'" % node.name()
self.log.info("processing {}".format(node))
data = lib.read(node)
# Check bypass state and reverse
data.update({"active": not node.isBypassed()})
if hasattr(node, "isBypassed"):
data.update({"active": not node.isBypassed()})
# temporarily translation of `active` to `publish` till issue has
# been resolved, https://github.com/pyblish/pyblish-base/issues/307

View file

@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
import os
from pprint import pformat
import pyblish.api
import openpype.api
class ExtractHDA(openpype.api.Extractor):
order = pyblish.api.ExtractorOrder
label = "Extract HDA"
hosts = ["houdini"]
families = ["hda"]
def process(self, instance):
self.log.info(pformat(instance.data))
hda_node = instance[0]
hda_def = hda_node.type().definition()
hda_options = hda_def.options()
hda_options.setSaveInitialParmsAndContents(True)
next_version = instance.data["anatomyData"]["version"]
self.log.info("setting version: {}".format(next_version))
hda_def.setVersion(str(next_version))
hda_def.setOptions(hda_options)
hda_def.save(hda_def.libraryFilePath(), hda_node, hda_options)
if "representations" not in instance.data:
instance.data["representations"] = []
file = os.path.basename(hda_def.libraryFilePath())
staging_dir = os.path.dirname(hda_def.libraryFilePath())
self.log.info("Using HDA from {}".format(hda_def.libraryFilePath()))
representation = {
'name': 'hda',
'ext': 'hda',
'files': file,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)

View file

@ -35,5 +35,5 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
def get_invalid(cls, instance):
rop = instance[0]
if rop.isBypassed():
if hasattr(rop, "isBypassed") and rop.isBypassed():
return [rop]

View file

@ -275,8 +275,7 @@ def on_open(_):
# Show outdated pop-up
def _on_show_inventory():
import avalon.tools.sceneinventory as tool
tool.show(parent=parent)
host_tools.show_scene_inventory(parent=parent)
dialog = popup.Popup(parent=parent)
dialog.setWindowTitle("Maya scene has outdated content")

View file

@ -3,7 +3,7 @@ import json
import pyblish.api
from avalon import io
from openpype.lib import get_subset_name
from openpype.lib import get_subset_name_with_asset_doc
class CollectBulkMovInstances(pyblish.api.InstancePlugin):
@ -26,16 +26,10 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin):
context = instance.context
asset_name = instance.data["asset"]
asset_doc = io.find_one(
{
"type": "asset",
"name": asset_name
},
{
"_id": 1,
"data.tasks": 1
}
)
asset_doc = io.find_one({
"type": "asset",
"name": asset_name
})
if not asset_doc:
raise AssertionError((
"Couldn't find Asset document with name \"{}\""
@ -53,11 +47,11 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin):
task_name = available_task_names[_task_name_low]
break
subset_name = get_subset_name(
subset_name = get_subset_name_with_asset_doc(
self.new_instance_family,
self.subset_name_variant,
task_name,
asset_doc["_id"],
asset_doc,
io.Session["AVALON_PROJECT"]
)
instance_name = f"{asset_name}_{subset_name}"

View file

@ -4,7 +4,7 @@ import copy
import pyblish.api
from avalon import io
from openpype.lib import get_subset_name
from openpype.lib import get_subset_name_with_asset_doc
class CollectInstances(pyblish.api.ContextPlugin):
@ -70,16 +70,10 @@ class CollectInstances(pyblish.api.ContextPlugin):
# - not sure if it's good idea to require asset id in
# get_subset_name?
asset_name = context.data["workfile_context"]["asset"]
asset_doc = io.find_one(
{
"type": "asset",
"name": asset_name
},
{"_id": 1}
)
asset_id = None
if asset_doc:
asset_id = asset_doc["_id"]
asset_doc = io.find_one({
"type": "asset",
"name": asset_name
})
# Project name from workfile context
project_name = context.data["workfile_context"]["project"]
@ -88,11 +82,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
# Use empty variant value
variant = ""
task_name = io.Session["AVALON_TASK"]
new_subset_name = get_subset_name(
new_subset_name = get_subset_name_with_asset_doc(
family,
variant,
task_name,
asset_id,
asset_doc,
project_name,
host_name
)

View file

@ -3,7 +3,7 @@ import json
import pyblish.api
from avalon import io
from openpype.lib import get_subset_name
from openpype.lib import get_subset_name_with_asset_doc
class CollectWorkfile(pyblish.api.ContextPlugin):
@ -28,16 +28,10 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
# get_subset_name?
family = "workfile"
asset_name = context.data["workfile_context"]["asset"]
asset_doc = io.find_one(
{
"type": "asset",
"name": asset_name
},
{"_id": 1}
)
asset_id = None
if asset_doc:
asset_id = asset_doc["_id"]
asset_doc = io.find_one({
"type": "asset",
"name": asset_name
})
# Project name from workfile context
project_name = context.data["workfile_context"]["project"]
@ -46,11 +40,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
# Use empty variant value
variant = ""
task_name = io.Session["AVALON_TASK"]
subset_name = get_subset_name(
subset_name = get_subset_name_with_asset_doc(
family,
variant,
task_name,
asset_id,
asset_doc,
project_name,
host_name
)

View file

@ -130,6 +130,7 @@ from .applications import (
from .plugin_tools import (
TaskNotSetError,
get_subset_name,
get_subset_name_with_asset_doc,
prepare_template_data,
filter_pyblish_plugins,
set_plugin_attributes_from_settings,
@ -249,6 +250,7 @@ __all__ = [
"TaskNotSetError",
"get_subset_name",
"get_subset_name_with_asset_doc",
"filter_pyblish_plugins",
"set_plugin_attributes_from_settings",
"source_hash",

View file

@ -245,6 +245,27 @@ def process_sequence(
report_items["Source file was not found"].append(msg)
return report_items, 0
delivery_templates = anatomy.templates.get("delivery") or {}
delivery_template = delivery_templates.get(template_name)
if delivery_template is None:
msg = (
"Delivery template \"{}\" in anatomy of project \"{}\""
" was not found"
).format(template_name, anatomy.project_name)
report_items[""].append(msg)
return report_items, 0
# Check if 'frame' key is available in template which is required
# for sequence delivery
if "{frame" not in delivery_template:
msg = (
"Delivery template \"{}\" in anatomy of project \"{}\""
"does not contain '{{frame}}' key to fill. Delivery of sequence"
" can't be processed."
).format(template_name, anatomy.project_name)
report_items[""].append(msg)
return report_items, 0
dir_path, file_name = os.path.split(str(src_path))
context = repre["context"]

View file

@ -28,17 +28,44 @@ class TaskNotSetError(KeyError):
super(TaskNotSetError, self).__init__(msg)
def get_subset_name(
def get_subset_name_with_asset_doc(
family,
variant,
task_name,
asset_id,
asset_doc,
project_name=None,
host_name=None,
default_template=None,
dynamic_data=None,
dbcon=None
dynamic_data=None
):
"""Calculate subset name based on passed context and OpenPype settings.
Subst name templates are defined in `project_settings/global/tools/creator
/subset_name_profiles` where are profiles with host name, family, task name
and task type filters. If context does not match any profile then
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
That's main reason why so many arguments are required to calculate subset
name.
Args:
family (str): Instance family.
variant (str): In most of cases it is user input during creation.
task_name (str): Task name on which context is instance created.
asset_doc (dict): Queried asset document with it's tasks in data.
Used to get task type.
project_name (str): Name of project on which is instance created.
Important for project settings that are loaded.
host_name (str): One of filtering criteria for template profile
filters.
default_template (str): Default template if any profile does not match
passed context. Constant 'DEFAULT_SUBSET_TEMPLATE' is used if
is not passed.
dynamic_data (dict): Dynamic data specific for a creator which creates
instance.
dbcon (AvalonMongoDB): Mongo connection to be able query asset document
if 'asset_doc' is not passed.
"""
if not family:
return ""
@ -53,25 +80,6 @@ def get_subset_name(
project_name = avalon.api.Session["AVALON_PROJECT"]
# Function should expect asset document instead of asset id
# - that way `dbcon` is not needed
if dbcon is None:
from avalon.api import AvalonMongoDB
dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
dbcon.install()
asset_doc = dbcon.find_one(
{
"type": "asset",
"_id": asset_id
},
{
"data.tasks": True
}
)
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
task_info = asset_tasks.get(task_name) or {}
task_type = task_info.get("type")
@ -113,6 +121,49 @@ def get_subset_name(
return template.format(**prepare_template_data(fill_pairs))
def get_subset_name(
family,
variant,
task_name,
asset_id,
project_name=None,
host_name=None,
default_template=None,
dynamic_data=None,
dbcon=None
):
"""Calculate subset name using OpenPype settings.
This variant of function expects asset id as argument.
This is legacy function should be replaced with
`get_subset_name_with_asset_doc` where asset document is expected.
"""
if dbcon is None:
from avalon.api import AvalonMongoDB
dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
dbcon.install()
asset_doc = dbcon.find_one(
{"_id": asset_id},
{"data.tasks": True}
) or {}
return get_subset_name_with_asset_doc(
family,
variant,
task_name,
asset_doc,
project_name,
host_name,
default_template,
dynamic_data
)
def prepare_template_data(fill_pairs):
"""
Prepares formatted data for filling template.

View file

@ -99,7 +99,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"camerarig",
"redshiftproxy",
"effect",
"xgen"
"xgen",
"hda"
]
exclude_families = ["clip"]
db_representation_context_keys = [

View file

@ -9,9 +9,9 @@ class ShowInventory(pyblish.api.Action):
on = "failed"
def process(self, context, plugin):
from avalon.tools import sceneinventory
from openpype.tools.utils import host_tools
sceneinventory.show()
host_tools.show_scene_inventory()
class ValidateContainers(pyblish.api.ContextPlugin):

View file

@ -110,7 +110,10 @@ from .enum_entity import (
)
from .list_entity import ListEntity
from .dict_immutable_keys_entity import DictImmutableKeysEntity
from .dict_immutable_keys_entity import (
DictImmutableKeysEntity,
RootsDictEntity
)
from .dict_mutable_keys_entity import DictMutableKeysEntity
from .dict_conditional import (
DictConditionalEntity,
@ -169,6 +172,7 @@ __all__ = (
"ListEntity",
"DictImmutableKeysEntity",
"RootsDictEntity",
"DictMutableKeysEntity",

View file

@ -510,7 +510,7 @@ class BaseItemEntity(BaseEntity):
pass
@abstractmethod
def _item_initalization(self):
def _item_initialization(self):
"""Entity specific initialization process."""
pass
@ -920,7 +920,7 @@ class ItemEntity(BaseItemEntity):
_default_label_wrap["collapsed"]
)
self._item_initalization()
self._item_initialization()
def save(self):
"""Call save on root item."""

View file

@ -9,7 +9,7 @@ from .exceptions import (
class ColorEntity(InputEntity):
schema_types = ["color"]
def _item_initalization(self):
def _item_initialization(self):
self.valid_value_types = (list, )
self.value_on_not_set = [0, 0, 0, 255]
self.use_alpha = self.schema_data.get("use_alpha", True)

View file

@ -107,7 +107,7 @@ class DictConditionalEntity(ItemEntity):
for _key, _value in new_value.items():
self.non_gui_children[self.current_enum][_key].set(_value)
def _item_initalization(self):
def _item_initialization(self):
self._default_metadata = NOT_SET
self._studio_override_metadata = NOT_SET
self._project_override_metadata = NOT_SET

View file

@ -4,7 +4,8 @@ import collections
from .lib import (
WRAPPER_TYPES,
OverrideState,
NOT_SET
NOT_SET,
STRING_TYPE
)
from openpype.settings.constants import (
METADATA_KEYS,
@ -18,6 +19,7 @@ from . import (
GUIEntity
)
from .exceptions import (
DefaultsNotDefined,
SchemaDuplicatedKeys,
EntitySchemaError,
InvalidKeySymbols
@ -172,7 +174,7 @@ class DictImmutableKeysEntity(ItemEntity):
for child_obj in added_children:
self.gui_layout.append(child_obj)
def _item_initalization(self):
def _item_initialization(self):
self._default_metadata = NOT_SET
self._studio_override_metadata = NOT_SET
self._project_override_metadata = NOT_SET
@ -547,3 +549,178 @@ class DictImmutableKeysEntity(ItemEntity):
super(DictImmutableKeysEntity, self).reset_callbacks()
for child_entity in self.children:
child_entity.reset_callbacks()
class RootsDictEntity(DictImmutableKeysEntity):
"""Entity that adds ability to fill value for roots of current project.
Value schema is defined by `object_type`.
It is not possible to change override state (Studio values will always
contain studio overrides and same for project). That is because roots can
be totally different for each project.
"""
_origin_schema_data = None
schema_types = ["dict-roots"]
def _item_initialization(self):
origin_schema_data = self.schema_data
self.separate_items = origin_schema_data.get("separate_items", True)
object_type = origin_schema_data.get("object_type")
if isinstance(object_type, STRING_TYPE):
object_type = {"type": object_type}
self.object_type = object_type
if not self.is_group:
self.is_group = True
schema_data = copy.deepcopy(self.schema_data)
schema_data["children"] = []
self.schema_data = schema_data
self._origin_schema_data = origin_schema_data
self._default_value = NOT_SET
self._studio_value = NOT_SET
self._project_value = NOT_SET
super(RootsDictEntity, self)._item_initialization()
def schema_validations(self):
if self.object_type is None:
reason = (
"Missing children definitions for root values"
" ('object_type' not filled)."
)
raise EntitySchemaError(self, reason)
if not isinstance(self.object_type, dict):
reason = (
"Children definitions for root values must be dictionary"
" ('object_type' is \"{}\")."
).format(str(type(self.object_type)))
raise EntitySchemaError(self, reason)
super(RootsDictEntity, self).schema_validations()
def set_override_state(self, state, ignore_missing_defaults):
self.children = []
self.non_gui_children = {}
self.gui_layout = []
roots_entity = self.get_entity_from_path(
"project_anatomy/roots"
)
children = []
first = True
for key in roots_entity.keys():
if first:
first = False
elif self.separate_items:
children.append({"type": "separator"})
child = copy.deepcopy(self.object_type)
child["key"] = key
child["label"] = key
children.append(child)
schema_data = copy.deepcopy(self.schema_data)
schema_data["children"] = children
self._add_children(schema_data)
self._set_children_values(state)
super(RootsDictEntity, self).set_override_state(
state, True
)
if state == OverrideState.STUDIO:
self.add_to_studio_default()
elif state == OverrideState.PROJECT:
self.add_to_project_override()
def on_child_change(self, child_obj):
if self._override_state is OverrideState.STUDIO:
if not child_obj.has_studio_override:
self.add_to_studio_default()
elif self._override_state is OverrideState.PROJECT:
if not child_obj.has_project_override:
self.add_to_project_override()
return super(RootsDictEntity, self).on_child_change(child_obj)
def _set_children_values(self, state):
if state >= OverrideState.DEFAULTS:
default_value = self._default_value
if default_value is NOT_SET:
if state > OverrideState.DEFAULTS:
raise DefaultsNotDefined(self)
else:
default_value = {}
for key, child_obj in self.non_gui_children.items():
child_value = default_value.get(key, NOT_SET)
child_obj.update_default_value(child_value)
if state >= OverrideState.STUDIO:
value = self._studio_value
if value is NOT_SET:
value = {}
for key, child_obj in self.non_gui_children.items():
child_value = value.get(key, NOT_SET)
child_obj.update_studio_value(child_value)
if state >= OverrideState.PROJECT:
value = self._project_value
if value is NOT_SET:
value = {}
for key, child_obj in self.non_gui_children.items():
child_value = value.get(key, NOT_SET)
child_obj.update_project_value(child_value)
def _update_current_metadata(self):
"""Override this method as this entity should not have metadata."""
self._metadata_are_modified = False
self._current_metadata = {}
def update_default_value(self, value):
"""Update default values.
Not an api method, should be called by parent.
"""
value = self._check_update_value(value, "default")
value, _ = self._prepare_value(value)
self._default_value = value
self._default_metadata = {}
self.has_default_value = value is not NOT_SET
def update_studio_value(self, value):
"""Update studio override values.
Not an api method, should be called by parent.
"""
value = self._check_update_value(value, "studio override")
value, _ = self._prepare_value(value)
self._studio_value = value
self._studio_override_metadata = {}
self.had_studio_override = value is not NOT_SET
def update_project_value(self, value):
"""Update project override values.
Not an api method, should be called by parent.
"""
value = self._check_update_value(value, "project override")
value, _metadata = self._prepare_value(value)
self._project_value = value
self._project_override_metadata = {}
self.had_project_override = value is not NOT_SET

View file

@ -191,7 +191,7 @@ class DictMutableKeysEntity(EndpointEntity):
child_entity = self.children_by_key[key]
self.set_child_label(child_entity, label)
def _item_initalization(self):
def _item_initialization(self):
self._default_metadata = {}
self._studio_override_metadata = {}
self._project_override_metadata = {}

View file

@ -8,7 +8,7 @@ from .lib import (
class BaseEnumEntity(InputEntity):
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = True
self.value_on_not_set = None
self.enum_items = None
@ -70,7 +70,7 @@ class BaseEnumEntity(InputEntity):
class EnumEntity(BaseEnumEntity):
schema_types = ["enum"]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = self.schema_data.get("multiselection", False)
self.enum_items = self.schema_data.get("enum_items")
# Default is optional and non breaking attribute
@ -157,7 +157,7 @@ class HostsEnumEntity(BaseEnumEntity):
"standalonepublisher"
]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = self.schema_data.get("multiselection", True)
use_empty_value = False
if not self.multiselection:
@ -250,7 +250,7 @@ class HostsEnumEntity(BaseEnumEntity):
class AppsEnumEntity(BaseEnumEntity):
schema_types = ["apps-enum"]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = True
self.value_on_not_set = []
self.enum_items = []
@ -317,7 +317,7 @@ class AppsEnumEntity(BaseEnumEntity):
class ToolsEnumEntity(BaseEnumEntity):
schema_types = ["tools-enum"]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = True
self.value_on_not_set = []
self.enum_items = []
@ -376,7 +376,7 @@ class ToolsEnumEntity(BaseEnumEntity):
class TaskTypeEnumEntity(BaseEnumEntity):
schema_types = ["task-types-enum"]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = self.schema_data.get("multiselection", True)
if self.multiselection:
self.valid_value_types = (list, )
@ -452,7 +452,7 @@ class TaskTypeEnumEntity(BaseEnumEntity):
class DeadlineUrlEnumEntity(BaseEnumEntity):
schema_types = ["deadline_url-enum"]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = self.schema_data.get("multiselection", True)
self.enum_items = []
@ -503,7 +503,7 @@ class DeadlineUrlEnumEntity(BaseEnumEntity):
class AnatomyTemplatesEnumEntity(BaseEnumEntity):
schema_types = ["anatomy-templates-enum"]
def _item_initalization(self):
def _item_initialization(self):
self.multiselection = False
self.enum_items = []

View file

@ -362,7 +362,7 @@ class NumberEntity(InputEntity):
float_number_regex = re.compile(r"^\d+\.\d+$")
int_number_regex = re.compile(r"^\d+$")
def _item_initalization(self):
def _item_initialization(self):
self.minimum = self.schema_data.get("minimum", -99999)
self.maximum = self.schema_data.get("maximum", 99999)
self.decimal = self.schema_data.get("decimal", 0)
@ -420,7 +420,7 @@ class NumberEntity(InputEntity):
class BoolEntity(InputEntity):
schema_types = ["boolean"]
def _item_initalization(self):
def _item_initialization(self):
self.valid_value_types = (bool, )
value_on_not_set = self.convert_to_valid_type(
self.schema_data.get("default", True)
@ -431,7 +431,7 @@ class BoolEntity(InputEntity):
class TextEntity(InputEntity):
schema_types = ["text"]
def _item_initalization(self):
def _item_initialization(self):
self.valid_value_types = (STRING_TYPE, )
self.value_on_not_set = ""
@ -449,7 +449,7 @@ class TextEntity(InputEntity):
class PathInput(InputEntity):
schema_types = ["path-input"]
def _item_initalization(self):
def _item_initialization(self):
self.valid_value_types = (STRING_TYPE, )
self.value_on_not_set = ""
@ -460,7 +460,7 @@ class PathInput(InputEntity):
class RawJsonEntity(InputEntity):
schema_types = ["raw-json"]
def _item_initalization(self):
def _item_initialization(self):
# Schema must define if valid value is dict or list
store_as_string = self.schema_data.get("store_as_string", False)
is_list = self.schema_data.get("is_list", False)

View file

@ -48,7 +48,7 @@ class PathEntity(ItemEntity):
raise AttributeError(self.attribute_error_msg.format("items"))
return self.child_obj.items()
def _item_initalization(self):
def _item_initialization(self):
if self.group_item is None and not self.is_group:
self.is_group = True
@ -216,7 +216,7 @@ class ListStrictEntity(ItemEntity):
return self.children[idx]
return default
def _item_initalization(self):
def _item_initialization(self):
self.valid_value_types = (list, )
self.require_key = True

View file

@ -149,7 +149,7 @@ class ListEntity(EndpointEntity):
return list(value)
return NOT_SET
def _item_initalization(self):
def _item_initialization(self):
self.valid_value_types = (list, )
self.children = []
self.value_on_not_set = []

View file

@ -65,7 +65,7 @@ class RootEntity(BaseItemEntity):
super(RootEntity, self).__init__(schema_data)
self._require_restart_callbacks = []
self._item_ids_require_restart = set()
self._item_initalization()
self._item_initialization()
if reset:
self.reset()
@ -176,7 +176,7 @@ class RootEntity(BaseItemEntity):
for child_obj in added_children:
self.gui_layout.append(child_obj)
def _item_initalization(self):
def _item_initialization(self):
# Store `self` to `root_item` for children entities
self.root_item = self

View file

@ -208,6 +208,25 @@
}
```
## dict-roots
- entity can be used only in Project settings
- keys of dictionary are based on current project roots
- they are not updated "live" it is required to save root changes and then
modify values on this entity
# TODO do live updates
```
{
"type": "dict-roots",
"key": "roots",
"label": "Roots",
"object_type": {
"type": "path",
"multiplatform": true,
"multipath": false
}
}
```
## dict-conditional
- is similar to `dict` but has always available one enum entity
- the enum entity has single selection and it's value define other children entities

View file

@ -8,8 +8,7 @@ from avalon.api import AvalonMongoDB
from openpype import style
from openpype.api import resources
from avalon.tools import lib as tools_lib
from avalon.tools.widgets import AssetWidget
from openpype.tools.utils.widgets import AssetWidget
from avalon.vendor import qtawesome
from .models import ProjectModel
from .lib import get_action_label, ProjectHandler

View file

@ -1456,7 +1456,11 @@ class HierarchyModel(QtCore.QAbstractItemModel):
return
raw_data = mime_data.data("application/copy_task")
encoded_data = QtCore.QByteArray.fromRawData(raw_data)
if isinstance(raw_data, QtCore.QByteArray):
# Raw data are already QByteArrat and we don't have to load them
encoded_data = raw_data
else:
encoded_data = QtCore.QByteArray.fromRawData(raw_data)
stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly)
text = stream.readQString()
try:

View file

@ -7,7 +7,6 @@ an active window manager; such as via Travis-CI.
"""
import os
import sys
import traceback
import inspect
import logging

View file

@ -35,26 +35,6 @@ def application():
yield app
def defer(delay, func):
"""Append artificial delay to `func`
This aids in keeping the GUI responsive, but complicates logic
when producing tests. To combat this, the environment variable ensures
that every operation is synchonous.
Arguments:
delay (float): Delay multiplier; default 1, 0 means no delay
func (callable): Any callable
"""
delay *= float(os.getenv("PYBLISH_DELAY", 1))
if delay > 0:
return QtCore.QTimer.singleShot(delay, func)
else:
return func()
class SharedObjects:
jobs = {}
@ -82,18 +62,6 @@ def schedule(func, time, channel="default"):
SharedObjects.jobs[channel] = timer
@contextlib.contextmanager
def dummy():
"""Dummy context manager
Usage:
>> with some_context() if False else dummy():
.. pass
"""
yield
def iter_model_rows(model, column, include_root=False):
"""Iterate over all row indices in a model"""
indices = [QtCore.QModelIndex()] # start iteration at root
@ -111,76 +79,6 @@ def iter_model_rows(model, column, include_root=False):
yield index
@contextlib.contextmanager
def preserve_states(tree_view,
column=0,
role=None,
preserve_expanded=True,
preserve_selection=True,
expanded_role=QtCore.Qt.DisplayRole,
selection_role=QtCore.Qt.DisplayRole):
"""Preserves row selection in QTreeView by column's data role.
This function is created to maintain the selection status of
the model items. When refresh is triggered the items which are expanded
will stay expanded and vise versa.
tree_view (QWidgets.QTreeView): the tree view nested in the application
column (int): the column to retrieve the data from
role (int): the role which dictates what will be returned
Returns:
None
"""
# When `role` is set then override both expanded and selection roles
if role:
expanded_role = role
selection_role = role
model = tree_view.model()
selection_model = tree_view.selectionModel()
flags = selection_model.Select | selection_model.Rows
expanded = set()
if preserve_expanded:
for index in iter_model_rows(
model, column=column, include_root=False
):
if tree_view.isExpanded(index):
value = index.data(expanded_role)
expanded.add(value)
selected = None
if preserve_selection:
selected_rows = selection_model.selectedRows()
if selected_rows:
selected = set(row.data(selection_role) for row in selected_rows)
try:
yield
finally:
if expanded:
for index in iter_model_rows(
model, column=0, include_root=False
):
value = index.data(expanded_role)
is_expanded = value in expanded
# skip if new index was created meanwhile
if is_expanded is None:
continue
tree_view.setExpanded(index, is_expanded)
if selected:
# Go through all indices, select the ones with similar data
for index in iter_model_rows(
model, column=column, include_root=False
):
value = index.data(selection_role)
state = value in selected
if state:
tree_view.scrollTo(index) # Ensure item is visible
selection_model.select(index, flags)
@contextlib.contextmanager
def preserve_expanded_rows(tree_view, column=0, role=None):
"""Preserves expanded row in QTreeView by column's data role.

View file

@ -76,3 +76,28 @@ I've selected `vdb1` and went **OpenPype -> Create** and selected **VDB Cache**.
geometry ROP in `/out` and sets its paths to output vdb files. During the publishing process
whole dops are cooked.
## Publishing Houdini Digital Assets (HDA)
You can publish most of the nodes in Houdini as hda for easy interchange of data between Houdini instances or even
other DCCs with Houdini Engine.
## Creating HDA
Simply select nodes you want to include in hda and go **OpenPype -> Create** and select **Houdini digital asset (hda)**.
You can even use already existing hda as a selected node, and it will be published (see below for limitation).
:::caution HDA Workflow limitations
As long as the hda is of same type - it is created from different nodes but using the same (subset) name, everything
is ok. But once you've published version of hda subset, you cannot change its type. For example, you create hda **Foo**
from *Cube* and *Sphere* - it will create hda subset named `hdaFoo` with the same type. You publish it as version 1.
Then you create version 2 with added *Torus*. Then you create version 3 from the scratch from completely different nodes,
but still using resulting subset name `hdaFoo`. Everything still works as expected. But then you use already
existing hda as a base, for example from different artist. Its type cannot be changed from what it was and so even if
it is named `hdaFoo` it has different type. It could be published, but you would never load it and retain ability to
switch versions between different hda types.
:::
## Loading HDA
When you load hda, it will install its type in your hip file and add published version as its definition file. When
you switch version via Scene Manager, it will add its definition and set it as preferred.