Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Alexey Bogomolov 2023-02-02 00:04:20 +03:00
commit d74daa49de
60 changed files with 793 additions and 254 deletions

View file

@ -1,3 +1,4 @@
import os
import logging
import contextlib
from abc import ABCMeta, abstractproperty
@ -100,6 +101,30 @@ class HostBase(object):
pass
def get_current_project_name(self):
"""
Returns:
Union[str, None]: Current project name.
"""
return os.environ.get("AVALON_PROJECT")
def get_current_asset_name(self):
"""
Returns:
Union[str, None]: Current asset name.
"""
return os.environ.get("AVALON_ASSET")
def get_current_task_name(self):
"""
Returns:
Union[str, None]: Current task name.
"""
return os.environ.get("AVALON_ASSET")
def get_current_context(self):
"""Get current context information.
@ -111,19 +136,14 @@ class HostBase(object):
Default implementation returns values from 'legacy_io.Session'.
Returns:
dict: Context with 3 keys 'project_name', 'asset_name' and
'task_name'. All of them can be 'None'.
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
'asset_name' and 'task_name'. All of them can be 'None'.
"""
from openpype.pipeline import legacy_io
if legacy_io.is_installed():
legacy_io.install()
return {
"project_name": legacy_io.Session["AVALON_PROJECT"],
"asset_name": legacy_io.Session["AVALON_ASSET"],
"task_name": legacy_io.Session["AVALON_TASK"]
"project_name": self.get_current_project_name(),
"asset_name": self.get_current_asset_name(),
"task_name": self.get_current_task_name()
}
def get_context_title(self):

View file

@ -19,7 +19,6 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["camera"]
version = (0, 1, 0)
label = "Zero Keyframe"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -14,7 +14,6 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh Has UV's"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
optional = True

View file

@ -14,7 +14,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh No Negative Scale"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -19,7 +19,6 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model", "rig"]
version = (0, 1, 0)
label = "No Colons in names"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -21,7 +21,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
version = (0, 1, 0)
label = "Transform Zero"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -113,7 +113,7 @@ class HoudiniCreatorBase(object):
Dict[str, Any]: Shared data dictionary.
"""
if shared_data.get("houdini_cached_subsets") is not None:
if shared_data.get("houdini_cached_subsets") is None:
cache = dict()
cache_legacy = dict()

View file

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating camera."""
from openpype.hosts.max.api import plugin
from openpype.pipeline import CreatedInstance
class CreateCamera(plugin.MaxCreator):
identifier = "io.openpype.creators.max.camera"
label = "Camera"
family = "camera"
icon = "gear"
def create(self, subset_name, instance_data, pre_create_data):
from pymxs import runtime as rt
sel_obj = list(rt.selection)
instance = super(CreateCamera, self).create(
subset_name,
instance_data,
pre_create_data) # type: CreatedInstance
container = rt.getNodeByName(instance.data.get("instance_node"))
# TODO: Disable "Add to Containers?" Panel
# parent the selected cameras into the container
for obj in sel_obj:
obj.parent = container
# for additional work on the node:
# instance_node = rt.getNodeByName(instance.get("instance_node"))

View file

@ -0,0 +1,49 @@
import os
from openpype.pipeline import (
load
)
class FbxLoader(load.LoaderPlugin):
"""Fbx Loader"""
families = ["camera"]
representations = ["fbx"]
order = -9
icon = "code-fork"
color = "white"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
filepath = os.path.normpath(self.fname)
fbx_import_cmd = (
f"""
FBXImporterSetParam "Animation" true
FBXImporterSetParam "Cameras" true
FBXImporterSetParam "AxisConversionMethod" true
FbxExporterSetParam "UpAxis" "Y"
FbxExporterSetParam "Preserveinstances" true
importFile @"{filepath}" #noPrompt using:FBXIMP
""")
self.log.debug(f"Executing command: {fbx_import_cmd}")
rt.execute(fbx_import_cmd)
container_name = f"{name}_CON"
asset = rt.getNodeByName(f"{name}")
# rename the container with "_CON"
container = rt.container(name=container_name)
asset.Parent = container
return container
def remove(self, container):
from pymxs import runtime as rt
node = container["node"]
rt.delete(node)

View file

@ -0,0 +1,50 @@
import os
from openpype.pipeline import (
load
)
class MaxSceneLoader(load.LoaderPlugin):
"""Max Scene Loader"""
families = ["camera"]
representations = ["max"]
order = -8
icon = "code-fork"
color = "green"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
path = os.path.normpath(self.fname)
# import the max scene by using "merge file"
path = path.replace('\\', '/')
merge_before = {
c for c in rt.rootNode.Children
if rt.classOf(c) == rt.Container
}
rt.mergeMaxFile(path)
merge_after = {
c for c in rt.rootNode.Children
if rt.classOf(c) == rt.Container
}
max_containers = merge_after.difference(merge_before)
if len(max_containers) != 1:
self.log.error("Something failed when loading.")
max_container = max_containers.pop()
container_name = f"{name}_CON"
# rename the container with "_CON"
# get the original container
container = rt.container(name=container_name)
max_container.Parent = container
return container
def remove(self, container):
from pymxs import runtime as rt
node = container["node"]
rt.delete(node)

View file

@ -15,7 +15,10 @@ from openpype.hosts.max.api import lib
class AbcLoader(load.LoaderPlugin):
"""Alembic loader."""
families = ["model", "animation", "pointcache"]
families = ["model",
"camera",
"animation",
"pointcache"]
label = "Load Alembic"
representations = ["abc"]
order = -10

View file

@ -0,0 +1,75 @@
import os
import pyblish.api
from openpype.pipeline import (
publish,
OptionalPyblishPluginMixin
)
from pymxs import runtime as rt
from openpype.hosts.max.api import (
maintained_selection,
get_all_children
)
class ExtractCameraAlembic(publish.Extractor,
OptionalPyblishPluginMixin):
"""
Extract Camera with AlembicExport
"""
order = pyblish.api.ExtractorOrder - 0.1
label = "Extract Alembic Camera"
hosts = ["max"]
families = ["camera"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
start = float(instance.data.get("frameStartHandle", 1))
end = float(instance.data.get("frameEndHandle", 1))
container = instance.data["instance_node"]
self.log.info("Extracting Camera ...")
stagingdir = self.staging_dir(instance)
filename = "{name}.abc".format(**instance.data)
path = os.path.join(stagingdir, filename)
# We run the render
self.log.info("Writing alembic '%s' to '%s'" % (filename,
stagingdir))
export_cmd = (
f"""
AlembicExport.ArchiveType = #ogawa
AlembicExport.CoordinateSystem = #maya
AlembicExport.StartFrame = {start}
AlembicExport.EndFrame = {end}
AlembicExport.CustomAttributes = true
exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport
""")
self.log.debug(f"Executing command: {export_cmd}")
with maintained_selection():
# select and export
rt.select(get_all_children(rt.getNodeByName(container)))
rt.execute(export_cmd)
self.log.info("Performing Extraction ...")
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
path))

View file

@ -0,0 +1,75 @@
import os
import pyblish.api
from openpype.pipeline import (
publish,
OptionalPyblishPluginMixin
)
from pymxs import runtime as rt
from openpype.hosts.max.api import (
maintained_selection,
get_all_children
)
class ExtractCameraFbx(publish.Extractor,
OptionalPyblishPluginMixin):
"""
Extract Camera with FbxExporter
"""
order = pyblish.api.ExtractorOrder - 0.2
label = "Extract Fbx Camera"
hosts = ["max"]
families = ["camera"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
container = instance.data["instance_node"]
self.log.info("Extracting Camera ...")
stagingdir = self.staging_dir(instance)
filename = "{name}.fbx".format(**instance.data)
filepath = os.path.join(stagingdir, filename)
self.log.info("Writing fbx file '%s' to '%s'" % (filename,
filepath))
# Need to export:
# Animation = True
# Cameras = True
# AxisConversionMethod
fbx_export_cmd = (
f"""
FBXExporterSetParam "Animation" true
FBXExporterSetParam "Cameras" true
FBXExporterSetParam "AxisConversionMethod" "Animation"
FbxExporterSetParam "UpAxis" "Y"
FbxExporterSetParam "Preserveinstances" true
exportFile @"{filepath}" #noPrompt selectedOnly:true using:FBXEXP
""")
self.log.debug(f"Executing command: {fbx_export_cmd}")
with maintained_selection():
# select and export
rt.select(get_all_children(rt.getNodeByName(container)))
rt.execute(fbx_export_cmd)
self.log.info("Performing Extraction ...")
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'fbx',
'ext': 'fbx',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
filepath))

View file

@ -0,0 +1,60 @@
import os
import pyblish.api
from openpype.pipeline import (
publish,
OptionalPyblishPluginMixin
)
from pymxs import runtime as rt
from openpype.hosts.max.api import (
maintained_selection,
get_all_children
)
class ExtractMaxSceneRaw(publish.Extractor,
OptionalPyblishPluginMixin):
"""
Extract Raw Max Scene with SaveSelected
"""
order = pyblish.api.ExtractorOrder - 0.2
label = "Extract Max Scene (Raw)"
hosts = ["max"]
families = ["camera"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
container = instance.data["instance_node"]
# publish the raw scene for camera
self.log.info("Extracting Raw Max Scene ...")
stagingdir = self.staging_dir(instance)
filename = "{name}.max".format(**instance.data)
max_path = os.path.join(stagingdir, filename)
self.log.info("Writing max file '%s' to '%s'" % (filename,
max_path))
if "representations" not in instance.data:
instance.data["representations"] = []
# saving max scene
with maintained_selection():
# need to figure out how to select the camera
rt.select(get_all_children(rt.getNodeByName(container)))
rt.execute(f'saveNodes selection "{max_path}" quiet:true')
self.log.info("Performing Extraction ...")
representation = {
'name': 'max',
'ext': 'max',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
max_path))

View file

@ -51,7 +51,7 @@ class ExtractAlembic(publish.Extractor):
order = pyblish.api.ExtractorOrder
label = "Extract Pointcache"
hosts = ["max"]
families = ["pointcache", "camera"]
families = ["pointcache"]
def process(self, instance):
start = float(instance.data.get("frameStartHandle", 1))

View file

@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
import pyblish.api
from openpype.pipeline import PublishValidationError
from pymxs import runtime as rt
class ValidateCameraContent(pyblish.api.InstancePlugin):
"""Validates Camera instance contents.
A Camera instance may only hold a SINGLE camera's transform
"""
order = pyblish.api.ValidatorOrder
families = ["camera"]
hosts = ["max"]
label = "Camera Contents"
camera_type = ["$Free_Camera", "$Target_Camera",
"$Physical_Camera", "$Target"]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError("Camera instance must only include"
"camera (and camera target)")
def get_invalid(self, instance):
"""
Get invalid nodes if the instance is not camera
"""
invalid = list()
container = instance.data["instance_node"]
self.log.info("Validating look content for "
"{}".format(container))
con = rt.getNodeByName(container)
selection_list = list(con.Children)
for sel in selection_list:
# to avoid Attribute Error from pymxs wrapper
sel_tmp = str(sel)
found = False
for cam in self.camera_type:
if sel_tmp.startswith(cam):
found = True
break
if not found:
self.log.error("Camera not found")
invalid.append(sel)
return invalid

View file

@ -254,11 +254,6 @@ def read(node):
return data
def _get_mel_global(name):
"""Return the value of a mel global variable"""
return mel.eval("$%s = $%s;" % (name, name))
def matrix_equals(a, b, tolerance=1e-10):
"""
Compares two matrices with an imperfection tolerance
@ -624,15 +619,15 @@ class delete_after(object):
cmds.delete(self._nodes)
def get_current_renderlayer():
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
def get_renderer(layer):
with renderlayer(layer):
return cmds.getAttr("defaultRenderGlobals.currentRenderer")
def get_current_renderlayer():
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
@contextlib.contextmanager
def no_undo(flush=False):
"""Disable the undo queue during the context
@ -1373,27 +1368,6 @@ def set_id(node, unique_id, overwrite=False):
cmds.setAttr(attr, unique_id, type="string")
# endregion ID
def get_reference_node(path):
"""
Get the reference node when the path is found being used in a reference
Args:
path (str): the file path to check
Returns:
node (str): name of the reference node in question
"""
try:
node = cmds.file(path, query=True, referenceNode=True)
except RuntimeError:
log.debug('File is not referenced : "{}"'.format(path))
return
reference_path = cmds.referenceQuery(path, filename=True)
if os.path.normpath(path) == os.path.normpath(reference_path):
return node
def set_attribute(attribute, value, node):
"""Adjust attributes based on the value from the attribute data

View file

@ -50,7 +50,6 @@ def install():
parent="MayaWindow"
)
renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower()
# Create context menu
context_label = "{}, {}".format(
legacy_io.Session["AVALON_ASSET"],

View file

@ -12,7 +12,6 @@ class CollectMayaWorkspace(pyblish.api.ContextPlugin):
label = "Maya Workspace"
hosts = ['maya']
version = (0, 1, 0)
def process(self, context):
workspace = cmds.workspace(rootDirectory=True, query=True)

View file

@ -58,23 +58,23 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
# Filter families.
families = [instance.data["family"]]
families += instance.data.get("families", [])
families = list(set(families) & set(self.attributes.keys()))
families = list(set(families) & set(cls.attributes.keys()))
if not families:
continue
# Get all attributes to validate.
attributes = {}
for family in families:
for preset in self.attributes[family]:
for preset in cls.attributes[family]:
[node_name, attribute_name] = preset.split(".")
try:
attributes[node_name].update(
{attribute_name: self.attributes[family][preset]}
{attribute_name: cls.attributes[family][preset]}
)
except KeyError:
attributes.update({
node_name: {
attribute_name: self.attributes[family][preset]
attribute_name: cls.attributes[family][preset]
}
})

View file

@ -19,7 +19,6 @@ class ValidateColorSets(pyblish.api.Validator):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh ColorSets'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -11,10 +11,6 @@ from openpype.pipeline.publish import (
)
def float_round(num, places=0, direction=ceil):
return direction(num * (10**places)) / float(10**places)
class ValidateMayaUnits(pyblish.api.ContextPlugin):
"""Check if the Maya units are set correct"""
@ -36,6 +32,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
# Collected units
linearunits = context.data.get('linearUnits')
angularunits = context.data.get('angularUnits')
# TODO(antirotor): This is hack as for framerates having multiple
# decimal places. FTrack is ceiling decimal values on
# fps to two decimal places but Maya 2019+ is reporting those fps
@ -43,7 +40,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
# rounding, we have to round those numbers coming from Maya.
# NOTE: this must be revisited yet again as it seems that Ftrack is
# now flooring the value?
fps = float_round(context.data.get('fps'), 2, ceil)
fps = mayalib.float_round(context.data.get('fps'), 2, ceil)
# TODO repace query with using 'context.data["assetEntity"]'
asset_doc = get_current_project_asset()

View file

@ -19,7 +19,6 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ["maya"]
families = ["model"]
category = "geometry"
label = "Mesh Arnold Attributes"
actions = [
openpype.hosts.maya.api.action.SelectInvalidAction,

View file

@ -48,7 +48,6 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh Has UVs'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
optional = True

View file

@ -15,8 +15,6 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Lamina Faces'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -19,8 +19,6 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
families = ['model']
hosts = ['maya']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Edge Length Non Zero'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
optional = True

View file

@ -20,8 +20,6 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Normals Unlocked'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -235,7 +235,6 @@ class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh Has Overlapping UVs'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
optional = True

View file

@ -21,9 +21,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model', 'pointcache']
category = 'uv'
optional = True
version = (0, 1, 0)
label = "Mesh Single UV Set"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -63,7 +63,6 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh Vertices Have Edges'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -16,7 +16,6 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['camera']
version = (0, 1, 0)
label = "No Default Cameras"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -23,8 +23,6 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
version = (0, 1, 0)
label = 'No Namespaces'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -43,8 +43,6 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
version = (0, 1, 0)
label = 'No Empty/Null Transforms'
actions = [RepairAction,
openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -24,7 +24,6 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['rig']
version = (0, 1, 0)
label = "Joints Hidden"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -31,8 +31,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin):
order = ValidatePipelineOrder
hosts = ['maya']
category = 'scene'
version = (0, 1, 0)
label = 'Maya Workspace Set'
def process(self, context):

View file

@ -38,9 +38,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
optional = True
version = (0, 1, 0)
label = "Shape Default Naming"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -32,9 +32,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
optional = True
version = (0, 1, 0)
label = 'Suffix Naming Conventions'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
SUFFIX_NAMING_TABLE = {"mesh": ["_GEO", "_GES", "_GEP", "_OSD"],

View file

@ -18,8 +18,6 @@ class ValidateTransformZero(pyblish.api.Validator):
order = ValidateContentsOrder
hosts = ["maya"]
families = ["model"]
category = "geometry"
version = (0, 1, 0)
label = "Transform Zero (Freeze)"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -13,7 +13,6 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ["maya"]
families = ["staticMesh"]
category = "geometry"
label = "Mesh is Triangulated"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
active = False

View file

@ -30,7 +30,7 @@ from .vendor_bin_utils import (
)
from .attribute_definitions import (
AbtractAttrDef,
AbstractAttrDef,
UIDef,
UISeparatorDef,
@ -246,7 +246,7 @@ __all__ = [
"get_ffmpeg_tool_path",
"is_oiio_supported",
"AbtractAttrDef",
"AbstractAttrDef",
"UIDef",
"UISeparatorDef",

View file

@ -20,7 +20,7 @@ def register_attr_def_class(cls):
Currently are registered definitions used to deserialize data to objects.
Attrs:
cls (AbtractAttrDef): Non-abstract class to be registered with unique
cls (AbstractAttrDef): Non-abstract class to be registered with unique
'type' attribute.
Raises:
@ -36,7 +36,7 @@ def get_attributes_keys(attribute_definitions):
"""Collect keys from list of attribute definitions.
Args:
attribute_definitions (List[AbtractAttrDef]): Objects of attribute
attribute_definitions (List[AbstractAttrDef]): Objects of attribute
definitions.
Returns:
@ -57,8 +57,8 @@ def get_default_values(attribute_definitions):
"""Receive default values for attribute definitions.
Args:
attribute_definitions (List[AbtractAttrDef]): Attribute definitions for
which default values should be collected.
attribute_definitions (List[AbstractAttrDef]): Attribute definitions
for which default values should be collected.
Returns:
Dict[str, Any]: Default values for passet attribute definitions.
@ -76,15 +76,15 @@ def get_default_values(attribute_definitions):
class AbstractAttrDefMeta(ABCMeta):
"""Meta class to validate existence of 'key' attribute.
"""Metaclass to validate existence of 'key' attribute.
Each object of `AbtractAttrDef` mus have defined 'key' attribute.
Each object of `AbstractAttrDef` mus have defined 'key' attribute.
"""
def __call__(self, *args, **kwargs):
obj = super(AbstractAttrDefMeta, self).__call__(*args, **kwargs)
init_class = getattr(obj, "__init__class__", None)
if init_class is not AbtractAttrDef:
if init_class is not AbstractAttrDef:
raise TypeError("{} super was not called in __init__.".format(
type(obj)
))
@ -92,7 +92,7 @@ class AbstractAttrDefMeta(ABCMeta):
@six.add_metaclass(AbstractAttrDefMeta)
class AbtractAttrDef(object):
class AbstractAttrDef(object):
"""Abstraction of attribute definiton.
Each attribute definition must have implemented validation and
@ -145,7 +145,7 @@ class AbtractAttrDef(object):
self.disabled = disabled
self._id = uuid.uuid4().hex
self.__init__class__ = AbtractAttrDef
self.__init__class__ = AbstractAttrDef
@property
def id(self):
@ -154,7 +154,15 @@ class AbtractAttrDef(object):
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.key == other.key
return (
self.key == other.key
and self.hidden == other.hidden
and self.default == other.default
and self.disabled == other.disabled
)
def __ne__(self, other):
return not self.__eq__(other)
@abstractproperty
def type(self):
@ -212,7 +220,7 @@ class AbtractAttrDef(object):
# UI attribute definitoins won't hold value
# -----------------------------------------
class UIDef(AbtractAttrDef):
class UIDef(AbstractAttrDef):
is_value_def = False
def __init__(self, key=None, default=None, *args, **kwargs):
@ -237,7 +245,7 @@ class UILabelDef(UIDef):
# Attribute defintioins should hold value
# ---------------------------------------
class UnknownDef(AbtractAttrDef):
class UnknownDef(AbstractAttrDef):
"""Definition is not known because definition is not available.
This attribute can be used to keep existing data unchanged but does not
@ -254,7 +262,7 @@ class UnknownDef(AbtractAttrDef):
return value
class HiddenDef(AbtractAttrDef):
class HiddenDef(AbstractAttrDef):
"""Hidden value of Any type.
This attribute can be used for UI purposes to pass values related
@ -274,7 +282,7 @@ class HiddenDef(AbtractAttrDef):
return value
class NumberDef(AbtractAttrDef):
class NumberDef(AbstractAttrDef):
"""Number definition.
Number can have defined minimum/maximum value and decimal points. Value
@ -350,7 +358,7 @@ class NumberDef(AbtractAttrDef):
return round(float(value), self.decimals)
class TextDef(AbtractAttrDef):
class TextDef(AbstractAttrDef):
"""Text definition.
Text can have multiline option so endline characters are allowed regex
@ -415,7 +423,7 @@ class TextDef(AbtractAttrDef):
return data
class EnumDef(AbtractAttrDef):
class EnumDef(AbstractAttrDef):
"""Enumeration of single item from items.
Args:
@ -457,7 +465,7 @@ class EnumDef(AbtractAttrDef):
return self.default
def serialize(self):
data = super(TextDef, self).serialize()
data = super(EnumDef, self).serialize()
data["items"] = copy.deepcopy(self.items)
return data
@ -523,7 +531,8 @@ class EnumDef(AbtractAttrDef):
return output
class BoolDef(AbtractAttrDef):
class BoolDef(AbstractAttrDef):
"""Boolean representation.
Args:
@ -768,7 +777,7 @@ class FileDefItem(object):
return output
class FileDef(AbtractAttrDef):
class FileDef(AbstractAttrDef):
"""File definition.
It is possible to define filters of allowed file extensions and if supports
folders.
@ -886,7 +895,7 @@ def serialize_attr_def(attr_def):
"""Serialize attribute definition to data.
Args:
attr_def (AbtractAttrDef): Attribute definition to serialize.
attr_def (AbstractAttrDef): Attribute definition to serialize.
Returns:
Dict[str, Any]: Serialized data.
@ -899,7 +908,7 @@ def serialize_attr_defs(attr_defs):
"""Serialize attribute definitions to data.
Args:
attr_defs (List[AbtractAttrDef]): Attribute definitions to serialize.
attr_defs (List[AbstractAttrDef]): Attribute definitions to serialize.
Returns:
List[Dict[str, Any]]: Serialized data.

View file

@ -86,6 +86,12 @@ from .context_tools import (
registered_host,
deregister_host,
get_process_id,
get_current_context,
get_current_host_name,
get_current_project_name,
get_current_asset_name,
get_current_task_name,
)
install = install_host
uninstall = uninstall_host
@ -176,6 +182,13 @@ __all__ = (
"register_host",
"registered_host",
"deregister_host",
"get_process_id",
"get_current_context",
"get_current_host_name",
"get_current_project_name",
"get_current_asset_name",
"get_current_task_name",
# Backwards compatible function names
"install",

View file

@ -438,13 +438,14 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None):
# get file rules from global and host_name
frules_global = imageio_global["file_rules"]
frules_host = imageio_host["file_rules"]
# host is optional, some might not have any settings
frules_host = imageio_host.get("file_rules", {})
# compile file rules dictionary
file_rules = {}
if frules_global["enabled"]:
file_rules.update(frules_global["rules"])
if frules_host["enabled"]:
if frules_host and frules_host["enabled"]:
file_rules.update(frules_host["rules"])
return file_rules
@ -455,7 +456,7 @@ def _get_imageio_settings(project_settings, host_name):
Args:
project_settings (dict): project settings.
Defaults to None.
Defaults to None.
host_name (str): host name
Returns:
@ -463,6 +464,7 @@ def _get_imageio_settings(project_settings, host_name):
"""
# get image io from global and host_name
imageio_global = project_settings["global"]["imageio"]
imageio_host = project_settings[host_name]["imageio"]
# host is optional, some might not have any settings
imageio_host = project_settings.get(host_name, {}).get("imageio", {})
return imageio_global, imageio_host

View file

@ -11,6 +11,7 @@ import pyblish.api
from pyblish.lib import MessageHandler
import openpype
from openpype.host import HostBase
from openpype.client import (
get_project,
get_asset_by_id,
@ -306,6 +307,58 @@ def debug_host():
return host
def get_current_host_name():
"""Current host name.
Function is based on currently registered host integration or environment
variant 'AVALON_APP'.
Returns:
Union[str, None]: Name of host integration in current process or None.
"""
host = registered_host()
if isinstance(host, HostBase):
return host.name
return os.environ.get("AVALON_APP")
def get_global_context():
return {
"project_name": os.environ.get("AVALON_PROJECT"),
"asset_name": os.environ.get("AVALON_ASSET"),
"task_name": os.environ.get("AVALON_TASK"),
}
def get_current_context():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_context()
return get_global_context()
def get_current_project_name():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_project_name()
return get_global_context()["project_name"]
def get_current_asset_name():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_asset_name()
return get_global_context()["asset_name"]
def get_current_task_name():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_task_name()
return get_global_context()["task_name"]
def get_current_project(fields=None):
"""Helper function to get project document based on global Session.
@ -316,7 +369,7 @@ def get_current_project(fields=None):
None: Project is not set.
"""
project_name = legacy_io.active_project()
project_name = get_current_project_name()
return get_project(project_name, fields=fields)
@ -341,12 +394,12 @@ def get_current_project_asset(asset_name=None, asset_id=None, fields=None):
None: Asset is not set or not exist.
"""
project_name = legacy_io.active_project()
project_name = get_current_project_name()
if asset_id:
return get_asset_by_id(project_name, asset_id, fields=fields)
if not asset_name:
asset_name = legacy_io.Session.get("AVALON_ASSET")
asset_name = get_current_asset_name()
# Skip if is not set even on context
if not asset_name:
return None
@ -363,7 +416,7 @@ def is_representation_from_latest(representation):
bool: Whether the representation is of latest version.
"""
project_name = legacy_io.active_project()
project_name = get_current_project_name()
return version_is_latest(project_name, representation["parent"])

View file

@ -13,6 +13,11 @@ from openpype.settings import (
get_system_settings,
get_project_settings
)
from openpype.lib.attribute_definitions import (
UnknownDef,
serialize_attr_defs,
deserialize_attr_defs,
)
from openpype.host import IPublishHost
from openpype.pipeline import legacy_io
from openpype.pipeline.mongodb import (
@ -28,6 +33,7 @@ from .creator_plugins import (
CreatorError,
)
# Changes of instances and context are send as tuple of 2 information
UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"])
@ -208,14 +214,12 @@ class AttributeValues(object):
Has dictionary like methods. Not all of them are allowed all the time.
Args:
attr_defs(AbtractAttrDef): Defintions of value type and properties.
attr_defs(AbstractAttrDef): Defintions of value type and properties.
values(dict): Values after possible conversion.
origin_data(dict): Values loaded from host before conversion.
"""
def __init__(self, attr_defs, values, origin_data=None):
from openpype.lib.attribute_definitions import UnknownDef
if origin_data is None:
origin_data = copy.deepcopy(values)
self._origin_data = origin_data
@ -288,11 +292,21 @@ class AttributeValues(object):
@property
def attr_defs(self):
"""Pointer to attribute definitions."""
return self._attr_defs
"""Pointer to attribute definitions.
Returns:
List[AbstractAttrDef]: Attribute definitions.
"""
return list(self._attr_defs)
def data_to_store(self):
"""Create new dictionary with data to store."""
"""Create new dictionary with data to store.
Returns:
Dict[str, Any]: Attribute values that should be stored.
"""
output = {}
for key in self._data:
output[key] = self[key]
@ -305,6 +319,7 @@ class AttributeValues(object):
@staticmethod
def calculate_changes(new_data, old_data):
"""Calculate changes of 2 dictionary objects."""
changes = {}
for key, new_value in new_data.items():
old_value = old_data.get(key)
@ -325,6 +340,15 @@ class AttributeValues(object):
elif self.get(key) != new_value:
self[key] = new_value
def get_serialized_attr_defs(self):
"""Serialize attribute definitions to json serializable types.
Returns:
List[Dict[str, Any]]: Serialized attribute definitions.
"""
return serialize_attr_defs(self._attr_defs)
class CreatorAttributeValues(AttributeValues):
"""Creator specific attribute values of an instance.
@ -362,13 +386,14 @@ class PublishAttributes:
"""Wrapper for publish plugin attribute definitions.
Cares about handling attribute definitions of multiple publish plugins.
Keep information about attribute definitions and their values.
Args:
parent(CreatedInstance, CreateContext): Parent for which will be
data stored and from which are data loaded.
origin_data(dict): Loaded data by plugin class name.
attr_plugins(list): List of publish plugins that may have defined
attribute definitions.
attr_plugins(Union[List[pyblish.api.Plugin], None]): List of publish
plugins that may have defined attribute definitions.
"""
def __init__(self, parent, origin_data, attr_plugins=None):
@ -509,6 +534,42 @@ class PublishAttributes:
self, [], value, value
)
def serialize_attributes(self):
return {
"attr_defs": {
plugin_name: attrs_value.get_serialized_attr_defs()
for plugin_name, attrs_value in self._data.items()
},
"plugin_names_order": self._plugin_names_order,
"missing_plugins": self._missing_plugins
}
def deserialize_attributes(self, data):
self._plugin_names_order = data["plugin_names_order"]
self._missing_plugins = data["missing_plugins"]
attr_defs = deserialize_attr_defs(data["attr_defs"])
origin_data = self._origin_data
data = self._data
self._data = {}
added_keys = set()
for plugin_name, attr_defs_data in attr_defs.items():
attr_defs = deserialize_attr_defs(attr_defs_data)
value = data.get(plugin_name) or {}
orig_value = copy.deepcopy(origin_data.get(plugin_name) or {})
self._data[plugin_name] = PublishAttributeValues(
self, attr_defs, value, orig_value
)
for key, value in data.items():
if key not in added_keys:
self._missing_plugins.append(key)
self._data[key] = PublishAttributeValues(
self, [], value, value
)
class CreatedInstance:
"""Instance entity with data that will be stored to workfile.
@ -517,15 +578,22 @@ class CreatedInstance:
about instance like "asset" and "task" and all data used for filling subset
name as creators may have custom data for subset name filling.
Notes:
Object have 2 possible initialization. One using 'creator' object which
is recommended for api usage. Second by passing information about
creator.
Args:
family(str): Name of family that will be created.
subset_name(str): Name of subset that will be created.
data(dict): Data used for filling subset name or override data from
already existing instance.
creator(BaseCreator): Creator responsible for instance.
host(ModuleType): Host implementation loaded with
`openpype.pipeline.registered_host`.
new(bool): Is instance new.
family (str): Name of family that will be created.
subset_name (str): Name of subset that will be created.
data (Dict[str, Any]): Data used for filling subset name or override
data from already existing instance.
creator (Union[BaseCreator, None]): Creator responsible for instance.
creator_identifier (str): Identifier of creator plugin.
creator_label (str): Creator plugin label.
group_label (str): Default group label from creator plugin.
creator_attr_defs (List[AbstractAttrDef]): Attribute definitions from
creator.
"""
# Keys that can't be changed or removed from data after loading using
@ -542,9 +610,24 @@ class CreatedInstance:
)
def __init__(
self, family, subset_name, data, creator, new=True
self,
family,
subset_name,
data,
creator=None,
creator_identifier=None,
creator_label=None,
group_label=None,
creator_attr_defs=None,
):
self.creator = creator
if creator is not None:
creator_identifier = creator.identifier
group_label = creator.get_group_label()
creator_label = creator.label
creator_attr_defs = creator.get_instance_attr_defs()
self._creator_label = creator_label
self._group_label = group_label or creator_identifier
# Instance members may have actions on them
# TODO implement members logic
@ -574,7 +657,7 @@ class CreatedInstance:
self._data["family"] = family
self._data["subset"] = subset_name
self._data["active"] = data.get("active", True)
self._data["creator_identifier"] = creator.identifier
self._data["creator_identifier"] = creator_identifier
# Pop from source data all keys that are defined in `_data` before
# this moment and through their values away
@ -588,10 +671,12 @@ class CreatedInstance:
# Stored creator specific attribute values
# {key: value}
creator_values = copy.deepcopy(orig_creator_attributes)
creator_attr_defs = creator.get_instance_attr_defs()
self._data["creator_attributes"] = CreatorAttributeValues(
self, creator_attr_defs, creator_values, orig_creator_attributes
self,
list(creator_attr_defs),
creator_values,
orig_creator_attributes
)
# Stored publish specific attribute values
@ -676,64 +761,23 @@ class CreatedInstance:
label = self._data.get("group")
if label:
return label
return self.creator.get_group_label()
return self._group_label
@property
def creator_identifier(self):
return self.creator.identifier
return self._data["creator_identifier"]
@property
def creator_label(self):
return self.creator.label or self.creator_identifier
@property
def create_context(self):
return self.creator.create_context
@property
def host(self):
return self.create_context.host
@property
def has_set_asset(self):
"""Asset name is set in data."""
return "asset" in self._data
@property
def has_set_task(self):
"""Task name is set in data."""
return "task" in self._data
@property
def has_valid_context(self):
"""Context data are valid for publishing."""
return self.has_valid_asset and self.has_valid_task
@property
def has_valid_asset(self):
"""Asset set in context exists in project."""
if not self.has_set_asset:
return False
return self._asset_is_valid
@property
def has_valid_task(self):
"""Task set in context exists in project."""
if not self.has_set_task:
return False
return self._task_is_valid
def set_asset_invalid(self, invalid):
# TODO replace with `set_asset_name`
self._asset_is_valid = not invalid
def set_task_invalid(self, invalid):
# TODO replace with `set_task_name`
self._task_is_valid = not invalid
return self._creator_label or self.creator_identifier
@property
def id(self):
"""Instance identifier."""
"""Instance identifier.
Returns:
str: UUID of instance.
"""
return self._data["instance_id"]
@ -742,6 +786,10 @@ class CreatedInstance:
"""Legacy access to data.
Access to data is needed to modify values.
Returns:
CreatedInstance: Object can be used as dictionary but with
validations of immutable keys.
"""
return self
@ -818,6 +866,12 @@ class CreatedInstance:
@property
def creator_attribute_defs(self):
"""Attribute defintions defined by creator plugin.
Returns:
List[AbstractAttrDef]: Attribute defitions.
"""
return self.creator_attributes.attr_defs
@property
@ -829,7 +883,7 @@ class CreatedInstance:
It is possible to recreate the instance using these data.
Todo:
Todos:
We probably don't need OrderedDict. When data are loaded they
are not ordered anymore.
@ -850,7 +904,15 @@ class CreatedInstance:
@classmethod
def from_existing(cls, instance_data, creator):
"""Convert instance data from workfile to CreatedInstance."""
"""Convert instance data from workfile to CreatedInstance.
Args:
instance_data (Dict[str, Any]): Data in a structure ready for
'CreatedInstance' object.
creator (Creator): Creator plugin which is creating the instance
of for which the instance belong.
"""
instance_data = copy.deepcopy(instance_data)
family = instance_data.get("family", None)
@ -859,26 +921,49 @@ class CreatedInstance:
subset_name = instance_data.get("subset", None)
return cls(
family, subset_name, instance_data, creator, new=False
family, subset_name, instance_data, creator
)
def set_publish_plugins(self, attr_plugins):
"""Set publish plugins with attribute definitions.
This method should be called only from 'CreateContext'.
Args:
attr_plugins (List[pyblish.api.Plugin]): Pyblish plugins which
inherit from 'OpenPypePyblishPluginMixin' and may contain
attribute definitions.
"""
self.publish_attributes.set_publish_plugins(attr_plugins)
def add_members(self, members):
"""Currently unused method."""
for member in members:
if member not in self._members:
self._members.append(member)
def serialize_for_remote(self):
"""Serialize object into data to be possible recreated object.
Returns:
Dict[str, Any]: Serialized data.
"""
creator_attr_defs = self.creator_attributes.get_serialized_attr_defs()
publish_attributes = self.publish_attributes.serialize_attributes()
return {
"data": self.data_to_store(),
"orig_data": copy.deepcopy(self._orig_data)
"orig_data": copy.deepcopy(self._orig_data),
"creator_attr_defs": creator_attr_defs,
"publish_attributes": publish_attributes,
"creator_label": self._creator_label,
"group_label": self._group_label,
}
@classmethod
def deserialize_on_remote(cls, serialized_data, creator_items):
def deserialize_on_remote(cls, serialized_data):
"""Convert instance data to CreatedInstance.
This is fake instance in remote process e.g. in UI process. The creator
@ -888,24 +973,32 @@ class CreatedInstance:
Args:
serialized_data (Dict[str, Any]): Serialized data for remote
recreating. Should contain 'data' and 'orig_data'.
creator_items (Dict[str, Any]): Mapping of creator identifier and
objects that behave like a creator for most of attribute
access.
"""
instance_data = copy.deepcopy(serialized_data["data"])
creator_identifier = instance_data["creator_identifier"]
creator_item = creator_items[creator_identifier]
family = instance_data.get("family", None)
if family is None:
family = creator_item.family
family = instance_data["family"]
subset_name = instance_data.get("subset", None)
creator_label = serialized_data["creator_label"]
group_label = serialized_data["group_label"]
creator_attr_defs = deserialize_attr_defs(
serialized_data["creator_attr_defs"]
)
publish_attributes = serialized_data["publish_attributes"]
obj = cls(
family, subset_name, instance_data, creator_item, new=False
family,
subset_name,
instance_data,
creator_identifier=creator_identifier,
creator_label=creator_label,
group_label=group_label,
creator_attributes=creator_attr_defs
)
obj._orig_data = serialized_data["orig_data"]
obj.publish_attributes.deserialize_attributes(publish_attributes)
return obj
@ -962,6 +1055,49 @@ class CreatedInstance:
if current_value != new_value:
self[key] = new_value
# Context validation related methods/properties
@property
def has_set_asset(self):
"""Asset name is set in data."""
return "asset" in self._data
@property
def has_set_task(self):
"""Task name is set in data."""
return "task" in self._data
@property
def has_valid_context(self):
"""Context data are valid for publishing."""
return self.has_valid_asset and self.has_valid_task
@property
def has_valid_asset(self):
"""Asset set in context exists in project."""
if not self.has_set_asset:
return False
return self._asset_is_valid
@property
def has_valid_task(self):
"""Task set in context exists in project."""
if not self.has_set_task:
return False
return self._task_is_valid
def set_asset_invalid(self, invalid):
# TODO replace with `set_asset_name`
self._asset_is_valid = not invalid
def set_task_invalid(self, invalid):
# TODO replace with `set_task_name`
self._task_is_valid = not invalid
class ConvertorItem(object):
"""Item representing convertor plugin.
@ -1000,6 +1136,10 @@ class CreateContext:
Context itself also can store data related to whole creation (workfile).
- those are mainly for Context publish plugins
Todos:
Don't use 'AvalonMongoDB'. It's used only to keep track about current
context which should be handled by host.
Args:
host(ModuleType): Host implementation which handles implementation and
global metadata.
@ -1402,7 +1542,7 @@ class CreateContext:
self._instances_by_id[instance.id] = instance
# Prepare publish plugin attributes and set it on instance
attr_plugins = self._get_publish_plugins_with_attr_for_family(
instance.creator.family
instance.family
)
instance.set_publish_plugins(attr_plugins)

View file

@ -425,8 +425,8 @@ class BaseCreator:
keys/values when plugin attributes change.
Returns:
List[AbtractAttrDef]: Attribute definitions that can be tweaked for
created instance.
List[AbstractAttrDef]: Attribute definitions that can be tweaked
for created instance.
"""
return self.instance_attr_defs
@ -563,8 +563,8 @@ class Creator(BaseCreator):
updating keys/values when plugin attributes change.
Returns:
List[AbtractAttrDef]: Attribute definitions that can be tweaked for
created instance.
List[AbstractAttrDef]: Attribute definitions that can be tweaked
for created instance.
"""
return self.pre_create_attr_defs

View file

@ -118,7 +118,7 @@ class OpenPypePyblishPluginMixin:
Attributes available for all families in plugin's `families` attribute.
Returns:
list<AbtractAttrDef>: Attribute definitions for plugin.
list<AbstractAttrDef>: Attribute definitions for plugin.
"""
return []

View file

@ -842,7 +842,8 @@ class PlaceholderPlugin(object):
"""Placeholder options for data showed.
Returns:
List[AbtractAttrDef]: Attribute definitions of placeholder options.
List[AbstractAttrDef]: Attribute definitions of
placeholder options.
"""
return []
@ -1143,7 +1144,7 @@ class PlaceholderLoadMixin(object):
as defaults for attributes.
Returns:
List[AbtractAttrDef]: Attribute definitions common for load
List[AbstractAttrDef]: Attribute definitions common for load
plugins.
"""
@ -1513,7 +1514,7 @@ class PlaceholderCreateMixin(object):
as defaults for attributes.
Returns:
List[AbtractAttrDef]: Attribute definitions common for create
List[AbstractAttrDef]: Attribute definitions common for create
plugins.
"""

View file

@ -4,7 +4,7 @@ import copy
from qtpy import QtWidgets, QtCore
from openpype.lib.attribute_definitions import (
AbtractAttrDef,
AbstractAttrDef,
UnknownDef,
HiddenDef,
NumberDef,
@ -33,9 +33,9 @@ def create_widget_for_attr_def(attr_def, parent=None):
def _create_widget_for_attr_def(attr_def, parent=None):
if not isinstance(attr_def, AbtractAttrDef):
if not isinstance(attr_def, AbstractAttrDef):
raise TypeError("Unexpected type \"{}\" expected \"{}\"".format(
str(type(attr_def)), AbtractAttrDef
str(type(attr_def)), AbstractAttrDef
))
if isinstance(attr_def, NumberDef):

View file

@ -2,7 +2,7 @@ import inspect
from qtpy import QtGui
import qtawesome
from openpype.lib.attribute_definitions import AbtractAttrDef
from openpype.lib.attribute_definitions import AbstractAttrDef
from openpype.tools.attribute_defs import AttributeDefinitionsDialog
from openpype.tools.utils.widgets import (
OptionalAction,
@ -43,7 +43,7 @@ def get_options(action, loader, parent, repre_contexts):
if not getattr(action, "optioned", False) or not loader_options:
return options
if isinstance(loader_options[0], AbtractAttrDef):
if isinstance(loader_options[0], AbstractAttrDef):
qargparse_options = False
dialog = AttributeDefinitionsDialog(loader_options, parent)
else:

View file

@ -826,7 +826,6 @@ class CreatorItem:
label,
group_label,
icon,
instance_attributes_defs,
description,
detailed_description,
default_variant,
@ -847,12 +846,8 @@ class CreatorItem:
self.default_variants = default_variants
self.create_allow_context_change = create_allow_context_change
self.create_allow_thumbnail = create_allow_thumbnail
self.instance_attributes_defs = instance_attributes_defs
self.pre_create_attributes_defs = pre_create_attributes_defs
def get_instance_attr_defs(self):
return self.instance_attributes_defs
def get_group_label(self):
return self.group_label
@ -891,7 +886,6 @@ class CreatorItem:
creator.label or identifier,
creator.get_group_label(),
creator.get_icon(),
creator.get_instance_attr_defs(),
description,
detail_description,
default_variant,
@ -902,15 +896,9 @@ class CreatorItem:
)
def to_data(self):
instance_attributes_defs = None
if self.instance_attributes_defs is not None:
instance_attributes_defs = serialize_attr_defs(
self.instance_attributes_defs
)
pre_create_attributes_defs = None
if self.pre_create_attributes_defs is not None:
instance_attributes_defs = serialize_attr_defs(
pre_create_attributes_defs = serialize_attr_defs(
self.pre_create_attributes_defs
)
@ -927,18 +915,11 @@ class CreatorItem:
"default_variants": self.default_variants,
"create_allow_context_change": self.create_allow_context_change,
"create_allow_thumbnail": self.create_allow_thumbnail,
"instance_attributes_defs": instance_attributes_defs,
"pre_create_attributes_defs": pre_create_attributes_defs,
}
@classmethod
def from_data(cls, data):
instance_attributes_defs = data["instance_attributes_defs"]
if instance_attributes_defs is not None:
data["instance_attributes_defs"] = deserialize_attr_defs(
instance_attributes_defs
)
pre_create_attributes_defs = data["pre_create_attributes_defs"]
if pre_create_attributes_defs is not None:
data["pre_create_attributes_defs"] = deserialize_attr_defs(
@ -1879,12 +1860,12 @@ class PublisherController(BasePublisherController):
which should be attribute definitions returned.
"""
# NOTE it would be great if attrdefs would have hash method implemented
# so they could be used as keys in dictionary
output = []
_attr_defs = {}
for instance in instances:
creator_identifier = instance.creator_identifier
creator_item = self.creator_items[creator_identifier]
for attr_def in creator_item.instance_attributes_defs:
for attr_def in instance.creator_attribute_defs:
found_idx = None
for idx, _attr_def in _attr_defs.items():
if attr_def == _attr_def:

View file

@ -136,10 +136,7 @@ class QtRemotePublishController(BasePublisherController):
created_instances = {}
for serialized_data in serialized_instances:
item = CreatedInstance.deserialize_on_remote(
serialized_data,
self._creator_items
)
item = CreatedInstance.deserialize_on_remote(serialized_data)
created_instances[item.id] = item
self._created_instances = created_instances

View file

@ -1220,7 +1220,8 @@ class GlobalAttrsWidget(QtWidgets.QWidget):
asset_task_combinations = []
for instance in instances:
if instance.creator is None:
# NOTE I'm not sure how this can even happen?
if instance.creator_identifier is None:
editable = False
variants.add(instance.get("variant") or self.unknown_value)

View file

@ -8,7 +8,7 @@ from openpype.style import (
get_objected_colors,
get_style_image_path
)
from openpype.lib.attribute_definitions import AbtractAttrDef
from openpype.lib.attribute_definitions import AbstractAttrDef
log = logging.getLogger(__name__)
@ -406,7 +406,7 @@ class OptionalAction(QtWidgets.QWidgetAction):
def set_option_tip(self, options):
sep = "\n\n"
if not options or not isinstance(options[0], AbtractAttrDef):
if not options or not isinstance(options[0], AbstractAttrDef):
mak = (lambda opt: opt["name"] + " :\n " + opt["help"])
self.option_tip = sep.join(mak(opt) for opt in options)
return

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.15.1-nightly.1"
__version__ = "3.15.1-nightly.2"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "OpenPype"
version = "3.15.0" # OpenPype
version = "3.15.1" # OpenPype
description = "Open VFX and Animation pipeline with support."
authors = ["OpenPype Team <info@openpype.io>"]
license = "MIT License"

View file

@ -62,7 +62,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass):
failures.append(
DBAssert.count_of_types(dbcon, "representation", 4))
additional_args = {"context.subset": "renderTest_taskMain",
additional_args = {"context.subset": "workfileTest_task",
"context.ext": "aep"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
@ -71,7 +71,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass):
additional_args = {"context.subset": "renderTest_taskMain",
"context.ext": "png"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
DBAssert.count_of_types(dbcon, "representation", 2,
additional_args=additional_args))
additional_args = {"context.subset": "renderTest_taskMain",

View file

@ -47,7 +47,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla
print("test_db_asserts")
failures = []
failures.append(DBAssert.count_of_types(dbcon, "version", 2))
failures.append(DBAssert.count_of_types(dbcon, "version", 3))
failures.append(
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
@ -80,7 +80,7 @@ class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestCla
additional_args = {"context.subset": "renderTest_taskMain",
"context.ext": "png"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
DBAssert.count_of_types(dbcon, "representation", 2,
additional_args=additional_args))
additional_args = {"context.subset": "renderTest_taskMain",

View file

@ -60,7 +60,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
failures.append(
DBAssert.count_of_types(dbcon, "representation", 4))
additional_args = {"context.subset": "renderTest_taskMain",
additional_args = {"context.subset": "workfileTest_task",
"context.ext": "aep"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
@ -69,7 +69,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
additional_args = {"context.subset": "renderTest_taskMain",
"context.ext": "png"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,
DBAssert.count_of_types(dbcon, "representation", 2,
additional_args=additional_args))
additional_args = {"context.subset": "renderTest_taskMain",

View file

@ -47,7 +47,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass):
failures.append(
DBAssert.count_of_types(dbcon, "representation", 4))
additional_args = {"context.subset": "renderTest_taskMain",
additional_args = {"context.subset": "workfileTest_task",
"context.ext": "aep"}
failures.append(
DBAssert.count_of_types(dbcon, "representation", 1,