mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merged develop
This commit is contained in:
commit
d2d486d24f
139 changed files with 13127 additions and 505 deletions
31
CHANGELOG.md
31
CHANGELOG.md
|
|
@ -1,11 +1,20 @@
|
|||
# Changelog
|
||||
|
||||
## [3.6.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.6.0-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.5.0...HEAD)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Flame: a host basic integration [\#2165](https://github.com/pypeclub/OpenPype/pull/2165)
|
||||
- Houdini: simple HDA workflow [\#2072](https://github.com/pypeclub/OpenPype/pull/2072)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Delivery: Check 'frame' key in template for sequence delivery [\#2196](https://github.com/pypeclub/OpenPype/pull/2196)
|
||||
- Usage of tools code [\#2185](https://github.com/pypeclub/OpenPype/pull/2185)
|
||||
- Settings: Dictionary based on project roots [\#2184](https://github.com/pypeclub/OpenPype/pull/2184)
|
||||
- Subset name: Be able to pass asset document to get subset name [\#2179](https://github.com/pypeclub/OpenPype/pull/2179)
|
||||
- Tools: Experimental tools [\#2167](https://github.com/pypeclub/OpenPype/pull/2167)
|
||||
- Loader: Refactor and use OpenPype stylesheets [\#2166](https://github.com/pypeclub/OpenPype/pull/2166)
|
||||
- Add loader for linked smart objects in photoshop [\#2149](https://github.com/pypeclub/OpenPype/pull/2149)
|
||||
|
|
@ -14,6 +23,8 @@
|
|||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Project Manager: Fix copying of tasks [\#2191](https://github.com/pypeclub/OpenPype/pull/2191)
|
||||
- StandalonePublisher: Source validator don't expect representations [\#2190](https://github.com/pypeclub/OpenPype/pull/2190)
|
||||
- MacOS: Launching of applications may cause Permissions error [\#2175](https://github.com/pypeclub/OpenPype/pull/2175)
|
||||
- Blender: Fix 'Deselect All' with object not in 'Object Mode' [\#2163](https://github.com/pypeclub/OpenPype/pull/2163)
|
||||
- Tools: Stylesheets are applied after tool show [\#2161](https://github.com/pypeclub/OpenPype/pull/2161)
|
||||
|
|
@ -101,11 +112,6 @@
|
|||
- Ftrack: Removed ftrack interface [\#2049](https://github.com/pypeclub/OpenPype/pull/2049)
|
||||
- Settings UI: Deffered set value on entity [\#2044](https://github.com/pypeclub/OpenPype/pull/2044)
|
||||
- Loader: Families filtering [\#2043](https://github.com/pypeclub/OpenPype/pull/2043)
|
||||
- Settings UI: Project view enhancements [\#2042](https://github.com/pypeclub/OpenPype/pull/2042)
|
||||
- Settings for Nuke IncrementScriptVersion [\#2039](https://github.com/pypeclub/OpenPype/pull/2039)
|
||||
- Loader & Library loader: Use tools from OpenPype [\#2038](https://github.com/pypeclub/OpenPype/pull/2038)
|
||||
- Adding predefined project folders creation in PM [\#2030](https://github.com/pypeclub/OpenPype/pull/2030)
|
||||
- WebserverModule: Removed interface of webserver module [\#2028](https://github.com/pypeclub/OpenPype/pull/2028)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
|
|
@ -123,19 +129,6 @@
|
|||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.0-nightly.6...3.4.0)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Added possibility to configure of synchronization of workfile version… [\#2041](https://github.com/pypeclub/OpenPype/pull/2041)
|
||||
- General: Task types in profiles [\#2036](https://github.com/pypeclub/OpenPype/pull/2036)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Workfiles tool: Task selection [\#2040](https://github.com/pypeclub/OpenPype/pull/2040)
|
||||
- Ftrack: Delete old versions missing settings key [\#2037](https://github.com/pypeclub/OpenPype/pull/2037)
|
||||
- Nuke: typo on a button [\#2034](https://github.com/pypeclub/OpenPype/pull/2034)
|
||||
- Hiero: Fix "none" named tags [\#2033](https://github.com/pypeclub/OpenPype/pull/2033)
|
||||
- FFmpeg: Subprocess arguments as list [\#2032](https://github.com/pypeclub/OpenPype/pull/2032)
|
||||
|
||||
## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1)
|
||||
|
|
|
|||
|
|
@ -37,7 +37,8 @@ class ExtractBlend(openpype.api.Extractor):
|
|||
if tree.type == 'SHADER':
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == 'ShaderNodeTexImage':
|
||||
node.image.pack()
|
||||
if node.image:
|
||||
node.image.pack()
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Houdini specific Avalon/Pyblish plugin definitions."""
|
||||
import sys
|
||||
from avalon.api import CreatorError
|
||||
from avalon import houdini
|
||||
import six
|
||||
|
||||
|
|
@ -8,7 +9,7 @@ import hou
|
|||
from openpype.api import PypeCreatorMixin
|
||||
|
||||
|
||||
class OpenPypeCreatorError(Exception):
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ import contextlib
|
|||
|
||||
import logging
|
||||
from Qt import QtCore, QtGui
|
||||
from avalon.tools.widgets import AssetWidget
|
||||
from avalon import style
|
||||
from openpype.tools.utils.widgets import AssetWidget
|
||||
from avalon import style, io
|
||||
|
||||
from pxr import Sdf
|
||||
|
||||
|
|
@ -31,7 +31,7 @@ def pick_asset(node):
|
|||
# Construct the AssetWidget as a frameless popup so it automatically
|
||||
# closes when clicked outside of it.
|
||||
global tool
|
||||
tool = AssetWidget(silo_creatable=False)
|
||||
tool = AssetWidget(io)
|
||||
tool.setContentsMargins(5, 5, 5, 5)
|
||||
tool.setWindowTitle("Pick Asset")
|
||||
tool.setStyleSheet(style.load_stylesheet())
|
||||
|
|
@ -41,8 +41,6 @@ def pick_asset(node):
|
|||
# Select the current asset if there is any
|
||||
name = parm.eval()
|
||||
if name:
|
||||
from avalon import io
|
||||
|
||||
db_asset = io.find_one({"name": name, "type": "asset"})
|
||||
if db_asset:
|
||||
silo = db_asset.get("silo")
|
||||
|
|
|
|||
96
openpype/hosts/houdini/plugins/create/create_hda.py
Normal file
96
openpype/hosts/houdini/plugins/create/create_hda.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from openpype.hosts.houdini.api import plugin
|
||||
from avalon.houdini import lib
|
||||
from avalon import io
|
||||
import hou
|
||||
|
||||
|
||||
class CreateHDA(plugin.Creator):
|
||||
"""Publish Houdini Digital Asset file."""
|
||||
|
||||
name = "hda"
|
||||
label = "Houdini Digital Asset (Hda)"
|
||||
family = "hda"
|
||||
icon = "gears"
|
||||
maintain_selection = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateHDA, self).__init__(*args, **kwargs)
|
||||
self.data.pop("active", None)
|
||||
|
||||
def _check_existing(self, subset_name):
|
||||
# type: (str) -> bool
|
||||
"""Check if existing subset name versions already exists."""
|
||||
# Get all subsets of the current asset
|
||||
asset_id = io.find_one({"name": self.data["asset"], "type": "asset"},
|
||||
projection={"_id": True})['_id']
|
||||
subset_docs = io.find(
|
||||
{
|
||||
"type": "subset",
|
||||
"parent": asset_id
|
||||
}, {"name": 1}
|
||||
)
|
||||
existing_subset_names = set(subset_docs.distinct("name"))
|
||||
existing_subset_names_low = {
|
||||
_name.lower() for _name in existing_subset_names
|
||||
}
|
||||
return subset_name.lower() in existing_subset_names_low
|
||||
|
||||
def _process(self, instance):
|
||||
subset_name = self.data["subset"]
|
||||
# get selected nodes
|
||||
out = hou.node("/obj")
|
||||
self.nodes = hou.selectedNodes()
|
||||
|
||||
if (self.options or {}).get("useSelection") and self.nodes:
|
||||
# if we have `use selection` enabled and we have some
|
||||
# selected nodes ...
|
||||
to_hda = self.nodes[0]
|
||||
if len(self.nodes) > 1:
|
||||
# if there is more then one node, create subnet first
|
||||
subnet = out.createNode(
|
||||
"subnet", node_name="{}_subnet".format(self.name))
|
||||
to_hda = subnet
|
||||
else:
|
||||
# in case of no selection, just create subnet node
|
||||
subnet = out.createNode(
|
||||
"subnet", node_name="{}_subnet".format(self.name))
|
||||
subnet.moveToGoodPosition()
|
||||
to_hda = subnet
|
||||
|
||||
if not to_hda.type().definition():
|
||||
# if node type has not its definition, it is not user
|
||||
# created hda. We test if hda can be created from the node.
|
||||
if not to_hda.canCreateDigitalAsset():
|
||||
raise Exception(
|
||||
"cannot create hda from node {}".format(to_hda))
|
||||
|
||||
hda_node = to_hda.createDigitalAsset(
|
||||
name=subset_name,
|
||||
hda_file_name="$HIP/{}.hda".format(subset_name)
|
||||
)
|
||||
hou.moveNodesTo(self.nodes, hda_node)
|
||||
hda_node.layoutChildren()
|
||||
else:
|
||||
if self._check_existing(subset_name):
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
("subset {} is already published with different HDA"
|
||||
"definition.").format(subset_name))
|
||||
hda_node = to_hda
|
||||
|
||||
hda_node.setName(subset_name)
|
||||
|
||||
# delete node created by Avalon in /out
|
||||
# this needs to be addressed in future Houdini workflow refactor.
|
||||
|
||||
hou.node("/out/{}".format(subset_name)).destroy()
|
||||
|
||||
try:
|
||||
lib.imprint(hda_node, self.data)
|
||||
except hou.OperationFailed:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
("Cannot set metadata on asset. Might be that it already is "
|
||||
"OpenPype asset.")
|
||||
)
|
||||
|
||||
return hda_node
|
||||
62
openpype/hosts/houdini/plugins/load/load_hda.py
Normal file
62
openpype/hosts/houdini/plugins/load/load_hda.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from avalon import api
|
||||
|
||||
from avalon.houdini import pipeline
|
||||
|
||||
|
||||
class HdaLoader(api.Loader):
|
||||
"""Load Houdini Digital Asset file."""
|
||||
|
||||
families = ["hda"]
|
||||
label = "Load Hda"
|
||||
representations = ["hda"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
import os
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = os.path.normpath(self.fname)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Create a unique name
|
||||
counter = 1
|
||||
namespace = namespace or context["asset"]["name"]
|
||||
formatted = "{}_{}".format(namespace, name) if namespace else name
|
||||
node_name = "{0}_{1:03d}".format(formatted, counter)
|
||||
|
||||
hou.hda.installFile(file_path)
|
||||
hda_node = obj.createNode(name, node_name)
|
||||
|
||||
self[:] = [hda_node]
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
[hda_node],
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
import hou
|
||||
|
||||
hda_node = container["node"]
|
||||
file_path = api.get_representation_path(representation)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
hou.hda.installFile(file_path)
|
||||
defs = hda_node.type().allInstalledDefinitions()
|
||||
def_paths = [d.libraryFilePath() for d in defs]
|
||||
new = def_paths.index(file_path)
|
||||
defs[new].setIsPreferred(True)
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
|
@ -23,8 +23,10 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin):
|
|||
return
|
||||
|
||||
# Check bypass state and reverse
|
||||
active = True
|
||||
node = instance[0]
|
||||
active = not node.isBypassed()
|
||||
if hasattr(node, "isBypassed"):
|
||||
active = not node.isBypassed()
|
||||
|
||||
# Set instance active state
|
||||
instance.data.update(
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
|
||||
nodes = hou.node("/out").children()
|
||||
nodes += hou.node("/obj").children()
|
||||
|
||||
# Include instances in USD stage only when it exists so it
|
||||
# remains backwards compatible with version before houdini 18
|
||||
|
|
@ -49,9 +50,12 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
self.log.info("processing {}".format(node))
|
||||
|
||||
data = lib.read(node)
|
||||
# Check bypass state and reverse
|
||||
data.update({"active": not node.isBypassed()})
|
||||
if hasattr(node, "isBypassed"):
|
||||
data.update({"active": not node.isBypassed()})
|
||||
|
||||
# temporarily translation of `active` to `publish` till issue has
|
||||
# been resolved, https://github.com/pyblish/pyblish-base/issues/307
|
||||
|
|
|
|||
43
openpype/hosts/houdini/plugins/publish/extract_hda.py
Normal file
43
openpype/hosts/houdini/plugins/publish/extract_hda.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ExtractHDA(openpype.api.Extractor):
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract HDA"
|
||||
hosts = ["houdini"]
|
||||
families = ["hda"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info(pformat(instance.data))
|
||||
hda_node = instance[0]
|
||||
hda_def = hda_node.type().definition()
|
||||
hda_options = hda_def.options()
|
||||
hda_options.setSaveInitialParmsAndContents(True)
|
||||
|
||||
next_version = instance.data["anatomyData"]["version"]
|
||||
self.log.info("setting version: {}".format(next_version))
|
||||
hda_def.setVersion(str(next_version))
|
||||
hda_def.setOptions(hda_options)
|
||||
hda_def.save(hda_def.libraryFilePath(), hda_node, hda_options)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
file = os.path.basename(hda_def.libraryFilePath())
|
||||
staging_dir = os.path.dirname(hda_def.libraryFilePath())
|
||||
self.log.info("Using HDA from {}".format(hda_def.libraryFilePath()))
|
||||
|
||||
representation = {
|
||||
'name': 'hda',
|
||||
'ext': 'hda',
|
||||
'files': file,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -35,5 +35,5 @@ class ValidateBypassed(pyblish.api.InstancePlugin):
|
|||
def get_invalid(cls, instance):
|
||||
|
||||
rop = instance[0]
|
||||
if rop.isBypassed():
|
||||
if hasattr(rop, "isBypassed") and rop.isBypassed():
|
||||
return [rop]
|
||||
|
|
|
|||
|
|
@ -275,8 +275,7 @@ def on_open(_):
|
|||
|
||||
# Show outdated pop-up
|
||||
def _on_show_inventory():
|
||||
import avalon.tools.sceneinventory as tool
|
||||
tool.show(parent=parent)
|
||||
host_tools.show_scene_inventory(parent=parent)
|
||||
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setWindowTitle("Maya scene has outdated content")
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import re
|
||||
import os
|
||||
import platform
|
||||
import uuid
|
||||
import math
|
||||
|
||||
|
|
@ -22,6 +23,7 @@ import avalon.maya.lib
|
|||
import avalon.maya.interactive
|
||||
|
||||
from openpype import lib
|
||||
from openpype.api import get_anatomy_settings
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -1822,7 +1824,7 @@ def set_scene_fps(fps, update=True):
|
|||
cmds.file(modified=True)
|
||||
|
||||
|
||||
def set_scene_resolution(width, height):
|
||||
def set_scene_resolution(width, height, pixelAspect):
|
||||
"""Set the render resolution
|
||||
|
||||
Args:
|
||||
|
|
@ -1850,6 +1852,36 @@ def set_scene_resolution(width, height):
|
|||
cmds.setAttr("%s.width" % control_node, width)
|
||||
cmds.setAttr("%s.height" % control_node, height)
|
||||
|
||||
deviceAspectRatio = ((float(width) / float(height)) * float(pixelAspect))
|
||||
cmds.setAttr("%s.deviceAspectRatio" % control_node, deviceAspectRatio)
|
||||
cmds.setAttr("%s.pixelAspect" % control_node, pixelAspect)
|
||||
|
||||
|
||||
def reset_scene_resolution():
|
||||
"""Apply the scene resolution from the project definition
|
||||
|
||||
scene resolution can be overwritten by an asset if the asset.data contains
|
||||
any information regarding scene resolution .
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
project_doc = io.find_one({"type": "project"})
|
||||
project_data = project_doc["data"]
|
||||
asset_data = lib.get_asset()["data"]
|
||||
|
||||
# Set project resolution
|
||||
width_key = "resolutionWidth"
|
||||
height_key = "resolutionHeight"
|
||||
pixelAspect_key = "pixelAspect"
|
||||
|
||||
width = asset_data.get(width_key, project_data.get(width_key, 1920))
|
||||
height = asset_data.get(height_key, project_data.get(height_key, 1080))
|
||||
pixelAspect = asset_data.get(pixelAspect_key,
|
||||
project_data.get(pixelAspect_key, 1))
|
||||
|
||||
set_scene_resolution(width, height, pixelAspect)
|
||||
|
||||
def set_context_settings():
|
||||
"""Apply the project settings from the project definition
|
||||
|
|
@ -1876,18 +1908,14 @@ def set_context_settings():
|
|||
api.Session["AVALON_FPS"] = str(fps)
|
||||
set_scene_fps(fps)
|
||||
|
||||
# Set project resolution
|
||||
width_key = "resolutionWidth"
|
||||
height_key = "resolutionHeight"
|
||||
|
||||
width = asset_data.get(width_key, project_data.get(width_key, 1920))
|
||||
height = asset_data.get(height_key, project_data.get(height_key, 1080))
|
||||
|
||||
set_scene_resolution(width, height)
|
||||
reset_scene_resolution()
|
||||
|
||||
# Set frame range.
|
||||
avalon.maya.interactive.reset_frame_range()
|
||||
|
||||
# Set colorspace
|
||||
set_colorspace()
|
||||
|
||||
|
||||
# Valid FPS
|
||||
def validate_fps():
|
||||
|
|
@ -2743,3 +2771,49 @@ def iter_shader_edits(relationships, shader_nodes, nodes_by_id, label=None):
|
|||
"uuid": data["uuid"],
|
||||
"nodes": nodes,
|
||||
"attributes": attr_value}
|
||||
|
||||
|
||||
def set_colorspace():
|
||||
"""Set Colorspace from project configuration
|
||||
"""
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
imageio = get_anatomy_settings(project_name)["imageio"]["maya"]
|
||||
root_dict = imageio["colorManagementPreference"]
|
||||
|
||||
if not isinstance(root_dict, dict):
|
||||
msg = "set_colorspace(): argument should be dictionary"
|
||||
log.error(msg)
|
||||
|
||||
log.debug(">> root_dict: {}".format(root_dict))
|
||||
|
||||
# first enable color management
|
||||
cmds.colorManagementPrefs(e=True, cmEnabled=True)
|
||||
cmds.colorManagementPrefs(e=True, ocioRulesEnabled=True)
|
||||
|
||||
# second set config path
|
||||
if root_dict.get("configFilePath"):
|
||||
unresolved_path = root_dict["configFilePath"]
|
||||
ocio_paths = unresolved_path[platform.system().lower()]
|
||||
|
||||
resolved_path = None
|
||||
for ocio_p in ocio_paths:
|
||||
resolved_path = str(ocio_p).format(**os.environ)
|
||||
if not os.path.exists(resolved_path):
|
||||
continue
|
||||
|
||||
if resolved_path:
|
||||
filepath = str(resolved_path).replace("\\", "/")
|
||||
cmds.colorManagementPrefs(e=True, configFilePath=filepath)
|
||||
cmds.colorManagementPrefs(e=True, cmConfigFileEnabled=True)
|
||||
log.debug("maya '{}' changed to: {}".format(
|
||||
"configFilePath", resolved_path))
|
||||
root_dict.pop("configFilePath")
|
||||
else:
|
||||
cmds.colorManagementPrefs(e=True, cmConfigFileEnabled=False)
|
||||
cmds.colorManagementPrefs(e=True, configFilePath="" )
|
||||
|
||||
# third set rendering space and view transform
|
||||
renderSpace = root_dict["renderSpace"]
|
||||
cmds.colorManagementPrefs(e=True, renderingSpaceName=renderSpace)
|
||||
viewTransform = root_dict["viewTransform"]
|
||||
cmds.colorManagementPrefs(e=True, viewTransformName=viewTransform)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from avalon.maya import pipeline
|
|||
from openpype.api import BuildWorkfile
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.hosts.maya.api import lib
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -110,6 +111,35 @@ def deferred():
|
|||
if workfile_action:
|
||||
top_menu.removeAction(workfile_action)
|
||||
|
||||
def modify_resolution():
|
||||
# Find the pipeline menu
|
||||
top_menu = _get_menu()
|
||||
|
||||
# Try to find resolution tool action in the menu
|
||||
resolution_action = None
|
||||
for action in top_menu.actions():
|
||||
if action.text() == "Reset Resolution":
|
||||
resolution_action = action
|
||||
break
|
||||
|
||||
# Add at the top of menu if "Work Files" action was not found
|
||||
after_action = ""
|
||||
if resolution_action:
|
||||
# Use action's object name for `insertAfter` argument
|
||||
after_action = resolution_action.objectName()
|
||||
|
||||
# Insert action to menu
|
||||
cmds.menuItem(
|
||||
"Reset Resolution",
|
||||
parent=pipeline._menu,
|
||||
command=lambda *args: lib.reset_scene_resolution(),
|
||||
insertAfter=after_action
|
||||
)
|
||||
|
||||
# Remove replaced action
|
||||
if resolution_action:
|
||||
top_menu.removeAction(resolution_action)
|
||||
|
||||
def remove_project_manager():
|
||||
top_menu = _get_menu()
|
||||
|
||||
|
|
@ -134,6 +164,31 @@ def deferred():
|
|||
if project_manager_action is not None:
|
||||
system_menu.menu().removeAction(project_manager_action)
|
||||
|
||||
def add_colorspace():
|
||||
# Find the pipeline menu
|
||||
top_menu = _get_menu()
|
||||
|
||||
# Try to find workfile tool action in the menu
|
||||
workfile_action = None
|
||||
for action in top_menu.actions():
|
||||
if action.text() == "Reset Resolution":
|
||||
workfile_action = action
|
||||
break
|
||||
|
||||
# Add at the top of menu if "Work Files" action was not found
|
||||
after_action = ""
|
||||
if workfile_action:
|
||||
# Use action's object name for `insertAfter` argument
|
||||
after_action = workfile_action.objectName()
|
||||
|
||||
# Insert action to menu
|
||||
cmds.menuItem(
|
||||
"Set Colorspace",
|
||||
parent=pipeline._menu,
|
||||
command=lambda *args: lib.set_colorspace(),
|
||||
insertAfter=after_action
|
||||
)
|
||||
|
||||
log.info("Attempting to install scripts menu ...")
|
||||
|
||||
# add_scripts_menu()
|
||||
|
|
@ -141,7 +196,9 @@ def deferred():
|
|||
add_look_assigner_item()
|
||||
add_experimental_item()
|
||||
modify_workfiles()
|
||||
modify_resolution()
|
||||
remove_project_manager()
|
||||
add_colorspace()
|
||||
add_scripts_menu()
|
||||
|
||||
|
||||
|
|
|
|||
53
openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py
Normal file
53
openpype/hosts/maya/plugins/publish/validate_mesh_ngons.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from avalon import maya
|
||||
from openpype.hosts.maya.api import lib
|
||||
|
||||
|
||||
def polyConstraint(objects, *args, **kwargs):
|
||||
kwargs.pop('mode', None)
|
||||
|
||||
with lib.no_undo(flush=False):
|
||||
with maya.maintained_selection():
|
||||
with lib.reset_polySelectConstraint():
|
||||
cmds.select(objects, r=1, noExpand=True)
|
||||
# Acting as 'polyCleanupArgList' for n-sided polygon selection
|
||||
cmds.polySelectConstraint(*args, mode=3, **kwargs)
|
||||
result = cmds.ls(selection=True)
|
||||
cmds.select(clear=True)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ValidateMeshNgons(pyblish.api.Validator):
|
||||
"""Ensure that meshes don't have ngons
|
||||
|
||||
Ngon are faces with more than 4 sides.
|
||||
|
||||
To debug the problem on the meshes you can use Maya's modeling
|
||||
tool: "Mesh > Cleanup..."
|
||||
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
label = "Mesh ngons"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
meshes = cmds.ls(instance, type='mesh')
|
||||
return polyConstraint(meshes, type=8, size=3)
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all the nodes in the instance "objectSet"""
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Meshes found with n-gons"
|
||||
"values: {0}".format(invalid))
|
||||
|
|
@ -106,12 +106,12 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin):
|
|||
for mapping in self.color_code_mapping:
|
||||
if mapping["color_code"] and \
|
||||
layer.color_code not in mapping["color_code"]:
|
||||
break
|
||||
continue
|
||||
|
||||
if mapping["layer_name_regex"] and \
|
||||
not any(re.search(pattern, layer.name)
|
||||
for pattern in mapping["layer_name_regex"]):
|
||||
break
|
||||
continue
|
||||
|
||||
family_list.append(mapping["family"])
|
||||
subset_name_list.append(mapping["subset_template_name"])
|
||||
|
|
@ -127,7 +127,6 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin):
|
|||
format(layer.name))
|
||||
self.log.warning("Only first family used!")
|
||||
family_list[:] = family_list[0]
|
||||
|
||||
if subset_name_list:
|
||||
resolved_subset_template = subset_name_list.pop()
|
||||
if family_list:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
import pyblish.api
|
||||
|
||||
from avalon import io
|
||||
from openpype.lib import get_subset_name
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectBulkMovInstances(pyblish.api.InstancePlugin):
|
||||
|
|
@ -26,16 +26,10 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin):
|
|||
context = instance.context
|
||||
asset_name = instance.data["asset"]
|
||||
|
||||
asset_doc = io.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
},
|
||||
{
|
||||
"_id": 1,
|
||||
"data.tasks": 1
|
||||
}
|
||||
)
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
if not asset_doc:
|
||||
raise AssertionError((
|
||||
"Couldn't find Asset document with name \"{}\""
|
||||
|
|
@ -53,11 +47,11 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin):
|
|||
task_name = available_task_names[_task_name_low]
|
||||
break
|
||||
|
||||
subset_name = get_subset_name(
|
||||
subset_name = get_subset_name_with_asset_doc(
|
||||
self.new_instance_family,
|
||||
self.subset_name_variant,
|
||||
task_name,
|
||||
asset_doc["_id"],
|
||||
asset_doc,
|
||||
io.Session["AVALON_PROJECT"]
|
||||
)
|
||||
instance_name = f"{asset_name}_{subset_name}"
|
||||
|
|
|
|||
|
|
@ -22,15 +22,15 @@ class ValidateSources(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
self.log.info("instance {}".format(instance.data))
|
||||
|
||||
for repr in instance.data["representations"]:
|
||||
for repre in instance.data.get("representations") or []:
|
||||
files = []
|
||||
if isinstance(repr["files"], str):
|
||||
files.append(repr["files"])
|
||||
if isinstance(repre["files"], str):
|
||||
files.append(repre["files"])
|
||||
else:
|
||||
files = list(repr["files"])
|
||||
files = list(repre["files"])
|
||||
|
||||
for file_name in files:
|
||||
source_file = os.path.join(repr["stagingDir"],
|
||||
source_file = os.path.join(repre["stagingDir"],
|
||||
file_name)
|
||||
|
||||
if not os.path.exists(source_file):
|
||||
|
|
|
|||
16
openpype/hosts/testhost/README.md
Normal file
16
openpype/hosts/testhost/README.md
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# What is `testhost`
|
||||
Host `testhost` was created to fake running host for testing of publisher.
|
||||
|
||||
Does not have any proper launch mechanism at the moment. There is python script `./run_publish.py` which will show publisher window. The script requires to set few variables to run. Execution will register host `testhost`, register global publish plugins and register creator and publish plugins from `./plugins`.
|
||||
|
||||
## Data
|
||||
Created instances and context data are stored into json files inside `./api` folder. Can be easily modified to save them to a different place.
|
||||
|
||||
## Plugins
|
||||
Test host has few plugins to be able test publishing.
|
||||
|
||||
### Creators
|
||||
They are just example plugins using functions from `api` to create/remove/update data. One of them is auto creator which means that is triggered on each reset of create context. Others are manual creators both creating the same family.
|
||||
|
||||
### Publishers
|
||||
Collectors are example plugin to use `get_attribute_defs` to define attributes for specific families or for context. Validators are to test `PublishValidationError`.
|
||||
0
openpype/hosts/testhost/__init__.py
Normal file
0
openpype/hosts/testhost/__init__.py
Normal file
43
openpype/hosts/testhost/api/__init__.py
Normal file
43
openpype/hosts/testhost/api/__init__.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import os
|
||||
import logging
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from openpype.pipeline import BaseCreator
|
||||
|
||||
from .pipeline import (
|
||||
ls,
|
||||
list_instances,
|
||||
update_instances,
|
||||
remove_instances,
|
||||
get_context_data,
|
||||
update_context_data,
|
||||
get_context_title
|
||||
)
|
||||
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install():
|
||||
log.info("OpenPype - Installing TestHost integration")
|
||||
pyblish.api.register_host("testhost")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(BaseCreator, CREATE_PATH)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ls",
|
||||
"list_instances",
|
||||
"update_instances",
|
||||
"remove_instances",
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
"get_context_title",
|
||||
|
||||
"install"
|
||||
)
|
||||
1
openpype/hosts/testhost/api/context.json
Normal file
1
openpype/hosts/testhost/api/context.json
Normal file
|
|
@ -0,0 +1 @@
|
|||
{}
|
||||
108
openpype/hosts/testhost/api/instances.json
Normal file
108
openpype/hosts/testhost/api/instances.json
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
[
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMyVariant",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "myVariant",
|
||||
"uuid": "a485f148-9121-46a5-8157-aa64df0fb449",
|
||||
"creator_attributes": {
|
||||
"number_key": 10,
|
||||
"ha": 10
|
||||
},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": false
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_one"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMyVariant2",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "myVariant2",
|
||||
"uuid": "a485f148-9121-46a5-8157-aa64df0fb444",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_one"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMain",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "Main",
|
||||
"uuid": "3607bc95-75f6-4648-a58d-e699f413d09f",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_two"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMain2",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0020",
|
||||
"task": "Compositing",
|
||||
"variant": "Main2",
|
||||
"uuid": "4ccf56f6-9982-4837-967c-a49695dbe8eb",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_two"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"family": "test_three",
|
||||
"subset": "test_threeMain2",
|
||||
"active": true,
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0020",
|
||||
"task": "Compositing",
|
||||
"variant": "Main2",
|
||||
"uuid": "4ccf56f6-9982-4837-967c-a49695dbe8ec",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"family": "workfile",
|
||||
"subset": "workfileMain",
|
||||
"active": true,
|
||||
"creator_identifier": "workfile",
|
||||
"version": 1,
|
||||
"asset": "Alpaca_01",
|
||||
"task": "modeling",
|
||||
"variant": "Main",
|
||||
"uuid": "7c9ddfc7-9f9c-4c1c-b233-38c966735fb6",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {}
|
||||
}
|
||||
]
|
||||
156
openpype/hosts/testhost/api/pipeline.py
Normal file
156
openpype/hosts/testhost/api/pipeline.py
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
|
||||
class HostContext:
|
||||
instances_json_path = None
|
||||
context_json_path = None
|
||||
|
||||
@classmethod
|
||||
def get_context_title(cls):
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
if not project_name:
|
||||
return "TestHost"
|
||||
|
||||
asset_name = os.environ.get("AVALON_ASSET")
|
||||
if not asset_name:
|
||||
return project_name
|
||||
|
||||
from avalon import io
|
||||
|
||||
asset_doc = io.find_one(
|
||||
{"type": "asset", "name": asset_name},
|
||||
{"data.parents": 1}
|
||||
)
|
||||
parents = asset_doc.get("data", {}).get("parents") or []
|
||||
|
||||
hierarchy = [project_name]
|
||||
hierarchy.extend(parents)
|
||||
hierarchy.append("<b>{}</b>".format(asset_name))
|
||||
task_name = os.environ.get("AVALON_TASK")
|
||||
if task_name:
|
||||
hierarchy.append(task_name)
|
||||
|
||||
return "/".join(hierarchy)
|
||||
|
||||
@classmethod
|
||||
def get_current_dir_filepath(cls, filename):
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
filename
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_instances_json_path(cls):
|
||||
if cls.instances_json_path is None:
|
||||
cls.instances_json_path = cls.get_current_dir_filepath(
|
||||
"instances.json"
|
||||
)
|
||||
return cls.instances_json_path
|
||||
|
||||
@classmethod
|
||||
def get_context_json_path(cls):
|
||||
if cls.context_json_path is None:
|
||||
cls.context_json_path = cls.get_current_dir_filepath(
|
||||
"context.json"
|
||||
)
|
||||
return cls.context_json_path
|
||||
|
||||
@classmethod
|
||||
def add_instance(cls, instance):
|
||||
instances = cls.get_instances()
|
||||
instances.append(instance)
|
||||
cls.save_instances(instances)
|
||||
|
||||
@classmethod
|
||||
def save_instances(cls, instances):
|
||||
json_path = cls.get_instances_json_path()
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(instances, json_stream, indent=4)
|
||||
|
||||
@classmethod
|
||||
def get_instances(cls):
|
||||
json_path = cls.get_instances_json_path()
|
||||
if not os.path.exists(json_path):
|
||||
instances = []
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(json_stream, instances)
|
||||
else:
|
||||
with open(json_path, "r") as json_stream:
|
||||
instances = json.load(json_stream)
|
||||
return instances
|
||||
|
||||
@classmethod
|
||||
def get_context_data(cls):
|
||||
json_path = cls.get_context_json_path()
|
||||
if not os.path.exists(json_path):
|
||||
data = {}
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(data, json_stream)
|
||||
else:
|
||||
with open(json_path, "r") as json_stream:
|
||||
data = json.load(json_stream)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def save_context_data(cls, data):
|
||||
json_path = cls.get_context_json_path()
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(data, json_stream, indent=4)
|
||||
|
||||
|
||||
def ls():
|
||||
return []
|
||||
|
||||
|
||||
def list_instances():
|
||||
return HostContext.get_instances()
|
||||
|
||||
|
||||
def update_instances(update_list):
|
||||
updated_instances = {}
|
||||
for instance, _changes in update_list:
|
||||
updated_instances[instance.id] = instance.data_to_store()
|
||||
|
||||
instances = HostContext.get_instances()
|
||||
for instance_data in instances:
|
||||
instance_id = instance_data["uuid"]
|
||||
if instance_id in updated_instances:
|
||||
new_instance_data = updated_instances[instance_id]
|
||||
old_keys = set(instance_data.keys())
|
||||
new_keys = set(new_instance_data.keys())
|
||||
instance_data.update(new_instance_data)
|
||||
for key in (old_keys - new_keys):
|
||||
instance_data.pop(key)
|
||||
|
||||
HostContext.save_instances(instances)
|
||||
|
||||
|
||||
def remove_instances(instances):
|
||||
if not isinstance(instances, (tuple, list)):
|
||||
instances = [instances]
|
||||
|
||||
current_instances = HostContext.get_instances()
|
||||
for instance in instances:
|
||||
instance_id = instance.data["uuid"]
|
||||
found_idx = None
|
||||
for idx, _instance in enumerate(current_instances):
|
||||
if instance_id == _instance["uuid"]:
|
||||
found_idx = idx
|
||||
break
|
||||
|
||||
if found_idx is not None:
|
||||
current_instances.pop(found_idx)
|
||||
HostContext.save_instances(current_instances)
|
||||
|
||||
|
||||
def get_context_data():
|
||||
return HostContext.get_context_data()
|
||||
|
||||
|
||||
def update_context_data(data, changes):
|
||||
HostContext.save_context_data(data)
|
||||
|
||||
|
||||
def get_context_title():
|
||||
return HostContext.get_context_title()
|
||||
74
openpype/hosts/testhost/plugins/create/auto_creator.py
Normal file
74
openpype/hosts/testhost/plugins/create/auto_creator.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
from avalon import io
|
||||
|
||||
|
||||
class MyAutoCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
def get_attribute_defs(self):
|
||||
output = [
|
||||
lib.NumberDef("number_key", label="Number")
|
||||
]
|
||||
return output
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
variant = "Main"
|
||||
project_name = io.Session["AVALON_PROJECT"]
|
||||
asset_name = io.Session["AVALON_ASSET"]
|
||||
task_name = io.Session["AVALON_TASK"]
|
||||
host_name = io.Session["AVALON_APP"]
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = io.find_one({"type": "asset", "name": asset_name})
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
elif (
|
||||
existing_instance["asset"] != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = io.find_one({"type": "asset", "name": asset_name})
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
70
openpype/hosts/testhost/plugins/create/test_creator_1.py
Normal file
70
openpype/hosts/testhost/plugins/create/test_creator_1.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
from openpype import resources
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
|
||||
|
||||
class TestCreatorOne(Creator):
|
||||
identifier = "test_one"
|
||||
label = "test"
|
||||
family = "test"
|
||||
description = "Testing creator of testhost"
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
pipeline.remove_instances(instances)
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def create(self, subset_name, data, options=None):
|
||||
new_instance = CreatedInstance(self.family, subset_name, data, self)
|
||||
pipeline.HostContext.add_instance(new_instance.data_to_store())
|
||||
self.log.info(new_instance.data)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
def get_default_variants(self):
|
||||
return [
|
||||
"myVariant",
|
||||
"variantTwo",
|
||||
"different_variant"
|
||||
]
|
||||
|
||||
def get_attribute_defs(self):
|
||||
output = [
|
||||
lib.NumberDef("number_key", label="Number")
|
||||
]
|
||||
return output
|
||||
|
||||
def get_detail_description(self):
|
||||
return """# Relictus funes est Nyseides currusque nunc oblita
|
||||
|
||||
## Causa sed
|
||||
|
||||
Lorem markdownum posito consumptis, *plebe Amorque*, abstitimus rogatus fictaque
|
||||
gladium Circe, nos? Bos aeternum quae. Utque me, si aliquem cladis, et vestigia
|
||||
arbor, sic mea ferre lacrimae agantur prospiciens hactenus. Amanti dentes pete,
|
||||
vos quid laudemque rastrorumque terras in gratantibus **radix** erat cedemus?
|
||||
|
||||
Pudor tu ponderibus verbaque illa; ire ergo iam Venus patris certe longae
|
||||
cruentum lecta, et quaeque. Sit doce nox. Anteit ad tempora magni plenaque et
|
||||
videres mersit sibique auctor in tendunt mittit cunctos ventisque gravitate
|
||||
volucris quemquam Aeneaden. Pectore Mensis somnus; pectora
|
||||
[ferunt](http://www.mox.org/oculosbracchia)? Fertilitatis bella dulce et suum?
|
||||
"""
|
||||
74
openpype/hosts/testhost/plugins/create/test_creator_2.py
Normal file
74
openpype/hosts/testhost/plugins/create/test_creator_2.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
|
||||
|
||||
class TestCreatorTwo(Creator):
|
||||
identifier = "test_two"
|
||||
label = "test"
|
||||
family = "test"
|
||||
description = "A second testing creator"
|
||||
|
||||
def get_icon(self):
|
||||
return "cube"
|
||||
|
||||
def create(self, subset_name, data, options=None):
|
||||
new_instance = CreatedInstance(self.family, subset_name, data, self)
|
||||
pipeline.HostContext.add_instance(new_instance.data_to_store())
|
||||
self.log.info(new_instance.data)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
pipeline.remove_instances(instances)
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_attribute_defs(self):
|
||||
output = [
|
||||
lib.NumberDef("number_key"),
|
||||
lib.TextDef("text_key")
|
||||
]
|
||||
return output
|
||||
|
||||
def get_detail_description(self):
|
||||
return """# Lorem ipsum, dolor sit amet. [](https://github.com/sindresorhus/awesome)
|
||||
|
||||
> A curated list of awesome lorem ipsum generators.
|
||||
|
||||
Inspired by the [awesome](https://github.com/sindresorhus/awesome) list thing.
|
||||
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Legend](#legend)
|
||||
- [Practical](#briefcase-practical)
|
||||
- [Whimsical](#roller_coaster-whimsical)
|
||||
- [Animals](#rabbit-animals)
|
||||
- [Eras](#tophat-eras)
|
||||
- [Famous Individuals](#sunglasses-famous-individuals)
|
||||
- [Music](#microphone-music)
|
||||
- [Food and Drink](#pizza-food-and-drink)
|
||||
- [Geographic and Dialects](#earth_africa-geographic-and-dialects)
|
||||
- [Literature](#books-literature)
|
||||
- [Miscellaneous](#cyclone-miscellaneous)
|
||||
- [Sports and Fitness](#bicyclist-sports-and-fitness)
|
||||
- [TV and Film](#movie_camera-tv-and-film)
|
||||
- [Tools, Apps, and Extensions](#wrench-tools-apps-and-extensions)
|
||||
- [Contribute](#contribute)
|
||||
- [TODO](#todo)
|
||||
"""
|
||||
34
openpype/hosts/testhost/plugins/publish/collect_context.py
Normal file
34
openpype/hosts/testhost/plugins/publish/collect_context.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
OpenPypePyblishPluginMixin,
|
||||
attribute_definitions
|
||||
)
|
||||
|
||||
|
||||
class CollectContextDataTestHost(
|
||||
pyblish.api.ContextPlugin, OpenPypePyblishPluginMixin
|
||||
):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
"""
|
||||
|
||||
label = "Collect Source - Test Host"
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
hosts = ["testhost"]
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.BoolDef(
|
||||
"test_bool",
|
||||
True,
|
||||
label="Bool input"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, context):
|
||||
# get json paths from os and load them
|
||||
for instance in context:
|
||||
instance.data["source"] = "testhost"
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
import json
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
OpenPypePyblishPluginMixin,
|
||||
attribute_definitions
|
||||
)
|
||||
|
||||
|
||||
class CollectInstanceOneTestHost(
|
||||
pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin
|
||||
):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
"""
|
||||
|
||||
label = "Collect Instance 1 - Test Host"
|
||||
order = pyblish.api.CollectorOrder - 0.3
|
||||
hosts = ["testhost"]
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.NumberDef(
|
||||
"version",
|
||||
default=1,
|
||||
minimum=1,
|
||||
maximum=999,
|
||||
decimals=0,
|
||||
label="Version"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
self._debug_log(instance)
|
||||
|
||||
publish_attributes = instance.data.get("publish_attributes")
|
||||
if not publish_attributes:
|
||||
return
|
||||
|
||||
values = publish_attributes.get(self.__class__.__name__)
|
||||
if not values:
|
||||
return
|
||||
|
||||
instance.data["version"] = values["version"]
|
||||
|
||||
def _debug_log(self, instance):
|
||||
def _default_json(value):
|
||||
return str(value)
|
||||
|
||||
self.log.info(
|
||||
json.dumps(instance.data, indent=4, default=_default_json)
|
||||
)
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
pass
|
||||
|
||||
|
||||
description = """
|
||||
## Publish plugins
|
||||
|
||||
### Validate Scene Settings
|
||||
|
||||
#### Skip Resolution Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB.
|
||||
|
||||
#### Skip Timeline Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB.
|
||||
|
||||
### AfterEffects Submit to Deadline
|
||||
|
||||
* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one.
|
||||
* `Priority` - priority of job on farm
|
||||
* `Primary Pool` - here is list of pool fetched from server you can select from.
|
||||
* `Secondary Pool`
|
||||
* `Frames Per Task` - number of sequence division between individual tasks (chunks)
|
||||
making one job on farm.
|
||||
"""
|
||||
|
||||
|
||||
class ValidateContextWithError(pyblish.api.ContextPlugin):
|
||||
"""Validate the instance asset is the current selected context asset.
|
||||
|
||||
As it might happen that multiple worfiles are opened, switching
|
||||
between them would mess with selected context.
|
||||
In that case outputs might be output under wrong asset!
|
||||
|
||||
Repair action will use Context asset value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate Context With Error"
|
||||
hosts = ["testhost"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
def process(self, context):
|
||||
raise PublishValidationError("Crashing", "Context error", description)
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
pass
|
||||
|
||||
|
||||
description = """
|
||||
## Publish plugins
|
||||
|
||||
### Validate Scene Settings
|
||||
|
||||
#### Skip Resolution Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB.
|
||||
|
||||
#### Skip Timeline Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB.
|
||||
|
||||
### AfterEffects Submit to Deadline
|
||||
|
||||
* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one.
|
||||
* `Priority` - priority of job on farm
|
||||
* `Primary Pool` - here is list of pool fetched from server you can select from.
|
||||
* `Secondary Pool`
|
||||
* `Frames Per Task` - number of sequence division between individual tasks (chunks)
|
||||
making one job on farm.
|
||||
"""
|
||||
|
||||
|
||||
class ValidateWithError(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance asset is the current selected context asset.
|
||||
|
||||
As it might happen that multiple worfiles are opened, switching
|
||||
between them would mess with selected context.
|
||||
In that case outputs might be output under wrong asset!
|
||||
|
||||
Repair action will use Context asset value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate With Error"
|
||||
hosts = ["testhost"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
def process(self, instance):
|
||||
raise PublishValidationError("Crashing", "Instance error", description)
|
||||
70
openpype/hosts/testhost/run_publish.py
Normal file
70
openpype/hosts/testhost/run_publish.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
mongo_url = ""
|
||||
project_name = ""
|
||||
asset_name = ""
|
||||
task_name = ""
|
||||
ftrack_url = ""
|
||||
ftrack_username = ""
|
||||
ftrack_api_key = ""
|
||||
|
||||
|
||||
def multi_dirname(path, times=1):
|
||||
for _ in range(times):
|
||||
path = os.path.dirname(path)
|
||||
return path
|
||||
|
||||
|
||||
host_name = "testhost"
|
||||
current_file = os.path.abspath(__file__)
|
||||
openpype_dir = multi_dirname(current_file, 4)
|
||||
|
||||
os.environ["OPENPYPE_MONGO"] = mongo_url
|
||||
os.environ["OPENPYPE_ROOT"] = openpype_dir
|
||||
os.environ["AVALON_MONGO"] = mongo_url
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
os.environ["AVALON_ASSET"] = asset_name
|
||||
os.environ["AVALON_TASK"] = task_name
|
||||
os.environ["AVALON_APP"] = host_name
|
||||
os.environ["OPENPYPE_DATABASE_NAME"] = "openpype"
|
||||
os.environ["AVALON_CONFIG"] = "openpype"
|
||||
os.environ["AVALON_TIMEOUT"] = "1000"
|
||||
os.environ["AVALON_DB"] = "avalon"
|
||||
os.environ["FTRACK_SERVER"] = ftrack_url
|
||||
os.environ["FTRACK_API_USER"] = ftrack_username
|
||||
os.environ["FTRACK_API_KEY"] = ftrack_api_key
|
||||
for path in [
|
||||
openpype_dir,
|
||||
r"{}\repos\avalon-core".format(openpype_dir),
|
||||
r"{}\.venv\Lib\site-packages".format(openpype_dir)
|
||||
]:
|
||||
sys.path.append(path)
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from openpype.tools.publisher.window import PublisherWindow
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function for testing purposes."""
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.hosts.testhost import api as testhost
|
||||
|
||||
manager = ModulesManager()
|
||||
for plugin_path in manager.collect_plugin_paths()["publish"]:
|
||||
pyblish.api.register_plugin_path(plugin_path)
|
||||
|
||||
avalon.api.install(testhost)
|
||||
|
||||
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
|
||||
app = QtWidgets.QApplication([])
|
||||
window = PublisherWindow()
|
||||
window.show()
|
||||
app.exec_()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -4,7 +4,7 @@ import copy
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from openpype.lib import get_subset_name
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
|
|
@ -70,16 +70,10 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
# - not sure if it's good idea to require asset id in
|
||||
# get_subset_name?
|
||||
asset_name = context.data["workfile_context"]["asset"]
|
||||
asset_doc = io.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
},
|
||||
{"_id": 1}
|
||||
)
|
||||
asset_id = None
|
||||
if asset_doc:
|
||||
asset_id = asset_doc["_id"]
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
# Project name from workfile context
|
||||
project_name = context.data["workfile_context"]["project"]
|
||||
|
|
@ -88,11 +82,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
# Use empty variant value
|
||||
variant = ""
|
||||
task_name = io.Session["AVALON_TASK"]
|
||||
new_subset_name = get_subset_name(
|
||||
new_subset_name = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import json
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from openpype.lib import get_subset_name
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
|
|
@ -28,16 +28,10 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
# get_subset_name?
|
||||
family = "workfile"
|
||||
asset_name = context.data["workfile_context"]["asset"]
|
||||
asset_doc = io.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
},
|
||||
{"_id": 1}
|
||||
)
|
||||
asset_id = None
|
||||
if asset_doc:
|
||||
asset_id = asset_doc["_id"]
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
|
||||
# Project name from workfile context
|
||||
project_name = context.data["workfile_context"]["project"]
|
||||
|
|
@ -46,11 +40,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
# Use empty variant value
|
||||
variant = ""
|
||||
task_name = io.Session["AVALON_TASK"]
|
||||
subset_name = get_subset_name(
|
||||
subset_name = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name
|
||||
)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from avalon.api import AvalonMongoDB
|
|||
|
||||
from openpype.lib import OpenPypeMongoConnection
|
||||
from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint
|
||||
from openpype.lib.plugin_tools import parse_json
|
||||
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
|
|
@ -175,6 +176,9 @@ class TaskNode(Node):
|
|||
class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
|
||||
"""Triggers headless publishing of batch."""
|
||||
async def post(self, request) -> Response:
|
||||
# for postprocessing in host, currently only PS
|
||||
host_map = {"photoshop": [".psd", ".psb"]}
|
||||
|
||||
output = {}
|
||||
log.info("WebpublisherBatchPublishEndpoint called")
|
||||
content = await request.json()
|
||||
|
|
@ -182,10 +186,44 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
|
|||
batch_path = os.path.join(self.resource.upload_dir,
|
||||
content["batch"])
|
||||
|
||||
add_args = {
|
||||
"host": "webpublisher",
|
||||
"project": content["project_name"],
|
||||
"user": content["user"]
|
||||
}
|
||||
|
||||
command = "remotepublish"
|
||||
|
||||
if content.get("studio_processing"):
|
||||
log.info("Post processing called")
|
||||
|
||||
batch_data = parse_json(os.path.join(batch_path, "manifest.json"))
|
||||
if not batch_data:
|
||||
raise ValueError(
|
||||
"Cannot parse batch meta in {} folder".format(batch_path))
|
||||
task_dir_name = batch_data["tasks"][0]
|
||||
task_data = parse_json(os.path.join(batch_path, task_dir_name,
|
||||
"manifest.json"))
|
||||
if not task_data:
|
||||
raise ValueError(
|
||||
"Cannot parse batch meta in {} folder".format(task_data))
|
||||
|
||||
command = "remotepublishfromapp"
|
||||
for host, extensions in host_map.items():
|
||||
for ext in extensions:
|
||||
for file_name in task_data["files"]:
|
||||
if ext in file_name:
|
||||
add_args["host"] = host
|
||||
break
|
||||
|
||||
if not add_args.get("host"):
|
||||
raise ValueError(
|
||||
"Couldn't discern host from {}".format(task_data["files"]))
|
||||
|
||||
openpype_app = self.resource.executable
|
||||
args = [
|
||||
openpype_app,
|
||||
'remotepublish',
|
||||
command,
|
||||
batch_path
|
||||
]
|
||||
|
||||
|
|
@ -193,12 +231,6 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
|
|||
msg = "Non existent OpenPype executable {}".format(openpype_app)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
add_args = {
|
||||
"host": "webpublisher",
|
||||
"project": content["project_name"],
|
||||
"user": content["user"]
|
||||
}
|
||||
|
||||
for key, value in add_args.items():
|
||||
args.append("--{}".format(key))
|
||||
args.append(value)
|
||||
|
|
|
|||
|
|
@ -130,6 +130,7 @@ from .applications import (
|
|||
from .plugin_tools import (
|
||||
TaskNotSetError,
|
||||
get_subset_name,
|
||||
get_subset_name_with_asset_doc,
|
||||
prepare_template_data,
|
||||
filter_pyblish_plugins,
|
||||
set_plugin_attributes_from_settings,
|
||||
|
|
@ -249,6 +250,7 @@ __all__ = [
|
|||
|
||||
"TaskNotSetError",
|
||||
"get_subset_name",
|
||||
"get_subset_name_with_asset_doc",
|
||||
"filter_pyblish_plugins",
|
||||
"set_plugin_attributes_from_settings",
|
||||
"source_hash",
|
||||
|
|
|
|||
|
|
@ -245,6 +245,27 @@ def process_sequence(
|
|||
report_items["Source file was not found"].append(msg)
|
||||
return report_items, 0
|
||||
|
||||
delivery_templates = anatomy.templates.get("delivery") or {}
|
||||
delivery_template = delivery_templates.get(template_name)
|
||||
if delivery_template is None:
|
||||
msg = (
|
||||
"Delivery template \"{}\" in anatomy of project \"{}\""
|
||||
" was not found"
|
||||
).format(template_name, anatomy.project_name)
|
||||
report_items[""].append(msg)
|
||||
return report_items, 0
|
||||
|
||||
# Check if 'frame' key is available in template which is required
|
||||
# for sequence delivery
|
||||
if "{frame" not in delivery_template:
|
||||
msg = (
|
||||
"Delivery template \"{}\" in anatomy of project \"{}\""
|
||||
"does not contain '{{frame}}' key to fill. Delivery of sequence"
|
||||
" can't be processed."
|
||||
).format(template_name, anatomy.project_name)
|
||||
report_items[""].append(msg)
|
||||
return report_items, 0
|
||||
|
||||
dir_path, file_name = os.path.split(str(src_path))
|
||||
|
||||
context = repre["context"]
|
||||
|
|
|
|||
|
|
@ -28,17 +28,44 @@ class TaskNotSetError(KeyError):
|
|||
super(TaskNotSetError, self).__init__(msg)
|
||||
|
||||
|
||||
def get_subset_name(
|
||||
def get_subset_name_with_asset_doc(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
asset_doc,
|
||||
project_name=None,
|
||||
host_name=None,
|
||||
default_template=None,
|
||||
dynamic_data=None,
|
||||
dbcon=None
|
||||
dynamic_data=None
|
||||
):
|
||||
"""Calculate subset name based on passed context and OpenPype settings.
|
||||
|
||||
Subst name templates are defined in `project_settings/global/tools/creator
|
||||
/subset_name_profiles` where are profiles with host name, family, task name
|
||||
and task type filters. If context does not match any profile then
|
||||
`DEFAULT_SUBSET_TEMPLATE` is used as default template.
|
||||
|
||||
That's main reason why so many arguments are required to calculate subset
|
||||
name.
|
||||
|
||||
Args:
|
||||
family (str): Instance family.
|
||||
variant (str): In most of cases it is user input during creation.
|
||||
task_name (str): Task name on which context is instance created.
|
||||
asset_doc (dict): Queried asset document with it's tasks in data.
|
||||
Used to get task type.
|
||||
project_name (str): Name of project on which is instance created.
|
||||
Important for project settings that are loaded.
|
||||
host_name (str): One of filtering criteria for template profile
|
||||
filters.
|
||||
default_template (str): Default template if any profile does not match
|
||||
passed context. Constant 'DEFAULT_SUBSET_TEMPLATE' is used if
|
||||
is not passed.
|
||||
dynamic_data (dict): Dynamic data specific for a creator which creates
|
||||
instance.
|
||||
dbcon (AvalonMongoDB): Mongo connection to be able query asset document
|
||||
if 'asset_doc' is not passed.
|
||||
"""
|
||||
if not family:
|
||||
return ""
|
||||
|
||||
|
|
@ -53,25 +80,6 @@ def get_subset_name(
|
|||
|
||||
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||
|
||||
# Function should expect asset document instead of asset id
|
||||
# - that way `dbcon` is not needed
|
||||
if dbcon is None:
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
dbcon.install()
|
||||
|
||||
asset_doc = dbcon.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"_id": asset_id
|
||||
},
|
||||
{
|
||||
"data.tasks": True
|
||||
}
|
||||
)
|
||||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
task_type = task_info.get("type")
|
||||
|
|
@ -113,6 +121,49 @@ def get_subset_name(
|
|||
return template.format(**prepare_template_data(fill_pairs))
|
||||
|
||||
|
||||
def get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_id,
|
||||
project_name=None,
|
||||
host_name=None,
|
||||
default_template=None,
|
||||
dynamic_data=None,
|
||||
dbcon=None
|
||||
):
|
||||
"""Calculate subset name using OpenPype settings.
|
||||
|
||||
This variant of function expects asset id as argument.
|
||||
|
||||
This is legacy function should be replaced with
|
||||
`get_subset_name_with_asset_doc` where asset document is expected.
|
||||
"""
|
||||
if dbcon is None:
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
dbcon.install()
|
||||
|
||||
asset_doc = dbcon.find_one(
|
||||
{"_id": asset_id},
|
||||
{"data.tasks": True}
|
||||
) or {}
|
||||
|
||||
return get_subset_name_with_asset_doc(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name,
|
||||
default_template,
|
||||
dynamic_data
|
||||
)
|
||||
|
||||
|
||||
def prepare_template_data(fill_pairs):
|
||||
"""
|
||||
Prepares formatted data for filling template.
|
||||
|
|
|
|||
41
openpype/lib/python_2_comp.py
Normal file
41
openpype/lib/python_2_comp.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import weakref
|
||||
|
||||
|
||||
class _weak_callable:
|
||||
def __init__(self, obj, func):
|
||||
self.im_self = obj
|
||||
self.im_func = func
|
||||
|
||||
def __call__(self, *args, **kws):
|
||||
if self.im_self is None:
|
||||
return self.im_func(*args, **kws)
|
||||
else:
|
||||
return self.im_func(self.im_self, *args, **kws)
|
||||
|
||||
|
||||
class WeakMethod:
|
||||
""" Wraps a function or, more importantly, a bound method in
|
||||
a way that allows a bound method's object to be GCed, while
|
||||
providing the same interface as a normal weak reference. """
|
||||
|
||||
def __init__(self, fn):
|
||||
try:
|
||||
self._obj = weakref.ref(fn.im_self)
|
||||
self._meth = fn.im_func
|
||||
except AttributeError:
|
||||
# It's not a bound method
|
||||
self._obj = None
|
||||
self._meth = fn
|
||||
|
||||
def __call__(self):
|
||||
if self._dead():
|
||||
return None
|
||||
return _weak_callable(self._getobj(), self._meth)
|
||||
|
||||
def _dead(self):
|
||||
return self._obj is not None and self._obj() is None
|
||||
|
||||
def _getobj(self):
|
||||
if self._obj is None:
|
||||
return None
|
||||
return self._obj()
|
||||
|
|
@ -100,6 +100,55 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None):
|
|||
)
|
||||
|
||||
|
||||
def fail_batch(_id, batches_in_progress, dbcon):
|
||||
"""Set current batch as failed as there are some stuck batches."""
|
||||
running_batches = [str(batch["_id"])
|
||||
for batch in batches_in_progress
|
||||
if batch["_id"] != _id]
|
||||
msg = "There are still running batches {}\n". \
|
||||
format("\n".join(running_batches))
|
||||
msg += "Ask admin to check them and reprocess current batch"
|
||||
dbcon.update_one(
|
||||
{"_id": _id},
|
||||
{"$set":
|
||||
{
|
||||
"finish_date": datetime.now(),
|
||||
"status": "error",
|
||||
"log": msg
|
||||
|
||||
}}
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def find_variant_key(application_manager, host):
|
||||
"""Searches for latest installed variant for 'host'
|
||||
|
||||
Args:
|
||||
application_manager (ApplicationManager)
|
||||
host (str)
|
||||
Returns
|
||||
(string) (optional)
|
||||
Raises:
|
||||
(ValueError) if no variant found
|
||||
"""
|
||||
app_group = application_manager.app_groups.get(host)
|
||||
if not app_group or not app_group.enabled:
|
||||
raise ValueError("No application {} configured".format(host))
|
||||
|
||||
found_variant_key = None
|
||||
# finds most up-to-date variant if any installed
|
||||
for variant_key, variant in app_group.variants.items():
|
||||
for executable in variant.executables:
|
||||
if executable.exists():
|
||||
found_variant_key = variant_key
|
||||
|
||||
if not found_variant_key:
|
||||
raise ValueError("No executable for {} found".format(host))
|
||||
|
||||
return found_variant_key
|
||||
|
||||
|
||||
def _get_close_plugin(close_plugin_name, log):
|
||||
if close_plugin_name:
|
||||
plugins = pyblish.api.discover()
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
import collections
|
||||
import copy
|
||||
import json
|
||||
import queue
|
||||
import time
|
||||
import datetime
|
||||
import atexit
|
||||
|
|
@ -193,7 +191,9 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self._avalon_ents_by_ftrack_id = {}
|
||||
proj, ents = self.avalon_entities
|
||||
if proj:
|
||||
ftrack_id = proj["data"]["ftrackId"]
|
||||
ftrack_id = proj["data"].get("ftrackId")
|
||||
if ftrack_id is None:
|
||||
ftrack_id = self._update_project_ftrack_id()
|
||||
self._avalon_ents_by_ftrack_id[ftrack_id] = proj
|
||||
for ent in ents:
|
||||
ftrack_id = ent["data"].get("ftrackId")
|
||||
|
|
@ -202,6 +202,16 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self._avalon_ents_by_ftrack_id[ftrack_id] = ent
|
||||
return self._avalon_ents_by_ftrack_id
|
||||
|
||||
def _update_project_ftrack_id(self):
|
||||
ftrack_id = self.cur_project["id"]
|
||||
|
||||
self.dbcon.update_one(
|
||||
{"type": "project"},
|
||||
{"$set": {"data.ftrackId": ftrack_id}}
|
||||
)
|
||||
|
||||
return ftrack_id
|
||||
|
||||
@property
|
||||
def avalon_subsets_by_parents(self):
|
||||
if self._avalon_subsets_by_parents is None:
|
||||
|
|
@ -340,13 +350,13 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self._avalon_archived_by_id[mongo_id] = entity
|
||||
|
||||
def _bubble_changeability(self, unchangeable_ids):
|
||||
unchangeable_queue = queue.Queue()
|
||||
unchangeable_queue = collections.deque()
|
||||
for entity_id in unchangeable_ids:
|
||||
unchangeable_queue.put((entity_id, False))
|
||||
unchangeable_queue.append((entity_id, False))
|
||||
|
||||
processed_parents_ids = []
|
||||
while not unchangeable_queue.empty():
|
||||
entity_id, child_is_archived = unchangeable_queue.get()
|
||||
while unchangeable_queue:
|
||||
entity_id, child_is_archived = unchangeable_queue.popleft()
|
||||
# skip if already processed
|
||||
if entity_id in processed_parents_ids:
|
||||
continue
|
||||
|
|
@ -388,7 +398,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
parent_id = entity["data"]["visualParent"]
|
||||
if parent_id is None:
|
||||
continue
|
||||
unchangeable_queue.put((parent_id, child_is_archived))
|
||||
unchangeable_queue.append((parent_id, child_is_archived))
|
||||
|
||||
def reset_variables(self):
|
||||
"""Reset variables so each event callback has clear env."""
|
||||
|
|
@ -1050,7 +1060,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
key=(lambda entity: len(entity["link"]))
|
||||
)
|
||||
|
||||
children_queue = queue.Queue()
|
||||
children_queue = collections.deque()
|
||||
for entity in synchronizable_ents:
|
||||
parent_avalon_ent = self.avalon_ents_by_ftrack_id[
|
||||
entity["parent_id"]
|
||||
|
|
@ -1060,10 +1070,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
for child in entity["children"]:
|
||||
if child.entity_type.lower() == "task":
|
||||
continue
|
||||
children_queue.put(child)
|
||||
children_queue.append(child)
|
||||
|
||||
while not children_queue.empty():
|
||||
entity = children_queue.get()
|
||||
while children_queue:
|
||||
entity = children_queue.popleft()
|
||||
ftrack_id = entity["id"]
|
||||
name = entity["name"]
|
||||
ent_by_ftrack_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
|
||||
|
|
@ -1093,7 +1103,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
for child in entity["children"]:
|
||||
if child.entity_type.lower() == "task":
|
||||
continue
|
||||
children_queue.put(child)
|
||||
children_queue.append(child)
|
||||
|
||||
def create_entity_in_avalon(self, ftrack_ent, parent_avalon):
|
||||
proj, ents = self.avalon_entities
|
||||
|
|
@ -1278,7 +1288,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"Processing renamed entities: {}".format(str(ent_infos))
|
||||
)
|
||||
|
||||
changeable_queue = queue.Queue()
|
||||
changeable_queue = collections.deque()
|
||||
for ftrack_id, ent_info in ent_infos.items():
|
||||
entity_type = ent_info["entity_type"]
|
||||
if entity_type == "Task":
|
||||
|
|
@ -1306,7 +1316,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
mongo_id = avalon_ent["_id"]
|
||||
if self.changeability_by_mongo_id[mongo_id]:
|
||||
changeable_queue.put((ftrack_id, avalon_ent, new_name))
|
||||
changeable_queue.append((ftrack_id, avalon_ent, new_name))
|
||||
else:
|
||||
ftrack_ent = self.ftrack_ents_by_id[ftrack_id]
|
||||
ftrack_ent["name"] = avalon_ent["name"]
|
||||
|
|
@ -1348,8 +1358,8 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
old_names = []
|
||||
# Process renaming in Avalon DB
|
||||
while not changeable_queue.empty():
|
||||
ftrack_id, avalon_ent, new_name = changeable_queue.get()
|
||||
while changeable_queue:
|
||||
ftrack_id, avalon_ent, new_name = changeable_queue.popleft()
|
||||
mongo_id = avalon_ent["_id"]
|
||||
old_name = avalon_ent["name"]
|
||||
|
||||
|
|
@ -1390,13 +1400,13 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
# - it's name may be changed in next iteration
|
||||
same_name_ftrack_id = same_name_avalon_ent["data"]["ftrackId"]
|
||||
same_is_unprocessed = False
|
||||
for item in list(changeable_queue.queue):
|
||||
for item in changeable_queue:
|
||||
if same_name_ftrack_id == item[0]:
|
||||
same_is_unprocessed = True
|
||||
break
|
||||
|
||||
if same_is_unprocessed:
|
||||
changeable_queue.put((ftrack_id, avalon_ent, new_name))
|
||||
changeable_queue.append((ftrack_id, avalon_ent, new_name))
|
||||
continue
|
||||
|
||||
self.duplicated.append(ftrack_id)
|
||||
|
|
@ -2008,12 +2018,12 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
# ftrack_parenting = collections.defaultdict(list)
|
||||
entities_dict = collections.defaultdict(dict)
|
||||
|
||||
children_queue = queue.Queue()
|
||||
parent_queue = queue.Queue()
|
||||
children_queue = collections.deque()
|
||||
parent_queue = collections.deque()
|
||||
|
||||
for mongo_id in hier_cust_attrs_ids:
|
||||
avalon_ent = self.avalon_ents_by_id[mongo_id]
|
||||
parent_queue.put(avalon_ent)
|
||||
parent_queue.append(avalon_ent)
|
||||
ftrack_id = avalon_ent["data"]["ftrackId"]
|
||||
if ftrack_id not in entities_dict:
|
||||
entities_dict[ftrack_id] = {
|
||||
|
|
@ -2040,10 +2050,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
entities_dict[_ftrack_id]["parent_id"] = ftrack_id
|
||||
if _ftrack_id not in entities_dict[ftrack_id]["children"]:
|
||||
entities_dict[ftrack_id]["children"].append(_ftrack_id)
|
||||
children_queue.put(children_ent)
|
||||
children_queue.append(children_ent)
|
||||
|
||||
while not children_queue.empty():
|
||||
avalon_ent = children_queue.get()
|
||||
while children_queue:
|
||||
avalon_ent = children_queue.popleft()
|
||||
mongo_id = avalon_ent["_id"]
|
||||
ftrack_id = avalon_ent["data"]["ftrackId"]
|
||||
if ftrack_id in cust_attrs_ftrack_ids:
|
||||
|
|
@ -2066,10 +2076,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
entities_dict[_ftrack_id]["parent_id"] = ftrack_id
|
||||
if _ftrack_id not in entities_dict[ftrack_id]["children"]:
|
||||
entities_dict[ftrack_id]["children"].append(_ftrack_id)
|
||||
children_queue.put(children_ent)
|
||||
children_queue.append(children_ent)
|
||||
|
||||
while not parent_queue.empty():
|
||||
avalon_ent = parent_queue.get()
|
||||
while parent_queue:
|
||||
avalon_ent = parent_queue.popleft()
|
||||
if avalon_ent["type"].lower() == "project":
|
||||
continue
|
||||
|
||||
|
|
@ -2100,7 +2110,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
# if ftrack_id not in ftrack_parenting[parent_ftrack_id]:
|
||||
# ftrack_parenting[parent_ftrack_id].append(ftrack_id)
|
||||
|
||||
parent_queue.put(parent_ent)
|
||||
parent_queue.append(parent_ent)
|
||||
|
||||
# Prepare values to query
|
||||
configuration_ids = set()
|
||||
|
|
@ -2174,11 +2184,13 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if value is not None:
|
||||
project_values[key] = value
|
||||
|
||||
hier_down_queue = queue.Queue()
|
||||
hier_down_queue.put((project_values, ftrack_project_id))
|
||||
hier_down_queue = collections.deque()
|
||||
hier_down_queue.append(
|
||||
(project_values, ftrack_project_id)
|
||||
)
|
||||
|
||||
while not hier_down_queue.empty():
|
||||
hier_values, parent_id = hier_down_queue.get()
|
||||
while hier_down_queue:
|
||||
hier_values, parent_id = hier_down_queue.popleft()
|
||||
for child_id in entities_dict[parent_id]["children"]:
|
||||
_hier_values = hier_values.copy()
|
||||
for name in hier_cust_attrs_keys:
|
||||
|
|
@ -2187,7 +2199,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
_hier_values[name] = value
|
||||
|
||||
entities_dict[child_id]["hier_attrs"].update(_hier_values)
|
||||
hier_down_queue.put((_hier_values, child_id))
|
||||
hier_down_queue.append((_hier_values, child_id))
|
||||
|
||||
ftrack_mongo_mapping = {}
|
||||
for mongo_id, ftrack_id in mongo_ftrack_mapping.items():
|
||||
|
|
@ -2302,11 +2314,12 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"""
|
||||
mongo_changes_bulk = []
|
||||
for mongo_id, changes in self.updates.items():
|
||||
filter = {"_id": mongo_id}
|
||||
avalon_ent = self.avalon_ents_by_id[mongo_id]
|
||||
is_project = avalon_ent["type"] == "project"
|
||||
change_data = avalon_sync.from_dict_to_set(changes, is_project)
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
mongo_changes_bulk.append(
|
||||
UpdateOne({"_id": mongo_id}, change_data)
|
||||
)
|
||||
|
||||
if not mongo_changes_bulk:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import collections
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from queue import Queue
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
|
@ -473,12 +472,12 @@ class DeleteAssetSubset(BaseAction):
|
|||
continue
|
||||
ftrack_ids_to_delete.append(ftrack_id)
|
||||
|
||||
children_queue = Queue()
|
||||
children_queue = collections.deque()
|
||||
for mongo_id in assets_to_delete:
|
||||
children_queue.put(mongo_id)
|
||||
children_queue.append(mongo_id)
|
||||
|
||||
while not children_queue.empty():
|
||||
mongo_id = children_queue.get()
|
||||
while children_queue:
|
||||
mongo_id = children_queue.popleft()
|
||||
if mongo_id in asset_ids_to_archive:
|
||||
continue
|
||||
|
||||
|
|
@ -494,7 +493,7 @@ class DeleteAssetSubset(BaseAction):
|
|||
for child in children:
|
||||
child_id = child["_id"]
|
||||
if child_id not in asset_ids_to_archive:
|
||||
children_queue.put(child_id)
|
||||
children_queue.append(child_id)
|
||||
|
||||
# Prepare names of assets in ftrack and ids of subsets in mongo
|
||||
asset_names_to_delete = []
|
||||
|
|
|
|||
|
|
@ -6,11 +6,6 @@ import copy
|
|||
|
||||
import six
|
||||
|
||||
if six.PY3:
|
||||
from queue import Queue
|
||||
else:
|
||||
from Queue import Queue
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
import avalon
|
||||
|
|
@ -146,11 +141,11 @@ def from_dict_to_set(data, is_project):
|
|||
data.pop("data")
|
||||
|
||||
result = {"$set": {}}
|
||||
dict_queue = Queue()
|
||||
dict_queue.put((None, data))
|
||||
dict_queue = collections.deque()
|
||||
dict_queue.append((None, data))
|
||||
|
||||
while not dict_queue.empty():
|
||||
_key, _data = dict_queue.get()
|
||||
while dict_queue:
|
||||
_key, _data = dict_queue.popleft()
|
||||
for key, value in _data.items():
|
||||
new_key = key
|
||||
if _key is not None:
|
||||
|
|
@ -160,7 +155,7 @@ def from_dict_to_set(data, is_project):
|
|||
(isinstance(value, dict) and not bool(value)): # empty dic
|
||||
result["$set"][new_key] = value
|
||||
continue
|
||||
dict_queue.put((new_key, value))
|
||||
dict_queue.append((new_key, value))
|
||||
|
||||
if task_changes is not not_set and task_changes_key:
|
||||
result["$set"][task_changes_key] = task_changes
|
||||
|
|
@ -714,7 +709,7 @@ class SyncEntitiesFactory:
|
|||
self.filter_by_duplicate_regex()
|
||||
|
||||
def filter_by_duplicate_regex(self):
|
||||
filter_queue = Queue()
|
||||
filter_queue = collections.deque()
|
||||
failed_regex_msg = "{} - Entity has invalid symbols in the name"
|
||||
duplicate_msg = "There are multiple entities with the name: \"{}\":"
|
||||
|
||||
|
|
@ -722,18 +717,18 @@ class SyncEntitiesFactory:
|
|||
for id in ids:
|
||||
ent_path = self.get_ent_path(id)
|
||||
self.log.warning(failed_regex_msg.format(ent_path))
|
||||
filter_queue.put(id)
|
||||
filter_queue.append(id)
|
||||
|
||||
for name, ids in self.duplicates.items():
|
||||
self.log.warning(duplicate_msg.format(name))
|
||||
for id in ids:
|
||||
ent_path = self.get_ent_path(id)
|
||||
self.log.warning(ent_path)
|
||||
filter_queue.put(id)
|
||||
filter_queue.append(id)
|
||||
|
||||
filtered_ids = []
|
||||
while not filter_queue.empty():
|
||||
ftrack_id = filter_queue.get()
|
||||
while filter_queue:
|
||||
ftrack_id = filter_queue.popleft()
|
||||
if ftrack_id in filtered_ids:
|
||||
continue
|
||||
|
||||
|
|
@ -749,7 +744,7 @@ class SyncEntitiesFactory:
|
|||
|
||||
filtered_ids.append(ftrack_id)
|
||||
for child_id in entity_dict.get("children", []):
|
||||
filter_queue.put(child_id)
|
||||
filter_queue.append(child_id)
|
||||
|
||||
for name, ids in self.tasks_failed_regex.items():
|
||||
for id in ids:
|
||||
|
|
@ -768,10 +763,10 @@ class SyncEntitiesFactory:
|
|||
) == "_notset_":
|
||||
return
|
||||
|
||||
self.filter_queue = Queue()
|
||||
self.filter_queue.put((self.ft_project_id, False))
|
||||
while not self.filter_queue.empty():
|
||||
parent_id, remove = self.filter_queue.get()
|
||||
filter_queue = collections.deque()
|
||||
filter_queue.append((self.ft_project_id, False))
|
||||
while filter_queue:
|
||||
parent_id, remove = filter_queue.popleft()
|
||||
if remove:
|
||||
parent_dict = self.entities_dict.pop(parent_id, {})
|
||||
self.all_filtered_entities[parent_id] = parent_dict
|
||||
|
|
@ -790,7 +785,7 @@ class SyncEntitiesFactory:
|
|||
child_id
|
||||
)
|
||||
_remove = True
|
||||
self.filter_queue.put((child_id, _remove))
|
||||
filter_queue.append((child_id, _remove))
|
||||
|
||||
def filter_by_selection(self, event):
|
||||
# BUGGY!!!! cause that entities are in deleted list
|
||||
|
|
@ -805,47 +800,51 @@ class SyncEntitiesFactory:
|
|||
selected_ids.append(entity["entityId"])
|
||||
|
||||
sync_ids = [self.ft_project_id]
|
||||
parents_queue = Queue()
|
||||
children_queue = Queue()
|
||||
for id in selected_ids:
|
||||
parents_queue = collections.deque()
|
||||
children_queue = collections.deque()
|
||||
for selected_id in selected_ids:
|
||||
# skip if already filtered with ignore sync custom attribute
|
||||
if id in self.filtered_ids:
|
||||
if selected_id in self.filtered_ids:
|
||||
continue
|
||||
|
||||
parents_queue.put(id)
|
||||
children_queue.put(id)
|
||||
parents_queue.append(selected_id)
|
||||
children_queue.append(selected_id)
|
||||
|
||||
while not parents_queue.empty():
|
||||
id = parents_queue.get()
|
||||
while parents_queue:
|
||||
ftrack_id = parents_queue.popleft()
|
||||
while True:
|
||||
# Stops when parent is in sync_ids
|
||||
if id in self.filtered_ids or id in sync_ids or id is None:
|
||||
if (
|
||||
ftrack_id in self.filtered_ids
|
||||
or ftrack_id in sync_ids
|
||||
or ftrack_id is None
|
||||
):
|
||||
break
|
||||
sync_ids.append(id)
|
||||
id = self.entities_dict[id]["parent_id"]
|
||||
sync_ids.append(ftrack_id)
|
||||
ftrack_id = self.entities_dict[ftrack_id]["parent_id"]
|
||||
|
||||
while not children_queue.empty():
|
||||
parent_id = children_queue.get()
|
||||
while children_queue:
|
||||
parent_id = children_queue.popleft()
|
||||
for child_id in self.entities_dict[parent_id]["children"]:
|
||||
if child_id in sync_ids or child_id in self.filtered_ids:
|
||||
continue
|
||||
sync_ids.append(child_id)
|
||||
children_queue.put(child_id)
|
||||
children_queue.append(child_id)
|
||||
|
||||
# separate not selected and to process entities
|
||||
for key, value in self.entities_dict.items():
|
||||
if key not in sync_ids:
|
||||
self.not_selected_ids.append(key)
|
||||
|
||||
for id in self.not_selected_ids:
|
||||
for ftrack_id in self.not_selected_ids:
|
||||
# pop from entities
|
||||
value = self.entities_dict.pop(id)
|
||||
value = self.entities_dict.pop(ftrack_id)
|
||||
# remove entity from parent's children
|
||||
parent_id = value["parent_id"]
|
||||
if parent_id not in sync_ids:
|
||||
continue
|
||||
|
||||
self.entities_dict[parent_id]["children"].remove(id)
|
||||
self.entities_dict[parent_id]["children"].remove(ftrack_id)
|
||||
|
||||
def _query_custom_attributes(self, session, conf_ids, entity_ids):
|
||||
output = []
|
||||
|
|
@ -1117,11 +1116,11 @@ class SyncEntitiesFactory:
|
|||
if value is not None:
|
||||
project_values[key] = value
|
||||
|
||||
hier_down_queue = Queue()
|
||||
hier_down_queue.put((project_values, top_id))
|
||||
hier_down_queue = collections.deque()
|
||||
hier_down_queue.append((project_values, top_id))
|
||||
|
||||
while not hier_down_queue.empty():
|
||||
hier_values, parent_id = hier_down_queue.get()
|
||||
while hier_down_queue:
|
||||
hier_values, parent_id = hier_down_queue.popleft()
|
||||
for child_id in self.entities_dict[parent_id]["children"]:
|
||||
_hier_values = copy.deepcopy(hier_values)
|
||||
for key in attributes_by_key.keys():
|
||||
|
|
@ -1134,7 +1133,7 @@ class SyncEntitiesFactory:
|
|||
_hier_values[key] = value
|
||||
|
||||
self.entities_dict[child_id]["hier_attrs"].update(_hier_values)
|
||||
hier_down_queue.put((_hier_values, child_id))
|
||||
hier_down_queue.append((_hier_values, child_id))
|
||||
|
||||
def remove_from_archived(self, mongo_id):
|
||||
entity = self.avalon_archived_by_id.pop(mongo_id, None)
|
||||
|
|
@ -1303,15 +1302,15 @@ class SyncEntitiesFactory:
|
|||
create_ftrack_ids.append(self.ft_project_id)
|
||||
|
||||
# make it go hierarchically
|
||||
prepare_queue = Queue()
|
||||
prepare_queue = collections.deque()
|
||||
|
||||
for child_id in self.entities_dict[self.ft_project_id]["children"]:
|
||||
prepare_queue.put(child_id)
|
||||
prepare_queue.append(child_id)
|
||||
|
||||
while not prepare_queue.empty():
|
||||
ftrack_id = prepare_queue.get()
|
||||
while prepare_queue:
|
||||
ftrack_id = prepare_queue.popleft()
|
||||
for child_id in self.entities_dict[ftrack_id]["children"]:
|
||||
prepare_queue.put(child_id)
|
||||
prepare_queue.append(child_id)
|
||||
|
||||
entity_dict = self.entities_dict[ftrack_id]
|
||||
ent_path = self.get_ent_path(ftrack_id)
|
||||
|
|
@ -1426,25 +1425,25 @@ class SyncEntitiesFactory:
|
|||
parent_id = ent_dict["parent_id"]
|
||||
self.entities_dict[parent_id]["children"].remove(ftrack_id)
|
||||
|
||||
children_queue = Queue()
|
||||
children_queue.put(ftrack_id)
|
||||
while not children_queue.empty():
|
||||
_ftrack_id = children_queue.get()
|
||||
children_queue = collections.deque()
|
||||
children_queue.append(ftrack_id)
|
||||
while children_queue:
|
||||
_ftrack_id = children_queue.popleft()
|
||||
entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []})
|
||||
for child_id in entity_dict["children"]:
|
||||
children_queue.put(child_id)
|
||||
children_queue.append(child_id)
|
||||
|
||||
def prepare_changes(self):
|
||||
self.log.debug("* Preparing changes for avalon/ftrack")
|
||||
hierarchy_changing_ids = []
|
||||
ignore_keys = collections.defaultdict(list)
|
||||
|
||||
update_queue = Queue()
|
||||
update_queue = collections.deque()
|
||||
for ftrack_id in self.update_ftrack_ids:
|
||||
update_queue.put(ftrack_id)
|
||||
update_queue.append(ftrack_id)
|
||||
|
||||
while not update_queue.empty():
|
||||
ftrack_id = update_queue.get()
|
||||
while update_queue:
|
||||
ftrack_id = update_queue.popleft()
|
||||
if ftrack_id == self.ft_project_id:
|
||||
changes = self.prepare_project_changes()
|
||||
if changes:
|
||||
|
|
@ -1720,7 +1719,7 @@ class SyncEntitiesFactory:
|
|||
new_entity_id = self.create_ftrack_ent_from_avalon_ent(
|
||||
av_entity, parent_id
|
||||
)
|
||||
update_queue.put(new_entity_id)
|
||||
update_queue.append(new_entity_id)
|
||||
|
||||
if new_entity_id:
|
||||
ftrack_ent_dict["entity"]["parent_id"] = new_entity_id
|
||||
|
|
@ -2024,14 +2023,14 @@ class SyncEntitiesFactory:
|
|||
entity["custom_attributes"][CUST_ATTR_ID_KEY] = str(new_id)
|
||||
|
||||
def _bubble_changeability(self, unchangeable_ids):
|
||||
unchangeable_queue = Queue()
|
||||
unchangeable_queue = collections.deque()
|
||||
for entity_id in unchangeable_ids:
|
||||
unchangeable_queue.put((entity_id, False))
|
||||
unchangeable_queue.append((entity_id, False))
|
||||
|
||||
processed_parents_ids = []
|
||||
subsets_to_remove = []
|
||||
while not unchangeable_queue.empty():
|
||||
entity_id, child_is_archived = unchangeable_queue.get()
|
||||
while unchangeable_queue:
|
||||
entity_id, child_is_archived = unchangeable_queue.popleft()
|
||||
# skip if already processed
|
||||
if entity_id in processed_parents_ids:
|
||||
continue
|
||||
|
|
@ -2067,7 +2066,9 @@ class SyncEntitiesFactory:
|
|||
parent_id = entity["data"]["visualParent"]
|
||||
if parent_id is None:
|
||||
continue
|
||||
unchangeable_queue.put((str(parent_id), child_is_archived))
|
||||
unchangeable_queue.append(
|
||||
(str(parent_id), child_is_archived)
|
||||
)
|
||||
|
||||
self._delete_subsets_without_asset(subsets_to_remove)
|
||||
|
||||
|
|
@ -2150,16 +2151,18 @@ class SyncEntitiesFactory:
|
|||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
|
||||
def reload_parents(self, hierarchy_changing_ids):
|
||||
parents_queue = Queue()
|
||||
parents_queue.put((self.ft_project_id, [], False))
|
||||
while not parents_queue.empty():
|
||||
ftrack_id, parent_parents, changed = parents_queue.get()
|
||||
parents_queue = collections.deque()
|
||||
parents_queue.append((self.ft_project_id, [], False))
|
||||
while parents_queue:
|
||||
ftrack_id, parent_parents, changed = parents_queue.popleft()
|
||||
_parents = copy.deepcopy(parent_parents)
|
||||
if ftrack_id not in hierarchy_changing_ids and not changed:
|
||||
if ftrack_id != self.ft_project_id:
|
||||
_parents.append(self.entities_dict[ftrack_id]["name"])
|
||||
for child_id in self.entities_dict[ftrack_id]["children"]:
|
||||
parents_queue.put((child_id, _parents, changed))
|
||||
parents_queue.append(
|
||||
(child_id, _parents, changed)
|
||||
)
|
||||
continue
|
||||
|
||||
changed = True
|
||||
|
|
@ -2170,7 +2173,9 @@ class SyncEntitiesFactory:
|
|||
|
||||
_parents.append(self.entities_dict[ftrack_id]["name"])
|
||||
for child_id in self.entities_dict[ftrack_id]["children"]:
|
||||
parents_queue.put((child_id, _parents, changed))
|
||||
parents_queue.append(
|
||||
(child_id, _parents, changed)
|
||||
)
|
||||
|
||||
if ftrack_id in self.create_ftrack_ids:
|
||||
mongo_id = self.ftrack_avalon_mapper[ftrack_id]
|
||||
|
|
|
|||
|
|
@ -201,5 +201,9 @@ class AbstractProvider:
|
|||
msg = "Error in resolving local root from anatomy"
|
||||
log.error(msg)
|
||||
raise ValueError(msg)
|
||||
except IndexError:
|
||||
msg = "Path {} contains unfillable placeholder"
|
||||
log.error(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
return path
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 3.2 KiB |
|
|
@ -422,6 +422,12 @@ class SyncServerThread(threading.Thread):
|
|||
periodically.
|
||||
"""
|
||||
while self.is_running:
|
||||
if self.module.long_running_tasks:
|
||||
task = self.module.long_running_tasks.pop()
|
||||
log.info("starting long running")
|
||||
await self.loop.run_in_executor(None, task["func"])
|
||||
log.info("finished long running")
|
||||
self.module.projects_processed.remove(task["project_name"])
|
||||
await asyncio.sleep(0.5)
|
||||
tasks = [task for task in asyncio.all_tasks() if
|
||||
task is not asyncio.current_task()]
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from datetime import datetime
|
|||
import threading
|
||||
import platform
|
||||
import copy
|
||||
from collections import deque
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
|
@ -120,6 +121,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
self._connection = None
|
||||
|
||||
# list of long blocking tasks
|
||||
self.long_running_tasks = deque()
|
||||
# projects that long tasks are running on
|
||||
self.projects_processed = set()
|
||||
|
||||
""" Start of Public API """
|
||||
def add_site(self, collection, representation_id, site_name=None,
|
||||
force=False):
|
||||
|
|
@ -197,6 +203,105 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
for repre in representations:
|
||||
self.remove_site(collection, repre.get("_id"), site_name, True)
|
||||
|
||||
def create_validate_project_task(self, collection, site_name):
|
||||
"""Adds metadata about project files validation on a queue.
|
||||
|
||||
This process will loop through all representation and check if
|
||||
their files actually exist on an active site.
|
||||
|
||||
This might be useful for edge cases when artists is switching
|
||||
between sites, remote site is actually physically mounted and
|
||||
active site has same file urls etc.
|
||||
|
||||
Task will run on a asyncio loop, shouldn't be blocking.
|
||||
"""
|
||||
task = {
|
||||
"type": "validate",
|
||||
"project_name": collection,
|
||||
"func": lambda: self.validate_project(collection, site_name)
|
||||
}
|
||||
self.projects_processed.add(collection)
|
||||
self.long_running_tasks.append(task)
|
||||
|
||||
def validate_project(self, collection, site_name, remove_missing=False):
|
||||
"""
|
||||
Validate 'collection' of 'site_name' and its local files
|
||||
|
||||
If file present and not marked with a 'site_name' in DB, DB is
|
||||
updated with site name and file modified date.
|
||||
|
||||
Args:
|
||||
module (SyncServerModule)
|
||||
collection (string): project name
|
||||
site_name (string): active site name
|
||||
remove_missing (bool): if True remove sites in DB if missing
|
||||
physically
|
||||
"""
|
||||
self.log.debug("Validation of {} for {} started".format(collection,
|
||||
site_name))
|
||||
query = {
|
||||
"type": "representation"
|
||||
}
|
||||
|
||||
representations = list(
|
||||
self.connection.database[collection].find(query))
|
||||
if not representations:
|
||||
self.log.debug("No repre found")
|
||||
return
|
||||
|
||||
sites_added = 0
|
||||
sites_removed = 0
|
||||
for repre in representations:
|
||||
repre_id = repre["_id"]
|
||||
for repre_file in repre.get("files", []):
|
||||
try:
|
||||
has_site = site_name in [site["name"]
|
||||
for site in repre_file["sites"]]
|
||||
except TypeError:
|
||||
self.log.debug("Structure error in {}".format(repre_id))
|
||||
continue
|
||||
|
||||
if has_site and not remove_missing:
|
||||
continue
|
||||
|
||||
file_path = repre_file.get("path", "")
|
||||
local_file_path = self.get_local_file_path(collection,
|
||||
site_name,
|
||||
file_path)
|
||||
|
||||
if local_file_path and os.path.exists(local_file_path):
|
||||
self.log.debug("Adding site {} for {}".format(site_name,
|
||||
repre_id))
|
||||
if not has_site:
|
||||
query = {
|
||||
"_id": repre_id
|
||||
}
|
||||
created_dt = datetime.fromtimestamp(
|
||||
os.path.getmtime(local_file_path))
|
||||
elem = {"name": site_name,
|
||||
"created_dt": created_dt}
|
||||
self._add_site(collection, query, [repre], elem,
|
||||
site_name=site_name,
|
||||
file_id=repre_file["_id"])
|
||||
sites_added += 1
|
||||
else:
|
||||
if has_site and remove_missing:
|
||||
self.log.debug("Removing site {} for {}".
|
||||
format(site_name, repre_id))
|
||||
self.reset_provider_for_file(collection,
|
||||
repre_id,
|
||||
file_id=repre_file["_id"],
|
||||
remove=True)
|
||||
sites_removed += 1
|
||||
|
||||
if sites_added % 100 == 0:
|
||||
self.log.debug("Sites added {}".format(sites_added))
|
||||
|
||||
self.log.debug("Validation of {} for {} ended".format(collection,
|
||||
site_name))
|
||||
self.log.info("Sites added {}, sites removed {}".format(sites_added,
|
||||
sites_removed))
|
||||
|
||||
def pause_representation(self, collection, representation_id, site_name):
|
||||
"""
|
||||
Sets 'representation_id' as paused, eg. no syncing should be
|
||||
|
|
@ -719,19 +824,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
self.lock = threading.Lock()
|
||||
|
||||
try:
|
||||
self.sync_server_thread = SyncServerThread(self)
|
||||
self.sync_server_thread = SyncServerThread(self)
|
||||
|
||||
except ValueError:
|
||||
log.info("No system setting for sync. Not syncing.", exc_info=True)
|
||||
self.enabled = False
|
||||
except KeyError:
|
||||
log.info((
|
||||
"There are not set presets for SyncServer OR "
|
||||
"Credentials provided are invalid, "
|
||||
"no syncing possible").
|
||||
format(str(self.sync_project_settings)), exc_info=True)
|
||||
self.enabled = False
|
||||
|
||||
def tray_start(self):
|
||||
"""
|
||||
|
|
@ -1359,7 +1453,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
found = False
|
||||
for repre_file in representation.pop().get("files"):
|
||||
for site in repre_file.get("sites"):
|
||||
if site["name"] == site_name:
|
||||
if site.get("name") == site_name:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
|
|
@ -1410,13 +1504,20 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self._update_site(collection, query, update, arr_filter)
|
||||
|
||||
def _add_site(self, collection, query, representation, elem, site_name,
|
||||
force=False):
|
||||
force=False, file_id=None):
|
||||
"""
|
||||
Adds 'site_name' to 'representation' on 'collection'
|
||||
|
||||
Args:
|
||||
representation (list of 1 dict)
|
||||
file_id (ObjectId)
|
||||
|
||||
Use 'force' to remove existing or raises ValueError
|
||||
"""
|
||||
for repre_file in representation.pop().get("files"):
|
||||
if file_id and file_id != repre_file["_id"]:
|
||||
continue
|
||||
|
||||
for site in repre_file.get("sites"):
|
||||
if site["name"] == site_name:
|
||||
if force:
|
||||
|
|
@ -1429,11 +1530,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
log.info(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
update = {
|
||||
"$push": {"files.$[].sites": elem}
|
||||
}
|
||||
if not file_id:
|
||||
update = {
|
||||
"$push": {"files.$[].sites": elem}
|
||||
}
|
||||
|
||||
arr_filter = []
|
||||
arr_filter = []
|
||||
else:
|
||||
update = {
|
||||
"$push": {"files.$[f].sites": elem}
|
||||
}
|
||||
arr_filter = [
|
||||
{'f._id': file_id}
|
||||
]
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
|
||||
|
|
@ -1508,7 +1617,24 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
return int(ld)
|
||||
|
||||
def show_widget(self):
|
||||
"""Show dialog to enter credentials"""
|
||||
"""Show dialog for Sync Queue"""
|
||||
no_errors = False
|
||||
try:
|
||||
from .tray.app import SyncServerWindow
|
||||
self.widget = SyncServerWindow(self)
|
||||
no_errors = True
|
||||
except ValueError:
|
||||
log.info("No system setting for sync. Not syncing.", exc_info=True)
|
||||
except KeyError:
|
||||
log.info((
|
||||
"There are not set presets for SyncServer OR "
|
||||
"Credentials provided are invalid, "
|
||||
"no syncing possible").
|
||||
format(str(self.sync_project_settings)), exc_info=True)
|
||||
except:
|
||||
log.error("Uncaught exception durin start of SyncServer",
|
||||
exc_info=True)
|
||||
self.enabled = no_errors
|
||||
self.widget.show()
|
||||
|
||||
def _get_success_dict(self, new_file_id):
|
||||
|
|
|
|||
|
|
@ -124,7 +124,8 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel):
|
|||
|
||||
if not representations:
|
||||
self.query = self.get_query(load_records)
|
||||
representations = self.dbcon.aggregate(self.query)
|
||||
representations = self.dbcon.aggregate(pipeline=self.query,
|
||||
allowDiskUse=True)
|
||||
|
||||
self.add_page_records(self.active_site, self.remote_site,
|
||||
representations)
|
||||
|
|
@ -159,7 +160,8 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel):
|
|||
items_to_fetch = min(self._total_records - self._rec_loaded,
|
||||
self.PAGE_SIZE)
|
||||
self.query = self.get_query(self._rec_loaded)
|
||||
representations = self.dbcon.aggregate(self.query)
|
||||
representations = self.dbcon.aggregate(pipeline=self.query,
|
||||
allowDiskUse=True)
|
||||
self.beginInsertRows(index,
|
||||
self._rec_loaded,
|
||||
self._rec_loaded + items_to_fetch - 1)
|
||||
|
|
@ -192,16 +194,16 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel):
|
|||
else:
|
||||
order = -1
|
||||
|
||||
backup_sort = dict(self.sort)
|
||||
backup_sort = dict(self.sort_criteria)
|
||||
|
||||
self.sort = {self.SORT_BY_COLUMN[index]: order} # reset
|
||||
self.sort_criteria = {self.SORT_BY_COLUMN[index]: order} # reset
|
||||
# add last one
|
||||
for key, val in backup_sort.items():
|
||||
if key != '_id' and key != self.SORT_BY_COLUMN[index]:
|
||||
self.sort[key] = val
|
||||
self.sort_criteria[key] = val
|
||||
break
|
||||
# add default one
|
||||
self.sort['_id'] = 1
|
||||
self.sort_criteria['_id'] = 1
|
||||
|
||||
self.query = self.get_query()
|
||||
# import json
|
||||
|
|
@ -209,7 +211,8 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel):
|
|||
# replace('False', 'false').\
|
||||
# replace('True', 'true').replace('None', 'null'))
|
||||
|
||||
representations = self.dbcon.aggregate(self.query)
|
||||
representations = self.dbcon.aggregate(pipeline=self.query,
|
||||
allowDiskUse=True)
|
||||
self.refresh(representations)
|
||||
|
||||
def set_word_filter(self, word_filter):
|
||||
|
|
@ -440,12 +443,13 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel):
|
|||
self.active_site = self.sync_server.get_active_site(self.project)
|
||||
self.remote_site = self.sync_server.get_remote_site(self.project)
|
||||
|
||||
self.sort = self.DEFAULT_SORT
|
||||
self.sort_criteria = self.DEFAULT_SORT
|
||||
|
||||
self.query = self.get_query()
|
||||
self.default_query = list(self.get_query())
|
||||
|
||||
representations = self.dbcon.aggregate(self.query)
|
||||
representations = self.dbcon.aggregate(pipeline=self.query,
|
||||
allowDiskUse=True)
|
||||
self.refresh(representations)
|
||||
|
||||
self.timer = QtCore.QTimer()
|
||||
|
|
@ -732,7 +736,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel):
|
|||
)
|
||||
|
||||
aggr.extend(
|
||||
[{"$sort": self.sort},
|
||||
[{"$sort": self.sort_criteria},
|
||||
{
|
||||
'$facet': {
|
||||
'paginatedResults': [{'$skip': self._rec_loaded},
|
||||
|
|
@ -970,10 +974,11 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel):
|
|||
self.active_site = self.sync_server.get_active_site(self.project)
|
||||
self.remote_site = self.sync_server.get_remote_site(self.project)
|
||||
|
||||
self.sort = self.DEFAULT_SORT
|
||||
self.sort_criteria = self.DEFAULT_SORT
|
||||
|
||||
self.query = self.get_query()
|
||||
representations = self.dbcon.aggregate(self.query)
|
||||
representations = self.dbcon.aggregate(pipeline=self.query,
|
||||
allowDiskUse=True)
|
||||
self.refresh(representations)
|
||||
|
||||
self.timer = QtCore.QTimer()
|
||||
|
|
@ -1235,7 +1240,7 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel):
|
|||
print(self.column_filtering)
|
||||
|
||||
aggr.extend([
|
||||
{"$sort": self.sort},
|
||||
{"$sort": self.sort_criteria},
|
||||
{
|
||||
'$facet': {
|
||||
'paginatedResults': [{'$skip': self._rec_loaded},
|
||||
|
|
|
|||
|
|
@ -32,6 +32,8 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
project_changed = QtCore.Signal()
|
||||
message_generated = QtCore.Signal(str)
|
||||
|
||||
refresh_msec = 10000
|
||||
|
||||
def __init__(self, sync_server, parent):
|
||||
super(SyncProjectListWidget, self).__init__(parent)
|
||||
self.setObjectName("ProjectListWidget")
|
||||
|
|
@ -56,8 +58,8 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
layout.addWidget(project_list, 1)
|
||||
|
||||
project_list.customContextMenuRequested.connect(self._on_context_menu)
|
||||
project_list.selectionModel().currentChanged.connect(
|
||||
self._on_index_change
|
||||
project_list.selectionModel().selectionChanged.connect(
|
||||
self._on_selection_changed
|
||||
)
|
||||
|
||||
self.project_model = project_model
|
||||
|
|
@ -69,17 +71,43 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
self.remote_site = None
|
||||
self.icons = {}
|
||||
|
||||
def _on_index_change(self, new_idx, _old_idx):
|
||||
project_name = new_idx.data(QtCore.Qt.DisplayRole)
|
||||
self._selection_changed = False
|
||||
self._model_reset = False
|
||||
|
||||
timer = QtCore.QTimer()
|
||||
timer.setInterval(self.refresh_msec)
|
||||
timer.timeout.connect(self.refresh)
|
||||
timer.start()
|
||||
|
||||
self.timer = timer
|
||||
|
||||
def _on_selection_changed(self, new_selection, _old_selection):
|
||||
# block involuntary selection changes
|
||||
if self._selection_changed or self._model_reset:
|
||||
return
|
||||
|
||||
indexes = new_selection.indexes()
|
||||
if not indexes:
|
||||
return
|
||||
|
||||
project_name = indexes[0].data(QtCore.Qt.DisplayRole)
|
||||
|
||||
if self.current_project == project_name:
|
||||
return
|
||||
self._selection_changed = True
|
||||
self.current_project = project_name
|
||||
self.project_changed.emit()
|
||||
self.refresh()
|
||||
self._selection_changed = False
|
||||
|
||||
def refresh(self):
|
||||
selected_index = None
|
||||
model = self.project_model
|
||||
self._model_reset = True
|
||||
model.clear()
|
||||
self._model_reset = False
|
||||
|
||||
project_name = None
|
||||
selected_item = None
|
||||
for project_name in self.sync_server.sync_project_settings.\
|
||||
keys():
|
||||
if self.sync_server.is_paused() or \
|
||||
|
|
@ -88,20 +116,38 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
else:
|
||||
icon = self._get_icon("synced")
|
||||
|
||||
model.appendRow(QtGui.QStandardItem(icon, project_name))
|
||||
if project_name in self.sync_server.projects_processed:
|
||||
icon = self._get_icon("refresh")
|
||||
|
||||
item = QtGui.QStandardItem(icon, project_name)
|
||||
model.appendRow(item)
|
||||
|
||||
if self.current_project == project_name:
|
||||
selected_item = item
|
||||
|
||||
if selected_item:
|
||||
selected_index = model.indexFromItem(selected_item)
|
||||
|
||||
if len(self.sync_server.sync_project_settings.keys()) == 0:
|
||||
model.appendRow(QtGui.QStandardItem(lib.DUMMY_PROJECT))
|
||||
|
||||
self.current_project = self.project_list.currentIndex().data(
|
||||
QtCore.Qt.DisplayRole
|
||||
)
|
||||
if not self.current_project:
|
||||
self.current_project = model.item(0).data(QtCore.Qt.DisplayRole)
|
||||
|
||||
if project_name:
|
||||
self.local_site = self.sync_server.get_active_site(project_name)
|
||||
self.remote_site = self.sync_server.get_remote_site(project_name)
|
||||
self.project_model = model
|
||||
|
||||
if selected_index and \
|
||||
selected_index.isValid() and \
|
||||
not self._selection_changed:
|
||||
mode = QtCore.QItemSelectionModel.Select | \
|
||||
QtCore.QItemSelectionModel.Rows
|
||||
self.project_list.selectionModel().select(selected_index, mode)
|
||||
|
||||
if self.current_project:
|
||||
self.local_site = self.sync_server.get_active_site(
|
||||
self.current_project)
|
||||
self.remote_site = self.sync_server.get_remote_site(
|
||||
self.current_project)
|
||||
|
||||
def _can_edit(self):
|
||||
"""Returns true if some site is user local site, eg. could edit"""
|
||||
|
|
@ -143,6 +189,11 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
actions_mapping[action] = self._clear_project
|
||||
menu.addAction(action)
|
||||
|
||||
if self.project_name not in self.sync_server.projects_processed:
|
||||
action = QtWidgets.QAction("Validate files on active site")
|
||||
actions_mapping[action] = self._validate_site
|
||||
menu.addAction(action)
|
||||
|
||||
result = menu.exec_(QtGui.QCursor.pos())
|
||||
if result:
|
||||
to_run = actions_mapping[result]
|
||||
|
|
@ -167,6 +218,13 @@ class SyncProjectListWidget(QtWidgets.QWidget):
|
|||
self.project_name = None
|
||||
self.refresh()
|
||||
|
||||
def _validate_site(self):
|
||||
if self.project_name:
|
||||
self.sync_server.create_validate_project_task(self.project_name,
|
||||
self.local_site)
|
||||
self.project_name = None
|
||||
self.refresh()
|
||||
|
||||
|
||||
class _SyncRepresentationWidget(QtWidgets.QWidget):
|
||||
"""
|
||||
|
|
|
|||
28
openpype/pipeline/__init__.py
Normal file
28
openpype/pipeline/__init__.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from .lib import attribute_definitions
|
||||
|
||||
from .create import (
|
||||
BaseCreator,
|
||||
Creator,
|
||||
AutoCreator,
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
from .publish import (
|
||||
PublishValidationError,
|
||||
KnownPublishError,
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"attribute_definitions",
|
||||
|
||||
"BaseCreator",
|
||||
"Creator",
|
||||
"AutoCreator",
|
||||
"CreatedInstance",
|
||||
|
||||
"PublishValidationError",
|
||||
"KnownPublishError",
|
||||
"OpenPypePyblishPluginMixin"
|
||||
)
|
||||
78
openpype/pipeline/create/README.md
Normal file
78
openpype/pipeline/create/README.md
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# Create
|
||||
Creation is process defying what and how will be published. May work in a different way based on host implementation.
|
||||
|
||||
## CreateContext
|
||||
Entry point of creation. All data and metadata are handled through create context. Context hold all global data and instances. Is responsible for loading of plugins (create, publish), triggering creator methods, validation of host implementation and emitting changes to creators and host.
|
||||
|
||||
Discovers Creator plugins to be able create new instances and convert existing instances. Creators may have defined attributes that are specific for their instances. Attributes definition can enhance behavior of instance during publishing.
|
||||
|
||||
Publish plugins are loaded because they can also define attributes definitions. These are less family specific To be able define attributes Publish plugin must inherit from `OpenPypePyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant).
|
||||
|
||||
Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`.
|
||||
|
||||
Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation.
|
||||
|
||||
|
||||
## CreatedInstance
|
||||
Product of creation is "instance" which holds basic data defying it. Core data are `creator_identifier`, `family` and `subset`. Other data can be keys used to fill subset name or metadata modifying publishing process of the instance (more described later). All instances have `id` which holds constant `pyblish.avalon.instance` and `uuid` which is identifier of the instance.
|
||||
Family tells how should be instance processed and subset what name will published item have.
|
||||
- There are cases when subset is not fully filled during creation and may change during publishing. That is in most of cases caused because instance is related to other instance or instance data do not represent final product.
|
||||
|
||||
`CreatedInstance` is entity holding the data which are stored and used.
|
||||
|
||||
```python
|
||||
{
|
||||
# Immutable data after creation
|
||||
## Identifier that this data represents instance for publishing (automatically assigned)
|
||||
"id": "pyblish.avalon.instance",
|
||||
## Identifier of this specific instance (automatically assigned)
|
||||
"uuid": <uuid4>,
|
||||
## Instance family (used from Creator)
|
||||
"family": <family>,
|
||||
|
||||
# Mutable data
|
||||
## Subset name based on subset name template - may change overtime (on context change)
|
||||
"subset": <subset>,
|
||||
## Instance is active and will be published
|
||||
"active": True,
|
||||
## Version of instance
|
||||
"version": 1,
|
||||
# Identifier of creator (is unique)
|
||||
"creator_identifier": "",
|
||||
## Creator specific attributes (defined by Creator)
|
||||
"creator_attributes": {...},
|
||||
## Publish plugin specific plugins (defined by Publish plugin)
|
||||
"publish_attributes": {
|
||||
# Attribute values are stored by publish plugin name
|
||||
# - Duplicated plugin names can cause clashes!
|
||||
<Plugin name>: {...},
|
||||
...
|
||||
},
|
||||
## Additional data related to instance (`asset`, `task`, etc.)
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
## Creator
|
||||
To be able create, update, remove or collect existing instances there must be defined a creator. Creator must have unique identifier and can represents a family. There can be multiple Creators for single family. Identifier of creator should contain family (advise).
|
||||
|
||||
Creator has abstract methods to handle instances. For new instance creation is used `create` which should create metadata in host context and add new instance object to `CreateContext`. To collect existing instances is used `collect_instances` which should find all existing instances related to creator and add them to `CreateContext`. To update data of instance is used `update_instances` which is called from `CreateContext` on `save_changes`. To remove instance use `remove_instances` which should remove metadata from host context and remove instance from `CreateContext`.
|
||||
|
||||
Creator has access to `CreateContext` which created object of the creator. All new instances or removed instances must be told to context. To do so use methods `_add_instance_to_context` and `_remove_instance_from_context` where `CreatedInstance` is passed. They should be called from `create` if new instance was created and from `remove_instances` if instance was removed.
|
||||
|
||||
Creators don't have strictly defined how are instances handled but it is good practice to define a way which is host specific. It is not strict because there are cases when host implementation just can't handle all requirements of all creators.
|
||||
|
||||
### AutoCreator
|
||||
Auto-creators are automatically executed when `CreateContext` is reset. They can be used to create instances that should be always available and may not require artist's manual creation (e.g. `workfile`). Should not create duplicated instance and validate existence before creates a new. Method `remove_instances` is implemented to do nothing.
|
||||
|
||||
## Host
|
||||
Host implementation must have available global context metadata handler functions. One to get current context data and second to update them. Currently are to context data stored only context publish plugin attribute values.
|
||||
|
||||
### Get global context data (`get_context_data`)
|
||||
There are data that are not specific for any instance but are specific for whole context (e.g. Context plugins values).
|
||||
|
||||
### Update global context data (`update_context_data`)
|
||||
Update global context data.
|
||||
|
||||
### Optional title of context
|
||||
It is recommended to implement `get_context_title` function. String returned from this function will be shown in UI as context in which artist is.
|
||||
24
openpype/pipeline/create/__init__.py
Normal file
24
openpype/pipeline/create/__init__.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from .creator_plugins import (
|
||||
CreatorError,
|
||||
|
||||
BaseCreator,
|
||||
Creator,
|
||||
AutoCreator
|
||||
)
|
||||
|
||||
from .context import (
|
||||
CreatedInstance,
|
||||
CreateContext
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"CreatorError",
|
||||
|
||||
"BaseCreator",
|
||||
"Creator",
|
||||
"AutoCreator",
|
||||
|
||||
"CreatedInstance",
|
||||
"CreateContext"
|
||||
)
|
||||
1142
openpype/pipeline/create/context.py
Normal file
1142
openpype/pipeline/create/context.py
Normal file
File diff suppressed because it is too large
Load diff
269
openpype/pipeline/create/creator_plugins.py
Normal file
269
openpype/pipeline/create/creator_plugins.py
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
import copy
|
||||
import logging
|
||||
|
||||
from abc import (
|
||||
ABCMeta,
|
||||
abstractmethod,
|
||||
abstractproperty
|
||||
)
|
||||
import six
|
||||
|
||||
from openpype.lib import get_subset_name_with_asset_doc
|
||||
|
||||
|
||||
class CreatorError(Exception):
|
||||
"""Should be raised when creator failed because of known issue.
|
||||
|
||||
Message of error should be user readable.
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
super(CreatorError, self).__init__(message)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class BaseCreator:
|
||||
"""Plugin that create and modify instance data before publishing process.
|
||||
|
||||
We should maybe find better name as creation is only one part of it's logic
|
||||
and to avoid expectations that it is the same as `avalon.api.Creator`.
|
||||
|
||||
Single object should be used for multiple instances instead of single
|
||||
instance per one creator object. Do not store temp data or mid-process data
|
||||
to `self` if it's not Plugin specific.
|
||||
"""
|
||||
|
||||
# Label shown in UI
|
||||
label = None
|
||||
|
||||
# Variable to store logger
|
||||
_log = None
|
||||
|
||||
# Creator is enabled (Probably does not have reason of existence?)
|
||||
enabled = True
|
||||
|
||||
# Creator (and family) icon
|
||||
# - may not be used if `get_icon` is reimplemented
|
||||
icon = None
|
||||
|
||||
def __init__(
|
||||
self, create_context, system_settings, project_settings, headless=False
|
||||
):
|
||||
# Reference to CreateContext
|
||||
self.create_context = create_context
|
||||
|
||||
# Creator is running in headless mode (without UI elemets)
|
||||
# - we may use UI inside processing this attribute should be checked
|
||||
self.headless = headless
|
||||
|
||||
@abstractproperty
|
||||
def identifier(self):
|
||||
"""Identifier of creator (must be unique)."""
|
||||
pass
|
||||
|
||||
@abstractproperty
|
||||
def family(self):
|
||||
"""Family that plugin represents."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = logging.getLogger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def _add_instance_to_context(self, instance):
|
||||
"""Helper method to ad d"""
|
||||
self.create_context.creator_adds_instance(instance)
|
||||
|
||||
def _remove_instance_from_context(self, instance):
|
||||
self.create_context.creator_removed_instance(instance)
|
||||
|
||||
@abstractmethod
|
||||
def create(self, options=None):
|
||||
"""Create new instance.
|
||||
|
||||
Replacement of `process` method from avalon implementation.
|
||||
- must expect all data that were passed to init in previous
|
||||
implementation
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def collect_instances(self, attr_plugins=None):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_instances(self, update_list):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def remove_instances(self, instances):
|
||||
"""Method called on instance removement.
|
||||
|
||||
Can also remove instance metadata from context but should return
|
||||
'True' if did so.
|
||||
|
||||
Args:
|
||||
instance(list<CreatedInstance>): Instance objects which should be
|
||||
removed.
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_icon(self):
|
||||
"""Icon of creator (family).
|
||||
|
||||
Can return path to image file or awesome icon name.
|
||||
"""
|
||||
return self.icon
|
||||
|
||||
def get_dynamic_data(
|
||||
self, variant, task_name, asset_doc, project_name, host_name
|
||||
):
|
||||
"""Dynamic data for subset name filling.
|
||||
|
||||
These may be get dynamically created based on current context of
|
||||
workfile.
|
||||
"""
|
||||
return {}
|
||||
|
||||
def get_subset_name(
|
||||
self, variant, task_name, asset_doc, project_name, host_name=None
|
||||
):
|
||||
"""Return subset name for passed context.
|
||||
|
||||
CHANGES:
|
||||
Argument `asset_id` was replaced with `asset_doc`. It is easier to
|
||||
query asset before. In some cases would this method be called multiple
|
||||
times and it would be too slow to query asset document on each
|
||||
callback.
|
||||
|
||||
NOTE:
|
||||
Asset document is not used yet but is required if would like to use
|
||||
task type in subset templates.
|
||||
|
||||
Args:
|
||||
variant(str): Subset name variant. In most of cases user input.
|
||||
task_name(str): For which task subset is created.
|
||||
asset_doc(dict): Asset document for which subset is created.
|
||||
project_name(str): Project name.
|
||||
host_name(str): Which host creates subset.
|
||||
"""
|
||||
dynamic_data = self.get_dynamic_data(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
|
||||
return get_subset_name_with_asset_doc(
|
||||
self.family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name,
|
||||
dynamic_data=dynamic_data
|
||||
)
|
||||
|
||||
def get_attribute_defs(self):
|
||||
"""Plugin attribute definitions.
|
||||
|
||||
Attribute definitions of plugin that hold data about created instance
|
||||
and values are stored to metadata for future usage and for publishing
|
||||
purposes.
|
||||
|
||||
NOTE:
|
||||
Convert method should be implemented which should care about updating
|
||||
keys/values when plugin attributes change.
|
||||
|
||||
Returns:
|
||||
list<AbtractAttrDef>: Attribute definitions that can be tweaked for
|
||||
created instance.
|
||||
"""
|
||||
return []
|
||||
|
||||
|
||||
class Creator(BaseCreator):
|
||||
"""Creator that has more information for artist to show in UI.
|
||||
|
||||
Creation requires prepared subset name and instance data.
|
||||
"""
|
||||
|
||||
# GUI Purposes
|
||||
# - default_variants may not be used if `get_default_variants` is overriden
|
||||
default_variants = []
|
||||
|
||||
# Short description of family
|
||||
# - may not be used if `get_description` is overriden
|
||||
description = None
|
||||
|
||||
# Detailed description of family for artists
|
||||
# - may not be used if `get_detail_description` is overriden
|
||||
detailed_description = None
|
||||
|
||||
@abstractmethod
|
||||
def create(self, subset_name, instance_data, options=None):
|
||||
"""Create new instance and store it.
|
||||
|
||||
Ideally should be stored to workfile using host implementation.
|
||||
|
||||
Args:
|
||||
subset_name(str): Subset name of created instance.
|
||||
instance_data(dict):
|
||||
"""
|
||||
|
||||
# instance = CreatedInstance(
|
||||
# self.family, subset_name, instance_data
|
||||
# )
|
||||
pass
|
||||
|
||||
def get_description(self):
|
||||
"""Short description of family and plugin.
|
||||
|
||||
Returns:
|
||||
str: Short description of family.
|
||||
"""
|
||||
return self.description
|
||||
|
||||
def get_detail_description(self):
|
||||
"""Description of family and plugin.
|
||||
|
||||
Can be detailed with markdown or html tags.
|
||||
|
||||
Returns:
|
||||
str: Detailed description of family for artist.
|
||||
"""
|
||||
return self.detailed_description
|
||||
|
||||
def get_default_variants(self):
|
||||
"""Default variant values for UI tooltips.
|
||||
|
||||
Replacement of `defatults` attribute. Using method gives ability to
|
||||
have some "logic" other than attribute values.
|
||||
|
||||
By default returns `default_variants` value.
|
||||
|
||||
Returns:
|
||||
list<str>: Whisper variants for user input.
|
||||
"""
|
||||
return copy.deepcopy(self.default_variants)
|
||||
|
||||
def get_default_variant(self):
|
||||
"""Default variant value that will be used to prefill variant input.
|
||||
|
||||
This is for user input and value may not be content of result from
|
||||
`get_default_variants`.
|
||||
|
||||
Can return `None`. In that case first element from
|
||||
`get_default_variants` should be used.
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class AutoCreator(BaseCreator):
|
||||
"""Creator which is automatically triggered without user interaction.
|
||||
|
||||
Can be used e.g. for `workfile`.
|
||||
"""
|
||||
def remove_instances(self, instances):
|
||||
"""Skip removement."""
|
||||
pass
|
||||
18
openpype/pipeline/lib/__init__.py
Normal file
18
openpype/pipeline/lib/__init__.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from .attribute_definitions import (
|
||||
AbtractAttrDef,
|
||||
UnknownDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
BoolDef
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AbtractAttrDef",
|
||||
"UnknownDef",
|
||||
"NumberDef",
|
||||
"TextDef",
|
||||
"EnumDef",
|
||||
"BoolDef"
|
||||
)
|
||||
263
openpype/pipeline/lib/attribute_definitions.py
Normal file
263
openpype/pipeline/lib/attribute_definitions.py
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
import re
|
||||
import collections
|
||||
import uuid
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import six
|
||||
|
||||
|
||||
class AbstractAttrDefMeta(ABCMeta):
|
||||
"""Meta class to validate existence of 'key' attribute.
|
||||
|
||||
Each object of `AbtractAttrDef` mus have defined 'key' attribute.
|
||||
"""
|
||||
def __call__(self, *args, **kwargs):
|
||||
obj = super(AbstractAttrDefMeta, self).__call__(*args, **kwargs)
|
||||
init_class = getattr(obj, "__init__class__", None)
|
||||
if init_class is not AbtractAttrDef:
|
||||
raise TypeError("{} super was not called in __init__.".format(
|
||||
type(obj)
|
||||
))
|
||||
return obj
|
||||
|
||||
|
||||
@six.add_metaclass(AbstractAttrDefMeta)
|
||||
class AbtractAttrDef:
|
||||
"""Abstraction of attribute definiton.
|
||||
|
||||
Each attribute definition must have implemented validation and
|
||||
conversion method.
|
||||
|
||||
Attribute definition should have ability to return "default" value. That
|
||||
can be based on passed data into `__init__` so is not abstracted to
|
||||
attribute.
|
||||
|
||||
QUESTION:
|
||||
How to force to set `key` attribute?
|
||||
|
||||
Args:
|
||||
key(str): Under which key will be attribute value stored.
|
||||
label(str): Attribute label.
|
||||
tooltip(str): Attribute tooltip.
|
||||
"""
|
||||
|
||||
def __init__(self, key, default, label=None, tooltip=None):
|
||||
self.key = key
|
||||
self.label = label
|
||||
self.tooltip = tooltip
|
||||
self.default = default
|
||||
self._id = uuid.uuid4()
|
||||
|
||||
self.__init__class__ = AbtractAttrDef
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self.key == other.key
|
||||
|
||||
@abstractmethod
|
||||
def convert_value(self, value):
|
||||
"""Convert value to a valid one.
|
||||
|
||||
Convert passed value to a valid type. Use default if value can't be
|
||||
converted.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UnknownDef(AbtractAttrDef):
|
||||
"""Definition is not known because definition is not available."""
|
||||
def __init__(self, key, default=None, **kwargs):
|
||||
kwargs["default"] = default
|
||||
super(UnknownDef, self).__init__(key, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
return value
|
||||
|
||||
|
||||
class NumberDef(AbtractAttrDef):
|
||||
"""Number definition.
|
||||
|
||||
Number can have defined minimum/maximum value and decimal points. Value
|
||||
is integer if decimals are 0.
|
||||
|
||||
Args:
|
||||
minimum(int, float): Minimum possible value.
|
||||
maximum(int, float): Maximum possible value.
|
||||
decimals(int): Maximum decimal points of value.
|
||||
default(int, float): Default value for conversion.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, key, minimum=None, maximum=None, decimals=None, default=None,
|
||||
**kwargs
|
||||
):
|
||||
minimum = 0 if minimum is None else minimum
|
||||
maximum = 999999 if maximum is None else maximum
|
||||
# Swap min/max when are passed in opposited order
|
||||
if minimum > maximum:
|
||||
maximum, minimum = minimum, maximum
|
||||
|
||||
if default is None:
|
||||
default = 0
|
||||
|
||||
elif not isinstance(default, (int, float)):
|
||||
raise TypeError((
|
||||
"'default' argument must be 'int' or 'float', not '{}'"
|
||||
).format(type(default)))
|
||||
|
||||
# Fix default value by mim/max values
|
||||
if default < minimum:
|
||||
default = minimum
|
||||
|
||||
elif default > maximum:
|
||||
default = maximum
|
||||
|
||||
super(NumberDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
self.minimum = minimum
|
||||
self.maximum = maximum
|
||||
self.decimals = 0 if decimals is None else decimals
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(NumberDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return (
|
||||
self.decimals == other.decimals
|
||||
and self.maximum == other.maximum
|
||||
and self.maximum == other.maximum
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
try:
|
||||
value = float(value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not isinstance(value, (int, float)):
|
||||
return self.default
|
||||
|
||||
if self.decimals == 0:
|
||||
return int(value)
|
||||
return round(float(value), self.decimals)
|
||||
|
||||
|
||||
class TextDef(AbtractAttrDef):
|
||||
"""Text definition.
|
||||
|
||||
Text can have multiline option so endline characters are allowed regex
|
||||
validation can be applied placeholder for UI purposes and default value.
|
||||
|
||||
Regex validation is not part of attribute implemntentation.
|
||||
|
||||
Args:
|
||||
multiline(bool): Text has single or multiline support.
|
||||
regex(str, re.Pattern): Regex validation.
|
||||
placeholder(str): UI placeholder for attribute.
|
||||
default(str, None): Default value. Empty string used when not defined.
|
||||
"""
|
||||
def __init__(
|
||||
self, key, multiline=None, regex=None, placeholder=None, default=None,
|
||||
**kwargs
|
||||
):
|
||||
if default is None:
|
||||
default = ""
|
||||
|
||||
super(TextDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
if multiline is None:
|
||||
multiline = False
|
||||
|
||||
elif not isinstance(default, six.string_types):
|
||||
raise TypeError((
|
||||
"'default' argument must be a {}, not '{}'"
|
||||
).format(six.string_types, type(default)))
|
||||
|
||||
if isinstance(regex, six.string_types):
|
||||
regex = re.compile(regex)
|
||||
|
||||
self.multiline = multiline
|
||||
self.placeholder = placeholder
|
||||
self.regex = regex
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(TextDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return (
|
||||
self.multiline == other.multiline
|
||||
and self.regex == other.regex
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
return self.default
|
||||
|
||||
|
||||
class EnumDef(AbtractAttrDef):
|
||||
"""Enumeration of single item from items.
|
||||
|
||||
Args:
|
||||
items: Items definition that can be coverted to
|
||||
`collections.OrderedDict`. Dictionary represent {value: label}
|
||||
relation.
|
||||
default: Default value. Must be one key(value) from passed items.
|
||||
"""
|
||||
|
||||
def __init__(self, key, items, default=None, **kwargs):
|
||||
if not items:
|
||||
raise ValueError((
|
||||
"Empty 'items' value. {} must have"
|
||||
" defined values on initialization."
|
||||
).format(self.__class__.__name__))
|
||||
|
||||
items = collections.OrderedDict(items)
|
||||
if default not in items:
|
||||
for _key in items.keys():
|
||||
default = _key
|
||||
break
|
||||
|
||||
super(EnumDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
self.items = items
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(EnumDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
if set(self.items.keys()) != set(other.items.keys()):
|
||||
return False
|
||||
|
||||
for key, label in self.items.items():
|
||||
if other.items[key] != label:
|
||||
return False
|
||||
return True
|
||||
|
||||
def convert_value(self, value):
|
||||
if value in self.items:
|
||||
return value
|
||||
return self.default
|
||||
|
||||
|
||||
class BoolDef(AbtractAttrDef):
|
||||
"""Boolean representation.
|
||||
|
||||
Args:
|
||||
default(bool): Default value. Set to `False` if not defined.
|
||||
"""
|
||||
|
||||
def __init__(self, key, default=None, **kwargs):
|
||||
if default is None:
|
||||
default = False
|
||||
super(BoolDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
return self.default
|
||||
38
openpype/pipeline/publish/README.md
Normal file
38
openpype/pipeline/publish/README.md
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
# Publish
|
||||
OpenPype is using `pyblish` for publishing process which is a little bit extented and modified mainly for UI purposes. OpenPype's (new) publish UI does not allow to enable/disable instances or plugins that can be done during creation part. Also does support actions only for validators after validation exception.
|
||||
|
||||
## Exceptions
|
||||
OpenPype define few specific exceptions that should be used in publish plugins.
|
||||
|
||||
### Validation exception
|
||||
Validation plugins should raise `PublishValidationError` to show to an artist what's wrong and give him actions to fix it. The exception says that error happened in plugin can be fixed by artist himself (with or without action on plugin). Any other errors will stop publishing immediately. Exception `PublishValidationError` raised after validation order has same effect as any other exception.
|
||||
|
||||
Exception `PublishValidationError` 3 arguments:
|
||||
- **message** Which is not used in UI but for headless publishing.
|
||||
- **title** Short description of error (2-5 words). Title is used for grouping of exceptions per plugin.
|
||||
- **description** Detailed description of happened issue where markdown and html can be used.
|
||||
|
||||
|
||||
### Known errors
|
||||
When there is a known error that can't be fixed by user (e.g. can't connect to deadline service, etc.) `KnownPublishError` should be raise. The only difference is that it's message is shown in UI to artist otherwise a neutral message without context is shown.
|
||||
|
||||
## Plugin extension
|
||||
Publish plugins can be extended by additional logic when inherits from `OpenPypePyblishPluginMixin` which can be used as mixin (additional inheritance of class).
|
||||
|
||||
```python
|
||||
import pyblish.api
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
# Example context plugin
|
||||
class MyExtendedPlugin(
|
||||
pyblish.api.ContextPlugin, OpenPypePyblishPluginMixin
|
||||
):
|
||||
pass
|
||||
|
||||
```
|
||||
|
||||
### Extensions
|
||||
Currently only extension is ability to define attributes for instances during creation. Method `get_attribute_defs` returns attribute definitions for families defined in plugin's `families` attribute if it's instance plugin or for whole context if it's context plugin. To convert existing values (or to remove legacy values) can be implemented `convert_attribute_values`. Values of publish attributes from created instance are never removed automatically so implementing of this method is best way to remove legacy data or convert them to new data structure.
|
||||
|
||||
Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`.
|
||||
20
openpype/pipeline/publish/__init__.py
Normal file
20
openpype/pipeline/publish/__init__.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from .publish_plugins import (
|
||||
PublishValidationError,
|
||||
KnownPublishError,
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
DiscoverResult,
|
||||
publish_plugins_discover
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"PublishValidationError",
|
||||
"KnownPublishError",
|
||||
"OpenPypePyblishPluginMixin",
|
||||
|
||||
"DiscoverResult",
|
||||
"publish_plugins_discover"
|
||||
)
|
||||
126
openpype/pipeline/publish/lib.py
Normal file
126
openpype/pipeline/publish/lib.py
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
import os
|
||||
import sys
|
||||
import types
|
||||
|
||||
import six
|
||||
import pyblish.plugin
|
||||
|
||||
|
||||
class DiscoverResult:
|
||||
"""Hold result of publish plugins discovery.
|
||||
|
||||
Stores discovered plugins duplicated plugins and file paths which
|
||||
crashed on execution of file.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.plugins = []
|
||||
self.crashed_file_paths = {}
|
||||
self.duplicated_plugins = []
|
||||
|
||||
def __iter__(self):
|
||||
for plugin in self.plugins:
|
||||
yield plugin
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.plugins[item]
|
||||
|
||||
def __setitem__(self, item, value):
|
||||
self.plugins[item] = value
|
||||
|
||||
|
||||
def publish_plugins_discover(paths=None):
|
||||
"""Find and return available pyblish plug-ins
|
||||
|
||||
Overriden function from `pyblish` module to be able collect crashed files
|
||||
and reason of their crash.
|
||||
|
||||
Arguments:
|
||||
paths (list, optional): Paths to discover plug-ins from.
|
||||
If no paths are provided, all paths are searched.
|
||||
|
||||
"""
|
||||
|
||||
# The only difference with `pyblish.api.discover`
|
||||
result = DiscoverResult()
|
||||
|
||||
plugins = dict()
|
||||
plugin_names = []
|
||||
|
||||
allow_duplicates = pyblish.plugin.ALLOW_DUPLICATES
|
||||
log = pyblish.plugin.log
|
||||
|
||||
# Include plug-ins from registered paths
|
||||
if not paths:
|
||||
paths = pyblish.plugin.plugin_paths()
|
||||
|
||||
for path in paths:
|
||||
path = os.path.normpath(path)
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
|
||||
for fname in os.listdir(path):
|
||||
if fname.startswith("_"):
|
||||
continue
|
||||
|
||||
abspath = os.path.join(path, fname)
|
||||
|
||||
if not os.path.isfile(abspath):
|
||||
continue
|
||||
|
||||
mod_name, mod_ext = os.path.splitext(fname)
|
||||
|
||||
if not mod_ext == ".py":
|
||||
continue
|
||||
|
||||
module = types.ModuleType(mod_name)
|
||||
module.__file__ = abspath
|
||||
|
||||
try:
|
||||
with open(abspath, "rb") as f:
|
||||
six.exec_(f.read(), module.__dict__)
|
||||
|
||||
# Store reference to original module, to avoid
|
||||
# garbage collection from collecting it's global
|
||||
# imports, such as `import os`.
|
||||
sys.modules[abspath] = module
|
||||
|
||||
except Exception as err:
|
||||
result.crashed_file_paths[abspath] = sys.exc_info()
|
||||
|
||||
log.debug("Skipped: \"%s\" (%s)", mod_name, err)
|
||||
continue
|
||||
|
||||
for plugin in pyblish.plugin.plugins_from_module(module):
|
||||
if not allow_duplicates and plugin.__name__ in plugin_names:
|
||||
result.duplicated_plugins.append(plugin)
|
||||
log.debug("Duplicate plug-in found: %s", plugin)
|
||||
continue
|
||||
|
||||
plugin_names.append(plugin.__name__)
|
||||
|
||||
plugin.__module__ = module.__file__
|
||||
key = "{0}.{1}".format(plugin.__module__, plugin.__name__)
|
||||
plugins[key] = plugin
|
||||
|
||||
# Include plug-ins from registration.
|
||||
# Directly registered plug-ins take precedence.
|
||||
for plugin in pyblish.plugin.registered_plugins():
|
||||
if not allow_duplicates and plugin.__name__ in plugin_names:
|
||||
result.duplicated_plugins.append(plugin)
|
||||
log.debug("Duplicate plug-in found: %s", plugin)
|
||||
continue
|
||||
|
||||
plugin_names.append(plugin.__name__)
|
||||
|
||||
plugins[plugin.__name__] = plugin
|
||||
|
||||
plugins = list(plugins.values())
|
||||
pyblish.plugin.sort(plugins) # In-place
|
||||
|
||||
# In-place user-defined filter
|
||||
for filter_ in pyblish.plugin._registered_plugin_filters:
|
||||
filter_(plugins)
|
||||
|
||||
result.plugins = plugins
|
||||
|
||||
return result
|
||||
86
openpype/pipeline/publish/publish_plugins.py
Normal file
86
openpype/pipeline/publish/publish_plugins.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
class PublishValidationError(Exception):
|
||||
"""Validation error happened during publishing.
|
||||
|
||||
This exception should be used when validation publishing failed.
|
||||
|
||||
Has additional UI specific attributes that may be handy for artist.
|
||||
|
||||
Args:
|
||||
message(str): Message of error. Short explanation an issue.
|
||||
title(str): Title showed in UI. All instances are grouped under
|
||||
single title.
|
||||
description(str): Detailed description of an error. It is possible
|
||||
to use Markdown syntax.
|
||||
"""
|
||||
def __init__(self, message, title=None, description=None):
|
||||
self.message = message
|
||||
self.title = title or "< Missing title >"
|
||||
self.description = description or message
|
||||
super(PublishValidationError, self).__init__(message)
|
||||
|
||||
|
||||
class KnownPublishError(Exception):
|
||||
"""Publishing crashed because of known error.
|
||||
|
||||
Message will be shown in UI for artist.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class OpenPypePyblishPluginMixin:
|
||||
# TODO
|
||||
# executable_in_thread = False
|
||||
#
|
||||
# state_message = None
|
||||
# state_percent = None
|
||||
# _state_change_callbacks = []
|
||||
#
|
||||
# def set_state(self, percent=None, message=None):
|
||||
# """Inner callback of plugin that would help to show in UI state.
|
||||
#
|
||||
# Plugin have registered callbacks on state change which could trigger
|
||||
# update message and percent in UI and repaint the change.
|
||||
#
|
||||
# This part must be optional and should not be used to display errors
|
||||
# or for logging.
|
||||
#
|
||||
# Message should be short without details.
|
||||
#
|
||||
# Args:
|
||||
# percent(int): Percent of processing in range <1-100>.
|
||||
# message(str): Message which will be shown to user (if in UI).
|
||||
# """
|
||||
# if percent is not None:
|
||||
# self.state_percent = percent
|
||||
#
|
||||
# if message:
|
||||
# self.state_message = message
|
||||
#
|
||||
# for callback in self._state_change_callbacks:
|
||||
# callback(self)
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
"""Publish attribute definitions.
|
||||
|
||||
Attributes available for all families in plugin's `families` attribute.
|
||||
Returns:
|
||||
list<AbtractAttrDef>: Attribute definitions for plugin.
|
||||
"""
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def convert_attribute_values(cls, attribute_values):
|
||||
if cls.__name__ not in attribute_values:
|
||||
return attribute_values
|
||||
|
||||
plugin_values = attribute_values[cls.__name__]
|
||||
|
||||
attr_defs = cls.get_attribute_defs()
|
||||
for attr_def in attr_defs:
|
||||
key = attr_def.key
|
||||
if key in plugin_values:
|
||||
plugin_values[key] = attr_def.convert_value(
|
||||
plugin_values[key]
|
||||
)
|
||||
return attribute_values
|
||||
57
openpype/plugins/publish/collect_from_create_context.py
Normal file
57
openpype/plugins/publish/collect_from_create_context.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
"""Create instances based on CreateContext.
|
||||
|
||||
"""
|
||||
import os
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
|
||||
class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
||||
"""Collect instances and data from CreateContext from new publishing."""
|
||||
|
||||
label = "Collect From Create Context"
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
|
||||
def process(self, context):
|
||||
create_context = context.data.pop("create_context", None)
|
||||
# Skip if create context is not available
|
||||
if not create_context:
|
||||
return
|
||||
|
||||
for created_instance in create_context.instances:
|
||||
instance_data = created_instance.data_to_store()
|
||||
if instance_data["active"]:
|
||||
self.create_instance(context, instance_data)
|
||||
|
||||
# Update global data to context
|
||||
context.data.update(create_context.context_data_to_store())
|
||||
|
||||
# Update context data
|
||||
for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"):
|
||||
value = create_context.dbcon.Session.get(key)
|
||||
if value is not None:
|
||||
avalon.api.Session[key] = value
|
||||
os.environ[key] = value
|
||||
|
||||
def create_instance(self, context, in_data):
|
||||
subset = in_data["subset"]
|
||||
# If instance data already contain families then use it
|
||||
instance_families = in_data.get("families") or []
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": in_data["asset"],
|
||||
"task": in_data["task"],
|
||||
"label": subset,
|
||||
"name": subset,
|
||||
"family": in_data["family"],
|
||||
"families": instance_families
|
||||
})
|
||||
for key, value in in_data.items():
|
||||
if key not in instance.data:
|
||||
instance.data[key] = value
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
self.log.info("parsing data: {}".format(in_data))
|
||||
|
||||
instance.data["representations"] = list()
|
||||
|
|
@ -99,7 +99,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"camerarig",
|
||||
"redshiftproxy",
|
||||
"effect",
|
||||
"xgen"
|
||||
"xgen",
|
||||
"hda"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
db_representation_context_keys = [
|
||||
|
|
|
|||
|
|
@ -9,9 +9,9 @@ class ShowInventory(pyblish.api.Action):
|
|||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
from avalon.tools import sceneinventory
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
sceneinventory.show()
|
||||
host_tools.show_scene_inventory()
|
||||
|
||||
|
||||
class ValidateContainers(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -11,7 +11,9 @@ from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info
|
|||
from openpype.lib.remote_publish import (
|
||||
get_webpublish_conn,
|
||||
start_webpublish_log,
|
||||
publish_and_log
|
||||
publish_and_log,
|
||||
fail_batch,
|
||||
find_variant_key
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -124,10 +126,17 @@ class PypeCommands:
|
|||
wants to process uploaded .psd file and publish collected layers
|
||||
from there.
|
||||
|
||||
Checks if no other batches are running (status =='in_progress). If
|
||||
so, it sleeps for SLEEP (this is separate process),
|
||||
waits for WAIT_FOR seconds altogether.
|
||||
|
||||
Requires installed host application on the machine.
|
||||
|
||||
Runs publish process as user would, in automatic fashion.
|
||||
"""
|
||||
SLEEP = 5 # seconds for another loop check for concurrently runs
|
||||
WAIT_FOR = 300 # seconds to wait for conc. runs
|
||||
|
||||
from openpype import install, uninstall
|
||||
from openpype.api import Logger
|
||||
|
||||
|
|
@ -140,25 +149,12 @@ class PypeCommands:
|
|||
from openpype.lib import ApplicationManager
|
||||
application_manager = ApplicationManager()
|
||||
|
||||
app_group = application_manager.app_groups.get(host)
|
||||
if not app_group or not app_group.enabled:
|
||||
raise ValueError("No application {} configured".format(host))
|
||||
|
||||
found_variant_key = None
|
||||
# finds most up-to-date variant if any installed
|
||||
for variant_key, variant in app_group.variants.items():
|
||||
for executable in variant.executables:
|
||||
if executable.exists():
|
||||
found_variant_key = variant_key
|
||||
|
||||
if not found_variant_key:
|
||||
raise ValueError("No executable for {} found".format(host))
|
||||
found_variant_key = find_variant_key(application_manager, host)
|
||||
|
||||
app_name = "{}/{}".format(host, found_variant_key)
|
||||
|
||||
batch_data = None
|
||||
if batch_dir and os.path.exists(batch_dir):
|
||||
# TODO check if batch manifest is same as tasks manifests
|
||||
batch_data = parse_json(os.path.join(batch_dir, "manifest.json"))
|
||||
|
||||
if not batch_data:
|
||||
|
|
@ -168,11 +164,38 @@ class PypeCommands:
|
|||
asset, task_name, _task_type = get_batch_asset_task_info(
|
||||
batch_data["context"])
|
||||
|
||||
# processing from app expects JUST ONE task in batch and 1 workfile
|
||||
task_dir_name = batch_data["tasks"][0]
|
||||
task_data = parse_json(os.path.join(batch_dir, task_dir_name,
|
||||
"manifest.json"))
|
||||
|
||||
workfile_path = os.path.join(batch_dir,
|
||||
batch_data["task"],
|
||||
batch_data["files"][0])
|
||||
task_dir_name,
|
||||
task_data["files"][0])
|
||||
|
||||
print("workfile_path {}".format(workfile_path))
|
||||
|
||||
_, batch_id = os.path.split(batch_dir)
|
||||
dbcon = get_webpublish_conn()
|
||||
# safer to start logging here, launch might be broken altogether
|
||||
_id = start_webpublish_log(dbcon, batch_id, user)
|
||||
|
||||
in_progress = True
|
||||
slept_times = 0
|
||||
while in_progress:
|
||||
batches_in_progress = list(dbcon.find({
|
||||
"status": "in_progress"
|
||||
}))
|
||||
if len(batches_in_progress) > 1:
|
||||
if slept_times * SLEEP >= WAIT_FOR:
|
||||
fail_batch(_id, batches_in_progress, dbcon)
|
||||
|
||||
print("Another batch running, sleeping for a bit")
|
||||
time.sleep(SLEEP)
|
||||
slept_times += 1
|
||||
else:
|
||||
in_progress = False
|
||||
|
||||
# must have for proper launch of app
|
||||
env = get_app_environments_for_context(
|
||||
project,
|
||||
|
|
@ -182,11 +205,6 @@ class PypeCommands:
|
|||
)
|
||||
os.environ.update(env)
|
||||
|
||||
_, batch_id = os.path.split(batch_dir)
|
||||
dbcon = get_webpublish_conn()
|
||||
# safer to start logging here, launch might be broken altogether
|
||||
_id = start_webpublish_log(dbcon, batch_id, user)
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_dir
|
||||
os.environ["IS_HEADLESS"] = "true"
|
||||
# must pass identifier to update log lines for a batch
|
||||
|
|
|
|||
|
|
@ -50,3 +50,11 @@ def get_openpype_splash_filepath(staging=None):
|
|||
else:
|
||||
splash_file_name = "openpype_splash.png"
|
||||
return get_resource("icons", splash_file_name)
|
||||
|
||||
|
||||
def pype_icon_filepath(staging=None):
|
||||
return get_openpype_icon_filepath(staging)
|
||||
|
||||
|
||||
def pype_splash_filepath(staging=None):
|
||||
return get_openpype_splash_filepath(staging)
|
||||
|
|
|
|||
|
|
@ -172,5 +172,16 @@
|
|||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"maya": {
|
||||
"colorManagementPreference": {
|
||||
"configFilePath": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"renderSpace": "scene-linear Rec 709/sRGB",
|
||||
"viewTransform": "sRGB gamma"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -255,6 +255,11 @@
|
|||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateMeshNgons": {
|
||||
"enabled": false,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateMeshNonManifold": {
|
||||
"enabled": false,
|
||||
"optional": true,
|
||||
|
|
|
|||
|
|
@ -1009,8 +1009,6 @@
|
|||
},
|
||||
"variants": {
|
||||
"2020": {
|
||||
"enabled": true,
|
||||
"variant_label": "2020",
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe"
|
||||
|
|
@ -1026,8 +1024,6 @@
|
|||
"environment": {}
|
||||
},
|
||||
"2021": {
|
||||
"enabled": true,
|
||||
"variant_label": "2021",
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe"
|
||||
|
|
@ -1041,6 +1037,21 @@
|
|||
"linux": []
|
||||
},
|
||||
"environment": {}
|
||||
},
|
||||
"2022": {
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Adobe\\Adobe Photoshop 2022\\Photoshop.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -110,7 +110,10 @@ from .enum_entity import (
|
|||
)
|
||||
|
||||
from .list_entity import ListEntity
|
||||
from .dict_immutable_keys_entity import DictImmutableKeysEntity
|
||||
from .dict_immutable_keys_entity import (
|
||||
DictImmutableKeysEntity,
|
||||
RootsDictEntity
|
||||
)
|
||||
from .dict_mutable_keys_entity import DictMutableKeysEntity
|
||||
from .dict_conditional import (
|
||||
DictConditionalEntity,
|
||||
|
|
@ -169,6 +172,7 @@ __all__ = (
|
|||
"ListEntity",
|
||||
|
||||
"DictImmutableKeysEntity",
|
||||
"RootsDictEntity",
|
||||
|
||||
"DictMutableKeysEntity",
|
||||
|
||||
|
|
|
|||
|
|
@ -510,7 +510,7 @@ class BaseItemEntity(BaseEntity):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
"""Entity specific initialization process."""
|
||||
pass
|
||||
|
||||
|
|
@ -920,7 +920,7 @@ class ItemEntity(BaseItemEntity):
|
|||
_default_label_wrap["collapsed"]
|
||||
)
|
||||
|
||||
self._item_initalization()
|
||||
self._item_initialization()
|
||||
|
||||
def save(self):
|
||||
"""Call save on root item."""
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from .exceptions import (
|
|||
class ColorEntity(InputEntity):
|
||||
schema_types = ["color"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.valid_value_types = (list, )
|
||||
self.value_on_not_set = [0, 0, 0, 255]
|
||||
self.use_alpha = self.schema_data.get("use_alpha", True)
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ class DictConditionalEntity(ItemEntity):
|
|||
for _key, _value in new_value.items():
|
||||
self.non_gui_children[self.current_enum][_key].set(_value)
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self._default_metadata = NOT_SET
|
||||
self._studio_override_metadata = NOT_SET
|
||||
self._project_override_metadata = NOT_SET
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ import collections
|
|||
from .lib import (
|
||||
WRAPPER_TYPES,
|
||||
OverrideState,
|
||||
NOT_SET
|
||||
NOT_SET,
|
||||
STRING_TYPE
|
||||
)
|
||||
from openpype.settings.constants import (
|
||||
METADATA_KEYS,
|
||||
|
|
@ -18,6 +19,7 @@ from . import (
|
|||
GUIEntity
|
||||
)
|
||||
from .exceptions import (
|
||||
DefaultsNotDefined,
|
||||
SchemaDuplicatedKeys,
|
||||
EntitySchemaError,
|
||||
InvalidKeySymbols
|
||||
|
|
@ -172,7 +174,7 @@ class DictImmutableKeysEntity(ItemEntity):
|
|||
for child_obj in added_children:
|
||||
self.gui_layout.append(child_obj)
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self._default_metadata = NOT_SET
|
||||
self._studio_override_metadata = NOT_SET
|
||||
self._project_override_metadata = NOT_SET
|
||||
|
|
@ -547,3 +549,178 @@ class DictImmutableKeysEntity(ItemEntity):
|
|||
super(DictImmutableKeysEntity, self).reset_callbacks()
|
||||
for child_entity in self.children:
|
||||
child_entity.reset_callbacks()
|
||||
|
||||
|
||||
class RootsDictEntity(DictImmutableKeysEntity):
|
||||
"""Entity that adds ability to fill value for roots of current project.
|
||||
|
||||
Value schema is defined by `object_type`.
|
||||
|
||||
It is not possible to change override state (Studio values will always
|
||||
contain studio overrides and same for project). That is because roots can
|
||||
be totally different for each project.
|
||||
"""
|
||||
_origin_schema_data = None
|
||||
schema_types = ["dict-roots"]
|
||||
|
||||
def _item_initialization(self):
|
||||
origin_schema_data = self.schema_data
|
||||
|
||||
self.separate_items = origin_schema_data.get("separate_items", True)
|
||||
object_type = origin_schema_data.get("object_type")
|
||||
if isinstance(object_type, STRING_TYPE):
|
||||
object_type = {"type": object_type}
|
||||
self.object_type = object_type
|
||||
|
||||
if not self.is_group:
|
||||
self.is_group = True
|
||||
|
||||
schema_data = copy.deepcopy(self.schema_data)
|
||||
schema_data["children"] = []
|
||||
|
||||
self.schema_data = schema_data
|
||||
self._origin_schema_data = origin_schema_data
|
||||
|
||||
self._default_value = NOT_SET
|
||||
self._studio_value = NOT_SET
|
||||
self._project_value = NOT_SET
|
||||
|
||||
super(RootsDictEntity, self)._item_initialization()
|
||||
|
||||
def schema_validations(self):
|
||||
if self.object_type is None:
|
||||
reason = (
|
||||
"Missing children definitions for root values"
|
||||
" ('object_type' not filled)."
|
||||
)
|
||||
raise EntitySchemaError(self, reason)
|
||||
|
||||
if not isinstance(self.object_type, dict):
|
||||
reason = (
|
||||
"Children definitions for root values must be dictionary"
|
||||
" ('object_type' is \"{}\")."
|
||||
).format(str(type(self.object_type)))
|
||||
raise EntitySchemaError(self, reason)
|
||||
|
||||
super(RootsDictEntity, self).schema_validations()
|
||||
|
||||
def set_override_state(self, state, ignore_missing_defaults):
|
||||
self.children = []
|
||||
self.non_gui_children = {}
|
||||
self.gui_layout = []
|
||||
|
||||
roots_entity = self.get_entity_from_path(
|
||||
"project_anatomy/roots"
|
||||
)
|
||||
children = []
|
||||
first = True
|
||||
for key in roots_entity.keys():
|
||||
if first:
|
||||
first = False
|
||||
elif self.separate_items:
|
||||
children.append({"type": "separator"})
|
||||
child = copy.deepcopy(self.object_type)
|
||||
child["key"] = key
|
||||
child["label"] = key
|
||||
children.append(child)
|
||||
|
||||
schema_data = copy.deepcopy(self.schema_data)
|
||||
schema_data["children"] = children
|
||||
|
||||
self._add_children(schema_data)
|
||||
|
||||
self._set_children_values(state)
|
||||
|
||||
super(RootsDictEntity, self).set_override_state(
|
||||
state, True
|
||||
)
|
||||
|
||||
if state == OverrideState.STUDIO:
|
||||
self.add_to_studio_default()
|
||||
|
||||
elif state == OverrideState.PROJECT:
|
||||
self.add_to_project_override()
|
||||
|
||||
def on_child_change(self, child_obj):
|
||||
if self._override_state is OverrideState.STUDIO:
|
||||
if not child_obj.has_studio_override:
|
||||
self.add_to_studio_default()
|
||||
|
||||
elif self._override_state is OverrideState.PROJECT:
|
||||
if not child_obj.has_project_override:
|
||||
self.add_to_project_override()
|
||||
|
||||
return super(RootsDictEntity, self).on_child_change(child_obj)
|
||||
|
||||
def _set_children_values(self, state):
|
||||
if state >= OverrideState.DEFAULTS:
|
||||
default_value = self._default_value
|
||||
if default_value is NOT_SET:
|
||||
if state > OverrideState.DEFAULTS:
|
||||
raise DefaultsNotDefined(self)
|
||||
else:
|
||||
default_value = {}
|
||||
|
||||
for key, child_obj in self.non_gui_children.items():
|
||||
child_value = default_value.get(key, NOT_SET)
|
||||
child_obj.update_default_value(child_value)
|
||||
|
||||
if state >= OverrideState.STUDIO:
|
||||
value = self._studio_value
|
||||
if value is NOT_SET:
|
||||
value = {}
|
||||
|
||||
for key, child_obj in self.non_gui_children.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_obj.update_studio_value(child_value)
|
||||
|
||||
if state >= OverrideState.PROJECT:
|
||||
value = self._project_value
|
||||
if value is NOT_SET:
|
||||
value = {}
|
||||
|
||||
for key, child_obj in self.non_gui_children.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_obj.update_project_value(child_value)
|
||||
|
||||
def _update_current_metadata(self):
|
||||
"""Override this method as this entity should not have metadata."""
|
||||
self._metadata_are_modified = False
|
||||
self._current_metadata = {}
|
||||
|
||||
def update_default_value(self, value):
|
||||
"""Update default values.
|
||||
|
||||
Not an api method, should be called by parent.
|
||||
"""
|
||||
value = self._check_update_value(value, "default")
|
||||
value, _ = self._prepare_value(value)
|
||||
|
||||
self._default_value = value
|
||||
self._default_metadata = {}
|
||||
self.has_default_value = value is not NOT_SET
|
||||
|
||||
def update_studio_value(self, value):
|
||||
"""Update studio override values.
|
||||
|
||||
Not an api method, should be called by parent.
|
||||
"""
|
||||
value = self._check_update_value(value, "studio override")
|
||||
value, _ = self._prepare_value(value)
|
||||
|
||||
self._studio_value = value
|
||||
self._studio_override_metadata = {}
|
||||
self.had_studio_override = value is not NOT_SET
|
||||
|
||||
def update_project_value(self, value):
|
||||
"""Update project override values.
|
||||
|
||||
Not an api method, should be called by parent.
|
||||
"""
|
||||
|
||||
value = self._check_update_value(value, "project override")
|
||||
value, _metadata = self._prepare_value(value)
|
||||
|
||||
self._project_value = value
|
||||
self._project_override_metadata = {}
|
||||
self.had_project_override = value is not NOT_SET
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
child_entity = self.children_by_key[key]
|
||||
self.set_child_label(child_entity, label)
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self._default_metadata = {}
|
||||
self._studio_override_metadata = {}
|
||||
self._project_override_metadata = {}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from .lib import (
|
|||
|
||||
|
||||
class BaseEnumEntity(InputEntity):
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = True
|
||||
self.value_on_not_set = None
|
||||
self.enum_items = None
|
||||
|
|
@ -70,7 +70,7 @@ class BaseEnumEntity(InputEntity):
|
|||
class EnumEntity(BaseEnumEntity):
|
||||
schema_types = ["enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", False)
|
||||
self.enum_items = self.schema_data.get("enum_items")
|
||||
# Default is optional and non breaking attribute
|
||||
|
|
@ -157,7 +157,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
"standalonepublisher"
|
||||
]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", True)
|
||||
use_empty_value = False
|
||||
if not self.multiselection:
|
||||
|
|
@ -250,7 +250,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
class AppsEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["apps-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = True
|
||||
self.value_on_not_set = []
|
||||
self.enum_items = []
|
||||
|
|
@ -317,7 +317,7 @@ class AppsEnumEntity(BaseEnumEntity):
|
|||
class ToolsEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["tools-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = True
|
||||
self.value_on_not_set = []
|
||||
self.enum_items = []
|
||||
|
|
@ -376,7 +376,7 @@ class ToolsEnumEntity(BaseEnumEntity):
|
|||
class TaskTypeEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["task-types-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", True)
|
||||
if self.multiselection:
|
||||
self.valid_value_types = (list, )
|
||||
|
|
@ -452,7 +452,7 @@ class TaskTypeEnumEntity(BaseEnumEntity):
|
|||
class DeadlineUrlEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["deadline_url-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", True)
|
||||
|
||||
self.enum_items = []
|
||||
|
|
@ -503,7 +503,7 @@ class DeadlineUrlEnumEntity(BaseEnumEntity):
|
|||
class AnatomyTemplatesEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["anatomy-templates-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.multiselection = False
|
||||
|
||||
self.enum_items = []
|
||||
|
|
|
|||
|
|
@ -362,7 +362,7 @@ class NumberEntity(InputEntity):
|
|||
float_number_regex = re.compile(r"^\d+\.\d+$")
|
||||
int_number_regex = re.compile(r"^\d+$")
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.minimum = self.schema_data.get("minimum", -99999)
|
||||
self.maximum = self.schema_data.get("maximum", 99999)
|
||||
self.decimal = self.schema_data.get("decimal", 0)
|
||||
|
|
@ -420,7 +420,7 @@ class NumberEntity(InputEntity):
|
|||
class BoolEntity(InputEntity):
|
||||
schema_types = ["boolean"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.valid_value_types = (bool, )
|
||||
value_on_not_set = self.convert_to_valid_type(
|
||||
self.schema_data.get("default", True)
|
||||
|
|
@ -431,7 +431,7 @@ class BoolEntity(InputEntity):
|
|||
class TextEntity(InputEntity):
|
||||
schema_types = ["text"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.valid_value_types = (STRING_TYPE, )
|
||||
self.value_on_not_set = ""
|
||||
|
||||
|
|
@ -449,7 +449,7 @@ class TextEntity(InputEntity):
|
|||
class PathInput(InputEntity):
|
||||
schema_types = ["path-input"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.valid_value_types = (STRING_TYPE, )
|
||||
self.value_on_not_set = ""
|
||||
|
||||
|
|
@ -460,7 +460,7 @@ class PathInput(InputEntity):
|
|||
class RawJsonEntity(InputEntity):
|
||||
schema_types = ["raw-json"]
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
# Schema must define if valid value is dict or list
|
||||
store_as_string = self.schema_data.get("store_as_string", False)
|
||||
is_list = self.schema_data.get("is_list", False)
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class PathEntity(ItemEntity):
|
|||
raise AttributeError(self.attribute_error_msg.format("items"))
|
||||
return self.child_obj.items()
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
if self.group_item is None and not self.is_group:
|
||||
self.is_group = True
|
||||
|
||||
|
|
@ -216,7 +216,7 @@ class ListStrictEntity(ItemEntity):
|
|||
return self.children[idx]
|
||||
return default
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.valid_value_types = (list, )
|
||||
self.require_key = True
|
||||
|
||||
|
|
|
|||
|
|
@ -149,7 +149,7 @@ class ListEntity(EndpointEntity):
|
|||
return list(value)
|
||||
return NOT_SET
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
self.valid_value_types = (list, )
|
||||
self.children = []
|
||||
self.value_on_not_set = []
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class RootEntity(BaseItemEntity):
|
|||
super(RootEntity, self).__init__(schema_data)
|
||||
self._require_restart_callbacks = []
|
||||
self._item_ids_require_restart = set()
|
||||
self._item_initalization()
|
||||
self._item_initialization()
|
||||
if reset:
|
||||
self.reset()
|
||||
|
||||
|
|
@ -176,7 +176,7 @@ class RootEntity(BaseItemEntity):
|
|||
for child_obj in added_children:
|
||||
self.gui_layout.append(child_obj)
|
||||
|
||||
def _item_initalization(self):
|
||||
def _item_initialization(self):
|
||||
# Store `self` to `root_item` for children entities
|
||||
self.root_item = self
|
||||
|
||||
|
|
|
|||
|
|
@ -208,6 +208,25 @@
|
|||
}
|
||||
```
|
||||
|
||||
## dict-roots
|
||||
- entity can be used only in Project settings
|
||||
- keys of dictionary are based on current project roots
|
||||
- they are not updated "live" it is required to save root changes and then
|
||||
modify values on this entity
|
||||
# TODO do live updates
|
||||
```
|
||||
{
|
||||
"type": "dict-roots",
|
||||
"key": "roots",
|
||||
"label": "Roots",
|
||||
"object_type": {
|
||||
"type": "path",
|
||||
"multiplatform": true,
|
||||
"multipath": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## dict-conditional
|
||||
- is similar to `dict` but has always available one enum entity
|
||||
- the enum entity has single selection and it's value define other children entities
|
||||
|
|
|
|||
|
|
@ -81,7 +81,12 @@
|
|||
{
|
||||
"key": "family",
|
||||
"label": "Resulting family",
|
||||
"type": "text"
|
||||
"type": "enum",
|
||||
"enum_items": [
|
||||
{
|
||||
"image": "image"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
|
|
|
|||
|
|
@ -358,6 +358,38 @@
|
|||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "maya",
|
||||
"type": "dict",
|
||||
"label": "Maya",
|
||||
"children": [
|
||||
{
|
||||
"key": "colorManagementPreference",
|
||||
"type": "dict",
|
||||
"label": "Color Managment Preference",
|
||||
"collapsible": false,
|
||||
"children": [
|
||||
{
|
||||
"type": "path",
|
||||
"key": "configFilePath",
|
||||
"label": "OCIO Config File Path",
|
||||
"multiplatform": true,
|
||||
"multipath": true
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "renderSpace",
|
||||
"label": "Rendering Space"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "viewTransform",
|
||||
"label": "Viewer Transform"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -274,6 +274,10 @@
|
|||
"key": "ValidateMeshLaminaFaces",
|
||||
"label": "ValidateMeshLaminaFaces"
|
||||
},
|
||||
{
|
||||
"key": "ValidateMeshNgons",
|
||||
"label": "ValidateMeshNgons"
|
||||
},
|
||||
{
|
||||
"key": "ValidateMeshNonManifold",
|
||||
"label": "ValidateMeshNonManifold"
|
||||
|
|
|
|||
|
|
@ -20,26 +20,21 @@
|
|||
"type": "raw-json"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_host_variant",
|
||||
"template_data": [
|
||||
{
|
||||
"app_variant_label": "2020",
|
||||
"app_variant": "2020",
|
||||
"variant_skip_paths": ["use_python_2"]
|
||||
},
|
||||
{
|
||||
"app_variant_label": "2021",
|
||||
"app_variant": "2021",
|
||||
"variant_skip_paths": ["use_python_2"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"collapsible_key": true,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_host_variant_items",
|
||||
"skip_paths": ["use_python_2"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,6 +58,19 @@
|
|||
"hover": "rgba(168, 175, 189, 0.3)",
|
||||
"selected-hover": "rgba(168, 175, 189, 0.7)"
|
||||
}
|
||||
},
|
||||
"publisher": {
|
||||
"error": "#AA5050",
|
||||
"success": "#458056",
|
||||
"warning": "#ffc671",
|
||||
"list-view-group": {
|
||||
"bg": "#434a56",
|
||||
"bg-hover": "rgba(168, 175, 189, 0.3)",
|
||||
"bg-selected-hover": "rgba(92, 173, 214, 0.4)",
|
||||
"bg-expander": "#2C313A",
|
||||
"bg-expander-hover": "#2d6c9f",
|
||||
"bg-expander-selected-hover": "#3784c5"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,10 +57,15 @@ QAbstractSpinBox:focus, QLineEdit:focus, QPlainTextEdit:focus, QTextEdit:focus{
|
|||
border-color: {color:border-focus};
|
||||
}
|
||||
|
||||
/* Checkbox */
|
||||
QCheckBox {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
QPushButton {
|
||||
text-align:center center;
|
||||
border: 1px solid transparent;
|
||||
border: 0px solid transparent;
|
||||
border-radius: 0.2em;
|
||||
padding: 3px 5px 3px 5px;
|
||||
background: {color:bg-buttons};
|
||||
|
|
@ -86,15 +91,15 @@ QPushButton::menu-indicator {
|
|||
}
|
||||
|
||||
QToolButton {
|
||||
border: none;
|
||||
background: transparent;
|
||||
border: 0px solid transparent;
|
||||
background: {color:bg-buttons};
|
||||
border-radius: 0.2em;
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
QToolButton:hover {
|
||||
background: #333840;
|
||||
border-color: {color:border-hover};
|
||||
background: {color:bg-button-hover};
|
||||
color: {color:font-hover};
|
||||
}
|
||||
|
||||
QToolButton:disabled {
|
||||
|
|
@ -104,14 +109,15 @@ QToolButton:disabled {
|
|||
QToolButton[popupMode="1"], QToolButton[popupMode="MenuButtonPopup"] {
|
||||
/* make way for the popup button */
|
||||
padding-right: 20px;
|
||||
border: 1px solid {color:bg-buttons};
|
||||
}
|
||||
|
||||
QToolButton::menu-button {
|
||||
width: 16px;
|
||||
/* Set border only of left side. */
|
||||
background: transparent;
|
||||
border: 1px solid transparent;
|
||||
border-left: 1px solid {color:bg-buttons};
|
||||
border-left: 1px solid qlineargradient(x1:0, y1:0, x2:0, y2:1, stop: 0 transparent, stop:0.2 {color:font}, stop:0.8 {color:font}, stop: 1 transparent);
|
||||
padding: 3px 0px 3px 0px;
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
QToolButton::menu-arrow {
|
||||
|
|
@ -571,7 +577,9 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {
|
|||
background: {color:bg-menu-separator};
|
||||
}
|
||||
|
||||
#IconBtn {}
|
||||
#IconButton {
|
||||
padding: 4px 4px 4px 4px;
|
||||
}
|
||||
|
||||
/* Password dialog*/
|
||||
#PasswordBtn {
|
||||
|
|
@ -595,6 +603,13 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {
|
|||
padding-right: 3px;
|
||||
}
|
||||
|
||||
#InfoText {
|
||||
padding-left: 30px;
|
||||
padding-top: 20px;
|
||||
background: transparent;
|
||||
border: 1px solid {color:border};
|
||||
}
|
||||
|
||||
#TypeEditor, #ToolEditor, #NameEditor, #NumberEditor {
|
||||
background: transparent;
|
||||
border-radius: 0.3em;
|
||||
|
|
@ -671,3 +686,169 @@ QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {
|
|||
#OptionalActionBody[state="hover"], #OptionalActionOption[state="hover"] {
|
||||
background: {color:bg-view-hover};
|
||||
}
|
||||
|
||||
/* New Create/Publish UI */
|
||||
#PublishLogConsole {
|
||||
font-family: "Roboto Mono";
|
||||
}
|
||||
|
||||
#VariantInput[state="new"], #VariantInput[state="new"]:focus, #VariantInput[state="new"]:hover {
|
||||
border-color: {color:publisher:success};
|
||||
}
|
||||
#VariantInput[state="invalid"], #VariantInput[state="invalid"]:focus, #VariantInput[state="invalid"]:hover {
|
||||
border-color: {color:publisher:error};
|
||||
}
|
||||
|
||||
#VariantInput[state="empty"], #VariantInput[state="empty"]:focus, #VariantInput[state="empty"]:hover {
|
||||
border-color: {color:bg-inputs};
|
||||
}
|
||||
|
||||
#VariantInput[state="exists"], #VariantInput[state="exists"]:focus, #VariantInput[state="exists"]:hover {
|
||||
border-color: #4E76BB;
|
||||
}
|
||||
|
||||
#MultipleItemView {
|
||||
background: transparent;
|
||||
border: none;
|
||||
}
|
||||
|
||||
#MultipleItemView:item {
|
||||
background: {color:bg-view-selection};
|
||||
border-radius: 0.4em;
|
||||
}
|
||||
|
||||
#InstanceListView::item {
|
||||
border-radius: 0.3em;
|
||||
margin: 1px;
|
||||
}
|
||||
#InstanceListGroupWidget {
|
||||
border: none;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#CardViewWidget {
|
||||
background: {color:bg-buttons};
|
||||
border-radius: 0.2em;
|
||||
}
|
||||
#CardViewWidget:hover {
|
||||
background: {color:bg-button-hover};
|
||||
}
|
||||
#CardViewWidget[state="selected"] {
|
||||
background: {color:bg-view-selection};
|
||||
}
|
||||
|
||||
#ListViewSubsetName[state="invalid"] {
|
||||
color: {color:publisher:error};
|
||||
}
|
||||
|
||||
#PublishFrame {
|
||||
background: rgba(0, 0, 0, 127);
|
||||
}
|
||||
#PublishFrame[state="1"] {
|
||||
background: rgb(22, 25, 29);
|
||||
}
|
||||
#PublishFrame[state="2"] {
|
||||
background: {color:bg};
|
||||
}
|
||||
|
||||
#PublishInfoFrame {
|
||||
background: {color:bg};
|
||||
border: 2px solid black;
|
||||
border-radius: 0.3em;
|
||||
}
|
||||
|
||||
#PublishInfoFrame[state="-1"] {
|
||||
background: rgb(194, 226, 236);
|
||||
}
|
||||
|
||||
#PublishInfoFrame[state="0"] {
|
||||
background: {color:publisher:error};
|
||||
}
|
||||
|
||||
#PublishInfoFrame[state="1"] {
|
||||
background: {color:publisher:success};
|
||||
}
|
||||
|
||||
#PublishInfoFrame[state="2"] {
|
||||
background: {color:publisher:warning};
|
||||
}
|
||||
|
||||
#PublishInfoFrame QLabel {
|
||||
color: black;
|
||||
font-style: bold;
|
||||
}
|
||||
|
||||
#PublishInfoMainLabel {
|
||||
font-size: 12pt;
|
||||
}
|
||||
|
||||
#PublishContextLabel {
|
||||
font-size: 13pt;
|
||||
}
|
||||
|
||||
#ValidationActionButton {
|
||||
border-radius: 0.2em;
|
||||
padding: 4px 6px 4px 6px;
|
||||
background: {color:bg-buttons};
|
||||
}
|
||||
|
||||
#ValidationActionButton:hover {
|
||||
background: {color:bg-button-hover};
|
||||
color: {color:font-hover};
|
||||
}
|
||||
|
||||
#ValidationActionButton:disabled {
|
||||
background: {color:bg-buttons-disabled};
|
||||
}
|
||||
|
||||
#ValidationErrorTitleFrame {
|
||||
background: {color:bg-inputs};
|
||||
border-left: 4px solid transparent;
|
||||
}
|
||||
|
||||
#ValidationErrorTitleFrame:hover {
|
||||
border-left-color: {color:border};
|
||||
}
|
||||
|
||||
#ValidationErrorTitleFrame[selected="1"] {
|
||||
background: {color:bg};
|
||||
border-left-color: {palette:blue-light};
|
||||
}
|
||||
|
||||
#ValidationErrorInstanceList {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
#ValidationErrorInstanceList::item {
|
||||
border-bottom: 1px solid {color:border};
|
||||
border-left: 1px solid {color:border};
|
||||
}
|
||||
|
||||
#TasksCombobox[state="invalid"], #AssetNameInput[state="invalid"] {
|
||||
border-color: {color:publisher:error};
|
||||
}
|
||||
|
||||
#PublishProgressBar[state="0"]::chunk {
|
||||
background: {color:bg-buttons};
|
||||
}
|
||||
|
||||
#PublishDetailViews {
|
||||
background: transparent;
|
||||
}
|
||||
#PublishDetailViews::item {
|
||||
margin: 1px 0px 1px 0px;
|
||||
}
|
||||
#PublishCommentInput {
|
||||
padding: 0.2em;
|
||||
}
|
||||
#FamilyIconLabel {
|
||||
font-size: 14pt;
|
||||
}
|
||||
#ArrowBtn, #ArrowBtn:disabled, #ArrowBtn:hover {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#NiceCheckbox {
|
||||
/* Default size hint of NiceCheckbox is defined by font size. */
|
||||
font-size: 7pt;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ class TestPerformance():
|
|||
file_id3 = bson.objectid.ObjectId()
|
||||
|
||||
self.inserted_ids.extend([file_id, file_id2, file_id3])
|
||||
version_str = "v{0:03}".format(i + 1)
|
||||
version_str = "v{:03d}".format(i + 1)
|
||||
file_name = "test_Cylinder_workfileLookdev_{}.mb".\
|
||||
format(version_str)
|
||||
|
||||
|
|
@ -95,7 +95,7 @@ class TestPerformance():
|
|||
"family": "workfile",
|
||||
"hierarchy": "Assets",
|
||||
"project": {"code": "test", "name": "Test"},
|
||||
"version": 1,
|
||||
"version": i + 1,
|
||||
"asset": "Cylinder",
|
||||
"representation": "mb",
|
||||
"root": self.ROOT_DIR
|
||||
|
|
@ -104,8 +104,8 @@ class TestPerformance():
|
|||
"name": "mb",
|
||||
"parent": {"oid": '{}'.format(id)},
|
||||
"data": {
|
||||
"path": "C:\\projects\\Test\\Assets\\Cylinder\\publish\\workfile\\workfileLookdev\\{}\\{}".format(version_str, file_name),
|
||||
"template": "{root}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{representation}"
|
||||
"path": "C:\\projects\\test_performance\\Assets\\Cylinder\\publish\\workfile\\workfileLookdev\\{}\\{}".format(version_str, file_name), # noqa
|
||||
"template": "{root[work]}\\{project[name]}\\{hierarchy}\\{asset}\\publish\\{family}\\{subset}\\v{version:0>3}\\{project[code]}_{asset}_{subset}_v{version:0>3}<_{output}><.{frame:0>4}>.{representation}" # noqa
|
||||
},
|
||||
"type": "representation",
|
||||
"schema": "openpype:representation-2.0"
|
||||
|
|
@ -188,30 +188,21 @@ class TestPerformance():
|
|||
create_files=False):
|
||||
ret = [
|
||||
{
|
||||
"path": "{root}" + "/Test/Assets/Cylinder/publish/workfile/" +
|
||||
"workfileLookdev/v{0:03}/" +
|
||||
"test_Cylinder_A_workfileLookdev_v{0:03}.dat"
|
||||
.format(i, i),
|
||||
"path": "{root[work]}" + "{root[work]}/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_A_workfileLookdev_v{:03d}.dat".format(i, i), #noqa
|
||||
"_id": '{}'.format(file_id),
|
||||
"hash": "temphash",
|
||||
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
|
||||
"size": random.randint(0, self.MAX_FILE_SIZE_B)
|
||||
},
|
||||
{
|
||||
"path": "{root}" + "/Test/Assets/Cylinder/publish/workfile/" +
|
||||
"workfileLookdev/v{0:03}/" +
|
||||
"test_Cylinder_B_workfileLookdev_v{0:03}.dat"
|
||||
.format(i, i),
|
||||
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_B_workfileLookdev_v{:03d}.dat".format(i, i), #noqa
|
||||
"_id": '{}'.format(file_id2),
|
||||
"hash": "temphash",
|
||||
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
|
||||
"size": random.randint(0, self.MAX_FILE_SIZE_B)
|
||||
},
|
||||
{
|
||||
"path": "{root}" + "/Test/Assets/Cylinder/publish/workfile/" +
|
||||
"workfileLookdev/v{0:03}/" +
|
||||
"test_Cylinder_C_workfileLookdev_v{0:03}.dat"
|
||||
.format(i, i),
|
||||
"path": "{root[work]}" + "/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/v{:03d}/test_Cylinder_C_workfileLookdev_v{:03d}.dat".format(i, i), #noqa
|
||||
"_id": '{}'.format(file_id3),
|
||||
"hash": "temphash",
|
||||
"sites": self.get_sites(self.MAX_NUMBER_OF_SITES),
|
||||
|
|
@ -221,7 +212,7 @@ class TestPerformance():
|
|||
]
|
||||
if create_files:
|
||||
for f in ret:
|
||||
path = f.get("path").replace("{root}", self.ROOT_DIR)
|
||||
path = f.get("path").replace("{root[work]}", self.ROOT_DIR)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
with open(path, 'wb') as fp:
|
||||
fp.write(os.urandom(f.get("size")))
|
||||
|
|
@ -231,26 +222,26 @@ class TestPerformance():
|
|||
def get_files_doc(self, i, file_id, file_id2, file_id3):
|
||||
ret = {}
|
||||
ret['{}'.format(file_id)] = {
|
||||
"path": "{root}" +
|
||||
"/Test/Assets/Cylinder/publish/workfile/workfileLookdev/"
|
||||
"v001/test_CylinderA_workfileLookdev_v{0:03}.mb".format(i),
|
||||
"path": "{root[work]}" +
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" #noqa
|
||||
"v{:03d}/test_CylinderA_workfileLookdev_v{:03d}.mb".format(i, i), # noqa
|
||||
"hash": "temphash",
|
||||
"sites": ["studio"],
|
||||
"size": 87236
|
||||
}
|
||||
|
||||
ret['{}'.format(file_id2)] = {
|
||||
"path": "{root}" +
|
||||
"/Test/Assets/Cylinder/publish/workfile/workfileLookdev/"
|
||||
"v001/test_CylinderB_workfileLookdev_v{0:03}.mb".format(i),
|
||||
"path": "{root[work]}" +
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" #noqa
|
||||
"v{:03d}/test_CylinderB_workfileLookdev_v{:03d}.mb".format(i, i), # noqa
|
||||
"hash": "temphash",
|
||||
"sites": ["studio"],
|
||||
"size": 87236
|
||||
}
|
||||
ret['{}'.format(file_id3)] = {
|
||||
"path": "{root}" +
|
||||
"/Test/Assets/Cylinder/publish/workfile/workfileLookdev/"
|
||||
"v001/test_CylinderC_workfileLookdev_v{0:03}.mb".format(i),
|
||||
"path": "{root[work]}" +
|
||||
"/test_performance/Assets/Cylinder/publish/workfile/workfileLookdev/" #noqa
|
||||
"v{:03d}/test_CylinderC_workfileLookdev_v{:03d}.mb".format(i, i), # noqa
|
||||
"hash": "temphash",
|
||||
"sites": ["studio"],
|
||||
"size": 87236
|
||||
|
|
@ -287,7 +278,7 @@ class TestPerformance():
|
|||
|
||||
if __name__ == '__main__':
|
||||
tp = TestPerformance('array')
|
||||
tp.prepare(no_of_records=10, create_files=True) # enable to prepare data
|
||||
tp.prepare(no_of_records=10000, create_files=True)
|
||||
# tp.run(10, 3)
|
||||
|
||||
# print('-'*50)
|
||||
|
|
@ -29,6 +29,7 @@ class ExperimentalToolsDialog(QtWidgets.QDialog):
|
|||
self.setWindowTitle("OpenPype Experimental tools")
|
||||
icon = QtGui.QIcon(app_icon_path())
|
||||
self.setWindowIcon(icon)
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
|
||||
# Widgets for cases there are not available experimental tools
|
||||
empty_widget = QtWidgets.QWidget(self)
|
||||
|
|
@ -80,7 +81,9 @@ class ExperimentalToolsDialog(QtWidgets.QDialog):
|
|||
tool_btns_layout.addWidget(separator_widget, 0)
|
||||
tool_btns_layout.addWidget(tool_btns_label, 0)
|
||||
|
||||
experimental_tools = ExperimentalTools()
|
||||
experimental_tools = ExperimentalTools(
|
||||
parent=parent, filter_hosts=True
|
||||
)
|
||||
|
||||
# Main layout
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
|
|
|
|||
|
|
@ -63,7 +63,14 @@ class ExperimentalTools:
|
|||
"""
|
||||
def __init__(self, parent=None, host_name=None, filter_hosts=None):
|
||||
# Definition of experimental tools
|
||||
experimental_tools = []
|
||||
experimental_tools = [
|
||||
ExperimentalTool(
|
||||
"publisher",
|
||||
"New publisher",
|
||||
self._show_publisher,
|
||||
"Combined creation and publishing into one tool."
|
||||
)
|
||||
]
|
||||
|
||||
# --- Example tool (callback will just print on click) ---
|
||||
# def example_callback(*args):
|
||||
|
|
@ -110,6 +117,8 @@ class ExperimentalTools:
|
|||
self._tools = experimental_tools
|
||||
self._parent_widget = parent
|
||||
|
||||
self._publisher_tool = None
|
||||
|
||||
@property
|
||||
def tools(self):
|
||||
"""Tools in list.
|
||||
|
|
@ -140,3 +149,13 @@ class ExperimentalTools:
|
|||
for identifier, eperimental_tool in self.tools_by_identifier.items():
|
||||
enabled = experimental_settings.get(identifier, False)
|
||||
eperimental_tool.set_enabled(enabled)
|
||||
|
||||
def _show_publisher(self):
|
||||
if self._publisher_tool is None:
|
||||
from openpype.tools import publisher
|
||||
|
||||
self._publisher_tool = publisher.PublisherWindow(
|
||||
parent=self._parent_widget
|
||||
)
|
||||
|
||||
self._publisher_tool.show()
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ from avalon.vendor import qtawesome
|
|||
|
||||
from .delegates import ActionDelegate
|
||||
from . import lib
|
||||
from .models import TaskModel, ActionModel, ProjectModel
|
||||
from .flickcharm import FlickCharm
|
||||
from .models import TaskModel, ActionModel
|
||||
from openpype.tools.flickcharm import FlickCharm
|
||||
from .constants import (
|
||||
ACTION_ROLE,
|
||||
GROUP_ROLE,
|
||||
|
|
|
|||
|
|
@ -8,8 +8,7 @@ from avalon.api import AvalonMongoDB
|
|||
from openpype import style
|
||||
from openpype.api import resources
|
||||
|
||||
from avalon.tools import lib as tools_lib
|
||||
from avalon.tools.widgets import AssetWidget
|
||||
from openpype.tools.utils.widgets import AssetWidget
|
||||
from avalon.vendor import qtawesome
|
||||
from .models import ProjectModel
|
||||
from .lib import get_action_label, ProjectHandler
|
||||
|
|
@ -21,7 +20,7 @@ from .widgets import (
|
|||
SlidePageWidget
|
||||
)
|
||||
|
||||
from .flickcharm import FlickCharm
|
||||
from openpype.tools.flickcharm import FlickCharm
|
||||
|
||||
|
||||
class ProjectIconView(QtWidgets.QListView):
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from Qt import QtWidgets, QtCore
|
|||
from openpype.hosts.maya.api.lib import assign_look_by_version
|
||||
|
||||
from avalon import style, io
|
||||
from avalon.tools import lib
|
||||
from openpype.tools.utils.lib import qt_app_context
|
||||
|
||||
from maya import cmds
|
||||
# old api for MFileIO
|
||||
|
|
@ -258,7 +258,7 @@ def show():
|
|||
mainwindow = next(widget for widget in top_level_widgets
|
||||
if widget.objectName() == "MayaWindow")
|
||||
|
||||
with lib.application():
|
||||
with qt_app_context():
|
||||
window = App(parent=mainwindow)
|
||||
window.setStyleSheet(style.load_stylesheet())
|
||||
window.show()
|
||||
|
|
|
|||
|
|
@ -1456,7 +1456,11 @@ class HierarchyModel(QtCore.QAbstractItemModel):
|
|||
return
|
||||
|
||||
raw_data = mime_data.data("application/copy_task")
|
||||
encoded_data = QtCore.QByteArray.fromRawData(raw_data)
|
||||
if isinstance(raw_data, QtCore.QByteArray):
|
||||
# Raw data are already QByteArrat and we don't have to load them
|
||||
encoded_data = raw_data
|
||||
else:
|
||||
encoded_data = QtCore.QByteArray.fromRawData(raw_data)
|
||||
stream = QtCore.QDataStream(encoded_data, QtCore.QIODevice.ReadOnly)
|
||||
text = stream.readQString()
|
||||
try:
|
||||
|
|
|
|||
7
openpype/tools/publisher/__init__.py
Normal file
7
openpype/tools/publisher/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from .app import show
|
||||
from .window import PublisherWindow
|
||||
|
||||
__all__ = (
|
||||
"show",
|
||||
"PublisherWindow"
|
||||
)
|
||||
17
openpype/tools/publisher/app.py
Normal file
17
openpype/tools/publisher/app.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
from .window import PublisherWindow
|
||||
|
||||
|
||||
class _WindowCache:
|
||||
window = None
|
||||
|
||||
|
||||
def show(parent=None):
|
||||
window = _WindowCache.window
|
||||
if window is None:
|
||||
window = PublisherWindow(parent)
|
||||
_WindowCache.window = window
|
||||
|
||||
window.show()
|
||||
window.activateWindow()
|
||||
|
||||
return window
|
||||
34
openpype/tools/publisher/constants.py
Normal file
34
openpype/tools/publisher/constants.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from Qt import QtCore
|
||||
|
||||
# ID of context item in instance view
|
||||
CONTEXT_ID = "context"
|
||||
CONTEXT_LABEL = "Options"
|
||||
|
||||
# Allowed symbols for subset name (and variant)
|
||||
# - characters, numbers, unsercore and dash
|
||||
SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_."
|
||||
VARIANT_TOOLTIP = (
|
||||
"Variant may contain alphabetical characters (a-Z)"
|
||||
"\nnumerical characters (0-9) dot (\".\") or underscore (\"_\")."
|
||||
)
|
||||
|
||||
# Roles for instance views
|
||||
INSTANCE_ID_ROLE = QtCore.Qt.UserRole + 1
|
||||
SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2
|
||||
IS_GROUP_ROLE = QtCore.Qt.UserRole + 3
|
||||
CREATOR_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 4
|
||||
FAMILY_ROLE = QtCore.Qt.UserRole + 5
|
||||
|
||||
|
||||
__all__ = (
|
||||
"CONTEXT_ID",
|
||||
|
||||
"SUBSET_NAME_ALLOWED_SYMBOLS",
|
||||
"VARIANT_TOOLTIP",
|
||||
|
||||
"INSTANCE_ID_ROLE",
|
||||
"SORT_VALUE_ROLE",
|
||||
"IS_GROUP_ROLE",
|
||||
"CREATOR_IDENTIFIER_ROLE",
|
||||
"FAMILY_ROLE"
|
||||
)
|
||||
991
openpype/tools/publisher/control.py
Normal file
991
openpype/tools/publisher/control.py
Normal file
|
|
@ -0,0 +1,991 @@
|
|||
import os
|
||||
import copy
|
||||
import inspect
|
||||
import logging
|
||||
import traceback
|
||||
import collections
|
||||
|
||||
import weakref
|
||||
try:
|
||||
from weakref import WeakMethod
|
||||
except Exception:
|
||||
from openpype.lib.python_2_comp import WeakMethod
|
||||
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import PublishValidationError
|
||||
from openpype.pipeline.create import CreateContext
|
||||
|
||||
from Qt import QtCore
|
||||
|
||||
# Define constant for plugin orders offset
|
||||
PLUGIN_ORDER_OFFSET = 0.5
|
||||
|
||||
|
||||
class MainThreadItem:
|
||||
"""Callback with args and kwargs."""
|
||||
def __init__(self, callback, *args, **kwargs):
|
||||
self.callback = callback
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def process(self):
|
||||
self.callback(*self.args, **self.kwargs)
|
||||
|
||||
|
||||
class MainThreadProcess(QtCore.QObject):
|
||||
"""Qt based main thread process executor.
|
||||
|
||||
Has timer which controls each 50ms if there is new item to process.
|
||||
|
||||
This approach gives ability to update UI meanwhile plugin is in progress.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(MainThreadProcess, self).__init__()
|
||||
self._items_to_process = collections.deque()
|
||||
|
||||
timer = QtCore.QTimer()
|
||||
timer.setInterval(50)
|
||||
|
||||
timer.timeout.connect(self._execute)
|
||||
|
||||
self._timer = timer
|
||||
|
||||
def add_item(self, item):
|
||||
self._items_to_process.append(item)
|
||||
|
||||
def _execute(self):
|
||||
if not self._items_to_process:
|
||||
return
|
||||
|
||||
item = self._items_to_process.popleft()
|
||||
item.process()
|
||||
|
||||
def start(self):
|
||||
if not self._timer.isActive():
|
||||
self._timer.start()
|
||||
|
||||
def stop(self):
|
||||
if self._timer.isActive():
|
||||
self._timer.stop()
|
||||
|
||||
def clear(self):
|
||||
if self._timer.isActive():
|
||||
self._timer.stop()
|
||||
self._items_to_process = collections.deque()
|
||||
|
||||
|
||||
class AssetDocsCache:
|
||||
"""Cache asset documents for creation part."""
|
||||
projection = {
|
||||
"_id": True,
|
||||
"name": True,
|
||||
"data.visualParent": True,
|
||||
"data.tasks": True
|
||||
}
|
||||
|
||||
def __init__(self, controller):
|
||||
self._controller = controller
|
||||
self._asset_docs = None
|
||||
self._task_names_by_asset_name = {}
|
||||
|
||||
@property
|
||||
def dbcon(self):
|
||||
return self._controller.dbcon
|
||||
|
||||
def reset(self):
|
||||
self._asset_docs = None
|
||||
self._task_names_by_asset_name = {}
|
||||
|
||||
def _query(self):
|
||||
if self._asset_docs is None:
|
||||
asset_docs = list(self.dbcon.find(
|
||||
{"type": "asset"},
|
||||
self.projection
|
||||
))
|
||||
task_names_by_asset_name = {}
|
||||
for asset_doc in asset_docs:
|
||||
asset_name = asset_doc["name"]
|
||||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_names_by_asset_name[asset_name] = list(asset_tasks.keys())
|
||||
self._asset_docs = asset_docs
|
||||
self._task_names_by_asset_name = task_names_by_asset_name
|
||||
|
||||
def get_asset_docs(self):
|
||||
self._query()
|
||||
return copy.deepcopy(self._asset_docs)
|
||||
|
||||
def get_task_names_by_asset_name(self):
|
||||
self._query()
|
||||
return copy.deepcopy(self._task_names_by_asset_name)
|
||||
|
||||
|
||||
class PublishReport:
|
||||
"""Report for single publishing process.
|
||||
|
||||
Report keeps current state of publishing and currently processed plugin.
|
||||
"""
|
||||
def __init__(self, controller):
|
||||
self.controller = controller
|
||||
self._publish_discover_result = None
|
||||
self._plugin_data = []
|
||||
self._plugin_data_with_plugin = []
|
||||
|
||||
self._stored_plugins = []
|
||||
self._current_plugin_data = []
|
||||
self._all_instances_by_id = {}
|
||||
self._current_context = None
|
||||
|
||||
def reset(self, context, publish_discover_result=None):
|
||||
"""Reset report and clear all data."""
|
||||
self._publish_discover_result = publish_discover_result
|
||||
self._plugin_data = []
|
||||
self._plugin_data_with_plugin = []
|
||||
self._current_plugin_data = {}
|
||||
self._all_instances_by_id = {}
|
||||
self._current_context = context
|
||||
|
||||
def add_plugin_iter(self, plugin, context):
|
||||
"""Add report about single iteration of plugin."""
|
||||
for instance in context:
|
||||
self._all_instances_by_id[instance.id] = instance
|
||||
|
||||
if self._current_plugin_data:
|
||||
self._current_plugin_data["passed"] = True
|
||||
|
||||
self._current_plugin_data = self._add_plugin_data_item(plugin)
|
||||
|
||||
def _get_plugin_data_item(self, plugin):
|
||||
store_item = None
|
||||
for item in self._plugin_data_with_plugin:
|
||||
if item["plugin"] is plugin:
|
||||
store_item = item["data"]
|
||||
break
|
||||
return store_item
|
||||
|
||||
def _add_plugin_data_item(self, plugin):
|
||||
if plugin in self._stored_plugins:
|
||||
raise ValueError("Plugin is already stored")
|
||||
|
||||
self._stored_plugins.append(plugin)
|
||||
|
||||
label = None
|
||||
if hasattr(plugin, "label"):
|
||||
label = plugin.label
|
||||
|
||||
plugin_data_item = {
|
||||
"name": plugin.__name__,
|
||||
"label": label,
|
||||
"order": plugin.order,
|
||||
"instances_data": [],
|
||||
"actions_data": [],
|
||||
"skipped": False,
|
||||
"passed": False
|
||||
}
|
||||
self._plugin_data_with_plugin.append({
|
||||
"plugin": plugin,
|
||||
"data": plugin_data_item
|
||||
})
|
||||
self._plugin_data.append(plugin_data_item)
|
||||
return plugin_data_item
|
||||
|
||||
def set_plugin_skipped(self):
|
||||
"""Set that current plugin has been skipped."""
|
||||
self._current_plugin_data["skipped"] = True
|
||||
|
||||
def add_result(self, result):
|
||||
"""Handle result of one plugin and it's instance."""
|
||||
instance = result["instance"]
|
||||
instance_id = None
|
||||
if instance is not None:
|
||||
instance_id = instance.id
|
||||
self._current_plugin_data["instances_data"].append({
|
||||
"id": instance_id,
|
||||
"logs": self._extract_instance_log_items(result)
|
||||
})
|
||||
|
||||
def add_action_result(self, action, result):
|
||||
"""Add result of single action."""
|
||||
plugin = result["plugin"]
|
||||
|
||||
store_item = self._get_plugin_data_item(plugin)
|
||||
if store_item is None:
|
||||
store_item = self._add_plugin_data_item(plugin)
|
||||
|
||||
action_name = action.__name__
|
||||
action_label = action.label or action_name
|
||||
log_items = self._extract_log_items(result)
|
||||
store_item["actions_data"].append({
|
||||
"success": result["success"],
|
||||
"name": action_name,
|
||||
"label": action_label,
|
||||
"logs": log_items
|
||||
})
|
||||
|
||||
def get_report(self, publish_plugins=None):
|
||||
"""Report data with all details of current state."""
|
||||
instances_details = {}
|
||||
for instance in self._all_instances_by_id.values():
|
||||
instances_details[instance.id] = self._extract_instance_data(
|
||||
instance, instance in self._current_context
|
||||
)
|
||||
|
||||
plugins_data = copy.deepcopy(self._plugin_data)
|
||||
if plugins_data and not plugins_data[-1]["passed"]:
|
||||
plugins_data[-1]["passed"] = True
|
||||
|
||||
if publish_plugins:
|
||||
for plugin in publish_plugins:
|
||||
if plugin not in self._stored_plugins:
|
||||
plugins_data.append(self._add_plugin_data_item(plugin))
|
||||
|
||||
crashed_file_paths = {}
|
||||
if self._publish_discover_result is not None:
|
||||
items = self._publish_discover_result.crashed_file_paths.items()
|
||||
for filepath, exc_info in items:
|
||||
crashed_file_paths[filepath] = "".join(
|
||||
traceback.format_exception(*exc_info)
|
||||
)
|
||||
|
||||
return {
|
||||
"plugins_data": plugins_data,
|
||||
"instances": instances_details,
|
||||
"context": self._extract_context_data(self._current_context),
|
||||
"crashed_file_paths": crashed_file_paths
|
||||
}
|
||||
|
||||
def _extract_context_data(self, context):
|
||||
return {
|
||||
"label": context.data.get("label")
|
||||
}
|
||||
|
||||
def _extract_instance_data(self, instance, exists):
|
||||
return {
|
||||
"name": instance.data.get("name"),
|
||||
"label": instance.data.get("label"),
|
||||
"family": instance.data["family"],
|
||||
"families": instance.data.get("families") or [],
|
||||
"exists": exists
|
||||
}
|
||||
|
||||
def _extract_instance_log_items(self, result):
|
||||
instance = result["instance"]
|
||||
instance_id = None
|
||||
if instance:
|
||||
instance_id = instance.id
|
||||
|
||||
log_items = self._extract_log_items(result)
|
||||
for item in log_items:
|
||||
item["instance_id"] = instance_id
|
||||
return log_items
|
||||
|
||||
def _extract_log_items(self, result):
|
||||
output = []
|
||||
records = result.get("records") or []
|
||||
for record in records:
|
||||
record_exc_info = record.exc_info
|
||||
if record_exc_info is not None:
|
||||
record_exc_info = "".join(
|
||||
traceback.format_exception(*record_exc_info)
|
||||
)
|
||||
|
||||
try:
|
||||
msg = record.getMessage()
|
||||
except Exception:
|
||||
msg = str(record.msg)
|
||||
|
||||
output.append({
|
||||
"type": "record",
|
||||
"msg": msg,
|
||||
"name": record.name,
|
||||
"lineno": record.lineno,
|
||||
"levelno": record.levelno,
|
||||
"levelname": record.levelname,
|
||||
"threadName": record.threadName,
|
||||
"filename": record.filename,
|
||||
"pathname": record.pathname,
|
||||
"msecs": record.msecs,
|
||||
"exc_info": record_exc_info
|
||||
})
|
||||
|
||||
exception = result.get("error")
|
||||
if exception:
|
||||
fname, line_no, func, exc = exception.traceback
|
||||
output.append({
|
||||
"type": "error",
|
||||
"msg": str(exception),
|
||||
"filename": str(fname),
|
||||
"lineno": str(line_no),
|
||||
"func": str(func),
|
||||
"traceback": exception.formatted_traceback
|
||||
})
|
||||
|
||||
return output
|
||||
|
||||
|
||||
class PublisherController:
|
||||
"""Middleware between UI, CreateContext and publish Context.
|
||||
|
||||
Handle both creation and publishing parts.
|
||||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to mongo with context.
|
||||
headless (bool): Headless publishing. ATM not implemented or used.
|
||||
"""
|
||||
def __init__(self, dbcon=None, headless=False):
|
||||
self.log = logging.getLogger("PublisherController")
|
||||
self.host = avalon.api.registered_host()
|
||||
self.headless = headless
|
||||
|
||||
self.create_context = CreateContext(
|
||||
self.host, dbcon, headless=headless, reset=False
|
||||
)
|
||||
|
||||
# pyblish.api.Context
|
||||
self._publish_context = None
|
||||
# Pyblish report
|
||||
self._publish_report = PublishReport(self)
|
||||
# Store exceptions of validation error
|
||||
self._publish_validation_errors = []
|
||||
# Currently processing plugin errors
|
||||
self._publish_current_plugin_validation_errors = None
|
||||
# Any other exception that happened during publishing
|
||||
self._publish_error = None
|
||||
# Publishing is in progress
|
||||
self._publish_is_running = False
|
||||
# Publishing is over validation order
|
||||
self._publish_validated = False
|
||||
# Publishing should stop at validation stage
|
||||
self._publish_up_validation = False
|
||||
# All publish plugins are processed
|
||||
self._publish_finished = False
|
||||
self._publish_max_progress = 0
|
||||
self._publish_progress = 0
|
||||
# This information is not much important for controller but for widget
|
||||
# which can change (and set) the comment.
|
||||
self._publish_comment_is_set = False
|
||||
|
||||
# Validation order
|
||||
# - plugin with order same or higher than this value is extractor or
|
||||
# higher
|
||||
self._validation_order = (
|
||||
pyblish.api.ValidatorOrder + PLUGIN_ORDER_OFFSET
|
||||
)
|
||||
|
||||
# Qt based main thread processor
|
||||
self._main_thread_processor = MainThreadProcess()
|
||||
# Plugin iterator
|
||||
self._main_thread_iter = None
|
||||
|
||||
# Variables where callbacks are stored
|
||||
self._instances_refresh_callback_refs = set()
|
||||
self._plugins_refresh_callback_refs = set()
|
||||
|
||||
self._publish_reset_callback_refs = set()
|
||||
self._publish_started_callback_refs = set()
|
||||
self._publish_validated_callback_refs = set()
|
||||
self._publish_stopped_callback_refs = set()
|
||||
|
||||
self._publish_instance_changed_callback_refs = set()
|
||||
self._publish_plugin_changed_callback_refs = set()
|
||||
|
||||
# State flags to prevent executing method which is already in progress
|
||||
self._resetting_plugins = False
|
||||
self._resetting_instances = False
|
||||
|
||||
# Cacher of avalon documents
|
||||
self._asset_docs_cache = AssetDocsCache(self)
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
"""Current project context."""
|
||||
return self.dbcon.Session["AVALON_PROJECT"]
|
||||
|
||||
@property
|
||||
def dbcon(self):
|
||||
"""Pointer to AvalonMongoDB in creator context."""
|
||||
return self.create_context.dbcon
|
||||
|
||||
@property
|
||||
def instances(self):
|
||||
"""Current instances in create context."""
|
||||
return self.create_context.instances
|
||||
|
||||
@property
|
||||
def creators(self):
|
||||
"""All creators loaded in create context."""
|
||||
return self.create_context.creators
|
||||
|
||||
@property
|
||||
def manual_creators(self):
|
||||
"""Creators that can be shown in create dialog."""
|
||||
return self.create_context.manual_creators
|
||||
|
||||
@property
|
||||
def host_is_valid(self):
|
||||
"""Host is valid for creation."""
|
||||
return self.create_context.host_is_valid
|
||||
|
||||
@property
|
||||
def publish_plugins(self):
|
||||
"""Publish plugins."""
|
||||
return self.create_context.publish_plugins
|
||||
|
||||
@property
|
||||
def plugins_with_defs(self):
|
||||
"""Publish plugins with possible attribute definitions."""
|
||||
return self.create_context.plugins_with_defs
|
||||
|
||||
def _create_reference(self, callback):
|
||||
if inspect.ismethod(callback):
|
||||
ref = WeakMethod(callback)
|
||||
elif callable(callback):
|
||||
ref = weakref.ref(callback)
|
||||
else:
|
||||
raise TypeError("Expected function or method got {}".format(
|
||||
str(type(callback))
|
||||
))
|
||||
return ref
|
||||
|
||||
def add_instances_refresh_callback(self, callback):
|
||||
"""Callbacks triggered on instances refresh."""
|
||||
ref = self._create_reference(callback)
|
||||
self._instances_refresh_callback_refs.add(ref)
|
||||
|
||||
def add_plugins_refresh_callback(self, callback):
|
||||
"""Callbacks triggered on plugins refresh."""
|
||||
ref = self._create_reference(callback)
|
||||
self._plugins_refresh_callback_refs.add(ref)
|
||||
|
||||
# --- Publish specific callbacks ---
|
||||
def add_publish_reset_callback(self, callback):
|
||||
"""Callbacks triggered on publishing reset."""
|
||||
ref = self._create_reference(callback)
|
||||
self._publish_reset_callback_refs.add(ref)
|
||||
|
||||
def add_publish_started_callback(self, callback):
|
||||
"""Callbacks triggered on publishing start."""
|
||||
ref = self._create_reference(callback)
|
||||
self._publish_started_callback_refs.add(ref)
|
||||
|
||||
def add_publish_validated_callback(self, callback):
|
||||
"""Callbacks triggered on passing last possible validation order."""
|
||||
ref = self._create_reference(callback)
|
||||
self._publish_validated_callback_refs.add(ref)
|
||||
|
||||
def add_instance_change_callback(self, callback):
|
||||
"""Callbacks triggered before next publish instance process."""
|
||||
ref = self._create_reference(callback)
|
||||
self._publish_instance_changed_callback_refs.add(ref)
|
||||
|
||||
def add_plugin_change_callback(self, callback):
|
||||
"""Callbacks triggered before next plugin processing."""
|
||||
ref = self._create_reference(callback)
|
||||
self._publish_plugin_changed_callback_refs.add(ref)
|
||||
|
||||
def add_publish_stopped_callback(self, callback):
|
||||
"""Callbacks triggered on publishing stop (any reason)."""
|
||||
ref = self._create_reference(callback)
|
||||
self._publish_stopped_callback_refs.add(ref)
|
||||
|
||||
def get_asset_docs(self):
|
||||
"""Get asset documents from cache for whole project."""
|
||||
return self._asset_docs_cache.get_asset_docs()
|
||||
|
||||
def get_context_title(self):
|
||||
"""Get context title for artist shown at the top of main window."""
|
||||
context_title = None
|
||||
if hasattr(self.host, "get_context_title"):
|
||||
context_title = self.host.get_context_title()
|
||||
|
||||
if context_title is None:
|
||||
context_title = os.environ.get("AVALON_APP_NAME")
|
||||
if context_title is None:
|
||||
context_title = os.environ.get("AVALON_APP")
|
||||
|
||||
return context_title
|
||||
|
||||
def get_asset_hierarchy(self):
|
||||
"""Prepare asset documents into hierarchy."""
|
||||
_queue = collections.deque(self.get_asset_docs())
|
||||
|
||||
output = collections.defaultdict(list)
|
||||
while _queue:
|
||||
asset_doc = _queue.popleft()
|
||||
parent_id = asset_doc["data"]["visualParent"]
|
||||
output[parent_id].append(asset_doc)
|
||||
return output
|
||||
|
||||
def get_task_names_by_asset_names(self, asset_names):
|
||||
"""Prepare task names by asset name."""
|
||||
task_names_by_asset_name = (
|
||||
self._asset_docs_cache.get_task_names_by_asset_name()
|
||||
)
|
||||
result = {}
|
||||
for asset_name in asset_names:
|
||||
result[asset_name] = set(
|
||||
task_names_by_asset_name.get(asset_name) or []
|
||||
)
|
||||
return result
|
||||
|
||||
def _trigger_callbacks(self, callbacks, *args, **kwargs):
|
||||
"""Helper method to trigger callbacks stored by their rerence."""
|
||||
# Trigger reset callbacks
|
||||
to_remove = set()
|
||||
for ref in callbacks:
|
||||
callback = ref()
|
||||
if callback:
|
||||
callback(*args, **kwargs)
|
||||
else:
|
||||
to_remove.add(ref)
|
||||
|
||||
for ref in to_remove:
|
||||
callbacks.remove(ref)
|
||||
|
||||
def reset(self):
|
||||
"""Reset everything related to creation and publishing."""
|
||||
# Stop publishing
|
||||
self.stop_publish()
|
||||
|
||||
# Reset avalon context
|
||||
self.create_context.reset_avalon_context()
|
||||
|
||||
self._reset_plugins()
|
||||
# Publish part must be resetted after plugins
|
||||
self._reset_publish()
|
||||
self._reset_instances()
|
||||
|
||||
def _reset_plugins(self):
|
||||
"""Reset to initial state."""
|
||||
if self._resetting_plugins:
|
||||
return
|
||||
|
||||
self._resetting_plugins = True
|
||||
|
||||
self.create_context.reset_plugins()
|
||||
|
||||
self._resetting_plugins = False
|
||||
|
||||
self._trigger_callbacks(self._plugins_refresh_callback_refs)
|
||||
|
||||
def _reset_instances(self):
|
||||
"""Reset create instances."""
|
||||
if self._resetting_instances:
|
||||
return
|
||||
|
||||
self._resetting_instances = True
|
||||
|
||||
self.create_context.reset_context_data()
|
||||
with self.create_context.bulk_instances_collection():
|
||||
self.create_context.reset_instances()
|
||||
self.create_context.execute_autocreators()
|
||||
|
||||
self._resetting_instances = False
|
||||
|
||||
self._trigger_callbacks(self._instances_refresh_callback_refs)
|
||||
|
||||
def get_creator_attribute_definitions(self, instances):
|
||||
"""Collect creator attribute definitions for multuple instances.
|
||||
|
||||
Args:
|
||||
instances(list<CreatedInstance>): List of created instances for
|
||||
which should be attribute definitions returned.
|
||||
"""
|
||||
output = []
|
||||
_attr_defs = {}
|
||||
for instance in instances:
|
||||
for attr_def in instance.creator_attribute_defs:
|
||||
found_idx = None
|
||||
for idx, _attr_def in _attr_defs.items():
|
||||
if attr_def == _attr_def:
|
||||
found_idx = idx
|
||||
break
|
||||
|
||||
value = instance.creator_attributes[attr_def.key]
|
||||
if found_idx is None:
|
||||
idx = len(output)
|
||||
output.append((attr_def, [instance], [value]))
|
||||
_attr_defs[idx] = attr_def
|
||||
else:
|
||||
item = output[found_idx]
|
||||
item[1].append(instance)
|
||||
item[2].append(value)
|
||||
return output
|
||||
|
||||
def get_publish_attribute_definitions(self, instances, include_context):
|
||||
"""Collect publish attribute definitions for passed instances.
|
||||
|
||||
Args:
|
||||
instances(list<CreatedInstance>): List of created instances for
|
||||
which should be attribute definitions returned.
|
||||
include_context(bool): Add context specific attribute definitions.
|
||||
"""
|
||||
_tmp_items = []
|
||||
if include_context:
|
||||
_tmp_items.append(self.create_context)
|
||||
|
||||
for instance in instances:
|
||||
_tmp_items.append(instance)
|
||||
|
||||
all_defs_by_plugin_name = {}
|
||||
all_plugin_values = {}
|
||||
for item in _tmp_items:
|
||||
for plugin_name, attr_val in item.publish_attributes.items():
|
||||
attr_defs = attr_val.attr_defs
|
||||
if not attr_defs:
|
||||
continue
|
||||
|
||||
if plugin_name not in all_defs_by_plugin_name:
|
||||
all_defs_by_plugin_name[plugin_name] = attr_val.attr_defs
|
||||
|
||||
if plugin_name not in all_plugin_values:
|
||||
all_plugin_values[plugin_name] = {}
|
||||
|
||||
plugin_values = all_plugin_values[plugin_name]
|
||||
|
||||
for attr_def in attr_defs:
|
||||
if attr_def.key not in plugin_values:
|
||||
plugin_values[attr_def.key] = []
|
||||
attr_values = plugin_values[attr_def.key]
|
||||
|
||||
value = attr_val[attr_def.key]
|
||||
attr_values.append((item, value))
|
||||
|
||||
output = []
|
||||
for plugin in self.plugins_with_defs:
|
||||
plugin_name = plugin.__name__
|
||||
if plugin_name not in all_defs_by_plugin_name:
|
||||
continue
|
||||
output.append((
|
||||
plugin_name,
|
||||
all_defs_by_plugin_name[plugin_name],
|
||||
all_plugin_values
|
||||
))
|
||||
return output
|
||||
|
||||
def get_icon_for_family(self, family):
|
||||
"""TODO rename to get creator icon."""
|
||||
creator = self.creators.get(family)
|
||||
if creator is not None:
|
||||
return creator.get_icon()
|
||||
return None
|
||||
|
||||
def create(
|
||||
self, creator_identifier, subset_name, instance_data, options
|
||||
):
|
||||
"""Trigger creation and refresh of instances in UI."""
|
||||
creator = self.creators[creator_identifier]
|
||||
creator.create(subset_name, instance_data, options)
|
||||
|
||||
self._trigger_callbacks(self._instances_refresh_callback_refs)
|
||||
|
||||
def save_changes(self):
|
||||
"""Save changes happened during creation."""
|
||||
if self.create_context.host_is_valid:
|
||||
self.create_context.save_changes()
|
||||
|
||||
def remove_instances(self, instances):
|
||||
""""""
|
||||
# QUESTION Expect that instaces are really removed? In that case save
|
||||
# reset is not required and save changes too.
|
||||
self.save_changes()
|
||||
|
||||
self.create_context.remove_instances(instances)
|
||||
|
||||
self._trigger_callbacks(self._instances_refresh_callback_refs)
|
||||
|
||||
# --- Publish specific implementations ---
|
||||
@property
|
||||
def publish_has_finished(self):
|
||||
return self._publish_finished
|
||||
|
||||
@property
|
||||
def publish_is_running(self):
|
||||
return self._publish_is_running
|
||||
|
||||
@property
|
||||
def publish_has_validated(self):
|
||||
return self._publish_validated
|
||||
|
||||
@property
|
||||
def publish_has_crashed(self):
|
||||
return bool(self._publish_error)
|
||||
|
||||
@property
|
||||
def publish_has_validation_errors(self):
|
||||
return bool(self._publish_validation_errors)
|
||||
|
||||
@property
|
||||
def publish_max_progress(self):
|
||||
return self._publish_max_progress
|
||||
|
||||
@property
|
||||
def publish_progress(self):
|
||||
return self._publish_progress
|
||||
|
||||
@property
|
||||
def publish_comment_is_set(self):
|
||||
return self._publish_comment_is_set
|
||||
|
||||
def get_publish_crash_error(self):
|
||||
return self._publish_error
|
||||
|
||||
def get_publish_report(self):
|
||||
return self._publish_report.get_report(self.publish_plugins)
|
||||
|
||||
def get_validation_errors(self):
|
||||
return self._publish_validation_errors
|
||||
|
||||
def _reset_publish(self):
|
||||
self._publish_is_running = False
|
||||
self._publish_validated = False
|
||||
self._publish_up_validation = False
|
||||
self._publish_finished = False
|
||||
self._publish_comment_is_set = False
|
||||
self._main_thread_processor.clear()
|
||||
self._main_thread_iter = self._publish_iterator()
|
||||
self._publish_context = pyblish.api.Context()
|
||||
# Make sure "comment" is set on publish context
|
||||
self._publish_context.data["comment"] = ""
|
||||
# Add access to create context during publishing
|
||||
# - must not be used for changing CreatedInstances during publishing!
|
||||
# QUESTION
|
||||
# - pop the key after first collector using it would be safest option?
|
||||
self._publish_context.data["create_context"] = self.create_context
|
||||
|
||||
self._publish_report.reset(
|
||||
self._publish_context,
|
||||
self.create_context.publish_discover_result
|
||||
)
|
||||
self._publish_validation_errors = []
|
||||
self._publish_current_plugin_validation_errors = None
|
||||
self._publish_error = None
|
||||
|
||||
self._publish_max_progress = len(self.publish_plugins)
|
||||
self._publish_progress = 0
|
||||
|
||||
self._trigger_callbacks(self._publish_reset_callback_refs)
|
||||
|
||||
def set_comment(self, comment):
|
||||
self._publish_context.data["comment"] = comment
|
||||
self._publish_comment_is_set = True
|
||||
|
||||
def publish(self):
|
||||
"""Run publishing."""
|
||||
self._publish_up_validation = False
|
||||
self._start_publish()
|
||||
|
||||
def validate(self):
|
||||
"""Run publishing and stop after Validation."""
|
||||
if self._publish_validated:
|
||||
return
|
||||
self._publish_up_validation = True
|
||||
self._start_publish()
|
||||
|
||||
def _start_publish(self):
|
||||
"""Start or continue in publishing."""
|
||||
if self._publish_is_running:
|
||||
return
|
||||
|
||||
# Make sure changes are saved
|
||||
self.save_changes()
|
||||
|
||||
self._publish_is_running = True
|
||||
self._trigger_callbacks(self._publish_started_callback_refs)
|
||||
self._main_thread_processor.start()
|
||||
self._publish_next_process()
|
||||
|
||||
def _stop_publish(self):
|
||||
"""Stop or pause publishing."""
|
||||
self._publish_is_running = False
|
||||
self._main_thread_processor.stop()
|
||||
self._trigger_callbacks(self._publish_stopped_callback_refs)
|
||||
|
||||
def stop_publish(self):
|
||||
"""Stop publishing process (any reason)."""
|
||||
if self._publish_is_running:
|
||||
self._stop_publish()
|
||||
|
||||
def run_action(self, plugin, action):
|
||||
# TODO handle result in UI
|
||||
result = pyblish.plugin.process(
|
||||
plugin, self._publish_context, None, action.id
|
||||
)
|
||||
self._publish_report.add_action_result(action, result)
|
||||
|
||||
def _publish_next_process(self):
|
||||
# Validations of progress before using iterator
|
||||
# - same conditions may be inside iterator but they may be used
|
||||
# only in specific cases (e.g. when it happens for a first time)
|
||||
|
||||
# There are validation errors and validation is passed
|
||||
# - can't do any progree
|
||||
if (
|
||||
self._publish_validated
|
||||
and self._publish_validation_errors
|
||||
):
|
||||
item = MainThreadItem(self.stop_publish)
|
||||
|
||||
# Any unexpected error happened
|
||||
# - everything should stop
|
||||
elif self._publish_error:
|
||||
item = MainThreadItem(self.stop_publish)
|
||||
|
||||
# Everything is ok so try to get new processing item
|
||||
else:
|
||||
item = next(self._main_thread_iter)
|
||||
|
||||
self._main_thread_processor.add_item(item)
|
||||
|
||||
def _publish_iterator(self):
|
||||
"""Main logic center of publishing.
|
||||
|
||||
Iterator returns `MainThreadItem` objects with callbacks that should be
|
||||
processed in main thread (threaded in future?). Cares about changing
|
||||
states of currently processed publish plugin and instance. Also
|
||||
change state of processed orders like validation order has passed etc.
|
||||
|
||||
Also stops publishing if should stop on validation.
|
||||
|
||||
QUESTION:
|
||||
Does validate button still make sense?
|
||||
"""
|
||||
for idx, plugin in enumerate(self.publish_plugins):
|
||||
self._publish_progress = idx
|
||||
# Add plugin to publish report
|
||||
self._publish_report.add_plugin_iter(plugin, self._publish_context)
|
||||
|
||||
# Reset current plugin validations error
|
||||
self._publish_current_plugin_validation_errors = None
|
||||
|
||||
# Check if plugin is over validation order
|
||||
if not self._publish_validated:
|
||||
self._publish_validated = (
|
||||
plugin.order >= self._validation_order
|
||||
)
|
||||
# Trigger callbacks when validation stage is passed
|
||||
if self._publish_validated:
|
||||
self._trigger_callbacks(
|
||||
self._publish_validated_callback_refs
|
||||
)
|
||||
|
||||
# Stop if plugin is over validation order and process
|
||||
# should process up to validation.
|
||||
if self._publish_up_validation and self._publish_validated:
|
||||
yield MainThreadItem(self.stop_publish)
|
||||
|
||||
# Stop if validation is over and validation errors happened
|
||||
if (
|
||||
self._publish_validated
|
||||
and self._publish_validation_errors
|
||||
):
|
||||
yield MainThreadItem(self.stop_publish)
|
||||
|
||||
# Trigger callback that new plugin is going to be processed
|
||||
self._trigger_callbacks(
|
||||
self._publish_plugin_changed_callback_refs, plugin
|
||||
)
|
||||
# Plugin is instance plugin
|
||||
if plugin.__instanceEnabled__:
|
||||
instances = pyblish.logic.instances_by_plugin(
|
||||
self._publish_context, plugin
|
||||
)
|
||||
if not instances:
|
||||
self._publish_report.set_plugin_skipped()
|
||||
continue
|
||||
|
||||
for instance in instances:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
self._trigger_callbacks(
|
||||
self._publish_instance_changed_callback_refs,
|
||||
self._publish_context,
|
||||
instance
|
||||
)
|
||||
yield MainThreadItem(
|
||||
self._process_and_continue, plugin, instance
|
||||
)
|
||||
else:
|
||||
families = collect_families_from_instances(
|
||||
self._publish_context, only_active=True
|
||||
)
|
||||
plugins = pyblish.logic.plugins_by_families(
|
||||
[plugin], families
|
||||
)
|
||||
if plugins:
|
||||
self._trigger_callbacks(
|
||||
self._publish_instance_changed_callback_refs,
|
||||
self._publish_context,
|
||||
None
|
||||
)
|
||||
yield MainThreadItem(
|
||||
self._process_and_continue, plugin, None
|
||||
)
|
||||
else:
|
||||
self._publish_report.set_plugin_skipped()
|
||||
|
||||
# Cleanup of publishing process
|
||||
self._publish_finished = True
|
||||
self._publish_progress = self._publish_max_progress
|
||||
yield MainThreadItem(self.stop_publish)
|
||||
|
||||
def _add_validation_error(self, result):
|
||||
if self._publish_current_plugin_validation_errors is None:
|
||||
self._publish_current_plugin_validation_errors = {
|
||||
"plugin": result["plugin"],
|
||||
"errors": []
|
||||
}
|
||||
self._publish_validation_errors.append(
|
||||
self._publish_current_plugin_validation_errors
|
||||
)
|
||||
|
||||
self._publish_current_plugin_validation_errors["errors"].append({
|
||||
"exception": result["error"],
|
||||
"instance": result["instance"]
|
||||
})
|
||||
|
||||
def _process_and_continue(self, plugin, instance):
|
||||
result = pyblish.plugin.process(
|
||||
plugin, self._publish_context, instance
|
||||
)
|
||||
|
||||
self._publish_report.add_result(result)
|
||||
|
||||
exception = result.get("error")
|
||||
if exception:
|
||||
if (
|
||||
isinstance(exception, PublishValidationError)
|
||||
and not self._publish_validated
|
||||
):
|
||||
self._add_validation_error(result)
|
||||
|
||||
else:
|
||||
self._publish_error = exception
|
||||
|
||||
self._publish_next_process()
|
||||
|
||||
|
||||
def collect_families_from_instances(instances, only_active=False):
|
||||
"""Collect all families for passed publish instances.
|
||||
|
||||
Args:
|
||||
instances(list<pyblish.api.Instance>): List of publish instances from
|
||||
which are families collected.
|
||||
only_active(bool): Return families only for active instances.
|
||||
"""
|
||||
all_families = set()
|
||||
for instance in instances:
|
||||
if only_active:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
family = instance.data.get("family")
|
||||
if family:
|
||||
all_families.add(family)
|
||||
|
||||
families = instance.data.get("families") or tuple()
|
||||
for family in families:
|
||||
all_families.add(family)
|
||||
|
||||
return list(all_families)
|
||||
14
openpype/tools/publisher/publish_report_viewer/__init__.py
Normal file
14
openpype/tools/publisher/publish_report_viewer/__init__.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from .widgets import (
|
||||
PublishReportViewerWidget
|
||||
)
|
||||
|
||||
from .window import (
|
||||
PublishReportViewerWindow
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"PublishReportViewerWidget",
|
||||
|
||||
"PublishReportViewerWindow",
|
||||
)
|
||||
20
openpype/tools/publisher/publish_report_viewer/constants.py
Normal file
20
openpype/tools/publisher/publish_report_viewer/constants.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from Qt import QtCore
|
||||
|
||||
|
||||
ITEM_ID_ROLE = QtCore.Qt.UserRole + 1
|
||||
ITEM_IS_GROUP_ROLE = QtCore.Qt.UserRole + 2
|
||||
ITEM_LABEL_ROLE = QtCore.Qt.UserRole + 3
|
||||
ITEM_ERRORED_ROLE = QtCore.Qt.UserRole + 4
|
||||
PLUGIN_SKIPPED_ROLE = QtCore.Qt.UserRole + 5
|
||||
PLUGIN_PASSED_ROLE = QtCore.Qt.UserRole + 6
|
||||
INSTANCE_REMOVED_ROLE = QtCore.Qt.UserRole + 7
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ITEM_ID_ROLE",
|
||||
"ITEM_IS_GROUP_ROLE",
|
||||
"ITEM_LABEL_ROLE",
|
||||
"ITEM_ERRORED_ROLE",
|
||||
"PLUGIN_SKIPPED_ROLE",
|
||||
"INSTANCE_REMOVED_ROLE"
|
||||
)
|
||||
331
openpype/tools/publisher/publish_report_viewer/delegates.py
Normal file
331
openpype/tools/publisher/publish_report_viewer/delegates.py
Normal file
|
|
@ -0,0 +1,331 @@
|
|||
import collections
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
from .constants import (
|
||||
ITEM_IS_GROUP_ROLE,
|
||||
ITEM_ERRORED_ROLE,
|
||||
PLUGIN_SKIPPED_ROLE,
|
||||
PLUGIN_PASSED_ROLE,
|
||||
INSTANCE_REMOVED_ROLE
|
||||
)
|
||||
|
||||
colors = {
|
||||
"error": QtGui.QColor("#ff4a4a"),
|
||||
"warning": QtGui.QColor("#ff9900"),
|
||||
"ok": QtGui.QColor("#77AE24"),
|
||||
"active": QtGui.QColor("#99CEEE"),
|
||||
"idle": QtCore.Qt.white,
|
||||
"inactive": QtGui.QColor("#888"),
|
||||
"hover": QtGui.QColor(255, 255, 255, 5),
|
||||
"selected": QtGui.QColor(255, 255, 255, 10),
|
||||
"outline": QtGui.QColor("#333"),
|
||||
"group": QtGui.QColor("#21252B"),
|
||||
"group-hover": QtGui.QColor("#3c3c3c"),
|
||||
"group-selected-hover": QtGui.QColor("#555555")
|
||||
}
|
||||
|
||||
|
||||
class GroupItemDelegate(QtWidgets.QStyledItemDelegate):
|
||||
"""Generic delegate for instance header"""
|
||||
|
||||
_item_icons_by_name_and_size = collections.defaultdict(dict)
|
||||
|
||||
_minus_pixmaps = {}
|
||||
_plus_pixmaps = {}
|
||||
_path_stroker = None
|
||||
|
||||
_item_pix_offset_ratio = 1.0 / 5.0
|
||||
_item_border_size = 1.0 / 7.0
|
||||
_group_pix_offset_ratio = 1.0 / 3.0
|
||||
_group_pix_stroke_size_ratio = 1.0 / 7.0
|
||||
|
||||
@classmethod
|
||||
def _get_path_stroker(cls):
|
||||
if cls._path_stroker is None:
|
||||
path_stroker = QtGui.QPainterPathStroker()
|
||||
path_stroker.setCapStyle(QtCore.Qt.RoundCap)
|
||||
path_stroker.setJoinStyle(QtCore.Qt.RoundJoin)
|
||||
|
||||
cls._path_stroker = path_stroker
|
||||
return cls._path_stroker
|
||||
|
||||
@classmethod
|
||||
def _get_plus_pixmap(cls, size):
|
||||
pix = cls._minus_pixmaps.get(size)
|
||||
if pix is not None:
|
||||
return pix
|
||||
|
||||
pix = QtGui.QPixmap(size, size)
|
||||
pix.fill(QtCore.Qt.transparent)
|
||||
|
||||
offset = int(size * cls._group_pix_offset_ratio)
|
||||
pnt_1 = QtCore.QPoint(offset, int(size / 2))
|
||||
pnt_2 = QtCore.QPoint(size - offset, int(size / 2))
|
||||
pnt_3 = QtCore.QPoint(int(size / 2), offset)
|
||||
pnt_4 = QtCore.QPoint(int(size / 2), size - offset)
|
||||
path_1 = QtGui.QPainterPath(pnt_1)
|
||||
path_1.lineTo(pnt_2)
|
||||
path_2 = QtGui.QPainterPath(pnt_3)
|
||||
path_2.lineTo(pnt_4)
|
||||
|
||||
path_stroker = cls._get_path_stroker()
|
||||
path_stroker.setWidth(size * cls._group_pix_stroke_size_ratio)
|
||||
stroked_path_1 = path_stroker.createStroke(path_1)
|
||||
stroked_path_2 = path_stroker.createStroke(path_2)
|
||||
|
||||
pix = QtGui.QPixmap(size, size)
|
||||
pix.fill(QtCore.Qt.transparent)
|
||||
|
||||
painter = QtGui.QPainter(pix)
|
||||
painter.setRenderHint(QtGui.QPainter.Antialiasing)
|
||||
painter.setPen(QtCore.Qt.transparent)
|
||||
painter.setBrush(QtCore.Qt.white)
|
||||
painter.drawPath(stroked_path_1)
|
||||
painter.drawPath(stroked_path_2)
|
||||
painter.end()
|
||||
|
||||
cls._minus_pixmaps[size] = pix
|
||||
|
||||
return pix
|
||||
|
||||
@classmethod
|
||||
def _get_minus_pixmap(cls, size):
|
||||
pix = cls._plus_pixmaps.get(size)
|
||||
if pix is not None:
|
||||
return pix
|
||||
|
||||
offset = int(size * cls._group_pix_offset_ratio)
|
||||
pnt_1 = QtCore.QPoint(offset, int(size / 2))
|
||||
pnt_2 = QtCore.QPoint(size - offset, int(size / 2))
|
||||
path = QtGui.QPainterPath(pnt_1)
|
||||
path.lineTo(pnt_2)
|
||||
path_stroker = cls._get_path_stroker()
|
||||
path_stroker.setWidth(size * cls._group_pix_stroke_size_ratio)
|
||||
stroked_path = path_stroker.createStroke(path)
|
||||
|
||||
pix = QtGui.QPixmap(size, size)
|
||||
pix.fill(QtCore.Qt.transparent)
|
||||
|
||||
painter = QtGui.QPainter(pix)
|
||||
painter.setRenderHint(QtGui.QPainter.Antialiasing)
|
||||
painter.setPen(QtCore.Qt.transparent)
|
||||
painter.setBrush(QtCore.Qt.white)
|
||||
painter.drawPath(stroked_path)
|
||||
painter.end()
|
||||
|
||||
cls._plus_pixmaps[size] = pix
|
||||
|
||||
return pix
|
||||
|
||||
@classmethod
|
||||
def _get_icon_color(cls, name):
|
||||
if name == "error":
|
||||
return QtGui.QColor(colors["error"])
|
||||
return QtGui.QColor(QtCore.Qt.white)
|
||||
|
||||
@classmethod
|
||||
def _get_icon(cls, name, size):
|
||||
icons_by_size = cls._item_icons_by_name_and_size[name]
|
||||
if icons_by_size and size in icons_by_size:
|
||||
return icons_by_size[size]
|
||||
|
||||
offset = int(size * cls._item_pix_offset_ratio)
|
||||
offset_size = size - (2 * offset)
|
||||
pix = QtGui.QPixmap(size, size)
|
||||
pix.fill(QtCore.Qt.transparent)
|
||||
|
||||
painter = QtGui.QPainter(pix)
|
||||
painter.setRenderHint(QtGui.QPainter.Antialiasing)
|
||||
|
||||
draw_ellipse = True
|
||||
if name == "error":
|
||||
color = QtGui.QColor(colors["error"])
|
||||
painter.setPen(QtCore.Qt.NoPen)
|
||||
painter.setBrush(color)
|
||||
|
||||
elif name == "skipped":
|
||||
color = QtGui.QColor(QtCore.Qt.white)
|
||||
pen = QtGui.QPen(color)
|
||||
pen.setWidth(int(size * cls._item_border_size))
|
||||
painter.setPen(pen)
|
||||
painter.setBrush(QtCore.Qt.transparent)
|
||||
|
||||
elif name == "passed":
|
||||
color = QtGui.QColor(colors["ok"])
|
||||
painter.setPen(QtCore.Qt.NoPen)
|
||||
painter.setBrush(color)
|
||||
|
||||
elif name == "removed":
|
||||
draw_ellipse = False
|
||||
|
||||
offset = offset * 1.5
|
||||
p1 = QtCore.QPoint(offset, offset)
|
||||
p2 = QtCore.QPoint(size - offset, size - offset)
|
||||
p3 = QtCore.QPoint(offset, size - offset)
|
||||
p4 = QtCore.QPoint(size - offset, offset)
|
||||
|
||||
pen = QtGui.QPen(QtCore.Qt.white)
|
||||
pen.setWidth(offset_size / 4)
|
||||
pen.setCapStyle(QtCore.Qt.RoundCap)
|
||||
painter.setPen(pen)
|
||||
painter.setBrush(QtCore.Qt.transparent)
|
||||
painter.drawLine(p1, p2)
|
||||
painter.drawLine(p3, p4)
|
||||
|
||||
else:
|
||||
color = QtGui.QColor(QtCore.Qt.white)
|
||||
painter.setPen(QtCore.Qt.NoPen)
|
||||
painter.setBrush(color)
|
||||
|
||||
if draw_ellipse:
|
||||
painter.drawEllipse(offset, offset, offset_size, offset_size)
|
||||
|
||||
painter.end()
|
||||
|
||||
cls._item_icons_by_name_and_size[name][size] = pix
|
||||
|
||||
return pix
|
||||
|
||||
def paint(self, painter, option, index):
|
||||
if index.data(ITEM_IS_GROUP_ROLE):
|
||||
self.group_item_paint(painter, option, index)
|
||||
else:
|
||||
self.item_paint(painter, option, index)
|
||||
|
||||
def item_paint(self, painter, option, index):
|
||||
self.initStyleOption(option, index)
|
||||
|
||||
widget = option.widget
|
||||
if widget:
|
||||
style = widget.style()
|
||||
else:
|
||||
style = QtWidgets.QApplicaion.style()
|
||||
|
||||
style.proxy().drawPrimitive(
|
||||
style.PE_PanelItemViewItem, option, painter, widget
|
||||
)
|
||||
_rect = style.proxy().subElementRect(
|
||||
style.SE_ItemViewItemText, option, widget
|
||||
)
|
||||
bg_rect = QtCore.QRectF(option.rect)
|
||||
bg_rect.setY(_rect.y())
|
||||
bg_rect.setHeight(_rect.height())
|
||||
|
||||
expander_rect = QtCore.QRectF(bg_rect)
|
||||
expander_rect.setWidth(expander_rect.height() + 5)
|
||||
|
||||
label_rect = QtCore.QRectF(
|
||||
expander_rect.x() + expander_rect.width(),
|
||||
expander_rect.y(),
|
||||
bg_rect.width() - expander_rect.width(),
|
||||
expander_rect.height()
|
||||
)
|
||||
|
||||
icon_size = expander_rect.height()
|
||||
if index.data(ITEM_ERRORED_ROLE):
|
||||
expander_icon = self._get_icon("error", icon_size)
|
||||
elif index.data(PLUGIN_SKIPPED_ROLE):
|
||||
expander_icon = self._get_icon("skipped", icon_size)
|
||||
elif index.data(PLUGIN_PASSED_ROLE):
|
||||
expander_icon = self._get_icon("passed", icon_size)
|
||||
elif index.data(INSTANCE_REMOVED_ROLE):
|
||||
expander_icon = self._get_icon("removed", icon_size)
|
||||
else:
|
||||
expander_icon = self._get_icon("", icon_size)
|
||||
|
||||
label = index.data(QtCore.Qt.DisplayRole)
|
||||
label = option.fontMetrics.elidedText(
|
||||
label, QtCore.Qt.ElideRight, label_rect.width()
|
||||
)
|
||||
|
||||
painter.save()
|
||||
# Draw icon
|
||||
pix_point = QtCore.QPoint(
|
||||
expander_rect.center().x() - int(expander_icon.width() / 2),
|
||||
expander_rect.top()
|
||||
)
|
||||
painter.drawPixmap(pix_point, expander_icon)
|
||||
|
||||
# Draw label
|
||||
painter.setFont(option.font)
|
||||
painter.drawText(label_rect, QtCore.Qt.AlignVCenter, label)
|
||||
|
||||
# Ok, we're done, tidy up.
|
||||
painter.restore()
|
||||
|
||||
def group_item_paint(self, painter, option, index):
|
||||
"""Paint text
|
||||
_
|
||||
My label
|
||||
"""
|
||||
self.initStyleOption(option, index)
|
||||
|
||||
widget = option.widget
|
||||
if widget:
|
||||
style = widget.style()
|
||||
else:
|
||||
style = QtWidgets.QApplicaion.style()
|
||||
_rect = style.proxy().subElementRect(
|
||||
style.SE_ItemViewItemText, option, widget
|
||||
)
|
||||
|
||||
bg_rect = QtCore.QRectF(option.rect)
|
||||
bg_rect.setY(_rect.y())
|
||||
bg_rect.setHeight(_rect.height())
|
||||
|
||||
expander_height = bg_rect.height()
|
||||
expander_width = expander_height + 5
|
||||
expander_y_offset = expander_height % 2
|
||||
expander_height -= expander_y_offset
|
||||
expander_rect = QtCore.QRectF(
|
||||
bg_rect.x(),
|
||||
bg_rect.y() + expander_y_offset,
|
||||
expander_width,
|
||||
expander_height
|
||||
)
|
||||
|
||||
label_rect = QtCore.QRectF(
|
||||
bg_rect.x() + expander_width,
|
||||
bg_rect.y(),
|
||||
bg_rect.width() - expander_width,
|
||||
bg_rect.height()
|
||||
)
|
||||
|
||||
bg_path = QtGui.QPainterPath()
|
||||
radius = (bg_rect.height() / 2) - 0.01
|
||||
bg_path.addRoundedRect(bg_rect, radius, radius)
|
||||
|
||||
painter.fillPath(bg_path, colors["group"])
|
||||
|
||||
selected = option.state & QtWidgets.QStyle.State_Selected
|
||||
hovered = option.state & QtWidgets.QStyle.State_MouseOver
|
||||
|
||||
if selected and hovered:
|
||||
painter.fillPath(bg_path, colors["selected"])
|
||||
elif hovered:
|
||||
painter.fillPath(bg_path, colors["hover"])
|
||||
|
||||
expanded = self.parent().isExpanded(index)
|
||||
if expanded:
|
||||
expander_icon = self._get_minus_pixmap(expander_height)
|
||||
else:
|
||||
expander_icon = self._get_plus_pixmap(expander_height)
|
||||
|
||||
label = index.data(QtCore.Qt.DisplayRole)
|
||||
label = option.fontMetrics.elidedText(
|
||||
label, QtCore.Qt.ElideRight, label_rect.width()
|
||||
)
|
||||
|
||||
# Maintain reference to state, so we can restore it once we're done
|
||||
painter.save()
|
||||
pix_point = QtCore.QPoint(
|
||||
expander_rect.center().x() - int(expander_icon.width() / 2),
|
||||
expander_rect.top()
|
||||
)
|
||||
painter.drawPixmap(pix_point, expander_icon)
|
||||
|
||||
# Draw label
|
||||
painter.setFont(option.font)
|
||||
painter.drawText(label_rect, QtCore.Qt.AlignVCenter, label)
|
||||
|
||||
# Ok, we're done, tidy up.
|
||||
painter.restore()
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue