mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into enhancement/OP-6154_Publishing-Luts
This commit is contained in:
commit
627363d476
95 changed files with 2100 additions and 380 deletions
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
6
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,9 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.17.2-nightly.4
|
||||
- 3.17.2-nightly.3
|
||||
- 3.17.2-nightly.2
|
||||
- 3.17.2-nightly.1
|
||||
- 3.17.1
|
||||
- 3.17.1-nightly.3
|
||||
|
|
@ -132,9 +135,6 @@ body:
|
|||
- 3.14.11-nightly.1
|
||||
- 3.14.10
|
||||
- 3.14.10-nightly.9
|
||||
- 3.14.10-nightly.8
|
||||
- 3.14.10-nightly.7
|
||||
- 3.14.10-nightly.6
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
|
|
@ -290,11 +290,15 @@ def run(script):
|
|||
"--setup_only",
|
||||
help="Only create dbs, do not run tests",
|
||||
default=None)
|
||||
@click.option("--mongo_url",
|
||||
help="MongoDB for testing.",
|
||||
default=None)
|
||||
def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
|
||||
timeout, setup_only):
|
||||
timeout, setup_only, mongo_url):
|
||||
"""Run all automatic tests after proper initialization via start.py"""
|
||||
PypeCommands().run_tests(folder, mark, pyargs, test_data_folder,
|
||||
persist, app_variant, timeout, setup_only)
|
||||
persist, app_variant, timeout, setup_only,
|
||||
mongo_url)
|
||||
|
||||
|
||||
@main.command(help="DEPRECATED - run sync server")
|
||||
|
|
|
|||
|
|
@ -422,7 +422,7 @@ def failed_json_default(value):
|
|||
|
||||
|
||||
class ServerCreateOperation(CreateOperation):
|
||||
"""Opeartion to create an entity.
|
||||
"""Operation to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
|
|
@ -634,7 +634,7 @@ class ServerUpdateOperation(UpdateOperation):
|
|||
|
||||
|
||||
class ServerDeleteOperation(DeleteOperation):
|
||||
"""Opeartion to delete an entity.
|
||||
"""Operation to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
|
|
@ -647,7 +647,7 @@ class ServerDeleteOperation(DeleteOperation):
|
|||
self._session = session
|
||||
|
||||
if entity_type == "asset":
|
||||
entity_type == "folder"
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
entity_type = "version"
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import subprocess
|
|||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
||||
|
||||
class LaunchFoundryAppsWindows(PreLaunchHook):
|
||||
class LaunchNewConsoleApps(PreLaunchHook):
|
||||
"""Foundry applications have specific way how to launch them.
|
||||
|
||||
Nuke is executed "like" python process so it is required to pass
|
||||
|
|
@ -13,13 +13,15 @@ class LaunchFoundryAppsWindows(PreLaunchHook):
|
|||
|
||||
# Should be as last hook because must change launch arguments to string
|
||||
order = 1000
|
||||
app_groups = {"nuke", "nukeassist", "nukex", "hiero", "nukestudio"}
|
||||
app_groups = {
|
||||
"nuke", "nukeassist", "nukex", "hiero", "nukestudio", "mayapy"
|
||||
}
|
||||
platforms = {"windows"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Change `creationflags` to CREATE_NEW_CONSOLE
|
||||
# - on Windows nuke will create new window using its console
|
||||
# - on Windows some apps will create new window using its console
|
||||
# Set `stdout` and `stderr` to None so new created console does not
|
||||
# have redirected output to DEVNULL in build
|
||||
self.launch_context.kwargs.update({
|
||||
|
|
@ -31,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook):
|
|||
|
||||
def inner_execute(self):
|
||||
# Get blender's python directory
|
||||
version_regex = re.compile(r"^[2-3]\.[0-9]+$")
|
||||
version_regex = re.compile(r"^[2-4]\.[0-9]+$")
|
||||
|
||||
platform = system().lower()
|
||||
executable = self.launch_context.executable.executable_path
|
||||
|
|
|
|||
|
|
@ -26,8 +26,7 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
Note:
|
||||
At least for now it only supports Alembic files.
|
||||
"""
|
||||
|
||||
families = ["model", "pointcache"]
|
||||
families = ["model", "pointcache", "animation"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Load Alembic"
|
||||
|
|
@ -53,16 +52,12 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
def _process(self, libpath, asset_group, group_name):
|
||||
plugin.deselect_all()
|
||||
|
||||
collection = bpy.context.view_layer.active_layer_collection.collection
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
bpy.ops.wm.alembic_import(
|
||||
filepath=libpath,
|
||||
relative_path=relative
|
||||
)
|
||||
|
||||
parent = bpy.context.scene.collection
|
||||
|
||||
imported = lib.get_selection()
|
||||
|
||||
# Children must be linked before parents,
|
||||
|
|
@ -79,6 +74,10 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
objects.reverse()
|
||||
|
||||
for obj in objects:
|
||||
# Unlink the object from all collections
|
||||
collections = obj.users_collection
|
||||
for collection in collections:
|
||||
collection.objects.unlink(obj)
|
||||
name = obj.name
|
||||
obj.name = f"{group_name}:{name}"
|
||||
if obj.type != 'EMPTY':
|
||||
|
|
@ -90,7 +89,7 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
material_slot.material.name = f"{group_name}:{name_mat}"
|
||||
|
||||
if not obj.get(AVALON_PROPERTY):
|
||||
obj[AVALON_PROPERTY] = dict()
|
||||
obj[AVALON_PROPERTY] = {}
|
||||
|
||||
avalon_info = obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
|
@ -99,6 +98,18 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
|
||||
return objects
|
||||
|
||||
def _link_objects(self, objects, collection, containers, asset_group):
|
||||
# Link the imported objects to any collection where the asset group is
|
||||
# linked to, except the AVALON_CONTAINERS collection
|
||||
group_collections = [
|
||||
collection
|
||||
for collection in asset_group.users_collection
|
||||
if collection != containers]
|
||||
|
||||
for obj in objects:
|
||||
for collection in group_collections:
|
||||
collection.objects.link(obj)
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
|
|
@ -120,18 +131,21 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
group_name = plugin.asset_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
|
||||
avalon_containers = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_containers:
|
||||
avalon_containers = bpy.data.collections.new(
|
||||
name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_containers)
|
||||
containers = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not containers:
|
||||
containers = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(containers)
|
||||
|
||||
asset_group = bpy.data.objects.new(group_name, object_data=None)
|
||||
avalon_containers.objects.link(asset_group)
|
||||
containers.objects.link(asset_group)
|
||||
|
||||
objects = self._process(libpath, asset_group, group_name)
|
||||
|
||||
bpy.context.scene.collection.objects.link(asset_group)
|
||||
# Link the asset group to the active collection
|
||||
collection = bpy.context.view_layer.active_layer_collection.collection
|
||||
collection.objects.link(asset_group)
|
||||
|
||||
self._link_objects(objects, asset_group, containers, asset_group)
|
||||
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
|
|
@ -207,7 +221,11 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
mat = asset_group.matrix_basis.copy()
|
||||
self._remove(asset_group)
|
||||
|
||||
self._process(str(libpath), asset_group, object_name)
|
||||
objects = self._process(str(libpath), asset_group, object_name)
|
||||
|
||||
containers = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
self._link_objects(objects, asset_group, containers, asset_group)
|
||||
|
||||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
|
|
|
|||
|
|
@ -244,7 +244,7 @@ class BlendLoader(plugin.AssetLoader):
|
|||
for parent in parent_containers:
|
||||
parent.get(AVALON_PROPERTY)["members"] = list(filter(
|
||||
lambda i: i not in members,
|
||||
parent.get(AVALON_PROPERTY)["members"]))
|
||||
parent.get(AVALON_PROPERTY).get("members", [])))
|
||||
|
||||
for attr in attrs:
|
||||
for data in getattr(bpy.data, attr):
|
||||
|
|
|
|||
|
|
@ -123,6 +123,9 @@ class CreateSaver(NewCreator):
|
|||
def _imprint(self, tool, data):
|
||||
# Save all data in a "openpype.{key}" = value data
|
||||
|
||||
# Instance id is the tool's name so we don't need to imprint as data
|
||||
data.pop("instance_id", None)
|
||||
|
||||
active = data.pop("active", None)
|
||||
if active is not None:
|
||||
# Use active value to set the passthrough state
|
||||
|
|
@ -188,6 +191,10 @@ class CreateSaver(NewCreator):
|
|||
passthrough = attrs["TOOLB_PassThrough"]
|
||||
data["active"] = not passthrough
|
||||
|
||||
# Override publisher's UUID generation because tool names are
|
||||
# already unique in Fusion in a comp
|
||||
data["instance_id"] = tool.Name
|
||||
|
||||
return data
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import os
|
||||
import errno
|
||||
import re
|
||||
import uuid
|
||||
import logging
|
||||
|
|
@ -9,10 +10,15 @@ import json
|
|||
|
||||
import six
|
||||
|
||||
from openpype.lib import StringTemplate
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.settings import get_current_project_settings
|
||||
from openpype.pipeline import get_current_project_name, get_current_asset_name
|
||||
from openpype.pipeline.context_tools import get_current_project_asset
|
||||
|
||||
from openpype.pipeline.context_tools import (
|
||||
get_current_context_template_data,
|
||||
get_current_project_asset
|
||||
)
|
||||
from openpype.widgets import popup
|
||||
import hou
|
||||
|
||||
|
||||
|
|
@ -160,8 +166,6 @@ def validate_fps():
|
|||
|
||||
if current_fps != fps:
|
||||
|
||||
from openpype.widgets import popup
|
||||
|
||||
# Find main window
|
||||
parent = hou.ui.mainQtWindow()
|
||||
if parent is None:
|
||||
|
|
@ -747,3 +751,99 @@ def get_camera_from_container(container):
|
|||
|
||||
assert len(cameras) == 1, "Camera instance must have only one camera"
|
||||
return cameras[0]
|
||||
|
||||
|
||||
def get_context_var_changes():
|
||||
"""get context var changes."""
|
||||
|
||||
houdini_vars_to_update = {}
|
||||
|
||||
project_settings = get_current_project_settings()
|
||||
houdini_vars_settings = \
|
||||
project_settings["houdini"]["general"]["update_houdini_var_context"]
|
||||
|
||||
if not houdini_vars_settings["enabled"]:
|
||||
return houdini_vars_to_update
|
||||
|
||||
houdini_vars = houdini_vars_settings["houdini_vars"]
|
||||
|
||||
# No vars specified - nothing to do
|
||||
if not houdini_vars:
|
||||
return houdini_vars_to_update
|
||||
|
||||
# Get Template data
|
||||
template_data = get_current_context_template_data()
|
||||
|
||||
# Set Houdini Vars
|
||||
for item in houdini_vars:
|
||||
# For consistency reasons we always force all vars to be uppercase
|
||||
# Also remove any leading, and trailing whitespaces.
|
||||
var = item["var"].strip().upper()
|
||||
|
||||
# get and resolve template in value
|
||||
item_value = StringTemplate.format_template(
|
||||
item["value"],
|
||||
template_data
|
||||
)
|
||||
|
||||
if var == "JOB" and item_value == "":
|
||||
# sync $JOB to $HIP if $JOB is empty
|
||||
item_value = os.environ["HIP"]
|
||||
|
||||
if item["is_directory"]:
|
||||
item_value = item_value.replace("\\", "/")
|
||||
|
||||
current_value = hou.hscript("echo -n `${}`".format(var))[0]
|
||||
|
||||
if current_value != item_value:
|
||||
houdini_vars_to_update[var] = (
|
||||
current_value, item_value, item["is_directory"]
|
||||
)
|
||||
|
||||
return houdini_vars_to_update
|
||||
|
||||
|
||||
def update_houdini_vars_context():
|
||||
"""Update asset context variables"""
|
||||
|
||||
for var, (_old, new, is_directory) in get_context_var_changes().items():
|
||||
if is_directory:
|
||||
try:
|
||||
os.makedirs(new)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
print(
|
||||
"Failed to create ${} dir. Maybe due to "
|
||||
"insufficient permissions.".format(var)
|
||||
)
|
||||
|
||||
hou.hscript("set {}={}".format(var, new))
|
||||
os.environ[var] = new
|
||||
print("Updated ${} to {}".format(var, new))
|
||||
|
||||
|
||||
def update_houdini_vars_context_dialog():
|
||||
"""Show pop-up to update asset context variables"""
|
||||
update_vars = get_context_var_changes()
|
||||
if not update_vars:
|
||||
# Nothing to change
|
||||
print("Nothing to change, Houdini vars are already up to date.")
|
||||
return
|
||||
|
||||
message = "\n".join(
|
||||
"${}: {} -> {}".format(var, old or "None", new or "None")
|
||||
for var, (old, new, _is_directory) in update_vars.items()
|
||||
)
|
||||
|
||||
# TODO: Use better UI!
|
||||
parent = hou.ui.mainQtWindow()
|
||||
dialog = popup.Popup(parent=parent)
|
||||
dialog.setModal(True)
|
||||
dialog.setWindowTitle("Houdini scene has outdated asset variables")
|
||||
dialog.setMessage(message)
|
||||
dialog.setButtonText("Fix")
|
||||
|
||||
# on_show is the Fix button clicked callback
|
||||
dialog.on_clicked.connect(update_houdini_vars_context)
|
||||
|
||||
dialog.show()
|
||||
|
|
|
|||
|
|
@ -300,6 +300,9 @@ def on_save():
|
|||
|
||||
log.info("Running callback on save..")
|
||||
|
||||
# update houdini vars
|
||||
lib.update_houdini_vars_context_dialog()
|
||||
|
||||
nodes = lib.get_id_required_nodes()
|
||||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
|
@ -335,6 +338,9 @@ def on_open():
|
|||
|
||||
log.info("Running callback on open..")
|
||||
|
||||
# update houdini vars
|
||||
lib.update_houdini_vars_context_dialog()
|
||||
|
||||
# Validate FPS after update_task_from_path to
|
||||
# ensure it is using correct FPS for the asset
|
||||
lib.validate_fps()
|
||||
|
|
@ -399,6 +405,7 @@ def _set_context_settings():
|
|||
"""
|
||||
|
||||
lib.reset_framerange()
|
||||
lib.update_houdini_vars_context()
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, new_value, old_value):
|
||||
|
|
|
|||
|
|
@ -187,13 +187,14 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
self.customize_node_look(instance_node)
|
||||
|
||||
instance_data["instance_node"] = instance_node.path()
|
||||
instance_data["instance_id"] = instance_node.path()
|
||||
instance = CreatedInstance(
|
||||
self.family,
|
||||
subset_name,
|
||||
instance_data,
|
||||
self)
|
||||
self._add_instance_to_context(instance)
|
||||
imprint(instance_node, instance.data_to_store())
|
||||
self.imprint(instance_node, instance.data_to_store())
|
||||
return instance
|
||||
|
||||
except hou.Error as er:
|
||||
|
|
@ -222,25 +223,41 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
self.cache_subsets(self.collection_shared_data)
|
||||
for instance in self.collection_shared_data[
|
||||
"houdini_cached_subsets"].get(self.identifier, []):
|
||||
|
||||
node_data = read(instance)
|
||||
|
||||
# Node paths are always the full node path since that is unique
|
||||
# Because it's the node's path it's not written into attributes
|
||||
# but explicitly collected
|
||||
node_path = instance.path()
|
||||
node_data["instance_id"] = node_path
|
||||
node_data["instance_node"] = node_path
|
||||
|
||||
created_instance = CreatedInstance.from_existing(
|
||||
read(instance), self
|
||||
node_data, self
|
||||
)
|
||||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = hou.node(created_inst.get("instance_node"))
|
||||
|
||||
new_values = {
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
imprint(
|
||||
self.imprint(
|
||||
instance_node,
|
||||
new_values,
|
||||
update=True
|
||||
)
|
||||
|
||||
def imprint(self, node, values, update=False):
|
||||
# Never store instance node and instance id since that data comes
|
||||
# from the node's path
|
||||
values.pop("instance_node", None)
|
||||
values.pop("instance_id", None)
|
||||
imprint(node, values, update=update)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Remove specified instance from the scene.
|
||||
|
||||
|
|
|
|||
|
|
@ -86,6 +86,14 @@ openpype.hosts.houdini.api.lib.reset_framerange()
|
|||
]]></scriptCode>
|
||||
</scriptItem>
|
||||
|
||||
<scriptItem id="update_context_vars">
|
||||
<label>Update Houdini Vars</label>
|
||||
<scriptCode><![CDATA[
|
||||
import openpype.hosts.houdini.api.lib
|
||||
openpype.hosts.houdini.api.lib.update_houdini_vars_context_dialog()
|
||||
]]></scriptCode>
|
||||
</scriptItem>
|
||||
|
||||
<separatorItem/>
|
||||
<scriptItem id="experimental_tools">
|
||||
<label>Experimental tools...</label>
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from pyblish.api import Instance
|
|||
|
||||
from maya import cmds # noqa
|
||||
import maya.mel as mel # noqa
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class FBXExtractor:
|
||||
|
|
@ -53,7 +54,6 @@ class FBXExtractor:
|
|||
"bakeComplexEnd": int,
|
||||
"bakeComplexStep": int,
|
||||
"bakeResampleAnimation": bool,
|
||||
"animationOnly": bool,
|
||||
"useSceneName": bool,
|
||||
"quaternion": str, # "euler"
|
||||
"shapes": bool,
|
||||
|
|
@ -63,7 +63,10 @@ class FBXExtractor:
|
|||
"embeddedTextures": bool,
|
||||
"inputConnections": bool,
|
||||
"upAxis": str, # x, y or z,
|
||||
"triangulate": bool
|
||||
"triangulate": bool,
|
||||
"fileVersion": str,
|
||||
"skeletonDefinitions": bool,
|
||||
"referencedAssetsContent": bool
|
||||
}
|
||||
|
||||
@property
|
||||
|
|
@ -94,7 +97,6 @@ class FBXExtractor:
|
|||
"bakeComplexEnd": end_frame,
|
||||
"bakeComplexStep": 1,
|
||||
"bakeResampleAnimation": True,
|
||||
"animationOnly": False,
|
||||
"useSceneName": False,
|
||||
"quaternion": "euler",
|
||||
"shapes": True,
|
||||
|
|
@ -104,7 +106,10 @@ class FBXExtractor:
|
|||
"embeddedTextures": False,
|
||||
"inputConnections": True,
|
||||
"upAxis": "y",
|
||||
"triangulate": False
|
||||
"triangulate": False,
|
||||
"fileVersion": "FBX202000",
|
||||
"skeletonDefinitions": False,
|
||||
"referencedAssetsContent": False
|
||||
}
|
||||
|
||||
def __init__(self, log=None):
|
||||
|
|
@ -198,5 +203,9 @@ class FBXExtractor:
|
|||
path (str): Path to use for export.
|
||||
|
||||
"""
|
||||
cmds.select(members, r=True, noExpand=True)
|
||||
mel.eval('FBXExport -f "{}" -s'.format(path))
|
||||
# The export requires forward slashes because we need
|
||||
# to format it into a string in a mel expression
|
||||
path = path.replace("\\", "/")
|
||||
with maintained_selection():
|
||||
cmds.select(members, r=True, noExpand=True)
|
||||
mel.eval('FBXExport -f "{}" -s'.format(path))
|
||||
|
|
|
|||
|
|
@ -183,6 +183,51 @@ def maintained_selection():
|
|||
cmds.select(clear=True)
|
||||
|
||||
|
||||
def get_namespace(node):
|
||||
"""Return namespace of given node"""
|
||||
node_name = node.rsplit("|", 1)[-1]
|
||||
if ":" in node_name:
|
||||
return node_name.rsplit(":", 1)[0]
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def strip_namespace(node, namespace):
|
||||
"""Strip given namespace from node path.
|
||||
|
||||
The namespace will only be stripped from names
|
||||
if it starts with that namespace. If the namespace
|
||||
occurs within another namespace it's not removed.
|
||||
|
||||
Examples:
|
||||
>>> strip_namespace("namespace:node", namespace="namespace:")
|
||||
"node"
|
||||
>>> strip_namespace("hello:world:node", namespace="hello:world")
|
||||
"node"
|
||||
>>> strip_namespace("hello:world:node", namespace="hello")
|
||||
"world:node"
|
||||
>>> strip_namespace("hello:world:node", namespace="world")
|
||||
"hello:world:node"
|
||||
>>> strip_namespace("ns:group|ns:node", namespace="ns")
|
||||
"group|node"
|
||||
|
||||
Returns:
|
||||
str: Node name without given starting namespace.
|
||||
|
||||
"""
|
||||
|
||||
# Ensure namespace ends with `:`
|
||||
if not namespace.endswith(":"):
|
||||
namespace = "{}:".format(namespace)
|
||||
|
||||
# The long path for a node can also have the namespace
|
||||
# in its parents so we need to remove it from each
|
||||
return "|".join(
|
||||
name[len(namespace):] if name.startswith(namespace) else name
|
||||
for name in node.split("|")
|
||||
)
|
||||
|
||||
|
||||
def get_custom_namespace(custom_namespace):
|
||||
"""Return unique namespace.
|
||||
|
||||
|
|
@ -922,7 +967,7 @@ def no_display_layers(nodes):
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def namespaced(namespace, new=True):
|
||||
def namespaced(namespace, new=True, relative_names=None):
|
||||
"""Work inside namespace during context
|
||||
|
||||
Args:
|
||||
|
|
@ -934,15 +979,19 @@ def namespaced(namespace, new=True):
|
|||
|
||||
"""
|
||||
original = cmds.namespaceInfo(cur=True, absoluteName=True)
|
||||
original_relative_names = cmds.namespace(query=True, relativeNames=True)
|
||||
if new:
|
||||
namespace = unique_namespace(namespace)
|
||||
cmds.namespace(add=namespace)
|
||||
|
||||
if relative_names is not None:
|
||||
cmds.namespace(relativeNames=relative_names)
|
||||
try:
|
||||
cmds.namespace(set=namespace)
|
||||
yield namespace
|
||||
finally:
|
||||
cmds.namespace(set=original)
|
||||
if relative_names is not None:
|
||||
cmds.namespace(relativeNames=original_relative_names)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
|
@ -4100,14 +4149,19 @@ def create_rig_animation_instance(
|
|||
"""
|
||||
if options is None:
|
||||
options = {}
|
||||
|
||||
name = context["representation"]["name"]
|
||||
output = next((node for node in nodes if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in nodes if
|
||||
node.endswith("controls_SET")), None)
|
||||
if name != "fbx":
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
anim_skeleton = next((node for node in nodes if
|
||||
node.endswith("skeletonAnim_SET")), None)
|
||||
skeleton_mesh = next((node for node in nodes if
|
||||
node.endswith("skeletonMesh_SET")), None)
|
||||
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = (
|
||||
|
|
@ -4119,9 +4173,7 @@ def create_rig_animation_instance(
|
|||
custom_subset = options.get("animationSubsetName")
|
||||
if custom_subset:
|
||||
formatting_data = {
|
||||
# TODO remove 'asset_type' and replace 'asset_name' with 'asset'
|
||||
"asset_name": context['asset']['name'],
|
||||
"asset_type": context['asset']['type'],
|
||||
"asset": context["asset"],
|
||||
"subset": context['subset']['name'],
|
||||
"family": (
|
||||
context['subset']['data'].get('family') or
|
||||
|
|
@ -4142,10 +4194,12 @@ def create_rig_animation_instance(
|
|||
|
||||
host = registered_host()
|
||||
create_context = CreateContext(host)
|
||||
|
||||
# Create the animation instance
|
||||
rig_sets = [output, controls, anim_skeleton, skeleton_mesh]
|
||||
# Remove sets that this particular rig does not have
|
||||
rig_sets = [s for s in rig_sets if s is not None]
|
||||
with maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
cmds.select(rig_sets + roots, noExpand=True)
|
||||
create_context.create(
|
||||
creator_identifier=creator_identifier,
|
||||
variant=namespace,
|
||||
|
|
|
|||
|
|
@ -1,14 +1,13 @@
|
|||
import os
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
from qtpy import QtWidgets, QtGui
|
||||
|
||||
import maya.utils
|
||||
import maya.cmds as cmds
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import (
|
||||
get_current_project_name,
|
||||
get_current_asset_name,
|
||||
get_current_task_name
|
||||
)
|
||||
|
|
@ -46,12 +45,12 @@ def get_context_label():
|
|||
)
|
||||
|
||||
|
||||
def install():
|
||||
def install(project_settings):
|
||||
if cmds.about(batch=True):
|
||||
log.info("Skipping openpype.menu initialization in batch mode..")
|
||||
return
|
||||
|
||||
def deferred():
|
||||
def add_menu():
|
||||
pyblish_icon = host_tools.get_pyblish_icon()
|
||||
parent_widget = get_main_window()
|
||||
cmds.menu(
|
||||
|
|
@ -191,7 +190,7 @@ def install():
|
|||
|
||||
cmds.setParent(MENU_NAME, menu=True)
|
||||
|
||||
def add_scripts_menu():
|
||||
def add_scripts_menu(project_settings):
|
||||
try:
|
||||
import scriptsmenu.launchformaya as launchformaya
|
||||
except ImportError:
|
||||
|
|
@ -201,9 +200,6 @@ def install():
|
|||
)
|
||||
return
|
||||
|
||||
# load configuration of custom menu
|
||||
project_name = get_current_project_name()
|
||||
project_settings = get_project_settings(project_name)
|
||||
config = project_settings["maya"]["scriptsmenu"]["definition"]
|
||||
_menu = project_settings["maya"]["scriptsmenu"]["name"]
|
||||
|
||||
|
|
@ -225,8 +221,9 @@ def install():
|
|||
# so that it only gets called after Maya UI has initialized too.
|
||||
# This is crucial with Maya 2020+ which initializes without UI
|
||||
# first as a QCoreApplication
|
||||
maya.utils.executeDeferred(deferred)
|
||||
cmds.evalDeferred(add_scripts_menu, lowestPriority=True)
|
||||
maya.utils.executeDeferred(add_menu)
|
||||
cmds.evalDeferred(partial(add_scripts_menu, project_settings),
|
||||
lowestPriority=True)
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
|
|||
|
|
@ -28,8 +28,6 @@ from openpype.lib import (
|
|||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
get_current_project_name,
|
||||
get_current_asset_name,
|
||||
get_current_task_name,
|
||||
register_loader_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_creator_plugin_path,
|
||||
|
|
@ -97,6 +95,8 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
self.log.info("Installing callbacks ... ")
|
||||
register_event_callback("init", on_init)
|
||||
|
||||
_set_project()
|
||||
|
||||
if lib.IS_HEADLESS:
|
||||
self.log.info((
|
||||
"Running in headless mode, skipping Maya save/open/new"
|
||||
|
|
@ -105,10 +105,9 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
return
|
||||
|
||||
_set_project()
|
||||
self._register_callbacks()
|
||||
|
||||
menu.install()
|
||||
menu.install(project_settings)
|
||||
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("open", on_open)
|
||||
|
|
|
|||
|
|
@ -151,6 +151,7 @@ class MayaCreatorBase(object):
|
|||
# We never store the instance_node as value on the node since
|
||||
# it's the node name itself
|
||||
data.pop("instance_node", None)
|
||||
data.pop("instance_id", None)
|
||||
|
||||
# Don't store `families` since it's up to the creator itself
|
||||
# to define the initial publish families - not a stored attribute of
|
||||
|
|
@ -227,6 +228,7 @@ class MayaCreatorBase(object):
|
|||
|
||||
# Explicitly re-parse the node name
|
||||
node_data["instance_node"] = node
|
||||
node_data["instance_id"] = node
|
||||
|
||||
# If the creator plug-in specifies
|
||||
families = self.get_publish_families()
|
||||
|
|
@ -601,6 +603,13 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase):
|
|||
class Loader(LoaderPlugin):
|
||||
hosts = ["maya"]
|
||||
|
||||
load_settings = {} # defined in settings
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
super(Loader, cls).apply_settings(project_settings, system_settings)
|
||||
cls.load_settings = project_settings['maya']['load']
|
||||
|
||||
def get_custom_namespace_and_group(self, context, options, loader_key):
|
||||
"""Queries Settings to get custom template for namespace and group.
|
||||
|
||||
|
|
@ -613,12 +622,9 @@ class Loader(LoaderPlugin):
|
|||
loader_key (str): key to get separate configuration from Settings
|
||||
('reference_loader'|'import_loader')
|
||||
"""
|
||||
options["attach_to_root"] = True
|
||||
|
||||
asset = context['asset']
|
||||
subset = context['subset']
|
||||
settings = get_project_settings(context['project']['name'])
|
||||
custom_naming = settings['maya']['load'][loader_key]
|
||||
options["attach_to_root"] = True
|
||||
custom_naming = self.load_settings[loader_key]
|
||||
|
||||
if not custom_naming['namespace']:
|
||||
raise LoadError("No namespace specified in "
|
||||
|
|
@ -627,6 +633,8 @@ class Loader(LoaderPlugin):
|
|||
self.log.debug("No custom group_name, no group will be created.")
|
||||
options["attach_to_root"] = False
|
||||
|
||||
asset = context['asset']
|
||||
subset = context['subset']
|
||||
formatting_data = {
|
||||
"asset_name": asset['name'],
|
||||
"asset_type": asset['type'],
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class PreCopyMel(PreLaunchHook):
|
|||
|
||||
Hook `GlobalHostDataHook` must be executed before this hook.
|
||||
"""
|
||||
app_groups = {"maya"}
|
||||
app_groups = {"maya", "mayapy"}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
|
|
|
|||
|
|
@ -20,6 +20,13 @@ class CreateRig(plugin.MayaCreator):
|
|||
instance_node = instance.get("instance_node")
|
||||
|
||||
self.log.info("Creating Rig instance set up ...")
|
||||
# TODO:change name (_controls_SET -> _rigs_SET)
|
||||
controls = cmds.sets(name=subset_name + "_controls_SET", empty=True)
|
||||
# TODO:change name (_out_SET -> _geo_SET)
|
||||
pointcache = cmds.sets(name=subset_name + "_out_SET", empty=True)
|
||||
cmds.sets([controls, pointcache], forceElement=instance_node)
|
||||
skeleton = cmds.sets(
|
||||
name=subset_name + "_skeletonAnim_SET", empty=True)
|
||||
skeleton_mesh = cmds.sets(
|
||||
name=subset_name + "_skeletonMesh_SET", empty=True)
|
||||
cmds.sets([controls, pointcache,
|
||||
skeleton, skeleton_mesh], forceElement=instance_node)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,46 @@
|
|||
import openpype.hosts.maya.api.plugin
|
||||
import maya.cmds as cmds
|
||||
|
||||
|
||||
def _process_reference(file_url, name, namespace, options):
|
||||
"""Load files by referencing scene in Maya.
|
||||
|
||||
Args:
|
||||
file_url (str): fileapth of the objects to be loaded
|
||||
name (str): subset name
|
||||
namespace (str): namespace
|
||||
options (dict): dict of storing the param
|
||||
|
||||
Returns:
|
||||
list: list of object nodes
|
||||
"""
|
||||
from openpype.hosts.maya.api.lib import unique_namespace
|
||||
# Get name from asset being loaded
|
||||
# Assuming name is subset name from the animation, we split the number
|
||||
# suffix from the name to ensure the namespace is unique
|
||||
name = name.split("_")[0]
|
||||
ext = file_url.split(".")[-1]
|
||||
namespace = unique_namespace(
|
||||
"{}_".format(name),
|
||||
format="%03d",
|
||||
suffix="_{}".format(ext)
|
||||
)
|
||||
|
||||
attach_to_root = options.get("attach_to_root", True)
|
||||
group_name = options["group_name"]
|
||||
|
||||
# no group shall be created
|
||||
if not attach_to_root:
|
||||
group_name = namespace
|
||||
|
||||
nodes = cmds.file(file_url,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=attach_to_root,
|
||||
groupName=group_name,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
return nodes
|
||||
|
||||
|
||||
class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
|
|
@ -16,44 +58,42 @@ class AbcLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
from openpype.hosts.maya.api.lib import unique_namespace
|
||||
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
# Prevent identical alembic nodes from being shared
|
||||
# Create unique namespace for the cameras
|
||||
|
||||
# Get name from asset being loaded
|
||||
# Assuming name is subset name from the animation, we split the number
|
||||
# suffix from the name to ensure the namespace is unique
|
||||
name = name.split("_")[0]
|
||||
namespace = unique_namespace(
|
||||
"{}_".format(name),
|
||||
format="%03d",
|
||||
suffix="_abc"
|
||||
)
|
||||
|
||||
attach_to_root = options.get("attach_to_root", True)
|
||||
group_name = options["group_name"]
|
||||
|
||||
# no group shall be created
|
||||
if not attach_to_root:
|
||||
group_name = namespace
|
||||
|
||||
# hero_001 (abc)
|
||||
# asset_counter{optional}
|
||||
path = self.filepath_from_context(context)
|
||||
file_url = self.prepare_root_value(path,
|
||||
context["project"]["name"])
|
||||
nodes = cmds.file(file_url,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=attach_to_root,
|
||||
groupName=group_name,
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
nodes = _process_reference(file_url, name, namespace, options)
|
||||
# load colorbleed ID attribute
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
class FbxLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Loader to reference an Fbx files"""
|
||||
|
||||
families = ["animation",
|
||||
"camera"]
|
||||
representations = ["fbx"]
|
||||
|
||||
label = "Reference animation"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
|
||||
cmds.loadPlugin("fbx4maya.mll", quiet=True)
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
file_url = self.prepare_root_value(path,
|
||||
context["project"]["name"])
|
||||
|
||||
nodes = _process_reference(file_url, name, namespace, options)
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
|
|
|||
36
openpype/hosts/maya/plugins/publish/collect_fbx_animation.py
Normal file
36
openpype/hosts/maya/plugins/publish/collect_fbx_animation.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from maya import cmds # noqa
|
||||
import pyblish.api
|
||||
from openpype.pipeline import OptionalPyblishPluginMixin
|
||||
|
||||
|
||||
class CollectFbxAnimation(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Collect Animated Rig Data for FBX Extractor."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Fbx Animation"
|
||||
hosts = ["maya"]
|
||||
families = ["animation"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
skeleton_sets = [
|
||||
i for i in instance
|
||||
if i.endswith("skeletonAnim_SET")
|
||||
]
|
||||
if not skeleton_sets:
|
||||
return
|
||||
|
||||
instance.data["families"].append("animation.fbx")
|
||||
instance.data["animated_skeleton"] = []
|
||||
for skeleton_set in skeleton_sets:
|
||||
skeleton_content = cmds.sets(skeleton_set, query=True)
|
||||
self.log.debug(
|
||||
"Collected animated skeleton data: {}".format(
|
||||
skeleton_content
|
||||
))
|
||||
if skeleton_content:
|
||||
instance.data["animated_skeleton"] = skeleton_content
|
||||
|
|
@ -22,7 +22,8 @@ class CollectRigSets(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
|
||||
# Find required sets by suffix
|
||||
searching = {"controls_SET", "out_SET"}
|
||||
searching = {"controls_SET", "out_SET",
|
||||
"skeletonAnim_SET", "skeletonMesh_SET"}
|
||||
found = {}
|
||||
for node in cmds.ls(instance, exactType="objectSet"):
|
||||
for suffix in searching:
|
||||
|
|
|
|||
44
openpype/hosts/maya/plugins/publish/collect_skeleton_mesh.py
Normal file
44
openpype/hosts/maya/plugins/publish/collect_skeleton_mesh.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from maya import cmds # noqa
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectSkeletonMesh(pyblish.api.InstancePlugin):
|
||||
"""Collect Static Rig Data for FBX Extractor."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Skeleton Mesh"
|
||||
hosts = ["maya"]
|
||||
families = ["rig"]
|
||||
|
||||
def process(self, instance):
|
||||
skeleton_mesh_set = instance.data["rig_sets"].get(
|
||||
"skeletonMesh_SET")
|
||||
if not skeleton_mesh_set:
|
||||
self.log.debug(
|
||||
"No skeletonMesh_SET found. "
|
||||
"Skipping collecting of skeleton mesh..."
|
||||
)
|
||||
return
|
||||
|
||||
# Store current frame to ensure single frame export
|
||||
frame = cmds.currentTime(query=True)
|
||||
instance.data["frameStart"] = frame
|
||||
instance.data["frameEnd"] = frame
|
||||
|
||||
instance.data["skeleton_mesh"] = []
|
||||
|
||||
skeleton_mesh_content = cmds.sets(
|
||||
skeleton_mesh_set, query=True) or []
|
||||
if not skeleton_mesh_content:
|
||||
self.log.debug(
|
||||
"No object nodes in skeletonMesh_SET. "
|
||||
"Skipping collecting of skeleton mesh..."
|
||||
)
|
||||
return
|
||||
instance.data["families"] += ["rig.fbx"]
|
||||
instance.data["skeleton_mesh"] = skeleton_mesh_content
|
||||
self.log.debug(
|
||||
"Collected skeletonMesh_SET members: {}".format(
|
||||
skeleton_mesh_content
|
||||
))
|
||||
65
openpype/hosts/maya/plugins/publish/extract_fbx_animation.py
Normal file
65
openpype/hosts/maya/plugins/publish/extract_fbx_animation.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
from maya import cmds # noqa
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api import fbx
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
namespaced, get_namespace, strip_namespace
|
||||
)
|
||||
|
||||
|
||||
class ExtractFBXAnimation(publish.Extractor):
|
||||
"""Extract Rig in FBX format from Maya.
|
||||
|
||||
This extracts the rig in fbx with the constraints
|
||||
and referenced asset content included.
|
||||
This also optionally extract animated rig in fbx with
|
||||
geometries included.
|
||||
|
||||
"""
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Animation (FBX)"
|
||||
hosts = ["maya"]
|
||||
families = ["animation.fbx"]
|
||||
|
||||
def process(self, instance):
|
||||
# Define output path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{0}.fbx".format(instance.name)
|
||||
path = os.path.join(staging_dir, filename)
|
||||
path = path.replace("\\", "/")
|
||||
|
||||
fbx_exporter = fbx.FBXExtractor(log=self.log)
|
||||
out_members = instance.data.get("animated_skeleton", [])
|
||||
# Export
|
||||
instance.data["constraints"] = True
|
||||
instance.data["skeletonDefinitions"] = True
|
||||
instance.data["referencedAssetsContent"] = True
|
||||
fbx_exporter.set_options_from_instance(instance)
|
||||
# Export from the rig's namespace so that the exported
|
||||
# FBX does not include the namespace but preserves the node
|
||||
# names as existing in the rig workfile
|
||||
namespace = get_namespace(out_members[0])
|
||||
relative_out_members = [
|
||||
strip_namespace(node, namespace) for node in out_members
|
||||
]
|
||||
with namespaced(
|
||||
":" + namespace,
|
||||
new=False,
|
||||
relative_names=True
|
||||
) as namespace:
|
||||
fbx_exporter.export(relative_out_members, path)
|
||||
|
||||
representations = instance.data.setdefault("representations", [])
|
||||
representations.append({
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': filename,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
self.log.debug(
|
||||
"Extracted FBX animation to: {0}".format(path))
|
||||
54
openpype/hosts/maya/plugins/publish/extract_skeleton_mesh.py
Normal file
54
openpype/hosts/maya/plugins/publish/extract_skeleton_mesh.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
from maya import cmds # noqa
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.pipeline.publish import OptionalPyblishPluginMixin
|
||||
from openpype.hosts.maya.api import fbx
|
||||
|
||||
|
||||
class ExtractSkeletonMesh(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Extract Rig in FBX format from Maya.
|
||||
|
||||
This extracts the rig in fbx with the constraints
|
||||
and referenced asset content included.
|
||||
This also optionally extract animated rig in fbx with
|
||||
geometries included.
|
||||
|
||||
"""
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Skeleton Mesh"
|
||||
hosts = ["maya"]
|
||||
families = ["rig.fbx"]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
# Define output path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{0}.fbx".format(instance.name)
|
||||
path = os.path.join(staging_dir, filename)
|
||||
|
||||
fbx_exporter = fbx.FBXExtractor(log=self.log)
|
||||
out_set = instance.data.get("skeleton_mesh", [])
|
||||
|
||||
instance.data["constraints"] = True
|
||||
instance.data["skeletonDefinitions"] = True
|
||||
|
||||
fbx_exporter.set_options_from_instance(instance)
|
||||
|
||||
# Export
|
||||
fbx_exporter.export(out_set, path)
|
||||
|
||||
representations = instance.data.setdefault("representations", [])
|
||||
representations.append({
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': filename,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
|
||||
self.log.debug("Extract FBX to: {0}".format(path))
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
ValidateContentsOrder
|
||||
)
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin):
|
||||
"""Validate all nodes in skeletonAnim_SET are referenced"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["animation.fbx"]
|
||||
label = "Animated Reference Rig"
|
||||
accepted_controllers = ["transform", "locator"]
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
def process(self, instance):
|
||||
animated_sets = instance.data.get("animated_skeleton", [])
|
||||
if not animated_sets:
|
||||
self.log.debug(
|
||||
"No nodes found in skeletonAnim_SET. "
|
||||
"Skipping validation of animated reference rig..."
|
||||
)
|
||||
return
|
||||
|
||||
for animated_reference in animated_sets:
|
||||
is_referenced = cmds.referenceQuery(
|
||||
animated_reference, isNodeReferenced=True)
|
||||
if not bool(is_referenced):
|
||||
raise PublishValidationError(
|
||||
"All the content in skeletonAnim_SET"
|
||||
" should be referenced nodes"
|
||||
)
|
||||
invalid_controls = self.validate_controls(animated_sets)
|
||||
if invalid_controls:
|
||||
raise PublishValidationError(
|
||||
"All the content in skeletonAnim_SET"
|
||||
" should be transforms"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate_controls(self, set_members):
|
||||
"""Check if the controller set contains only accepted node types.
|
||||
|
||||
Checks if all its set members are within the hierarchy of the root
|
||||
Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
set_members: list of nodes of the skeleton_anim_set
|
||||
hierarchy: list of nodes which reside under the root node
|
||||
|
||||
Returns:
|
||||
errors (list)
|
||||
"""
|
||||
|
||||
# Validate control types
|
||||
invalid = []
|
||||
set_members = cmds.ls(set_members, long=True)
|
||||
for node in set_members:
|
||||
if cmds.nodeType(node) not in self.accepted_controllers:
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
|
@ -30,18 +30,21 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
|
|||
def get_invalid(cls, instance):
|
||||
invalid = list()
|
||||
|
||||
file_attr = cls.attribute
|
||||
if not file_attr:
|
||||
file_attrs = cls.attribute
|
||||
if not file_attrs:
|
||||
return invalid
|
||||
|
||||
# Consider only valid node types to avoid "Unknown object type" warning
|
||||
all_node_types = set(cmds.allNodeTypes())
|
||||
node_types = [key for key in file_attr.keys() if key in all_node_types]
|
||||
node_types = [
|
||||
key for key in file_attrs.keys()
|
||||
if key in all_node_types
|
||||
]
|
||||
|
||||
for node, node_type in pairwise(cmds.ls(type=node_types,
|
||||
showType=True)):
|
||||
# get the filepath
|
||||
file_attr = "{}.{}".format(node, file_attr[node_type])
|
||||
file_attr = "{}.{}".format(node, file_attrs[node_type])
|
||||
filepath = cmds.getAttr(file_attr)
|
||||
|
||||
if filepath and not os.path.exists(filepath):
|
||||
|
|
|
|||
117
openpype/hosts/maya/plugins/publish/validate_resolution.py
Normal file
117
openpype/hosts/maya/plugins/publish/validate_resolution.py
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from maya import cmds
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
from openpype.hosts.maya.api import lib
|
||||
from openpype.hosts.maya.api.lib import reset_scene_resolution
|
||||
|
||||
|
||||
class ValidateResolution(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validate the render resolution setting aligned with DB"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["renderlayer"]
|
||||
hosts = ["maya"]
|
||||
label = "Validate Resolution"
|
||||
actions = [RepairAction]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
invalid = self.get_invalid_resolution(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Render resolution is invalid. See log for details.",
|
||||
description=(
|
||||
"Wrong render resolution setting. "
|
||||
"Please use repair button to fix it.\n\n"
|
||||
"If current renderer is V-Ray, "
|
||||
"make sure vraySettings node has been created."
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid_resolution(cls, instance):
|
||||
width, height, pixelAspect = cls.get_db_resolution(instance)
|
||||
current_renderer = instance.data["renderer"]
|
||||
layer = instance.data["renderlayer"]
|
||||
invalid = False
|
||||
if current_renderer == "vray":
|
||||
vray_node = "vraySettings"
|
||||
if cmds.objExists(vray_node):
|
||||
current_width = lib.get_attr_in_layer(
|
||||
"{}.width".format(vray_node), layer=layer)
|
||||
current_height = lib.get_attr_in_layer(
|
||||
"{}.height".format(vray_node), layer=layer)
|
||||
current_pixelAspect = lib.get_attr_in_layer(
|
||||
"{}.pixelAspect".format(vray_node), layer=layer
|
||||
)
|
||||
else:
|
||||
cls.log.error(
|
||||
"Can't detect VRay resolution because there is no node "
|
||||
"named: `{}`".format(vray_node)
|
||||
)
|
||||
return True
|
||||
else:
|
||||
current_width = lib.get_attr_in_layer(
|
||||
"defaultResolution.width", layer=layer)
|
||||
current_height = lib.get_attr_in_layer(
|
||||
"defaultResolution.height", layer=layer)
|
||||
current_pixelAspect = lib.get_attr_in_layer(
|
||||
"defaultResolution.pixelAspect", layer=layer
|
||||
)
|
||||
if current_width != width or current_height != height:
|
||||
cls.log.error(
|
||||
"Render resolution {}x{} does not match "
|
||||
"asset resolution {}x{}".format(
|
||||
current_width, current_height,
|
||||
width, height
|
||||
))
|
||||
invalid = True
|
||||
if current_pixelAspect != pixelAspect:
|
||||
cls.log.error(
|
||||
"Render pixel aspect {} does not match "
|
||||
"asset pixel aspect {}".format(
|
||||
current_pixelAspect, pixelAspect
|
||||
))
|
||||
invalid = True
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def get_db_resolution(cls, instance):
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
project_doc = instance.context.data["projectEntity"]
|
||||
for data in [asset_doc["data"], project_doc["data"]]:
|
||||
if (
|
||||
"resolutionWidth" in data and
|
||||
"resolutionHeight" in data and
|
||||
"pixelAspect" in data
|
||||
):
|
||||
width = data["resolutionWidth"]
|
||||
height = data["resolutionHeight"]
|
||||
pixelAspect = data["pixelAspect"]
|
||||
return int(width), int(height), float(pixelAspect)
|
||||
|
||||
# Defaults if not found in asset document or project document
|
||||
return 1920, 1080, 1.0
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
# Usually without renderlayer overrides the renderlayers
|
||||
# all share the same resolution value - so fixing the first
|
||||
# will have fixed all the others too. It's much faster to
|
||||
# check whether it's invalid first instead of switching
|
||||
# into all layers individually
|
||||
if not cls.get_invalid_resolution(instance):
|
||||
cls.log.debug(
|
||||
"Nothing to repair on instance: {}".format(instance)
|
||||
)
|
||||
return
|
||||
layer_node = instance.data['setMembers']
|
||||
with lib.renderlayer(layer_node):
|
||||
reset_scene_resolution()
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
from maya import cmds
|
||||
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
ValidateContentsOrder
|
||||
|
|
@ -20,33 +20,27 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
label = "Rig Contents"
|
||||
hosts = ["maya"]
|
||||
families = ["rig"]
|
||||
action = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
accepted_output = ["mesh", "transform"]
|
||||
accepted_controllers = ["transform"]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"Invalid rig content. See log for details.")
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
# Find required sets by suffix
|
||||
required = ["controls_SET", "out_SET"]
|
||||
missing = [
|
||||
key for key in required if key not in instance.data["rig_sets"]
|
||||
]
|
||||
if missing:
|
||||
raise PublishValidationError(
|
||||
"%s is missing sets: %s" % (instance, ", ".join(missing))
|
||||
)
|
||||
required, rig_sets = cls.get_nodes(instance)
|
||||
|
||||
controls_set = instance.data["rig_sets"]["controls_SET"]
|
||||
out_set = instance.data["rig_sets"]["out_SET"]
|
||||
cls.validate_missing_objectsets(instance, required, rig_sets)
|
||||
|
||||
# Ensure there are at least some transforms or dag nodes
|
||||
# in the rig instance
|
||||
set_members = instance.data['setMembers']
|
||||
if not cmds.ls(set_members, type="dagNode", long=True):
|
||||
raise PublishValidationError(
|
||||
"No dag nodes in the pointcache instance. "
|
||||
"(Empty instance?)"
|
||||
)
|
||||
controls_set = rig_sets["controls_SET"]
|
||||
out_set = rig_sets["out_SET"]
|
||||
|
||||
# Ensure contents in sets and retrieve long path for all objects
|
||||
output_content = cmds.sets(out_set, query=True) or []
|
||||
|
|
@ -61,49 +55,92 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
)
|
||||
controls_content = cmds.ls(controls_content, long=True)
|
||||
|
||||
# Validate members are inside the hierarchy from root node
|
||||
root_nodes = cmds.ls(set_members, assemblies=True, long=True)
|
||||
hierarchy = cmds.listRelatives(root_nodes, allDescendents=True,
|
||||
fullPath=True) + root_nodes
|
||||
hierarchy = set(hierarchy)
|
||||
|
||||
invalid_hierarchy = []
|
||||
for node in output_content:
|
||||
if node not in hierarchy:
|
||||
invalid_hierarchy.append(node)
|
||||
for node in controls_content:
|
||||
if node not in hierarchy:
|
||||
invalid_hierarchy.append(node)
|
||||
rig_content = output_content + controls_content
|
||||
invalid_hierarchy = cls.invalid_hierarchy(instance, rig_content)
|
||||
|
||||
# Additional validations
|
||||
invalid_geometry = self.validate_geometry(output_content)
|
||||
invalid_controls = self.validate_controls(controls_content)
|
||||
invalid_geometry = cls.validate_geometry(output_content)
|
||||
invalid_controls = cls.validate_controls(controls_content)
|
||||
|
||||
error = False
|
||||
if invalid_hierarchy:
|
||||
self.log.error("Found nodes which reside outside of root group "
|
||||
cls.log.error("Found nodes which reside outside of root group "
|
||||
"while they are set up for publishing."
|
||||
"\n%s" % invalid_hierarchy)
|
||||
error = True
|
||||
|
||||
if invalid_controls:
|
||||
self.log.error("Only transforms can be part of the controls_SET."
|
||||
cls.log.error("Only transforms can be part of the controls_SET."
|
||||
"\n%s" % invalid_controls)
|
||||
error = True
|
||||
|
||||
if invalid_geometry:
|
||||
self.log.error("Only meshes can be part of the out_SET\n%s"
|
||||
cls.log.error("Only meshes can be part of the out_SET\n%s"
|
||||
% invalid_geometry)
|
||||
error = True
|
||||
|
||||
if error:
|
||||
return invalid_hierarchy + invalid_controls + invalid_geometry
|
||||
|
||||
@classmethod
|
||||
def validate_missing_objectsets(cls, instance,
|
||||
required_objsets, rig_sets):
|
||||
"""Validate missing objectsets in rig sets
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
required_objsets (list): list of objectset names
|
||||
rig_sets (list): list of rig sets
|
||||
|
||||
Raises:
|
||||
PublishValidationError: When the error is raised, it will show
|
||||
which instance has the missing object sets
|
||||
"""
|
||||
missing = [
|
||||
key for key in required_objsets if key not in rig_sets
|
||||
]
|
||||
if missing:
|
||||
raise PublishValidationError(
|
||||
"Invalid rig content. See log for details.")
|
||||
"%s is missing sets: %s" % (instance, ", ".join(missing))
|
||||
)
|
||||
|
||||
def validate_geometry(self, set_members):
|
||||
"""Check if the out set passes the validations
|
||||
@classmethod
|
||||
def invalid_hierarchy(cls, instance, content):
|
||||
"""
|
||||
Check if all rig set members are within the hierarchy of the rig root
|
||||
|
||||
Checks if all its set members are within the hierarchy of the root
|
||||
Args:
|
||||
instance (str): instance
|
||||
content (list): list of content from rig sets
|
||||
|
||||
Raises:
|
||||
PublishValidationError: It means no dag nodes in
|
||||
the rig instance
|
||||
|
||||
Returns:
|
||||
list: invalid hierarchy
|
||||
"""
|
||||
# Ensure there are at least some transforms or dag nodes
|
||||
# in the rig instance
|
||||
set_members = instance.data['setMembers']
|
||||
if not cmds.ls(set_members, type="dagNode", long=True):
|
||||
raise PublishValidationError(
|
||||
"No dag nodes in the rig instance. "
|
||||
"(Empty instance?)"
|
||||
)
|
||||
# Validate members are inside the hierarchy from root node
|
||||
root_nodes = cmds.ls(set_members, assemblies=True, long=True)
|
||||
hierarchy = cmds.listRelatives(root_nodes, allDescendents=True,
|
||||
fullPath=True) + root_nodes
|
||||
hierarchy = set(hierarchy)
|
||||
invalid_hierarchy = []
|
||||
for node in content:
|
||||
if node not in hierarchy:
|
||||
invalid_hierarchy.append(node)
|
||||
return invalid_hierarchy
|
||||
|
||||
@classmethod
|
||||
def validate_geometry(cls, set_members):
|
||||
"""
|
||||
Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
|
|
@ -122,15 +159,13 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
fullPath=True) or []
|
||||
all_shapes = cmds.ls(set_members + shapes, long=True, shapes=True)
|
||||
for shape in all_shapes:
|
||||
if cmds.nodeType(shape) not in self.accepted_output:
|
||||
if cmds.nodeType(shape) not in cls.accepted_output:
|
||||
invalid.append(shape)
|
||||
|
||||
return invalid
|
||||
|
||||
def validate_controls(self, set_members):
|
||||
"""Check if the controller set passes the validations
|
||||
|
||||
Checks if all its set members are within the hierarchy of the root
|
||||
@classmethod
|
||||
def validate_controls(cls, set_members):
|
||||
"""
|
||||
Checks if the control set members are allowed node types.
|
||||
Checks if the node types of the set members valid
|
||||
|
||||
Args:
|
||||
|
|
@ -144,7 +179,80 @@ class ValidateRigContents(pyblish.api.InstancePlugin):
|
|||
# Validate control types
|
||||
invalid = []
|
||||
for node in set_members:
|
||||
if cmds.nodeType(node) not in self.accepted_controllers:
|
||||
if cmds.nodeType(node) not in cls.accepted_controllers:
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def get_nodes(cls, instance):
|
||||
"""Get the target objectsets and rig sets nodes
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
tuple: 2-tuple of list of objectsets,
|
||||
list of rig sets nodes
|
||||
"""
|
||||
objectsets = ["controls_SET", "out_SET"]
|
||||
rig_sets_nodes = instance.data.get("rig_sets", [])
|
||||
return objectsets, rig_sets_nodes
|
||||
|
||||
|
||||
class ValidateSkeletonRigContents(ValidateRigContents):
|
||||
"""Ensure skeleton rigs contains pipeline-critical content
|
||||
|
||||
The rigs optionally contain at least two object sets:
|
||||
"skeletonMesh_SET" - Set of the skinned meshes
|
||||
with bone hierarchies
|
||||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
label = "Skeleton Rig Contents"
|
||||
hosts = ["maya"]
|
||||
families = ["rig.fbx"]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
objectsets, skeleton_mesh_nodes = cls.get_nodes(instance)
|
||||
cls.validate_missing_objectsets(
|
||||
instance, objectsets, instance.data["rig_sets"])
|
||||
|
||||
# Ensure contents in sets and retrieve long path for all objects
|
||||
output_content = instance.data.get("skeleton_mesh", [])
|
||||
output_content = cmds.ls(skeleton_mesh_nodes, long=True)
|
||||
|
||||
invalid_hierarchy = cls.invalid_hierarchy(
|
||||
instance, output_content)
|
||||
invalid_geometry = cls.validate_geometry(output_content)
|
||||
|
||||
error = False
|
||||
if invalid_hierarchy:
|
||||
cls.log.error("Found nodes which reside outside of root group "
|
||||
"while they are set up for publishing."
|
||||
"\n%s" % invalid_hierarchy)
|
||||
error = True
|
||||
if invalid_geometry:
|
||||
cls.log.error("Found nodes which reside outside of root group "
|
||||
"while they are set up for publishing."
|
||||
"\n%s" % invalid_hierarchy)
|
||||
error = True
|
||||
if error:
|
||||
return invalid_hierarchy + invalid_geometry
|
||||
|
||||
@classmethod
|
||||
def get_nodes(cls, instance):
|
||||
"""Get the target objectsets and rig sets nodes
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
tuple: 2-tuple of list of objectsets,
|
||||
list of rig sets nodes
|
||||
"""
|
||||
objectsets = ["skeletonMesh_SET"]
|
||||
skeleton_mesh_nodes = instance.data.get("skeleton_mesh", [])
|
||||
return objectsets, skeleton_mesh_nodes
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
controls_set = instance.data["rig_sets"].get("controls_SET")
|
||||
controls_set = cls.get_node(instance)
|
||||
if not controls_set:
|
||||
cls.log.error(
|
||||
"Must have 'controls_SET' in rig instance"
|
||||
|
|
@ -189,7 +189,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
controls_set = instance.data["rig_sets"].get("controls_SET")
|
||||
controls_set = cls.get_node(instance)
|
||||
if not controls_set:
|
||||
cls.log.error(
|
||||
"Unable to repair because no 'controls_SET' found in rig "
|
||||
|
|
@ -228,3 +228,64 @@ class ValidateRigControllers(pyblish.api.InstancePlugin):
|
|||
default = cls.CONTROLLER_DEFAULTS[attr]
|
||||
cls.log.info("Setting %s to %s" % (plug, default))
|
||||
cmds.setAttr(plug, default)
|
||||
|
||||
@classmethod
|
||||
def get_node(cls, instance):
|
||||
"""Get target object nodes from controls_SET
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
list: list of object nodes from controls_SET
|
||||
"""
|
||||
return instance.data["rig_sets"].get("controls_SET")
|
||||
|
||||
|
||||
class ValidateSkeletonRigControllers(ValidateRigControllers):
|
||||
"""Validate rig controller for skeletonAnim_SET
|
||||
|
||||
Controls must have the transformation attributes on their default
|
||||
values of translate zero, rotate zero and scale one when they are
|
||||
unlocked attributes.
|
||||
|
||||
Unlocked keyable attributes may not have any incoming connections. If
|
||||
these connections are required for the rig then lock the attributes.
|
||||
|
||||
The visibility attribute must be locked.
|
||||
|
||||
Note that `repair` will:
|
||||
- Lock all visibility attributes
|
||||
- Reset all default values for translate, rotate, scale
|
||||
- Break all incoming connections to keyable attributes
|
||||
|
||||
"""
|
||||
order = ValidateContentsOrder + 0.05
|
||||
label = "Skeleton Rig Controllers"
|
||||
hosts = ["maya"]
|
||||
families = ["rig.fbx"]
|
||||
|
||||
# Default controller values
|
||||
CONTROLLER_DEFAULTS = {
|
||||
"translateX": 0,
|
||||
"translateY": 0,
|
||||
"translateZ": 0,
|
||||
"rotateX": 0,
|
||||
"rotateY": 0,
|
||||
"rotateZ": 0,
|
||||
"scaleX": 1,
|
||||
"scaleY": 1,
|
||||
"scaleZ": 1
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_node(cls, instance):
|
||||
"""Get target object nodes from skeletonMesh_SET
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
list: list of object nodes from skeletonMesh_SET
|
||||
"""
|
||||
return instance.data["rig_sets"].get("skeletonMesh_SET")
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin):
|
|||
def get_invalid(cls, instance):
|
||||
"""Get all nodes which do not match the criteria"""
|
||||
|
||||
out_set = instance.data["rig_sets"].get("out_SET")
|
||||
out_set = cls.get_node(instance)
|
||||
if not out_set:
|
||||
return []
|
||||
|
||||
|
|
@ -85,3 +85,45 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin):
|
|||
continue
|
||||
|
||||
lib.set_id(node, sibling_id, overwrite=True)
|
||||
|
||||
@classmethod
|
||||
def get_node(cls, instance):
|
||||
"""Get target object nodes from out_SET
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
list: list of object nodes from out_SET
|
||||
"""
|
||||
return instance.data["rig_sets"].get("out_SET")
|
||||
|
||||
|
||||
class ValidateSkeletonRigOutSetNodeIds(ValidateRigOutSetNodeIds):
|
||||
"""Validate if deformed shapes have related IDs to the original shapes
|
||||
from skeleton set.
|
||||
|
||||
When a deformer is applied in the scene on a referenced mesh that already
|
||||
had deformers then Maya will create a new shape node for the mesh that
|
||||
does not have the original id. This validator checks whether the ids are
|
||||
valid on all the shape nodes in the instance.
|
||||
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
families = ["rig.fbx"]
|
||||
hosts = ['maya']
|
||||
label = 'Skeleton Rig Out Set Node Ids'
|
||||
|
||||
@classmethod
|
||||
def get_node(cls, instance):
|
||||
"""Get target object nodes from skeletonMesh_SET
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
list: list of object nodes from skeletonMesh_SET
|
||||
"""
|
||||
return instance.data["rig_sets"].get(
|
||||
"skeletonMesh_SET")
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin):
|
|||
invalid = {}
|
||||
|
||||
if compute:
|
||||
out_set = instance.data["rig_sets"].get("out_SET")
|
||||
out_set = cls.get_node(instance)
|
||||
if not out_set:
|
||||
instance.data["mismatched_output_ids"] = invalid
|
||||
return invalid
|
||||
|
|
@ -115,3 +115,40 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin):
|
|||
"Multiple matched ids found. Please repair manually: "
|
||||
"{}".format(multiple_ids_match)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_node(cls, instance):
|
||||
"""Get target object nodes from out_SET
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
list: list of object nodes from out_SET
|
||||
"""
|
||||
return instance.data["rig_sets"].get("out_SET")
|
||||
|
||||
|
||||
class ValidateSkeletonRigOutputIds(ValidateRigOutputIds):
|
||||
"""Validate rig output ids from the skeleton sets.
|
||||
|
||||
Ids must share the same id as similarly named nodes in the scene. This is
|
||||
to ensure the id from the model is preserved through animation.
|
||||
|
||||
"""
|
||||
order = ValidateContentsOrder + 0.05
|
||||
label = "Skeleton Rig Output Ids"
|
||||
hosts = ["maya"]
|
||||
families = ["rig.fbx"]
|
||||
|
||||
@classmethod
|
||||
def get_node(cls, instance):
|
||||
"""Get target object nodes from skeletonMesh_SET
|
||||
|
||||
Args:
|
||||
instance (str): instance
|
||||
|
||||
Returns:
|
||||
list: list of object nodes from skeletonMesh_SET
|
||||
"""
|
||||
return instance.data["rig_sets"].get("skeletonMesh_SET")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Plugin for validating naming conventions."""
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateSkeletonTopGroupHierarchy(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates top group hierarchy in the SETs
|
||||
Make sure the object inside the SETs are always top
|
||||
group of the hierarchy
|
||||
|
||||
"""
|
||||
order = ValidateContentsOrder + 0.05
|
||||
label = "Skeleton Rig Top Group Hierarchy"
|
||||
families = ["rig.fbx"]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = []
|
||||
skeleton_mesh_data = instance.data("skeleton_mesh", [])
|
||||
if skeleton_mesh_data:
|
||||
invalid = self.get_top_hierarchy(skeleton_mesh_data)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
"The skeletonMesh_SET includes the object which "
|
||||
"is not at the top hierarchy: {}".format(invalid))
|
||||
|
||||
def get_top_hierarchy(self, targets):
|
||||
targets = cmds.ls(targets, long=True) # ensure long names
|
||||
non_top_hierarchy_list = [
|
||||
target for target in targets if target.count("|") > 2
|
||||
]
|
||||
return non_top_hierarchy_list
|
||||
|
|
@ -69,11 +69,8 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin,
|
|||
|
||||
invalid = []
|
||||
|
||||
project_settings = get_project_settings(
|
||||
legacy_io.Session["AVALON_PROJECT"]
|
||||
)
|
||||
collision_prefixes = (
|
||||
project_settings
|
||||
instance.context.data["project_settings"]
|
||||
["maya"]
|
||||
["create"]
|
||||
["CreateUnrealStaticMesh"]
|
||||
|
|
|
|||
|
|
@ -3425,34 +3425,6 @@ def create_viewer_profile_string(viewer, display=None, path_like=False):
|
|||
return "{} ({})".format(viewer, display)
|
||||
|
||||
|
||||
def get_head_filename_without_hashes(original_path, name):
|
||||
"""Function to get the renamed head filename without frame hashes
|
||||
To avoid the system being confused on finding the filename with
|
||||
frame hashes if the head of the filename has the hashed symbol
|
||||
|
||||
Examples:
|
||||
>>> get_head_filename_without_hashes("render.####.exr", "baking")
|
||||
render.baking.####.exr
|
||||
>>> get_head_filename_without_hashes("render.%04d.exr", "tag")
|
||||
render.tag.%d.exr
|
||||
>>> get_head_filename_without_hashes("exr.####.exr", "foo")
|
||||
exr.foo.%04d.exr
|
||||
|
||||
Args:
|
||||
original_path (str): the filename with frame hashes
|
||||
name (str): the name of the tags
|
||||
|
||||
Returns:
|
||||
str: the renamed filename with the tag
|
||||
"""
|
||||
filename = os.path.basename(original_path)
|
||||
|
||||
def insert_name(matchobj):
|
||||
return "{}.{}".format(name, matchobj.group(0))
|
||||
|
||||
return re.sub(r"(%\d*d)|#+", insert_name, filename)
|
||||
|
||||
|
||||
def get_filenames_without_hash(filename, frame_start, frame_end):
|
||||
"""Get filenames without frame hash
|
||||
i.e. "renderCompositingMain.baking.0001.exr"
|
||||
|
|
|
|||
|
|
@ -39,7 +39,6 @@ from .lib import (
|
|||
get_view_process_node,
|
||||
get_viewer_config_from_string,
|
||||
deprecated,
|
||||
get_head_filename_without_hashes,
|
||||
get_filenames_without_hash
|
||||
)
|
||||
from .pipeline import (
|
||||
|
|
@ -816,19 +815,20 @@ class ExporterReviewMov(ExporterReview):
|
|||
|
||||
self.log.info("File info was set...")
|
||||
|
||||
self.file = self.fhead + self.name + ".{}".format(self.ext)
|
||||
if ".{}".format(self.ext) not in VIDEO_EXTENSIONS:
|
||||
# filename would be with frame hashes if
|
||||
# the file extension is not in video format
|
||||
filename = get_head_filename_without_hashes(
|
||||
self.path_in, self.name)
|
||||
self.file = filename
|
||||
# make sure the filename are in
|
||||
# correct image output format
|
||||
if ".{}".format(self.ext) not in self.file:
|
||||
filename_no_ext, _ = os.path.splitext(filename)
|
||||
self.file = "{}.{}".format(filename_no_ext, self.ext)
|
||||
|
||||
if ".{}".format(self.ext) in VIDEO_EXTENSIONS:
|
||||
self.file = "{}{}.{}".format(
|
||||
self.fhead, self.name, self.ext)
|
||||
else:
|
||||
# Output is image (or image sequence)
|
||||
# When the file is an image it's possible it
|
||||
# has extra information after the `fhead` that
|
||||
# we want to preserve, e.g. like frame numbers
|
||||
# or frames hashes like `####`
|
||||
filename_no_ext = os.path.splitext(
|
||||
os.path.basename(self.path_in))[0]
|
||||
after_head = filename_no_ext[len(self.fhead):]
|
||||
self.file = "{}{}.{}.{}".format(
|
||||
self.fhead, self.name, after_head, self.ext)
|
||||
self.path = os.path.join(
|
||||
self.staging_dir, self.file).replace("\\", "/")
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class SetFrameRangeLoader(load.LoaderPlugin):
|
|||
"yeticache",
|
||||
"pointcache"]
|
||||
representations = ["*"]
|
||||
extension = {"*"}
|
||||
extensions = {"*"}
|
||||
|
||||
label = "Set frame range"
|
||||
order = 11
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class LoadBackdropNodes(load.LoaderPlugin):
|
|||
|
||||
families = ["workfile", "nukenodes"]
|
||||
representations = ["*"]
|
||||
extension = {"nk"}
|
||||
extensions = {"nk"}
|
||||
|
||||
label = "Import Nuke Nodes"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class AlembicCameraLoader(load.LoaderPlugin):
|
|||
|
||||
families = ["camera"]
|
||||
representations = ["*"]
|
||||
extension = {"abc"}
|
||||
extensions = {"abc"}
|
||||
|
||||
label = "Load Alembic Camera"
|
||||
icon = "camera"
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class LoadEffects(load.LoaderPlugin):
|
|||
|
||||
families = ["effect"]
|
||||
representations = ["*"]
|
||||
extension = {"json"}
|
||||
extensions = {"json"}
|
||||
|
||||
label = "Load Effects - nodes"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
|
||||
families = ["effect"]
|
||||
representations = ["*"]
|
||||
extension = {"json"}
|
||||
extensions = {"json"}
|
||||
|
||||
label = "Load Effects - Input Process"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ class LoadGizmo(load.LoaderPlugin):
|
|||
|
||||
families = ["gizmo"]
|
||||
representations = ["*"]
|
||||
extension = {"gizmo"}
|
||||
extensions = {"gizmo"}
|
||||
|
||||
label = "Load Gizmo"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin):
|
|||
|
||||
families = ["gizmo"]
|
||||
representations = ["*"]
|
||||
extension = {"gizmo"}
|
||||
extensions = {"gizmo"}
|
||||
|
||||
label = "Load Gizmo - Input Process"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class MatchmoveLoader(load.LoaderPlugin):
|
|||
|
||||
families = ["matchmove"]
|
||||
representations = ["*"]
|
||||
extension = {"py"}
|
||||
extensions = {"py"}
|
||||
|
||||
defaults = ["Camera", "Object"]
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class AlembicModelLoader(load.LoaderPlugin):
|
|||
|
||||
families = ["model", "pointcache", "animation"]
|
||||
representations = ["*"]
|
||||
extension = {"abc"}
|
||||
extensions = {"abc"}
|
||||
|
||||
label = "Load Alembic"
|
||||
icon = "cube"
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class LinkAsGroup(load.LoaderPlugin):
|
|||
|
||||
families = ["workfile", "nukenodes"]
|
||||
representations = ["*"]
|
||||
extension = {"nk"}
|
||||
extensions = {"nk"}
|
||||
|
||||
label = "Load Precomp"
|
||||
order = 0
|
||||
|
|
|
|||
|
|
@ -33,11 +33,13 @@ class ExtractReviewIntermediates(publish.Extractor):
|
|||
"""
|
||||
nuke_publish = project_settings["nuke"]["publish"]
|
||||
deprecated_setting = nuke_publish["ExtractReviewDataMov"]
|
||||
current_setting = nuke_publish["ExtractReviewIntermediates"]
|
||||
current_setting = nuke_publish.get("ExtractReviewIntermediates")
|
||||
if deprecated_setting["enabled"]:
|
||||
# Use deprecated settings if they are still enabled
|
||||
cls.viewer_lut_raw = deprecated_setting["viewer_lut_raw"]
|
||||
cls.outputs = deprecated_setting["outputs"]
|
||||
elif current_setting is None:
|
||||
pass
|
||||
elif current_setting["enabled"]:
|
||||
cls.viewer_lut_raw = current_setting["viewer_lut_raw"]
|
||||
cls.outputs = current_setting["outputs"]
|
||||
|
|
|
|||
|
|
@ -6,13 +6,10 @@ from .utils import (
|
|||
)
|
||||
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ResolveHost,
|
||||
ls,
|
||||
containerise,
|
||||
update_container,
|
||||
publish,
|
||||
launch_workfiles_app,
|
||||
maintained_selection,
|
||||
remove_instance,
|
||||
list_instances
|
||||
|
|
@ -76,14 +73,10 @@ __all__ = [
|
|||
"bmdvf",
|
||||
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"ResolveHost",
|
||||
"ls",
|
||||
"containerise",
|
||||
"update_container",
|
||||
"reload_pipeline",
|
||||
"publish",
|
||||
"launch_workfiles_app",
|
||||
"maintained_selection",
|
||||
"remove_instance",
|
||||
"list_instances",
|
||||
|
|
|
|||
|
|
@ -5,11 +5,6 @@ from qtpy import QtWidgets, QtCore
|
|||
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .pipeline import (
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
path = os.path.join(os.path.dirname(__file__), "menu_style.qss")
|
||||
|
|
@ -113,7 +108,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
|
||||
def on_workfile_clicked(self):
|
||||
print("Clicked Workfile")
|
||||
launch_workfiles_app()
|
||||
host_tools.show_workfiles()
|
||||
|
||||
def on_create_clicked(self):
|
||||
print("Clicked Create")
|
||||
|
|
@ -121,7 +116,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
|
||||
def on_publish_clicked(self):
|
||||
print("Clicked Publish")
|
||||
publish(None)
|
||||
host_tools.show_publish(parent=None)
|
||||
|
||||
def on_load_clicked(self):
|
||||
print("Clicked Load")
|
||||
|
|
|
|||
|
|
@ -12,14 +12,24 @@ from openpype.pipeline import (
|
|||
schema,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.host import (
|
||||
HostBase,
|
||||
IWorkfileHost,
|
||||
ILoadHost
|
||||
)
|
||||
|
||||
from . import lib
|
||||
from .utils import get_resolve_module
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
file_extensions,
|
||||
has_unsaved_changes,
|
||||
work_root,
|
||||
current_file
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
|
@ -32,53 +42,56 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
|
||||
|
||||
def install():
|
||||
"""Install resolve-specific functionality of avalon-core.
|
||||
class ResolveHost(HostBase, IWorkfileHost, ILoadHost):
|
||||
name = "resolve"
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into resolve.
|
||||
def install(self):
|
||||
"""Install resolve-specific functionality of avalon-core.
|
||||
|
||||
It is called automatically when installing via `api.install(resolve)`.
|
||||
This is where you install menus and register families, data
|
||||
and loaders into resolve.
|
||||
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
It is called automatically when installing via `api.install(resolve)`.
|
||||
|
||||
"""
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
log.info("openpype.hosts.resolve installed")
|
||||
"""
|
||||
|
||||
pyblish.register_host("resolve")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering DaVinci Resovle plug-ins..")
|
||||
log.info("openpype.hosts.resolve installed")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
pyblish.register_host(self.name)
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
print("Registering DaVinci Resolve plug-ins..")
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
get_resolve_module()
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled",
|
||||
on_pyblish_instance_toggled)
|
||||
|
||||
get_resolve_module()
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall all that was installed
|
||||
def open_workfile(self, filepath):
|
||||
return open_file(filepath)
|
||||
|
||||
This is where you undo everything that was done in `install()`.
|
||||
That means, removing menus, deregistering families and data
|
||||
and everything. It should be as though `install()` was never run,
|
||||
because odds are calling this function means the user is interested
|
||||
in re-installing shortly afterwards. If, for example, he has been
|
||||
modifying the menu or registered families.
|
||||
def save_workfile(self, filepath=None):
|
||||
return save_file(filepath)
|
||||
|
||||
"""
|
||||
pyblish.deregister_host("resolve")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
log.info("Deregistering DaVinci Resovle plug-ins..")
|
||||
def work_root(self, session):
|
||||
return work_root(session)
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
def get_current_workfile(self):
|
||||
return current_file()
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
def workfile_has_unsaved_changes(self):
|
||||
return has_unsaved_changes()
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return file_extensions()
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
|
||||
def containerise(timeline_item,
|
||||
|
|
@ -206,15 +219,6 @@ def update_container(timeline_item, data=None):
|
|||
return bool(lib.set_timeline_item_pype_tag(timeline_item, container))
|
||||
|
||||
|
||||
def launch_workfiles_app(*args):
|
||||
host_tools.show_workfiles()
|
||||
|
||||
|
||||
def publish(parent):
|
||||
"""Shorthand to publish from within host"""
|
||||
return host_tools.show_publish()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_selection():
|
||||
"""Maintain selection during context
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def get_resolve_module():
|
|||
# dont run if already loaded
|
||||
if api.bmdvr:
|
||||
log.info(("resolve module is assigned to "
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}"))
|
||||
f"`openpype.hosts.resolve.api.bmdvr`: {api.bmdvr}"))
|
||||
return api.bmdvr
|
||||
try:
|
||||
"""
|
||||
|
|
@ -41,6 +41,10 @@ def get_resolve_module():
|
|||
)
|
||||
elif sys.platform.startswith("linux"):
|
||||
expected_path = "/opt/resolve/libs/Fusion/Modules"
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Unsupported platform: {}".format(sys.platform)
|
||||
)
|
||||
|
||||
# check if the default path has it...
|
||||
print(("Unable to find module DaVinciResolveScript from "
|
||||
|
|
@ -74,6 +78,6 @@ def get_resolve_module():
|
|||
api.bmdvr = bmdvr
|
||||
api.bmdvf = bmdvf
|
||||
log.info(("Assigning resolve module to "
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}"))
|
||||
f"`openpype.hosts.resolve.api.bmdvr`: {api.bmdvr}"))
|
||||
log.info(("Assigning resolve module to "
|
||||
f"`pype.hosts.resolve.api.bmdvf`: {api.bmdvf}"))
|
||||
f"`openpype.hosts.resolve.api.bmdvf`: {api.bmdvf}"))
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ def ensure_installed_host():
|
|||
if host:
|
||||
return host
|
||||
|
||||
install_host(openpype.hosts.resolve.api)
|
||||
host = openpype.hosts.resolve.api.ResolveHost()
|
||||
install_host(host)
|
||||
return registered_host()
|
||||
|
||||
|
||||
|
|
@ -37,10 +38,10 @@ def launch_menu():
|
|||
openpype.hosts.resolve.api.launch_pype_menu()
|
||||
|
||||
|
||||
def open_file(path):
|
||||
def open_workfile(path):
|
||||
# Avoid the need to "install" the host
|
||||
host = ensure_installed_host()
|
||||
host.open_file(path)
|
||||
host.open_workfile(path)
|
||||
|
||||
|
||||
def main():
|
||||
|
|
@ -49,7 +50,7 @@ def main():
|
|||
|
||||
if workfile_path and os.path.exists(workfile_path):
|
||||
log.info(f"Opening last workfile: {workfile_path}")
|
||||
open_file(workfile_path)
|
||||
open_workfile(workfile_path)
|
||||
else:
|
||||
log.info("No last workfile set to open. Skipping..")
|
||||
|
||||
|
|
|
|||
|
|
@ -8,12 +8,13 @@ log = Logger.get_logger(__name__)
|
|||
|
||||
|
||||
def main(env):
|
||||
import openpype.hosts.resolve.api as bmdvr
|
||||
from openpype.hosts.resolve.api import ResolveHost, launch_pype_menu
|
||||
|
||||
# activate resolve from openpype
|
||||
install_host(bmdvr)
|
||||
host = ResolveHost()
|
||||
install_host(host)
|
||||
|
||||
bmdvr.launch_pype_menu()
|
||||
launch_pype_menu()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import requests
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import (
|
||||
OpenPypePyblishPluginMixin
|
||||
|
|
@ -34,6 +35,8 @@ class FusionSubmitDeadline(
|
|||
targets = ["local"]
|
||||
|
||||
# presets
|
||||
plugin = None
|
||||
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
concurrent_tasks = 1
|
||||
|
|
@ -173,7 +176,7 @@ class FusionSubmitDeadline(
|
|||
"SecondaryPool": instance.data.get("secondaryPool"),
|
||||
"Group": self.group,
|
||||
|
||||
"Plugin": "Fusion",
|
||||
"Plugin": self.plugin,
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=int(instance.data["frameStartHandle"]),
|
||||
end=int(instance.data["frameEndHandle"])
|
||||
|
|
@ -216,16 +219,29 @@ class FusionSubmitDeadline(
|
|||
|
||||
# Include critical variables with submission
|
||||
keys = [
|
||||
# TODO: This won't work if the slaves don't have access to
|
||||
# these paths, such as if slaves are running Linux and the
|
||||
# submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"OFX_PLUGIN_PATH",
|
||||
"FUSION9_MasterPrefs"
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_DEV",
|
||||
"OPENPYPE_LOG_NO_COLORS",
|
||||
"IS_TEST"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **legacy_io.Session)
|
||||
|
||||
# to recognize render jobs
|
||||
if AYON_SERVER_ENABLED:
|
||||
environment["AYON_BUNDLE_NAME"] = os.environ["AYON_BUNDLE_NAME"]
|
||||
render_job_label = "AYON_RENDER_JOB"
|
||||
else:
|
||||
render_job_label = "OPENPYPE_RENDER_JOB"
|
||||
|
||||
environment[render_job_label] = "1"
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ class AyonDeadlinePlugin(DeadlinePlugin):
|
|||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
expanded_paths.append(path)
|
||||
exe = FileUtils.SearchFileList(";".join(expanded_paths))
|
||||
|
||||
if exe == "":
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ def _get_template_id(renderer):
|
|||
:rtype: int
|
||||
"""
|
||||
|
||||
# TODO: Use settings from context?
|
||||
templates = get_system_settings()["modules"]["muster"]["templates_mapping"]
|
||||
if not templates:
|
||||
raise RuntimeError(("Muster template mapping missing in "
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import shutil
|
||||
import filecmp
|
||||
|
||||
from openpype.client.entities import get_representations
|
||||
from openpype.lib.applications import PreLaunchHook, LaunchTypes
|
||||
|
|
@ -194,3 +195,69 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
self.data["last_workfile_path"] = local_workfile_path
|
||||
# Keep source filepath for further path conformation
|
||||
self.data["source_filepath"] = last_published_workfile_path
|
||||
|
||||
# Get resources directory
|
||||
resources_dir = os.path.join(
|
||||
os.path.dirname(local_workfile_path), 'resources'
|
||||
)
|
||||
# Make resource directory if it doesn't exist
|
||||
if not os.path.exists(resources_dir):
|
||||
os.mkdir(resources_dir)
|
||||
|
||||
# Copy resources to the local resources directory
|
||||
for file in workfile_representation['files']:
|
||||
# Get resource main path
|
||||
resource_main_path = anatomy.fill_root(file["path"])
|
||||
|
||||
# Get resource file basename
|
||||
resource_basename = os.path.basename(resource_main_path)
|
||||
|
||||
# Only copy if the resource file exists, and it's not the workfile
|
||||
if (
|
||||
not os.path.exists(resource_main_path)
|
||||
or resource_basename == os.path.basename(
|
||||
last_published_workfile_path
|
||||
)
|
||||
):
|
||||
continue
|
||||
|
||||
# Get resource path in workfile folder
|
||||
resource_work_path = os.path.join(
|
||||
resources_dir, resource_basename
|
||||
)
|
||||
|
||||
# Check if the resource file already exists in the resources folder
|
||||
if os.path.exists(resource_work_path):
|
||||
# Check if both files are the same
|
||||
if filecmp.cmp(resource_main_path, resource_work_path):
|
||||
self.log.warning(
|
||||
'Resource "{}" already exists.'
|
||||
.format(resource_basename)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
# Add `.old` to existing resource path
|
||||
resource_path_old = resource_work_path + '.old'
|
||||
if os.path.exists(resource_work_path + '.old'):
|
||||
for i in range(1, 100):
|
||||
p = resource_path_old + '%02d' % i
|
||||
if not os.path.exists(p):
|
||||
# Rename existing resource file to
|
||||
# `resource_name.old` + 2 digits
|
||||
shutil.move(resource_work_path, p)
|
||||
break
|
||||
else:
|
||||
self.log.warning(
|
||||
'There are a hundred old files for '
|
||||
'resource "{}". '
|
||||
'Perhaps is it time to clean up your '
|
||||
'resources folder'
|
||||
.format(resource_basename)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
# Rename existing resource file to `resource_name.old`
|
||||
shutil.move(resource_work_path, resource_path_old)
|
||||
|
||||
# Copy resource file to workfile resources folder
|
||||
shutil.copy(resource_main_path, resources_dir)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,10 @@ from openpype.tests.lib import is_in_tests
|
|||
|
||||
from .publish.lib import filter_pyblish_plugins
|
||||
from .anatomy import Anatomy
|
||||
from .template_data import get_template_data_with_names
|
||||
from .template_data import (
|
||||
get_template_data_with_names,
|
||||
get_template_data
|
||||
)
|
||||
from .workfile import (
|
||||
get_workfile_template_key,
|
||||
get_custom_workfile_template_by_string_context,
|
||||
|
|
@ -658,3 +661,70 @@ def get_process_id():
|
|||
if _process_id is None:
|
||||
_process_id = str(uuid.uuid4())
|
||||
return _process_id
|
||||
|
||||
|
||||
def get_current_context_template_data():
|
||||
"""Template data for template fill from current context
|
||||
|
||||
Returns:
|
||||
Dict[str, Any] of the following tokens and their values
|
||||
Supported Tokens:
|
||||
- Regular Tokens
|
||||
- app
|
||||
- user
|
||||
- asset
|
||||
- parent
|
||||
- hierarchy
|
||||
- folder[name]
|
||||
- root[work, ...]
|
||||
- studio[code, name]
|
||||
- project[code, name]
|
||||
- task[type, name, short]
|
||||
|
||||
- Context Specific Tokens
|
||||
- assetData[frameStart]
|
||||
- assetData[frameEnd]
|
||||
- assetData[handleStart]
|
||||
- assetData[handleEnd]
|
||||
- assetData[frameStartHandle]
|
||||
- assetData[frameEndHandle]
|
||||
- assetData[resolutionHeight]
|
||||
- assetData[resolutionWidth]
|
||||
|
||||
"""
|
||||
|
||||
# pre-prepare get_template_data args
|
||||
current_context = get_current_context()
|
||||
project_name = current_context["project_name"]
|
||||
asset_name = current_context["asset_name"]
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
# prepare get_template_data args
|
||||
project_doc = get_project(project_name)
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
task_name = current_context["task_name"]
|
||||
host_name = get_current_host_name()
|
||||
|
||||
# get regular template data
|
||||
template_data = get_template_data(
|
||||
project_doc, asset_doc, task_name, host_name
|
||||
)
|
||||
|
||||
template_data["root"] = anatomy.roots
|
||||
|
||||
# get context specific vars
|
||||
asset_data = asset_doc["data"].copy()
|
||||
|
||||
# compute `frameStartHandle` and `frameEndHandle`
|
||||
if "frameStart" in asset_data and "handleStart" in asset_data:
|
||||
asset_data["frameStartHandle"] = \
|
||||
asset_data["frameStart"] - asset_data["handleStart"]
|
||||
|
||||
if "frameEnd" in asset_data and "handleEnd" in asset_data:
|
||||
asset_data["frameEndHandle"] = \
|
||||
asset_data["frameEnd"] + asset_data["handleEnd"]
|
||||
|
||||
# add assetData
|
||||
template_data["assetData"] = asset_data
|
||||
|
||||
return template_data
|
||||
|
|
|
|||
|
|
@ -107,17 +107,18 @@ def get_time_data_from_instance_or_context(instance):
|
|||
TimeData: dataclass holding time information.
|
||||
|
||||
"""
|
||||
context = instance.context
|
||||
return TimeData(
|
||||
start=(instance.data.get("frameStart") or
|
||||
instance.context.data.get("frameStart")),
|
||||
end=(instance.data.get("frameEnd") or
|
||||
instance.context.data.get("frameEnd")),
|
||||
fps=(instance.data.get("fps") or
|
||||
instance.context.data.get("fps")),
|
||||
handle_start=(instance.data.get("handleStart") or
|
||||
instance.context.data.get("handleStart")), # noqa: E501
|
||||
handle_end=(instance.data.get("handleEnd") or
|
||||
instance.context.data.get("handleEnd"))
|
||||
start=instance.data.get("frameStart", context.data.get("frameStart")),
|
||||
end=instance.data.get("frameEnd", context.data.get("frameEnd")),
|
||||
fps=instance.data.get("fps", context.data.get("fps")),
|
||||
step=instance.data.get("byFrameStep", instance.data.get("step", 1)),
|
||||
handle_start=instance.data.get(
|
||||
"handleStart", context.data.get("handleStart")
|
||||
),
|
||||
handle_end=instance.data.get(
|
||||
"handleEnd", context.data.get("handleEnd")
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -83,10 +83,6 @@ class OpenTaskPath(LauncherAction):
|
|||
if os.path.exists(valid_workdir):
|
||||
return valid_workdir
|
||||
|
||||
# If task was selected, try to find asset path only to asset
|
||||
if not task_name:
|
||||
raise AssertionError("Folder does not exist.")
|
||||
|
||||
data.pop("task", None)
|
||||
workdir = anatomy.templates_obj["work"]["folder"].format(data)
|
||||
valid_workdir = self._find_first_filled_path(workdir)
|
||||
|
|
@ -95,7 +91,7 @@ class OpenTaskPath(LauncherAction):
|
|||
valid_workdir = os.path.normpath(valid_workdir)
|
||||
if os.path.exists(valid_workdir):
|
||||
return valid_workdir
|
||||
raise AssertionError("Folder does not exist.")
|
||||
raise AssertionError("Folder does not exist yet.")
|
||||
|
||||
@staticmethod
|
||||
def open_in_explorer(path):
|
||||
|
|
|
|||
|
|
@ -213,7 +213,8 @@ class PypeCommands:
|
|||
pass
|
||||
|
||||
def run_tests(self, folder, mark, pyargs,
|
||||
test_data_folder, persist, app_variant, timeout, setup_only):
|
||||
test_data_folder, persist, app_variant, timeout, setup_only,
|
||||
mongo_url):
|
||||
"""
|
||||
Runs tests from 'folder'
|
||||
|
||||
|
|
@ -226,6 +227,10 @@ class PypeCommands:
|
|||
end
|
||||
app_variant (str): variant (eg 2020 for AE), empty if use
|
||||
latest installed version
|
||||
timeout (int): explicit timeout for single test
|
||||
setup_only (bool): if only preparation steps should be
|
||||
triggered, no tests (useful for debugging/development)
|
||||
mongo_url (str): url to Openpype Mongo database
|
||||
"""
|
||||
print("run_tests")
|
||||
if folder:
|
||||
|
|
@ -264,6 +269,9 @@ class PypeCommands:
|
|||
if setup_only:
|
||||
args.extend(["--setup_only", setup_only])
|
||||
|
||||
if mongo_url:
|
||||
args.extend(["--mongo_url", mongo_url])
|
||||
|
||||
print("run_tests args: {}".format(args))
|
||||
import pytest
|
||||
pytest.main(args)
|
||||
|
|
|
|||
|
|
@ -748,15 +748,17 @@ def _convert_nuke_project_settings(ayon_settings, output):
|
|||
)
|
||||
|
||||
new_review_data_outputs = {}
|
||||
outputs_settings = None
|
||||
outputs_settings = []
|
||||
# Check deprecated ExtractReviewDataMov
|
||||
# settings for backwards compatibility
|
||||
deprecrated_review_settings = ayon_publish["ExtractReviewDataMov"]
|
||||
current_review_settings = (
|
||||
ayon_publish["ExtractReviewIntermediates"]
|
||||
ayon_publish.get("ExtractReviewIntermediates")
|
||||
)
|
||||
if deprecrated_review_settings["enabled"]:
|
||||
outputs_settings = deprecrated_review_settings["outputs"]
|
||||
elif current_review_settings is None:
|
||||
pass
|
||||
elif current_review_settings["enabled"]:
|
||||
outputs_settings = current_review_settings["outputs"]
|
||||
|
||||
|
|
|
|||
|
|
@ -52,7 +52,8 @@
|
|||
"priority": 50,
|
||||
"chunk_size": 10,
|
||||
"concurrent_tasks": 1,
|
||||
"group": ""
|
||||
"group": "",
|
||||
"plugin": "Fusion"
|
||||
},
|
||||
"NukeSubmitDeadline": {
|
||||
"enabled": true,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,16 @@
|
|||
{
|
||||
"general": {
|
||||
"update_houdini_var_context": {
|
||||
"enabled": true,
|
||||
"houdini_vars":[
|
||||
{
|
||||
"var": "JOB",
|
||||
"value": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task[name]}",
|
||||
"is_directory": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"imageio": {
|
||||
"activate_host_color_management": true,
|
||||
"ocio_config": {
|
||||
|
|
|
|||
|
|
@ -707,6 +707,9 @@
|
|||
"CollectMayaRender": {
|
||||
"sync_workfile_version": false
|
||||
},
|
||||
"CollectFbxAnimation": {
|
||||
"enabled": true
|
||||
},
|
||||
"CollectFbxCamera": {
|
||||
"enabled": false
|
||||
},
|
||||
|
|
@ -826,6 +829,11 @@
|
|||
"redshift_render_attributes": [],
|
||||
"renderman_render_attributes": []
|
||||
},
|
||||
"ValidateResolution": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateCurrentRenderLayerIsRenderable": {
|
||||
"enabled": true,
|
||||
"optional": false,
|
||||
|
|
@ -1120,6 +1128,11 @@
|
|||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateAnimatedReferenceRig": {
|
||||
"enabled": true,
|
||||
"optional": false,
|
||||
"active": true
|
||||
},
|
||||
"ValidateAnimationContent": {
|
||||
"enabled": true,
|
||||
"optional": false,
|
||||
|
|
@ -1140,6 +1153,16 @@
|
|||
"optional": false,
|
||||
"active": true
|
||||
},
|
||||
"ValidateSkeletonRigContents": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateSkeletonRigControllers": {
|
||||
"enabled": false,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateSkinclusterDeformerSet": {
|
||||
"enabled": true,
|
||||
"optional": false,
|
||||
|
|
@ -1150,6 +1173,21 @@
|
|||
"optional": false,
|
||||
"allow_history_only": false
|
||||
},
|
||||
"ValidateSkeletonRigOutSetNodeIds": {
|
||||
"enabled": false,
|
||||
"optional": false,
|
||||
"allow_history_only": false
|
||||
},
|
||||
"ValidateSkeletonRigOutputIds": {
|
||||
"enabled": false,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateSkeletonTopGroupHierarchy": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateCameraAttributes": {
|
||||
"enabled": false,
|
||||
"optional": true,
|
||||
|
|
|
|||
|
|
@ -114,6 +114,65 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"mayapy": {
|
||||
"enabled": true,
|
||||
"label": "MayaPy",
|
||||
"icon": "{}/app_icons/maya.png",
|
||||
"host_name": "maya",
|
||||
"environment": {
|
||||
"MAYA_DISABLE_CLIC_IPM": "Yes",
|
||||
"MAYA_DISABLE_CIP": "Yes",
|
||||
"MAYA_DISABLE_CER": "Yes",
|
||||
"PYMEL_SKIP_MEL_INIT": "Yes",
|
||||
"LC_ALL": "C"
|
||||
},
|
||||
"variants": {
|
||||
"2024": {
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2024\\bin\\mayapy.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2024/bin/mayapy"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [
|
||||
"-I"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"-I"
|
||||
]
|
||||
},
|
||||
"environment": {}
|
||||
},
|
||||
"2023": {
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2023\\bin\\mayapy.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2023/bin/mayapy"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [
|
||||
"-I"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"-I"
|
||||
]
|
||||
},
|
||||
"environment": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"3dsmax": {
|
||||
"enabled": true,
|
||||
"label": "3ds max",
|
||||
|
|
|
|||
|
|
@ -289,6 +289,15 @@
|
|||
"type": "text",
|
||||
"key": "group",
|
||||
"label": "Group Name"
|
||||
},
|
||||
{
|
||||
"type": "enum",
|
||||
"key": "plugin",
|
||||
"label": "Deadline Plugin",
|
||||
"enum_items": [
|
||||
{"Fusion": "Fusion"},
|
||||
{"FusionCmd": "FusionCmd"}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@
|
|||
"label": "Houdini",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_houdini_general"
|
||||
},
|
||||
{
|
||||
"key": "imageio",
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "general",
|
||||
"label": "General",
|
||||
"collapsible": true,
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "update_houdini_var_context",
|
||||
"label": "Update Houdini Vars on context change",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Sync vars with context changes.<br>If a value is treated as a directory on update it will be ensured the folder exists"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "houdini_vars",
|
||||
"label": "Houdini Vars",
|
||||
"collapsible": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "var",
|
||||
"label": "Var"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "value",
|
||||
"label": "Value"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "is_directory",
|
||||
"label": "Treat as directory"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -21,6 +21,20 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CollectFbxAnimation",
|
||||
"label": "Collect Fbx Animation",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
@ -417,6 +431,10 @@
|
|||
"type": "schema_template",
|
||||
"name": "template_publish_plugin",
|
||||
"template_data": [
|
||||
{
|
||||
"key": "ValidateResolution",
|
||||
"label": "Validate Resolution Settings"
|
||||
},
|
||||
{
|
||||
"key": "ValidateCurrentRenderLayerIsRenderable",
|
||||
"label": "Validate Current Render Layer Has Renderable Camera"
|
||||
|
|
@ -793,6 +811,10 @@
|
|||
"key": "ValidateRigControllers",
|
||||
"label": "Validate Rig Controllers"
|
||||
},
|
||||
{
|
||||
"key": "ValidateAnimatedReferenceRig",
|
||||
"label": "Validate Animated Reference Rig"
|
||||
},
|
||||
{
|
||||
"key": "ValidateAnimationContent",
|
||||
"label": "Validate Animation Content"
|
||||
|
|
@ -809,9 +831,51 @@
|
|||
"key": "ValidateSkeletalMeshHierarchy",
|
||||
"label": "Validate Skeletal Mesh Top Node"
|
||||
},
|
||||
{
|
||||
{
|
||||
"key": "ValidateSkeletonRigContents",
|
||||
"label": "Validate Skeleton Rig Contents"
|
||||
},
|
||||
{
|
||||
"key": "ValidateSkeletonRigControllers",
|
||||
"label": "Validate Skeleton Rig Controllers"
|
||||
},
|
||||
{
|
||||
"key": "ValidateSkinclusterDeformerSet",
|
||||
"label": "Validate Skincluster Deformer Relationships"
|
||||
},
|
||||
{
|
||||
"key": "ValidateSkeletonRigOutputIds",
|
||||
"label": "Validate Skeleton Rig Output Ids"
|
||||
},
|
||||
{
|
||||
"key": "ValidateSkeletonTopGroupHierarchy",
|
||||
"label": "Validate Skeleton Top Group Hierarchy"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "ValidateRigOutSetNodeIds",
|
||||
"label": "Validate Rig Out Set Node Ids",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "optional",
|
||||
"label": "Optional"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "allow_history_only",
|
||||
"label": "Allow history only"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -819,8 +883,8 @@
|
|||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "ValidateRigOutSetNodeIds",
|
||||
"label": "Validate Rig Out Set Node Ids",
|
||||
"key": "ValidateSkeletonRigOutSetNodeIds",
|
||||
"label": "Validate Skeleton Rig Out Set Node Ids",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "mayapy",
|
||||
"label": "Autodesk MayaPy",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_host_unchangables"
|
||||
},
|
||||
{
|
||||
"key": "environment",
|
||||
"label": "Environment",
|
||||
"type": "raw-json"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_host_variant_items"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -9,6 +9,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_maya"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_mayapy"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_3dsmax"
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
|
||||
@abstractmethod
|
||||
def set_application_force_not_open_workfile(
|
||||
self, project_name, folder_id, task_id, action_id, enabled
|
||||
self, project_name, folder_id, task_id, action_ids, enabled
|
||||
):
|
||||
"""This is application action related to force not open last workfile.
|
||||
|
||||
|
|
@ -280,7 +280,7 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
project_name (Union[str, None]): Project name.
|
||||
folder_id (Union[str, None]): Folder id.
|
||||
task_id (Union[str, None]): Task id.
|
||||
action_id (str): Action identifier.
|
||||
action_id (Iterable[str]): Action identifiers.
|
||||
enabled (bool): New value of force not open workfile.
|
||||
"""
|
||||
|
||||
|
|
@ -295,3 +295,13 @@ class AbstractLauncherFrontEnd(AbstractLauncherCommon):
|
|||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def refresh_actions(self):
|
||||
"""Refresh actions and all related data.
|
||||
|
||||
Triggers 'controller.refresh.actions.started' event at the beginning
|
||||
and 'controller.refresh.actions.finished' at the end.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -121,10 +121,10 @@ class BaseLauncherController(
|
|||
project_name, folder_id, task_id)
|
||||
|
||||
def set_application_force_not_open_workfile(
|
||||
self, project_name, folder_id, task_id, action_id, enabled
|
||||
self, project_name, folder_id, task_id, action_ids, enabled
|
||||
):
|
||||
self._actions_model.set_application_force_not_open_workfile(
|
||||
project_name, folder_id, task_id, action_id, enabled
|
||||
project_name, folder_id, task_id, action_ids, enabled
|
||||
)
|
||||
|
||||
def trigger_action(self, project_name, folder_id, task_id, identifier):
|
||||
|
|
@ -145,5 +145,17 @@ class BaseLauncherController(
|
|||
|
||||
self._emit_event("controller.refresh.finished")
|
||||
|
||||
def refresh_actions(self):
|
||||
self._emit_event("controller.refresh.actions.started")
|
||||
|
||||
# Refresh project settings (used for actions discovery)
|
||||
self._project_settings = {}
|
||||
# Refresh projects - they define applications
|
||||
self._projects_model.reset()
|
||||
# Refresh actions
|
||||
self._actions_model.refresh()
|
||||
|
||||
self._emit_event("controller.refresh.actions.finished")
|
||||
|
||||
def _emit_event(self, topic, data=None):
|
||||
self.emit_event(topic, data, "controller")
|
||||
|
|
|
|||
|
|
@ -326,13 +326,14 @@ class ActionsModel:
|
|||
return output
|
||||
|
||||
def set_application_force_not_open_workfile(
|
||||
self, project_name, folder_id, task_id, action_id, enabled
|
||||
self, project_name, folder_id, task_id, action_ids, enabled
|
||||
):
|
||||
no_workfile_reg_data = self._get_no_last_workfile_reg_data()
|
||||
project_data = no_workfile_reg_data.setdefault(project_name, {})
|
||||
folder_data = project_data.setdefault(folder_id, {})
|
||||
task_data = folder_data.setdefault(task_id, {})
|
||||
task_data[action_id] = enabled
|
||||
for action_id in action_ids:
|
||||
task_data[action_id] = enabled
|
||||
self._launcher_tool_reg.set_item(
|
||||
self._not_open_workfile_reg_key, no_workfile_reg_data
|
||||
)
|
||||
|
|
@ -359,7 +360,10 @@ class ActionsModel:
|
|||
project_name, folder_id, task_id
|
||||
)
|
||||
force_not_open_workfile = per_action.get(identifier, False)
|
||||
action.data["start_last_workfile"] = force_not_open_workfile
|
||||
if force_not_open_workfile:
|
||||
action.data["start_last_workfile"] = False
|
||||
else:
|
||||
action.data.pop("start_last_workfile", None)
|
||||
action.process(session)
|
||||
except Exception as exc:
|
||||
self.log.warning("Action trigger failed.", exc_info=True)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,21 @@ ANIMATION_STATE_ROLE = QtCore.Qt.UserRole + 6
|
|||
FORCE_NOT_OPEN_WORKFILE_ROLE = QtCore.Qt.UserRole + 7
|
||||
|
||||
|
||||
def _variant_label_sort_getter(action_item):
|
||||
"""Get variant label value for sorting.
|
||||
|
||||
Make sure the output value is a string.
|
||||
|
||||
Args:
|
||||
action_item (ActionItem): Action item.
|
||||
|
||||
Returns:
|
||||
str: Variant label or empty string.
|
||||
"""
|
||||
|
||||
return action_item.variant_label or ""
|
||||
|
||||
|
||||
class ActionsQtModel(QtGui.QStandardItemModel):
|
||||
"""Qt model for actions.
|
||||
|
||||
|
|
@ -31,10 +46,6 @@ class ActionsQtModel(QtGui.QStandardItemModel):
|
|||
def __init__(self, controller):
|
||||
super(ActionsQtModel, self).__init__()
|
||||
|
||||
controller.register_event_callback(
|
||||
"controller.refresh.finished",
|
||||
self._on_controller_refresh_finished,
|
||||
)
|
||||
controller.register_event_callback(
|
||||
"selection.project.changed",
|
||||
self._on_selection_project_changed,
|
||||
|
|
@ -51,6 +62,7 @@ class ActionsQtModel(QtGui.QStandardItemModel):
|
|||
self._controller = controller
|
||||
|
||||
self._items_by_id = {}
|
||||
self._action_items_by_id = {}
|
||||
self._groups_by_id = {}
|
||||
|
||||
self._selected_project_name = None
|
||||
|
|
@ -72,8 +84,12 @@ class ActionsQtModel(QtGui.QStandardItemModel):
|
|||
def get_item_by_id(self, action_id):
|
||||
return self._items_by_id.get(action_id)
|
||||
|
||||
def get_action_item_by_id(self, action_id):
|
||||
return self._action_items_by_id.get(action_id)
|
||||
|
||||
def _clear_items(self):
|
||||
self._items_by_id = {}
|
||||
self._action_items_by_id = {}
|
||||
self._groups_by_id = {}
|
||||
root = self.invisibleRootItem()
|
||||
root.removeRows(0, root.rowCount())
|
||||
|
|
@ -101,12 +117,14 @@ class ActionsQtModel(QtGui.QStandardItemModel):
|
|||
|
||||
groups_by_id = {}
|
||||
for action_items in items_by_label.values():
|
||||
action_items.sort(key=_variant_label_sort_getter, reverse=True)
|
||||
first_item = next(iter(action_items))
|
||||
all_action_items_info.append((first_item, len(action_items) > 1))
|
||||
groups_by_id[first_item.identifier] = action_items
|
||||
|
||||
new_items = []
|
||||
items_by_id = {}
|
||||
action_items_by_id = {}
|
||||
for action_item_info in all_action_items_info:
|
||||
action_item, is_group = action_item_info
|
||||
icon = get_qt_icon(action_item.icon)
|
||||
|
|
@ -132,6 +150,7 @@ class ActionsQtModel(QtGui.QStandardItemModel):
|
|||
action_item.force_not_open_workfile,
|
||||
FORCE_NOT_OPEN_WORKFILE_ROLE)
|
||||
items_by_id[action_item.identifier] = item
|
||||
action_items_by_id[action_item.identifier] = action_item
|
||||
|
||||
if new_items:
|
||||
root_item.appendRows(new_items)
|
||||
|
|
@ -139,19 +158,14 @@ class ActionsQtModel(QtGui.QStandardItemModel):
|
|||
to_remove = set(self._items_by_id.keys()) - set(items_by_id.keys())
|
||||
for identifier in to_remove:
|
||||
item = self._items_by_id.pop(identifier)
|
||||
self._action_items_by_id.pop(identifier)
|
||||
root_item.removeRow(item.row())
|
||||
|
||||
self._groups_by_id = groups_by_id
|
||||
self._items_by_id = items_by_id
|
||||
self._action_items_by_id = action_items_by_id
|
||||
self.refreshed.emit()
|
||||
|
||||
def _on_controller_refresh_finished(self):
|
||||
context = self._controller.get_selected_context()
|
||||
self._selected_project_name = context["project_name"]
|
||||
self._selected_folder_id = context["folder_id"]
|
||||
self._selected_task_id = context["task_id"]
|
||||
self.refresh()
|
||||
|
||||
def _on_selection_project_changed(self, event):
|
||||
self._selected_project_name = event["project_name"]
|
||||
self._selected_folder_id = None
|
||||
|
|
@ -336,6 +350,9 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
|
||||
self._set_row_height(1)
|
||||
|
||||
def refresh(self):
|
||||
self._model.refresh()
|
||||
|
||||
def _set_row_height(self, rows):
|
||||
self.setMinimumHeight(rows * 75)
|
||||
|
||||
|
|
@ -387,9 +404,15 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
checkbox.setChecked(True)
|
||||
|
||||
action_id = index.data(ACTION_ID_ROLE)
|
||||
is_group = index.data(ACTION_IS_GROUP_ROLE)
|
||||
if is_group:
|
||||
action_items = self._model.get_group_items(action_id)
|
||||
else:
|
||||
action_items = [self._model.get_action_item_by_id(action_id)]
|
||||
action_ids = {action_item.identifier for action_item in action_items}
|
||||
checkbox.stateChanged.connect(
|
||||
lambda: self._on_checkbox_changed(
|
||||
action_id, checkbox.isChecked()
|
||||
action_ids, checkbox.isChecked()
|
||||
)
|
||||
)
|
||||
action = QtWidgets.QWidgetAction(menu)
|
||||
|
|
@ -402,7 +425,7 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
menu.exec_(global_point)
|
||||
self._context_menu = None
|
||||
|
||||
def _on_checkbox_changed(self, action_id, is_checked):
|
||||
def _on_checkbox_changed(self, action_ids, is_checked):
|
||||
if self._context_menu is not None:
|
||||
self._context_menu.close()
|
||||
|
||||
|
|
@ -410,7 +433,7 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
folder_id = self._model.get_selected_folder_id()
|
||||
task_id = self._model.get_selected_task_id()
|
||||
self._controller.set_application_force_not_open_workfile(
|
||||
project_name, folder_id, task_id, action_id, is_checked)
|
||||
project_name, folder_id, task_id, action_ids, is_checked)
|
||||
self._model.refresh()
|
||||
|
||||
def _on_clicked(self, index):
|
||||
|
|
|
|||
|
|
@ -92,6 +92,10 @@ class HierarchyPage(QtWidgets.QWidget):
|
|||
if visible and project_name:
|
||||
self._projects_combobox.set_selection(project_name)
|
||||
|
||||
def refresh(self):
|
||||
self._folders_widget.refresh()
|
||||
self._tasks_widget.refresh()
|
||||
|
||||
def _on_back_clicked(self):
|
||||
self._controller.set_selected_project(None)
|
||||
|
||||
|
|
|
|||
|
|
@ -73,6 +73,9 @@ class ProjectIconView(QtWidgets.QListView):
|
|||
|
||||
class ProjectsWidget(QtWidgets.QWidget):
|
||||
"""Projects Page"""
|
||||
|
||||
refreshed = QtCore.Signal()
|
||||
|
||||
def __init__(self, controller, parent=None):
|
||||
super(ProjectsWidget, self).__init__(parent=parent)
|
||||
|
||||
|
|
@ -104,6 +107,7 @@ class ProjectsWidget(QtWidgets.QWidget):
|
|||
main_layout.addWidget(projects_view, 1)
|
||||
|
||||
projects_view.clicked.connect(self._on_view_clicked)
|
||||
projects_model.refreshed.connect(self.refreshed)
|
||||
projects_filter_text.textChanged.connect(
|
||||
self._on_project_filter_change)
|
||||
refresh_btn.clicked.connect(self._on_refresh_clicked)
|
||||
|
|
@ -119,6 +123,15 @@ class ProjectsWidget(QtWidgets.QWidget):
|
|||
self._projects_model = projects_model
|
||||
self._projects_proxy_model = projects_proxy_model
|
||||
|
||||
def has_content(self):
|
||||
"""Model has at least one project.
|
||||
|
||||
Returns:
|
||||
bool: True if there is any content in the model.
|
||||
"""
|
||||
|
||||
return self._projects_model.has_content()
|
||||
|
||||
def _on_view_clicked(self, index):
|
||||
if index.isValid():
|
||||
project_name = index.data(QtCore.Qt.DisplayRole)
|
||||
|
|
|
|||
|
|
@ -99,8 +99,8 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
message_timer.setInterval(self.message_interval)
|
||||
message_timer.setSingleShot(True)
|
||||
|
||||
refresh_timer = QtCore.QTimer()
|
||||
refresh_timer.setInterval(self.refresh_interval)
|
||||
actions_refresh_timer = QtCore.QTimer()
|
||||
actions_refresh_timer.setInterval(self.refresh_interval)
|
||||
|
||||
page_slide_anim = QtCore.QVariantAnimation(self)
|
||||
page_slide_anim.setDuration(self.page_side_anim_interval)
|
||||
|
|
@ -108,8 +108,10 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
page_slide_anim.setEndValue(1.0)
|
||||
page_slide_anim.setEasingCurve(QtCore.QEasingCurve.OutQuad)
|
||||
|
||||
projects_page.refreshed.connect(self._on_projects_refresh)
|
||||
message_timer.timeout.connect(self._on_message_timeout)
|
||||
refresh_timer.timeout.connect(self._on_refresh_timeout)
|
||||
actions_refresh_timer.timeout.connect(
|
||||
self._on_actions_refresh_timeout)
|
||||
page_slide_anim.valueChanged.connect(
|
||||
self._on_page_slide_value_changed)
|
||||
page_slide_anim.finished.connect(self._on_page_slide_finished)
|
||||
|
|
@ -132,6 +134,7 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
self._is_on_projects_page = True
|
||||
self._window_is_active = False
|
||||
self._refresh_on_activate = False
|
||||
self._selected_project_name = None
|
||||
|
||||
self._pages_widget = pages_widget
|
||||
self._pages_layout = pages_layout
|
||||
|
|
@ -143,7 +146,7 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
# self._action_history = action_history
|
||||
|
||||
self._message_timer = message_timer
|
||||
self._refresh_timer = refresh_timer
|
||||
self._actions_refresh_timer = actions_refresh_timer
|
||||
self._page_slide_anim = page_slide_anim
|
||||
|
||||
hierarchy_page.setVisible(not self._is_on_projects_page)
|
||||
|
|
@ -152,14 +155,14 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
def showEvent(self, event):
|
||||
super(LauncherWindow, self).showEvent(event)
|
||||
self._window_is_active = True
|
||||
if not self._refresh_timer.isActive():
|
||||
self._refresh_timer.start()
|
||||
if not self._actions_refresh_timer.isActive():
|
||||
self._actions_refresh_timer.start()
|
||||
self._controller.refresh()
|
||||
|
||||
def closeEvent(self, event):
|
||||
super(LauncherWindow, self).closeEvent(event)
|
||||
self._window_is_active = False
|
||||
self._refresh_timer.stop()
|
||||
self._actions_refresh_timer.stop()
|
||||
|
||||
def changeEvent(self, event):
|
||||
if event.type() in (
|
||||
|
|
@ -170,15 +173,15 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
self._window_is_active = is_active
|
||||
if is_active and self._refresh_on_activate:
|
||||
self._refresh_on_activate = False
|
||||
self._on_refresh_timeout()
|
||||
self._refresh_timer.start()
|
||||
self._on_actions_refresh_timeout()
|
||||
self._actions_refresh_timer.start()
|
||||
|
||||
super(LauncherWindow, self).changeEvent(event)
|
||||
|
||||
def _on_refresh_timeout(self):
|
||||
def _on_actions_refresh_timeout(self):
|
||||
# Stop timer if widget is not visible
|
||||
if self._window_is_active:
|
||||
self._controller.refresh()
|
||||
self._controller.refresh_actions()
|
||||
else:
|
||||
self._refresh_on_activate = True
|
||||
|
||||
|
|
@ -191,12 +194,26 @@ class LauncherWindow(QtWidgets.QWidget):
|
|||
|
||||
def _on_project_selection_change(self, event):
|
||||
project_name = event["project_name"]
|
||||
self._selected_project_name = project_name
|
||||
if not project_name:
|
||||
self._go_to_projects_page()
|
||||
|
||||
elif self._is_on_projects_page:
|
||||
self._go_to_hierarchy_page(project_name)
|
||||
|
||||
def _on_projects_refresh(self):
|
||||
# There is nothing to do, we're on projects page
|
||||
if self._is_on_projects_page:
|
||||
return
|
||||
|
||||
# No projects were found -> go back to projects page
|
||||
if not self._projects_page.has_content():
|
||||
self._go_to_projects_page()
|
||||
return
|
||||
|
||||
self._hierarchy_page.refresh()
|
||||
self._actions_widget.refresh()
|
||||
|
||||
def _on_action_trigger_started(self, event):
|
||||
self._echo("Running action: {}".format(event["full_label"]))
|
||||
|
||||
|
|
|
|||
|
|
@ -199,13 +199,18 @@ class HierarchyModel(object):
|
|||
Hierarchy items are folders and tasks. Folders can have as parent another
|
||||
folder or project. Tasks can have as parent only folder.
|
||||
"""
|
||||
lifetime = 60 # A minute
|
||||
|
||||
def __init__(self, controller):
|
||||
self._folders_items = NestedCacheItem(levels=1, default_factory=dict)
|
||||
self._folders_by_id = NestedCacheItem(levels=2, default_factory=dict)
|
||||
self._folders_items = NestedCacheItem(
|
||||
levels=1, default_factory=dict, lifetime=self.lifetime)
|
||||
self._folders_by_id = NestedCacheItem(
|
||||
levels=2, default_factory=dict, lifetime=self.lifetime)
|
||||
|
||||
self._task_items = NestedCacheItem(levels=2, default_factory=dict)
|
||||
self._tasks_by_id = NestedCacheItem(levels=2, default_factory=dict)
|
||||
self._task_items = NestedCacheItem(
|
||||
levels=2, default_factory=dict, lifetime=self.lifetime)
|
||||
self._tasks_by_id = NestedCacheItem(
|
||||
levels=2, default_factory=dict, lifetime=self.lifetime)
|
||||
|
||||
self._folders_refreshing = set()
|
||||
self._tasks_refreshing = set()
|
||||
|
|
|
|||
|
|
@ -56,11 +56,21 @@ class FoldersModel(QtGui.QStandardItemModel):
|
|||
|
||||
return self._has_content
|
||||
|
||||
def clear(self):
|
||||
def refresh(self):
|
||||
"""Refresh folders for last selected project.
|
||||
|
||||
Force to update folders model from controller. This may or may not
|
||||
trigger query from server, that's based on controller's cache.
|
||||
"""
|
||||
|
||||
self.set_project_name(self._last_project_name)
|
||||
|
||||
def _clear_items(self):
|
||||
self._items_by_id = {}
|
||||
self._parent_id_by_id = {}
|
||||
self._has_content = False
|
||||
super(FoldersModel, self).clear()
|
||||
root_item = self.invisibleRootItem()
|
||||
root_item.removeRows(0, root_item.rowCount())
|
||||
|
||||
def get_index_by_id(self, item_id):
|
||||
"""Get index by folder id.
|
||||
|
|
@ -90,7 +100,7 @@ class FoldersModel(QtGui.QStandardItemModel):
|
|||
self._is_refreshing = True
|
||||
|
||||
if self._last_project_name != project_name:
|
||||
self.clear()
|
||||
self._clear_items()
|
||||
self._last_project_name = project_name
|
||||
|
||||
thread = self._refresh_threads.get(project_name)
|
||||
|
|
@ -135,7 +145,7 @@ class FoldersModel(QtGui.QStandardItemModel):
|
|||
def _fill_items(self, folder_items_by_id):
|
||||
if not folder_items_by_id:
|
||||
if folder_items_by_id is not None:
|
||||
self.clear()
|
||||
self._clear_items()
|
||||
self._is_refreshing = False
|
||||
self.refreshed.emit()
|
||||
return
|
||||
|
|
@ -247,6 +257,7 @@ class FoldersWidget(QtWidgets.QWidget):
|
|||
folders_model = FoldersModel(controller)
|
||||
folders_proxy_model = RecursiveSortFilterProxyModel()
|
||||
folders_proxy_model.setSourceModel(folders_model)
|
||||
folders_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
folders_view.setModel(folders_proxy_model)
|
||||
|
||||
|
|
@ -293,6 +304,14 @@ class FoldersWidget(QtWidgets.QWidget):
|
|||
|
||||
self._folders_proxy_model.setFilterFixedString(name)
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh folders model.
|
||||
|
||||
Force to update folders model from controller.
|
||||
"""
|
||||
|
||||
self._folders_model.refresh()
|
||||
|
||||
def _on_project_selection_change(self, event):
|
||||
project_name = event["project_name"]
|
||||
self._set_project_name(project_name)
|
||||
|
|
@ -300,9 +319,6 @@ class FoldersWidget(QtWidgets.QWidget):
|
|||
def _set_project_name(self, project_name):
|
||||
self._folders_model.set_project_name(project_name)
|
||||
|
||||
def _clear(self):
|
||||
self._folders_model.clear()
|
||||
|
||||
def _on_folders_refresh_finished(self, event):
|
||||
if event["sender"] != SENDER_NAME:
|
||||
self._set_project_name(event["project_name"])
|
||||
|
|
|
|||
|
|
@ -44,14 +44,20 @@ class TasksModel(QtGui.QStandardItemModel):
|
|||
# Initial state
|
||||
self._add_invalid_selection_item()
|
||||
|
||||
def clear(self):
|
||||
def _clear_items(self):
|
||||
self._items_by_name = {}
|
||||
self._has_content = False
|
||||
self._remove_invalid_items()
|
||||
super(TasksModel, self).clear()
|
||||
root_item = self.invisibleRootItem()
|
||||
root_item.removeRows(0, root_item.rowCount())
|
||||
|
||||
def refresh(self, project_name, folder_id):
|
||||
"""Refresh tasks for folder.
|
||||
def refresh(self):
|
||||
"""Refresh tasks for last project and folder."""
|
||||
|
||||
self._refresh(self._last_project_name, self._last_folder_id)
|
||||
|
||||
def set_context(self, project_name, folder_id):
|
||||
"""Set context for which should be tasks showed.
|
||||
|
||||
Args:
|
||||
project_name (Union[str]): Name of project.
|
||||
|
|
@ -121,7 +127,7 @@ class TasksModel(QtGui.QStandardItemModel):
|
|||
return self._empty_tasks_item
|
||||
|
||||
def _add_invalid_item(self, item):
|
||||
self.clear()
|
||||
self._clear_items()
|
||||
root_item = self.invisibleRootItem()
|
||||
root_item.appendRow(item)
|
||||
|
||||
|
|
@ -299,6 +305,7 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
tasks_model = TasksModel(controller)
|
||||
tasks_proxy_model = QtCore.QSortFilterProxyModel()
|
||||
tasks_proxy_model.setSourceModel(tasks_model)
|
||||
tasks_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
tasks_view.setModel(tasks_proxy_model)
|
||||
|
||||
|
|
@ -334,8 +341,14 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
self._handle_expected_selection = handle_expected_selection
|
||||
self._expected_selection_data = None
|
||||
|
||||
def _clear(self):
|
||||
self._tasks_model.clear()
|
||||
def refresh(self):
|
||||
"""Refresh folders for last selected project.
|
||||
|
||||
Force to update folders model from controller. This may or may not
|
||||
trigger query from server, that's based on controller's cache.
|
||||
"""
|
||||
|
||||
self._tasks_model.refresh()
|
||||
|
||||
def _on_tasks_refresh_finished(self, event):
|
||||
"""Tasks were refreshed in controller.
|
||||
|
|
@ -353,13 +366,13 @@ class TasksWidget(QtWidgets.QWidget):
|
|||
or event["folder_id"] != self._selected_folder_id
|
||||
):
|
||||
return
|
||||
self._tasks_model.refresh(
|
||||
self._tasks_model.set_context(
|
||||
event["project_name"], self._selected_folder_id
|
||||
)
|
||||
|
||||
def _folder_selection_changed(self, event):
|
||||
self._selected_folder_id = event["folder_id"]
|
||||
self._tasks_model.refresh(
|
||||
self._tasks_model.set_context(
|
||||
event["project_name"], self._selected_folder_id
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.17.2-nightly.1"
|
||||
__version__ = "3.17.2-nightly.4"
|
||||
|
|
|
|||
|
|
@ -109,6 +109,55 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"maya": {
|
||||
"enabled": true,
|
||||
"label": "Maya",
|
||||
"icon": "{}/app_icons/maya.png",
|
||||
"host_name": "maya",
|
||||
"environment": "{\n \"MAYA_DISABLE_CLIC_IPM\": \"Yes\",\n \"MAYA_DISABLE_CIP\": \"Yes\",\n \"MAYA_DISABLE_CER\": \"Yes\",\n \"PYMEL_SKIP_MEL_INIT\": \"Yes\",\n \"LC_ALL\": \"C\"\n}\n",
|
||||
"variants": [
|
||||
{
|
||||
"name": "2024",
|
||||
"label": "2024",
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2024\\bin\\mayapy.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2024/bin/mayapy"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": "{\n \"MAYA_VERSION\": \"2024\"\n}",
|
||||
"use_python_2": false
|
||||
},
|
||||
{
|
||||
"name": "2023",
|
||||
"label": "2023",
|
||||
"executables": {
|
||||
"windows": [
|
||||
"C:\\Program Files\\Autodesk\\Maya2023\\bin\\mayapy.exe"
|
||||
],
|
||||
"darwin": [],
|
||||
"linux": [
|
||||
"/usr/autodesk/maya2023/bin/mayapy"
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"environment": "{\n \"MAYA_VERSION\": \"2023\"\n}",
|
||||
"use_python_2": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"adsk_3dsmax": {
|
||||
"enabled": true,
|
||||
"label": "3ds Max",
|
||||
|
|
@ -237,6 +286,7 @@
|
|||
},
|
||||
{
|
||||
"name": "13-0",
|
||||
"label": "13.0",
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
|
|
@ -319,6 +369,7 @@
|
|||
},
|
||||
{
|
||||
"name": "13-0",
|
||||
"label": "13.0",
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
|
|
@ -405,6 +456,7 @@
|
|||
},
|
||||
{
|
||||
"name": "13-0",
|
||||
"label": "13.0",
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
|
|
@ -491,6 +543,7 @@
|
|||
},
|
||||
{
|
||||
"name": "13-0",
|
||||
"label": "13.0",
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
|
|
@ -577,6 +630,7 @@
|
|||
},
|
||||
{
|
||||
"name": "13-0",
|
||||
"label": "13.0",
|
||||
"use_python_2": false,
|
||||
"executables": {
|
||||
"windows": [
|
||||
|
|
|
|||
|
|
@ -124,6 +124,24 @@ class LimitGroupsSubmodel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
def fusion_deadline_plugin_enum():
|
||||
"""Return a list of value/label dicts for the enumerator.
|
||||
|
||||
Returning a list of dicts is used to allow for a custom label to be
|
||||
displayed in the UI.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
"value": "Fusion",
|
||||
"label": "Fusion"
|
||||
},
|
||||
{
|
||||
"value": "FusionCmd",
|
||||
"label": "FusionCmd"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class FusionSubmitDeadlineModel(BaseSettingsModel):
|
||||
enabled: bool = Field(True, title="Enabled")
|
||||
optional: bool = Field(False, title="Optional")
|
||||
|
|
@ -132,6 +150,9 @@ class FusionSubmitDeadlineModel(BaseSettingsModel):
|
|||
chunk_size: int = Field(10, title="Frame per Task")
|
||||
concurrent_tasks: int = Field(1, title="Number of concurrent tasks")
|
||||
group: str = Field("", title="Group Name")
|
||||
plugin: str = Field("Fusion",
|
||||
enum_resolver=fusion_deadline_plugin_enum,
|
||||
title="Deadline Plugin")
|
||||
|
||||
|
||||
class NukeSubmitDeadlineModel(BaseSettingsModel):
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
45
server_addon/houdini/server/settings/general.py
Normal file
45
server_addon/houdini/server/settings/general.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
from pydantic import Field
|
||||
from ayon_server.settings import BaseSettingsModel
|
||||
|
||||
|
||||
class HoudiniVarModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
var: str = Field("", title="Var")
|
||||
value: str = Field("", title="Value")
|
||||
is_directory: bool = Field(False, title="Treat as directory")
|
||||
|
||||
|
||||
class UpdateHoudiniVarcontextModel(BaseSettingsModel):
|
||||
"""Sync vars with context changes.
|
||||
|
||||
If a value is treated as a directory on update
|
||||
it will be ensured the folder exists.
|
||||
"""
|
||||
|
||||
enabled: bool = Field(title="Enabled")
|
||||
# TODO this was dynamic dictionary '{var: path}'
|
||||
houdini_vars: list[HoudiniVarModel] = Field(
|
||||
default_factory=list,
|
||||
title="Houdini Vars"
|
||||
)
|
||||
|
||||
|
||||
class GeneralSettingsModel(BaseSettingsModel):
|
||||
update_houdini_var_context: UpdateHoudiniVarcontextModel = Field(
|
||||
default_factory=UpdateHoudiniVarcontextModel,
|
||||
title="Update Houdini Vars on context change"
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_GENERAL_SETTINGS = {
|
||||
"update_houdini_var_context": {
|
||||
"enabled": True,
|
||||
"houdini_vars": [
|
||||
{
|
||||
"var": "JOB",
|
||||
"value": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task[name]}", # noqa
|
||||
"is_directory": True
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -4,7 +4,10 @@ from ayon_server.settings import (
|
|||
MultiplatformPathModel,
|
||||
MultiplatformPathListModel,
|
||||
)
|
||||
|
||||
from .general import (
|
||||
GeneralSettingsModel,
|
||||
DEFAULT_GENERAL_SETTINGS
|
||||
)
|
||||
from .imageio import HoudiniImageIOModel
|
||||
from .publish_plugins import (
|
||||
PublishPluginsModel,
|
||||
|
|
@ -52,6 +55,10 @@ class ShelvesModel(BaseSettingsModel):
|
|||
|
||||
|
||||
class HoudiniSettings(BaseSettingsModel):
|
||||
general: GeneralSettingsModel = Field(
|
||||
default_factory=GeneralSettingsModel,
|
||||
title="General"
|
||||
)
|
||||
imageio: HoudiniImageIOModel = Field(
|
||||
default_factory=HoudiniImageIOModel,
|
||||
title="Color Management (ImageIO)"
|
||||
|
|
@ -73,6 +80,7 @@ class HoudiniSettings(BaseSettingsModel):
|
|||
|
||||
|
||||
DEFAULT_VALUES = {
|
||||
"general": DEFAULT_GENERAL_SETTINGS,
|
||||
"shelves": [],
|
||||
"create": DEFAULT_HOUDINI_CREATE_SETTINGS,
|
||||
"publish": DEFAULT_HOUDINI_PUBLISH_SETTINGS
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.3"
|
||||
__version__ = "0.1.4"
|
||||
|
|
|
|||
|
|
@ -129,6 +129,10 @@ class CollectMayaRenderModel(BaseSettingsModel):
|
|||
)
|
||||
|
||||
|
||||
class CollectFbxAnimationModel(BaseSettingsModel):
|
||||
enabled: bool = Field(title="Collect Fbx Animation")
|
||||
|
||||
|
||||
class CollectFbxCameraModel(BaseSettingsModel):
|
||||
enabled: bool = Field(title="CollectFbxCamera")
|
||||
|
||||
|
|
@ -364,6 +368,10 @@ class PublishersModel(BaseSettingsModel):
|
|||
title="Collect Render Layers",
|
||||
section="Collectors"
|
||||
)
|
||||
CollectFbxAnimation: CollectFbxAnimationModel = Field(
|
||||
default_factory=CollectFbxAnimationModel,
|
||||
title="Collect FBX Animation",
|
||||
)
|
||||
CollectFbxCamera: CollectFbxCameraModel = Field(
|
||||
default_factory=CollectFbxCameraModel,
|
||||
title="Collect Camera for FBX export",
|
||||
|
|
@ -425,6 +433,10 @@ class PublishersModel(BaseSettingsModel):
|
|||
default_factory=ValidateRenderSettingsModel,
|
||||
title="Validate Render Settings"
|
||||
)
|
||||
ValidateResolution: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Resolution Setting"
|
||||
)
|
||||
ValidateCurrentRenderLayerIsRenderable: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Current Render Layer Has Renderable Camera"
|
||||
|
|
@ -644,6 +656,10 @@ class PublishersModel(BaseSettingsModel):
|
|||
default_factory=BasicValidateModel,
|
||||
title="Validate Rig Controllers",
|
||||
)
|
||||
ValidateAnimatedReferenceRig: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Animated Reference Rig",
|
||||
)
|
||||
ValidateAnimationContent: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Animation Content",
|
||||
|
|
@ -660,14 +676,34 @@ class PublishersModel(BaseSettingsModel):
|
|||
default_factory=BasicValidateModel,
|
||||
title="Validate Skeletal Mesh Top Node",
|
||||
)
|
||||
ValidateSkeletonRigContents: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Skeleton Rig Contents"
|
||||
)
|
||||
ValidateSkeletonRigControllers: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Skeleton Rig Controllers"
|
||||
)
|
||||
ValidateSkinclusterDeformerSet: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Skincluster Deformer Relationships",
|
||||
)
|
||||
ValidateSkeletonRigOutputIds: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Skeleton Rig Output Ids"
|
||||
)
|
||||
ValidateSkeletonTopGroupHierarchy: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
title="Validate Skeleton Top Group Hierarchy",
|
||||
)
|
||||
ValidateRigOutSetNodeIds: ValidateRigOutSetNodeIdsModel = Field(
|
||||
default_factory=ValidateRigOutSetNodeIdsModel,
|
||||
title="Validate Rig Out Set Node Ids",
|
||||
)
|
||||
ValidateSkeletonRigOutSetNodeIds: ValidateRigOutSetNodeIdsModel = Field(
|
||||
default_factory=ValidateRigOutSetNodeIdsModel,
|
||||
title="Validate Skeleton Rig Out Set Node Ids",
|
||||
)
|
||||
# Rig - END
|
||||
ValidateCameraAttributes: BasicValidateModel = Field(
|
||||
default_factory=BasicValidateModel,
|
||||
|
|
@ -748,6 +784,9 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"CollectMayaRender": {
|
||||
"sync_workfile_version": False
|
||||
},
|
||||
"CollectFbxAnimation": {
|
||||
"enabled": True
|
||||
},
|
||||
"CollectFbxCamera": {
|
||||
"enabled": False
|
||||
},
|
||||
|
|
@ -867,6 +906,11 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"redshift_render_attributes": [],
|
||||
"renderman_render_attributes": []
|
||||
},
|
||||
"ValidateResolution": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateCurrentRenderLayerIsRenderable": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
@ -1143,6 +1187,11 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateAnimatedReferenceRig": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
"active": True
|
||||
},
|
||||
"ValidateAnimationContent": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
@ -1163,6 +1212,16 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"optional": False,
|
||||
"active": True
|
||||
},
|
||||
"ValidateSkeletonRigContents": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateSkeletonRigControllers": {
|
||||
"enabled": False,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateSkinclusterDeformerSet": {
|
||||
"enabled": True,
|
||||
"optional": False,
|
||||
|
|
@ -1173,6 +1232,21 @@ DEFAULT_PUBLISH_SETTINGS = {
|
|||
"optional": False,
|
||||
"allow_history_only": False
|
||||
},
|
||||
"ValidateSkeletonRigOutSetNodeIds": {
|
||||
"enabled": False,
|
||||
"optional": False,
|
||||
"allow_history_only": False
|
||||
},
|
||||
"ValidateSkeletonRigOutputIds": {
|
||||
"enabled": False,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateSkeletonTopGroupHierarchy": {
|
||||
"enabled": True,
|
||||
"optional": True,
|
||||
"active": True
|
||||
},
|
||||
"ValidateCameraAttributes": {
|
||||
"enabled": False,
|
||||
"optional": True,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.3"
|
||||
__version__ = "0.1.5"
|
||||
|
|
|
|||
|
|
@ -29,6 +29,11 @@ def pytest_addoption(parser):
|
|||
help="True - only setup test, do not run any tests"
|
||||
)
|
||||
|
||||
parser.addoption(
|
||||
"--mongo_url", action="store", default=None,
|
||||
help="Provide url of the Mongo database."
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_data_folder(request):
|
||||
|
|
@ -55,6 +60,11 @@ def setup_only(request):
|
|||
return request.config.getoption("--setup_only")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mongo_url(request):
|
||||
return request.config.getoption("--mongo_url")
|
||||
|
||||
|
||||
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
|
||||
def pytest_runtest_makereport(item, call):
|
||||
# execute all other hooks to obtain the report object
|
||||
|
|
|
|||
|
|
@ -147,11 +147,11 @@ class ModuleUnitTest(BaseTest):
|
|||
|
||||
@pytest.fixture(scope="module")
|
||||
def db_setup(self, download_test_data, env_var, monkeypatch_session,
|
||||
request):
|
||||
request, mongo_url):
|
||||
"""Restore prepared MongoDB dumps into selected DB."""
|
||||
backup_dir = os.path.join(download_test_data, "input", "dumps")
|
||||
|
||||
uri = os.environ.get("OPENPYPE_MONGO")
|
||||
uri = mongo_url or os.environ.get("OPENPYPE_MONGO")
|
||||
db_handler = DBHandler(uri)
|
||||
db_handler.setup_from_dump(self.TEST_DB_NAME, backup_dir,
|
||||
overwrite=True,
|
||||
|
|
|
|||
|
|
@ -3,9 +3,36 @@ id: admin_hosts_houdini
|
|||
title: Houdini
|
||||
sidebar_label: Houdini
|
||||
---
|
||||
## General Settings
|
||||
### Houdini Vars
|
||||
|
||||
Allows admins to have a list of vars (e.g. JOB) with (dynamic) values that will be updated on context changes, e.g. when switching to another asset or task.
|
||||
|
||||
Using template keys is supported but formatting keys capitalization variants is not, e.g. `{Asset}` and `{ASSET}` won't work
|
||||
|
||||
|
||||
:::note
|
||||
If `Treat as directory` toggle is activated, Openpype will consider the given value is a path of a folder.
|
||||
|
||||
If the folder does not exist on the context change it will be created by this feature so that the path will always try to point to an existing folder.
|
||||
:::
|
||||
|
||||
Disabling `Update Houdini vars on context change` feature will leave all Houdini vars unmanaged and thus no context update changes will occur.
|
||||
|
||||
> If `$JOB` is present in the Houdini var list and has an empty value, OpenPype will set its value to `$HIP`
|
||||
|
||||
|
||||
:::note
|
||||
For consistency reasons we always force all vars to be uppercase.
|
||||
e.g. `myvar` will be `MYVAR`
|
||||
:::
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
## Shelves Manager
|
||||
You can add your custom shelf set into Houdini by setting your shelf sets, shelves and tools in **Houdini -> Shelves Manager**.
|
||||

|
||||
|
||||
The Shelf Set Path is used to load a .shelf file to generate your shelf set. If the path is specified, you don't have to set the shelves and tools.
|
||||
The Shelf Set Path is used to load a .shelf file to generate your shelf set. If the path is specified, you don't have to set the shelves and tools.
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 23 KiB |
Loading…
Add table
Add a link
Reference in a new issue