mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' of https://github.com/ynput/ayon-core into enhancement/maya_extract_pointcache_bake_attributes
# Conflicts: # server_addon/maya/server/version.py
This commit is contained in:
commit
dd80e4d45a
205 changed files with 5224 additions and 3124 deletions
24
.github/workflows/pr_linting.yml
vendored
Normal file
24
.github/workflows/pr_linting.yml
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
name: 📇 Code Linting
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
branches: [ develop ]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
linting:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: chartboost/ruff-action@v1
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -77,6 +77,7 @@ dump.sql
|
|||
|
||||
# Poetry
|
||||
########
|
||||
.poetry/
|
||||
.python-version
|
||||
.editorconfig
|
||||
.pre-commit-config.yaml
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
flake8:
|
||||
enabled: true
|
||||
config_file: setup.cfg
|
||||
flake8:
|
||||
enabled: true
|
||||
config_file: setup.cfg
|
||||
|
|
|
|||
|
|
@ -1,12 +1,27 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: no-commit-to-branch
|
||||
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: no-commit-to-branch
|
||||
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
- tomli
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.3.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
# Run the formatter.
|
||||
# - id: ruff-format
|
||||
|
|
|
|||
24
README.md
24
README.md
|
|
@ -1,8 +1,8 @@
|
|||
|
||||
AYON Core addon
|
||||
========
|
||||
AYON Core Addon
|
||||
===============
|
||||
|
||||
AYON core provides the base building blocks for all other AYON addons and integrations and is responsible for discovery and initialization of other addons.
|
||||
AYON core provides the base building blocks for all other AYON addons and integrations and is responsible for discovery and initialization of other addons.
|
||||
|
||||
- Some of its key functions include:
|
||||
- It is used as the main command line handler in [ayon-launcher](https://github.com/ynput/ayon-launcher) application.
|
||||
|
|
@ -13,8 +13,20 @@ AYON core provides the base building blocks for all other AYON addons and integr
|
|||
- Defines pipeline API used by other integrations
|
||||
- Provides all graphical tools for artists
|
||||
- Defines AYON QT styling
|
||||
- A bunch more things
|
||||
- A bunch more things
|
||||
|
||||
Together with [ayon-launcher](https://github.com/ynput/ayon-launcher) , they form the base of AYON pipeline and is one of few compulsory addons for AYON pipeline to be useful in a meaningful way.
|
||||
Together with [ayon-launcher](https://github.com/ynput/ayon-launcher) , they form the base of AYON pipeline and is one of few compulsory addons for AYON pipeline to be useful in a meaningful way.
|
||||
|
||||
AYON-core is a successor to OpenPype repository (minus all the addons) and still in the process of cleaning up of all references. Please bear with us during this transitional phase.
|
||||
AYON-core is a successor to [OpenPype repository](https://github.com/ynput/OpenPype) (minus all the addons) and still in the process of cleaning up of all references. Please bear with us during this transitional phase.
|
||||
|
||||
Development and testing notes
|
||||
-----------------------------
|
||||
There is `pyproject.toml` file in the root of the repository. This file is used to define the development environment and is used by `poetry` to create a virtual environment.
|
||||
This virtual environment is used to run tests and to develop the code, to help with
|
||||
linting and formatting. Dependencies defined here are not used in actual addon
|
||||
deployment - for that you need to edit `./client/pyproject.toml` file. That file
|
||||
will be then processed [ayon-dependencies-tool](https://github.com/ynput/ayon-dependencies-tool)
|
||||
to create dependency package.
|
||||
|
||||
Right now, this file needs to by synced with dependencies manually, but in the future
|
||||
we plan to automate process of development environment creation.
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
- default interfaces are defined in `interfaces.py`
|
||||
|
||||
## IPluginPaths
|
||||
- addon wants to add directory path/s to avalon or publish plugins
|
||||
- addon wants to add directory path/s to publish, load, create or inventory plugins
|
||||
- addon must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"`
|
||||
- each key may contain list or string with a path to directory with plugins
|
||||
|
||||
|
|
@ -89,4 +89,4 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
|
||||
### TrayAddonsManager
|
||||
- inherits from `AddonsManager`
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayAddon` methods
|
||||
- has specific implementation for AYON Tray and handle `ITrayAddon` methods
|
||||
|
|
|
|||
|
|
@ -741,7 +741,7 @@ class AddonsManager:
|
|||
|
||||
addon_classes = []
|
||||
for module in openpype_modules:
|
||||
# Go through globals in `pype.modules`
|
||||
# Go through globals in `ayon_core.modules`
|
||||
for name in dir(module):
|
||||
modules_item = getattr(module, name, None)
|
||||
# Filter globals that are not classes which inherit from
|
||||
|
|
|
|||
|
|
@ -67,8 +67,6 @@ class Commands:
|
|||
install_ayon_plugins,
|
||||
get_global_context,
|
||||
)
|
||||
from ayon_core.tools.utils.host_tools import show_publish
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
|
|
@ -134,6 +132,8 @@ class Commands:
|
|||
print(plugin)
|
||||
|
||||
if gui:
|
||||
from ayon_core.tools.utils.host_tools import show_publish
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
with qt_app_context():
|
||||
show_publish()
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,58 +0,0 @@
|
|||
import os
|
||||
|
||||
from ayon_core.lib import get_ayon_launcher_args
|
||||
from ayon_core.lib.applications import (
|
||||
get_non_python_host_kwargs,
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
|
||||
|
||||
class NonPythonHostHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Non python host implementation do not launch host directly but use
|
||||
python script which launch the host. For these cases it is necessary to
|
||||
prepend python (or ayon) executable and script path before application's.
|
||||
"""
|
||||
app_groups = {"harmony", "photoshop", "aftereffects"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = os.path.join(
|
||||
AYON_CORE_ROOT,
|
||||
"scripts",
|
||||
"non_python_host_launch.py"
|
||||
)
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these areguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = \
|
||||
get_non_python_host_kwargs(self.launch_context.kwargs)
|
||||
|
|
@ -1,6 +1,12 @@
|
|||
from .addon import AfterEffectsAddon
|
||||
from .addon import (
|
||||
AFTEREFFECTS_ADDON_ROOT,
|
||||
AfterEffectsAddon,
|
||||
get_launch_script_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AFTEREFFECTS_ADDON_ROOT",
|
||||
"AfterEffectsAddon",
|
||||
"get_launch_script_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
import os
|
||||
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
AFTEREFFECTS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class AfterEffectsAddon(AYONAddon, IHostAddon):
|
||||
name = "aftereffects"
|
||||
|
|
@ -17,3 +21,16 @@ class AfterEffectsAddon(AYONAddon, IHostAddon):
|
|||
|
||||
def get_workfile_extensions(self):
|
||||
return [".aep"]
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(AFTEREFFECTS_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
return os.path.join(
|
||||
AFTEREFFECTS_ADDON_ROOT, "api", "launch_script.py"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import asyncio
|
|||
import functools
|
||||
import traceback
|
||||
|
||||
|
||||
from wsrpc_aiohttp import (
|
||||
WebSocketRoute,
|
||||
WebSocketAsync
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
"""Script wraps launch mechanism of non python host implementations.
|
||||
"""Script wraps launch mechanism of AfterEffects implementations.
|
||||
|
||||
Arguments passed to the script are passed to launch function in host
|
||||
implementation. In all cases requires host app executable and may contain
|
||||
|
|
@ -8,6 +8,8 @@ workfile or others.
|
|||
import os
|
||||
import sys
|
||||
|
||||
from ayon_core.hosts.aftereffects.api.launch_logic import main as host_main
|
||||
|
||||
# Get current file to locate start point of sys.argv
|
||||
CURRENT_FILE = os.path.abspath(__file__)
|
||||
|
||||
|
|
@ -79,26 +81,9 @@ def main(argv):
|
|||
if after_script_idx is not None:
|
||||
launch_args = sys_args[after_script_idx:]
|
||||
|
||||
host_name = os.environ["AYON_HOST_NAME"].lower()
|
||||
if host_name == "photoshop":
|
||||
# TODO refactor launch logic according to AE
|
||||
from ayon_core.hosts.photoshop.api.lib import main
|
||||
elif host_name == "aftereffects":
|
||||
from ayon_core.hosts.aftereffects.api.launch_logic import main
|
||||
elif host_name == "harmony":
|
||||
from ayon_core.hosts.harmony.api.lib import main
|
||||
else:
|
||||
title = "Unknown host name"
|
||||
message = (
|
||||
"BUG: Environment variable AYON_HOST_NAME contains unknown"
|
||||
" host name \"{}\""
|
||||
).format(host_name)
|
||||
show_error_messagebox(title, message)
|
||||
return
|
||||
|
||||
if launch_args:
|
||||
# Launch host implementation
|
||||
main(*launch_args)
|
||||
host_main(*launch_args)
|
||||
else:
|
||||
# Show message box
|
||||
on_invalid_args(after_script_idx is None)
|
||||
91
client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py
Normal file
91
client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from ayon_core.lib import (
|
||||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects import get_launch_script_path
|
||||
|
||||
|
||||
def get_launch_kwargs(kwargs):
|
||||
"""Explicit setting of kwargs for Popen for AfterEffects.
|
||||
|
||||
Expected behavior
|
||||
- ayon_console opens window with logs
|
||||
- ayon has stdout/stderr available for capturing
|
||||
|
||||
Args:
|
||||
kwargs (Union[dict, None]): Current kwargs or None.
|
||||
|
||||
"""
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
if platform.system().lower() != "windows":
|
||||
return kwargs
|
||||
|
||||
if is_using_ayon_console():
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE
|
||||
})
|
||||
else:
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
return kwargs
|
||||
|
||||
|
||||
class AEPrelaunchHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Hook add python executable and script path to AE implementation before
|
||||
AE executable and add last workfile path to launch arguments.
|
||||
|
||||
Existence of last workfile is checked. If workfile does not exists tries
|
||||
to copy templated workfile from predefined path.
|
||||
"""
|
||||
app_groups = {"aftereffects"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = get_launch_script_path()
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)
|
||||
):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these arguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = get_launch_kwargs(
|
||||
self.launch_context.kwargs
|
||||
)
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
import inspect
|
||||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError,
|
||||
RepairAction
|
||||
)
|
||||
import ayon_core.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateModelMeshUvMap1(
|
||||
pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin,
|
||||
):
|
||||
"""Validate model mesh uvs are named `map1`.
|
||||
|
||||
This is solely to get them to work nicely for the Maya pipeline.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
label = "Mesh UVs named map1"
|
||||
actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
optional = True
|
||||
enabled = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
|
||||
invalid = []
|
||||
for obj in instance:
|
||||
if obj.mode != "OBJECT":
|
||||
cls.log.warning(
|
||||
f"Mesh object {obj.name} should be in 'OBJECT' mode"
|
||||
" to be properly checked."
|
||||
)
|
||||
|
||||
obj_data = obj.data
|
||||
if isinstance(obj_data, bpy.types.Mesh):
|
||||
mesh = obj_data
|
||||
|
||||
# Ignore mesh without UVs
|
||||
if not mesh.uv_layers:
|
||||
continue
|
||||
|
||||
# If mesh has map1 all is ok
|
||||
if mesh.uv_layers.get("map1"):
|
||||
continue
|
||||
|
||||
cls.log.warning(
|
||||
f"Mesh object {obj.name} should be in 'OBJECT' mode"
|
||||
" to be properly checked."
|
||||
)
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
for obj in cls.get_invalid(instance):
|
||||
mesh = obj.data
|
||||
|
||||
# Rename the first UV set to map1
|
||||
mesh.uv_layers[0].name = "map1"
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
f"Meshes found in instance without valid UV's: {invalid}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""## Meshes must have map1 uv set
|
||||
|
||||
To accompany a better Maya-focused pipeline with Alembics it is
|
||||
expected that a Mesh has a `map1` UV set. Blender defaults to
|
||||
a UV set named `UVMap` and thus needs to be renamed.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import inspect
|
||||
from typing import List
|
||||
|
||||
import mathutils
|
||||
|
|
@ -5,29 +6,26 @@ import bpy
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.blender.api import plugin, lib
|
||||
import ayon_core.hosts.blender.api.action
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
PublishValidationError,
|
||||
RepairAction
|
||||
)
|
||||
|
||||
|
||||
class ValidateTransformZero(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Transforms can't have any values
|
||||
|
||||
To solve this issue, try freezing the transforms. So long
|
||||
as the transforms, rotation and scale values are zero,
|
||||
you're all good.
|
||||
|
||||
"""
|
||||
"""Transforms can't have any values"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
label = "Transform Zero"
|
||||
actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction]
|
||||
actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
||||
_identity = mathutils.Matrix()
|
||||
|
||||
|
|
@ -51,5 +49,46 @@ class ValidateTransformZero(pyblish.api.InstancePlugin,
|
|||
names = ", ".join(obj.name for obj in invalid)
|
||||
raise PublishValidationError(
|
||||
"Objects found in instance which do not"
|
||||
f" have transform set to zero: {names}"
|
||||
f" have transform set to zero: {names}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
invalid = cls.get_invalid(instance)
|
||||
if not invalid:
|
||||
return
|
||||
|
||||
context = plugin.create_blender_context(
|
||||
active=invalid[0], selected=invalid
|
||||
)
|
||||
with lib.maintained_selection():
|
||||
with bpy.context.temp_override(**context):
|
||||
plugin.deselect_all()
|
||||
for obj in invalid:
|
||||
obj.select_set(True)
|
||||
|
||||
# TODO: Preferably this does allow custom pivot point locations
|
||||
# and if so, this should likely apply to the delta instead
|
||||
# using `bpy.ops.object.transforms_to_deltas(mode="ALL")`
|
||||
bpy.ops.object.transform_apply(location=True,
|
||||
rotation=True,
|
||||
scale=True)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""## Transforms can't have any values.
|
||||
|
||||
The location, rotation and scale on the transform must be at
|
||||
the default values. This also goes for the delta transforms.
|
||||
|
||||
To solve this issue, try freezing the transforms:
|
||||
- `Object` > `Apply` > `All Transforms`
|
||||
|
||||
Using the Repair action directly will do the same.
|
||||
|
||||
So long as the transforms, rotation and scale values are zero,
|
||||
you're all good.
|
||||
"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
padding = anatomy.templates.get("frame_padding", 4)
|
||||
padding = anatomy.templates_obj.frame_padding
|
||||
product_type = "render"
|
||||
anatomy_data.update({
|
||||
"frame": f"%0{padding}d",
|
||||
|
|
@ -28,15 +28,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
})
|
||||
anatomy_data["product"]["type"] = product_type
|
||||
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
|
||||
# get anatomy rendering keys
|
||||
r_anatomy_key = self.anatomy_template_key_render_files
|
||||
m_anatomy_key = self.anatomy_template_key_metadata
|
||||
|
||||
# get folder and path for rendering images from celaction
|
||||
render_dir = anatomy_filled[r_anatomy_key]["folder"]
|
||||
render_path = anatomy_filled[r_anatomy_key]["path"]
|
||||
r_template_item = anatomy.get_template_item("publish", r_anatomy_key)
|
||||
render_dir = r_template_item["directory"].format_strict(anatomy_data)
|
||||
render_path = r_template_item["path"].format_strict(anatomy_data)
|
||||
self.log.debug("__ render_path: `{}`".format(render_path))
|
||||
|
||||
# create dir if it doesnt exists
|
||||
|
|
@ -51,11 +50,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
instance.data["path"] = render_path
|
||||
|
||||
# get anatomy for published renders folder path
|
||||
if anatomy_filled.get(m_anatomy_key):
|
||||
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
|
||||
m_anatomy_key]["folder"]
|
||||
self.log.info("Metadata render path: `{}`".format(
|
||||
instance.data["publishRenderMetadataFolder"]
|
||||
))
|
||||
m_template_item = anatomy.get_template_item(
|
||||
"publish", m_anatomy_key, default=None
|
||||
)
|
||||
if m_template_item is not None:
|
||||
metadata_path = m_template_item["directory"].format_strict(
|
||||
anatomy_data
|
||||
)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_path
|
||||
self.log.info("Metadata render path: `{}`".format(metadata_path))
|
||||
|
||||
self.log.info(f"Render output path set to: `{render_path}`")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
OpenPype Autodesk Flame api
|
||||
AYON Autodesk Flame api
|
||||
"""
|
||||
from .constants import (
|
||||
COLOR_MAP,
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
|
||||
"""
|
||||
OpenPype Flame api constances
|
||||
AYON Flame api constances
|
||||
"""
|
||||
# OpenPype marker workflow variables
|
||||
# AYON marker workflow variables
|
||||
MARKER_NAME = "OpenPypeData"
|
||||
MARKER_DURATION = 0
|
||||
MARKER_COLOR = "cyan"
|
||||
MARKER_PUBLISH_DEFAULT = False
|
||||
|
||||
# OpenPype color definitions
|
||||
# AYON color definitions
|
||||
COLOR_MAP = {
|
||||
"red": (1.0, 0.0, 0.0),
|
||||
"orange": (1.0, 0.5, 0.0),
|
||||
|
|
|
|||
|
|
@ -38,12 +38,12 @@ def install():
|
|||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info("OpenPype Flame plug-ins registered ...")
|
||||
log.info("AYON Flame plug-ins registered ...")
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("OpenPype Flame host installed ...")
|
||||
log.info("AYON Flame host installed ...")
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
@ -57,7 +57,7 @@ def uninstall():
|
|||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("OpenPype Flame host uninstalled ...")
|
||||
log.info("AYON Flame host uninstalled ...")
|
||||
|
||||
|
||||
def containerise(flame_clip_segment,
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setWindowTitle(name or "Pype Creator Input")
|
||||
self.setWindowTitle(name or "AYON Creator Input")
|
||||
self.resize(500, 700)
|
||||
|
||||
# Where inputs and labels are set
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class WireTapCom(object):
|
|||
|
||||
def get_launch_args(
|
||||
self, project_name, project_data, user_name, *args, **kwargs):
|
||||
"""Forming launch arguments for OpenPype launcher.
|
||||
"""Forming launch arguments for AYON launcher.
|
||||
|
||||
Args:
|
||||
project_name (str): name of project
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ log = Logger.get_logger(__name__)
|
|||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for flame.
|
||||
|
||||
To be able to run start OpenPype within Flame we have to copy
|
||||
To be able to run start AYON within Flame we have to copy
|
||||
all utility_scripts and additional FLAME_SCRIPT_DIR into
|
||||
`/opt/Autodesk/shared/python`. This will be always synchronizing those
|
||||
folders.
|
||||
|
|
@ -124,7 +124,7 @@ def setup(env=None):
|
|||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Flame OpenPype wrapper has been installed")
|
||||
log.info("Flame AYON wrapper has been installed")
|
||||
|
||||
|
||||
def get_flame_version():
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
project_data = {
|
||||
"Name": project_entity["name"],
|
||||
"Nickname": project_entity["code"],
|
||||
"Description": "Created by OpenPype",
|
||||
"Description": "Created by AYON",
|
||||
"SetupDir": project_entity["name"],
|
||||
"FrameWidth": int(width),
|
||||
"FrameHeight": int(height),
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ class FlameBabyPublisherPanel(object):
|
|||
|
||||
# creating ui
|
||||
self.window.setMinimumSize(1500, 600)
|
||||
self.window.setWindowTitle('OpenPype: Baby-publisher')
|
||||
self.window.setWindowTitle('AYON: Baby-publisher')
|
||||
self.window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
|
||||
self.window.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.window.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ def scope_sequence(selection):
|
|||
def get_media_panel_custom_ui_actions():
|
||||
return [
|
||||
{
|
||||
"name": "OpenPype: Baby-publisher",
|
||||
"name": "AYON: Baby-publisher",
|
||||
"actions": [
|
||||
{
|
||||
"name": "Create Shots",
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from ayon_core.pipeline import (
|
|||
|
||||
|
||||
def openpype_install():
|
||||
"""Registering OpenPype in context
|
||||
"""Registering AYON in context
|
||||
"""
|
||||
install_host(opfapi)
|
||||
print("Registered host: {}".format(registered_host()))
|
||||
|
|
@ -28,7 +28,7 @@ def exeption_handler(exctype, value, _traceback):
|
|||
tb (str): traceback to show
|
||||
"""
|
||||
import traceback
|
||||
msg = "OpenPype: Python exception {} in {}".format(value, exctype)
|
||||
msg = "AYON: Python exception {} in {}".format(value, exctype)
|
||||
mbox = QtWidgets.QMessageBox()
|
||||
mbox.setText(msg)
|
||||
mbox.setDetailedText(
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from .lib import (
|
|||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
from .menu import launch_openpype_menu
|
||||
from .menu import launch_ayon_menu
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -35,5 +35,5 @@ __all__ = [
|
|||
"comp_lock_and_undo_chunk",
|
||||
|
||||
# menu
|
||||
"launch_openpype_menu",
|
||||
"launch_ayon_menu",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -28,9 +28,9 @@ self = sys.modules[__name__]
|
|||
self.menu = None
|
||||
|
||||
|
||||
class OpenPypeMenu(QtWidgets.QWidget):
|
||||
class AYONMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OpenPypeMenu, self).__init__(*args, **kwargs)
|
||||
super(AYONMenu, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName(f"{MENU_LABEL}Menu")
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
self._pulse = FusionPulse(parent=self)
|
||||
self._pulse.start()
|
||||
|
||||
# Detect Fusion events as OpenPype events
|
||||
# Detect Fusion events as AYON events
|
||||
self._event_handler = FusionEventHandler(parent=self)
|
||||
self._event_handler.start()
|
||||
|
||||
|
|
@ -174,16 +174,16 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
set_current_context_framerange()
|
||||
|
||||
|
||||
def launch_openpype_menu():
|
||||
def launch_ayon_menu():
|
||||
app = get_qt_app()
|
||||
|
||||
pype_menu = OpenPypeMenu()
|
||||
ayon_menu = AYONMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
ayon_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
self.menu = pype_menu
|
||||
ayon_menu.show()
|
||||
self.menu = ayon_menu
|
||||
|
||||
result = app.exec_()
|
||||
print("Shutting down..")
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
name = "fusion"
|
||||
|
||||
def install(self):
|
||||
"""Install fusion-specific functionality of OpenPype.
|
||||
"""Install fusion-specific functionality of AYON.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
|
@ -177,7 +177,7 @@ def on_after_open(event):
|
|||
if any_outdated_containers():
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Find OpenPype menu to attach to
|
||||
# Find AYON menu to attach to
|
||||
from . import menu
|
||||
|
||||
def _on_show_scene_inventory():
|
||||
|
|
@ -326,9 +326,9 @@ class FusionEventThread(QtCore.QThread):
|
|||
|
||||
|
||||
class FusionEventHandler(QtCore.QObject):
|
||||
"""Emits OpenPype events based on Fusion events captured in a QThread.
|
||||
"""Emits AYON events based on Fusion events captured in a QThread.
|
||||
|
||||
This will emit the following OpenPype events based on Fusion actions:
|
||||
This will emit the following AYON events based on Fusion actions:
|
||||
save: Comp_Save, Comp_SaveAs
|
||||
open: Comp_Opened
|
||||
new: Comp_New
|
||||
|
|
@ -374,7 +374,7 @@ class FusionEventHandler(QtCore.QObject):
|
|||
self._event_thread.stop()
|
||||
|
||||
def _on_event(self, event):
|
||||
"""Handle Fusion events to emit OpenPype events"""
|
||||
"""Handle Fusion events to emit AYON events"""
|
||||
if not event:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ class GenericCreateSaver(Creator):
|
|||
formatting_data = deepcopy(data)
|
||||
|
||||
# get frame padding from anatomy templates
|
||||
frame_padding = self.project_anatomy.templates["frame_padding"]
|
||||
frame_padding = self.project_anatomy.templates_obj.frame_padding
|
||||
|
||||
# get output format
|
||||
ext = data["creator_attributes"]["image_format"]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
### OpenPype deploy MenuScripts
|
||||
### AYON deploy MenuScripts
|
||||
|
||||
Note that this `MenuScripts` is not an official Fusion folder.
|
||||
OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions.
|
||||
AYON only uses this folder in `{fusion}/deploy/` to trigger the AYON menu actions.
|
||||
|
||||
They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`.
|
||||
|
|
@ -35,7 +35,7 @@ def main(env):
|
|||
log = Logger.get_logger(__name__)
|
||||
log.info(f"Registered host: {registered_host()}")
|
||||
|
||||
menu.launch_openpype_menu()
|
||||
menu.launch_ayon_menu()
|
||||
|
||||
# Initiate a QTimer to check if Fusion is still alive every X interval
|
||||
# If Fusion is not found - kill itself
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
|
|||
Prepares local Fusion profile directory, copies existing Fusion profile.
|
||||
This also sets FUSION MasterPrefs variable, which is used
|
||||
to apply Master.prefs file to override some Fusion profile settings to:
|
||||
- enable the OpenPype menu
|
||||
- enable the AYON menu
|
||||
- force Python 3 over Python 2
|
||||
- force English interface
|
||||
Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from ayon_core.hosts.fusion import (
|
|||
|
||||
class FusionPrelaunch(PreLaunchHook):
|
||||
"""
|
||||
Prepares OpenPype Fusion environment.
|
||||
Prepares AYON Fusion environment.
|
||||
Requires correct Python home variable to be defined in the environment
|
||||
settings for Fusion to point at a valid Python 3 build for Fusion.
|
||||
Python3 versions that are supported by Fusion:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from ayon_core.lib import NumberDef
|
||||
|
||||
from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver
|
||||
from ayon_core.hosts.fusion.api import get_current_comp
|
||||
|
||||
|
||||
class CreateImageSaver(GenericCreateSaver):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from .addon import (
|
||||
HARMONY_HOST_DIR,
|
||||
HARMONY_ADDON_ROOT,
|
||||
HarmonyAddon,
|
||||
get_launch_script_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HARMONY_HOST_DIR",
|
||||
"HARMONY_ADDON_ROOT",
|
||||
"HarmonyAddon",
|
||||
"get_launch_script_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
HARMONY_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class HarmonyAddon(AYONAddon, IHostAddon):
|
||||
|
|
@ -11,10 +11,23 @@ class HarmonyAddon(AYONAddon, IHostAddon):
|
|||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
openharmony_path = os.path.join(
|
||||
HARMONY_HOST_DIR, "vendor", "OpenHarmony"
|
||||
HARMONY_ADDON_ROOT, "vendor", "OpenHarmony"
|
||||
)
|
||||
# TODO check if is already set? What to do if is already set?
|
||||
env["LIB_OPENHARMONY_PATH"] = openharmony_path
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".zip"]
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(HARMONY_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
return os.path.join(
|
||||
HARMONY_ADDON_ROOT, "api", "launch_script.py"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ class CreateComposite(harmony.Creator):
|
|||
|
||||
name = "compositeDefault"
|
||||
label = "Composite"
|
||||
product_type = "mindbender.template"
|
||||
product_type = "template"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateComposite, self).__init__(*args, **kwargs)
|
||||
|
|
@ -221,7 +221,7 @@ class CreateRender(harmony.Creator):
|
|||
|
||||
name = "writeDefault"
|
||||
label = "Write"
|
||||
product_type = "mindbender.imagesequence"
|
||||
product_type = "render"
|
||||
node_type = "WRITE"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -304,7 +304,7 @@ class ExtractImage(pyblish.api.InstancePlugin):
|
|||
label = "Extract Image Sequence"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
hosts = ["harmony"]
|
||||
families = ["mindbender.imagesequence"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, instance):
|
||||
project_path = harmony.send(
|
||||
|
|
@ -582,8 +582,16 @@ class ImageSequenceLoader(load.LoaderPlugin):
|
|||
"""Load images
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
product_types = {"mindbender.imagesequence"}
|
||||
product_types = {
|
||||
"shot",
|
||||
"render",
|
||||
"image",
|
||||
"plate",
|
||||
"reference",
|
||||
"review",
|
||||
}
|
||||
representations = ["*"]
|
||||
extensions = {"jpeg", "png", "jpg"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
files = []
|
||||
|
|
|
|||
93
client/ayon_core/hosts/harmony/api/launch_script.py
Normal file
93
client/ayon_core/hosts/harmony/api/launch_script.py
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
"""Script wraps launch mechanism of Harmony implementations.
|
||||
|
||||
Arguments passed to the script are passed to launch function in host
|
||||
implementation. In all cases requires host app executable and may contain
|
||||
workfile or others.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ayon_core.hosts.harmony.api.lib import main as host_main
|
||||
|
||||
# Get current file to locate start point of sys.argv
|
||||
CURRENT_FILE = os.path.abspath(__file__)
|
||||
|
||||
|
||||
def show_error_messagebox(title, message, detail_message=None):
|
||||
"""Function will show message and process ends after closing it."""
|
||||
from qtpy import QtWidgets, QtCore
|
||||
from ayon_core import style
|
||||
|
||||
app = QtWidgets.QApplication([])
|
||||
app.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
msgbox = QtWidgets.QMessageBox()
|
||||
msgbox.setWindowTitle(title)
|
||||
msgbox.setText(message)
|
||||
|
||||
if detail_message:
|
||||
msgbox.setDetailedText(detail_message)
|
||||
|
||||
msgbox.setWindowModality(QtCore.Qt.ApplicationModal)
|
||||
msgbox.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
||||
|
||||
def on_invalid_args(script_not_found):
|
||||
"""Show to user message box saying that something went wrong.
|
||||
|
||||
Tell user that arguments to launch implementation are invalid with
|
||||
arguments details.
|
||||
|
||||
Args:
|
||||
script_not_found (bool): Use different message based on this value.
|
||||
"""
|
||||
|
||||
title = "Invalid arguments"
|
||||
joined_args = ", ".join("\"{}\"".format(arg) for arg in sys.argv)
|
||||
if script_not_found:
|
||||
submsg = "Where couldn't find script path:\n\"{}\""
|
||||
else:
|
||||
submsg = "Expected Host executable after script path:\n\"{}\""
|
||||
|
||||
message = "BUG: Got invalid arguments so can't launch Host application."
|
||||
detail_message = "Process was launched with arguments:\n{}\n\n{}".format(
|
||||
joined_args,
|
||||
submsg.format(CURRENT_FILE)
|
||||
)
|
||||
|
||||
show_error_messagebox(title, message, detail_message)
|
||||
|
||||
|
||||
def main(argv):
|
||||
# Modify current file path to find match in sys.argv which may be different
|
||||
# on windows (different letter cases and slashes).
|
||||
modified_current_file = CURRENT_FILE.replace("\\", "/").lower()
|
||||
|
||||
# Create a copy of sys argv
|
||||
sys_args = list(argv)
|
||||
after_script_idx = None
|
||||
# Find script path in sys.argv to know index of argv where host
|
||||
# executable should be.
|
||||
for idx, item in enumerate(sys_args):
|
||||
if item.replace("\\", "/").lower() == modified_current_file:
|
||||
after_script_idx = idx + 1
|
||||
break
|
||||
|
||||
# Validate that there is at least one argument after script path
|
||||
launch_args = None
|
||||
if after_script_idx is not None:
|
||||
launch_args = sys_args[after_script_idx:]
|
||||
|
||||
if launch_args:
|
||||
# Launch host implementation
|
||||
host_main(*launch_args)
|
||||
else:
|
||||
# Show message box
|
||||
on_invalid_args(after_script_idx is None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Utility functions used for Avalon - Harmony integration."""
|
||||
import platform
|
||||
import subprocess
|
||||
import threading
|
||||
import os
|
||||
|
|
@ -14,15 +15,16 @@ import json
|
|||
import signal
|
||||
import time
|
||||
from uuid import uuid4
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
import collections
|
||||
|
||||
from .server import Server
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from ayon_core.lib import is_using_ayon_console
|
||||
from ayon_core.tools.stdout_broker.app import StdOutBroker
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core import style
|
||||
from ayon_core.lib.applications import get_non_python_host_kwargs
|
||||
|
||||
from .server import Server
|
||||
|
||||
# Setup logging.
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -324,7 +326,18 @@ def launch_zip_file(filepath):
|
|||
return
|
||||
|
||||
print("Launching {}".format(scene_path))
|
||||
kwargs = get_non_python_host_kwargs({}, False)
|
||||
# QUESTION Could we use 'run_detached_process' from 'ayon_core.lib'?
|
||||
kwargs = {}
|
||||
if (
|
||||
platform.system().lower() == "windows"
|
||||
and not is_using_ayon_console()
|
||||
):
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
|
||||
process = subprocess.Popen(
|
||||
[ProcessContext.application_path, scene_path],
|
||||
**kwargs
|
||||
|
|
|
|||
|
|
@ -15,13 +15,13 @@ from ayon_core.pipeline import (
|
|||
from ayon_core.pipeline.load import get_outdated_containers
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
|
||||
from ayon_core.hosts.harmony import HARMONY_HOST_DIR
|
||||
from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT
|
||||
import ayon_core.hosts.harmony.api as harmony
|
||||
|
||||
|
||||
log = logging.getLogger("ayon_core.hosts.harmony")
|
||||
|
||||
PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins")
|
||||
PLUGINS_DIR = os.path.join(HARMONY_ADDON_ROOT, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
|
|
|||
91
client/ayon_core/hosts/harmony/hooks/pre_launch_args.py
Normal file
91
client/ayon_core/hosts/harmony/hooks/pre_launch_args.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from ayon_core.lib import (
|
||||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.hosts.harmony import get_launch_script_path
|
||||
|
||||
|
||||
def get_launch_kwargs(kwargs):
|
||||
"""Explicit setting of kwargs for Popen for Harmony.
|
||||
|
||||
Expected behavior
|
||||
- ayon_console opens window with logs
|
||||
- ayon has stdout/stderr available for capturing
|
||||
|
||||
Args:
|
||||
kwargs (Union[dict, None]): Current kwargs or None.
|
||||
|
||||
"""
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
if platform.system().lower() != "windows":
|
||||
return kwargs
|
||||
|
||||
if is_using_ayon_console():
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE
|
||||
})
|
||||
else:
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
return kwargs
|
||||
|
||||
|
||||
class HarmonyPrelaunchHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Hook add python executable and script path to Harmony implementation
|
||||
before Harmony executable and add last workfile path to launch arguments.
|
||||
|
||||
Existence of last workfile is checked. If workfile does not exists tries
|
||||
to copy templated workfile from predefined path.
|
||||
"""
|
||||
app_groups = {"harmony"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = get_launch_script_path()
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)
|
||||
):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these arguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = get_launch_kwargs(
|
||||
self.launch_context.kwargs
|
||||
)
|
||||
|
|
@ -50,11 +50,11 @@ class ImportTemplateLoader(load.LoaderPlugin):
|
|||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
pass
|
||||
def update(self, container, context):
|
||||
pass
|
||||
|
||||
def remove(self, container):
|
||||
pass
|
||||
def remove(self, container):
|
||||
pass
|
||||
|
||||
|
||||
class ImportWorkfileLoader(ImportTemplateLoader):
|
||||
|
|
|
|||
|
|
@ -632,7 +632,9 @@ def sync_avalon_data_to_workfile():
|
|||
project_name = get_current_project_name()
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
work_template = anatomy.templates["work"]["path"]
|
||||
work_template = anatomy.get_template_item(
|
||||
"work", "default", "path"
|
||||
)
|
||||
work_root = anatomy.root_value_for_template(work_template)
|
||||
active_project_root = (
|
||||
os.path.join(work_root, project_name)
|
||||
|
|
@ -825,7 +827,7 @@ class PublishAction(QtWidgets.QAction):
|
|||
# root_node = hiero.core.nuke.RootNode()
|
||||
#
|
||||
# anatomy = Anatomy(get_current_project_name())
|
||||
# work_template = anatomy.templates["work"]["path"]
|
||||
# work_template = anatomy.get_template_item("work", "default", "path")
|
||||
# root_path = anatomy.root_value_for_template(work_template)
|
||||
#
|
||||
# nuke_script.addNode(root_node)
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setWindowTitle(name or "Pype Creator Input")
|
||||
self.setWindowTitle(name or "AYON Creator Input")
|
||||
self.resize(500, 700)
|
||||
|
||||
# Where inputs and labels are set
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class CreateShotClip(phiero.Creator):
|
|||
|
||||
gui_tracks = [track.name()
|
||||
for track in phiero.get_current_sequence().videoTracks()]
|
||||
gui_name = "Pype publish attributes creator"
|
||||
gui_name = "AYON publish attributes creator"
|
||||
gui_info = "Define sequential rename and fill hierarchy data."
|
||||
gui_inputs = {
|
||||
"renameHierarchy": {
|
||||
|
|
|
|||
|
|
@ -19,10 +19,6 @@ from ayon_core.lib import BoolDef
|
|||
from .lib import imprint, read, lsattr, add_self_publish_button
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
pass
|
||||
|
||||
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator plugin to create instances in Houdini
|
||||
|
||||
|
|
@ -92,8 +88,8 @@ class Creator(LegacyCreator):
|
|||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
CreatorError,
|
||||
CreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
|
||||
|
|
@ -147,7 +143,6 @@ class HoudiniCreatorBase(object):
|
|||
def create_instance_node(
|
||||
folder_path, node_name, parent, node_type="geometry"
|
||||
):
|
||||
# type: (str, str, str) -> hou.Node
|
||||
"""Create node representing instance.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -210,8 +205,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
CreatorError,
|
||||
CreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def lock_parameters(self, node, parameters):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Creator plugin for creating publishable Houdini Digital Assets."""
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
import hou
|
||||
|
||||
|
|
@ -16,7 +17,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
maintain_selection = False
|
||||
|
||||
def _check_existing(self, folder_path, product_name):
|
||||
# type: (str) -> bool
|
||||
# type: (str, str) -> bool
|
||||
"""Check if existing product name versions already exists."""
|
||||
# Get all products of the current folder
|
||||
project_name = self.project_name
|
||||
|
|
@ -52,7 +53,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
# if node type has not its definition, it is not user
|
||||
# created hda. We test if hda can be created from the node.
|
||||
if not to_hda.canCreateDigitalAsset():
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
"cannot create hda from node {}".format(to_hda))
|
||||
|
||||
hda_node = to_hda.createDigitalAsset(
|
||||
|
|
@ -61,7 +62,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
)
|
||||
hda_node.layoutChildren()
|
||||
elif self._check_existing(folder_path, node_name):
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
("product {} is already published with different HDA"
|
||||
"definition.").format(node_name))
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Creator plugin to create Redshift ROP."""
|
||||
import hou # noqa
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.lib import EnumDef, BoolDef
|
||||
|
||||
|
|
@ -42,7 +43,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
"Redshift_IPR", node_name=f"{basename}_IPR"
|
||||
)
|
||||
except hou.OperationFailed as e:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
(
|
||||
"Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import hou
|
||||
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.pipeline import CreatedInstance
|
||||
from ayon_core.pipeline import CreatedInstance, CreatorError
|
||||
from ayon_core.lib import EnumDef, BoolDef
|
||||
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
"vray", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
"Cannot create Vray render node. "
|
||||
"Make sure Vray installed and enabled!"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
product_types = {"model", "animation", "pointcache", "gpuCache"}
|
||||
label = "Load Alembic"
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extensions = {"abc"}
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ class AbcArchiveLoader(load.LoaderPlugin):
|
|||
|
||||
product_types = {"model", "animation", "pointcache", "gpuCache"}
|
||||
label = "Load Alembic as Archive"
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extensions = {"abc"}
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
|
|
|||
|
|
@ -167,6 +167,9 @@ class CameraLoader(load.LoaderPlugin):
|
|||
|
||||
temp_camera.destroy()
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
|
|
@ -195,7 +198,6 @@ class CameraLoader(load.LoaderPlugin):
|
|||
def _match_maya_render_mask(self, camera):
|
||||
"""Workaround to match Maya render mask in Houdini"""
|
||||
|
||||
# print("Setting match maya render mask ")
|
||||
parm = camera.parm("aperture")
|
||||
expression = parm.expression()
|
||||
expression = expression.replace("return ", "aperture = ")
|
||||
|
|
|
|||
129
client/ayon_core/hosts/houdini/plugins/load/load_filepath.py
Normal file
129
client/ayon_core/hosts/houdini/plugins/load/load_filepath.py
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from ayon_core.pipeline import load
|
||||
from openpype.hosts.houdini.api import pipeline
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class FilePathLoader(load.LoaderPlugin):
|
||||
"""Load a managed filepath to a null node.
|
||||
|
||||
This is useful if for a particular workflow there is no existing loader
|
||||
yet. A Houdini artists can load as the generic filepath loader and then
|
||||
reference the relevant Houdini parm to use the exact value. The benefit
|
||||
is that this filepath will be managed and can be updated as usual.
|
||||
|
||||
"""
|
||||
|
||||
label = "Load filepath to node"
|
||||
order = 9
|
||||
icon = "link"
|
||||
color = "white"
|
||||
product_types = {"*"}
|
||||
representations = ["*"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["folder"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create a null node
|
||||
container = obj.createNode("null", node_name=node_name)
|
||||
|
||||
# Destroy any children
|
||||
for node in container.children():
|
||||
node.destroy()
|
||||
|
||||
# Add filepath attribute, set value as default value
|
||||
filepath = self.format_path(
|
||||
path=self.filepath_from_context(context),
|
||||
representation=context["representation"]
|
||||
)
|
||||
parm_template_group = container.parmTemplateGroup()
|
||||
attr_folder = hou.FolderParmTemplate("attributes_folder", "Attributes")
|
||||
parm = hou.StringParmTemplate(name="filepath",
|
||||
label="Filepath",
|
||||
num_components=1,
|
||||
default_value=(filepath,))
|
||||
attr_folder.addParmTemplate(parm)
|
||||
parm_template_group.append(attr_folder)
|
||||
|
||||
# Hide some default labels
|
||||
for folder_label in ["Transform", "Render", "Misc", "Redshift OBJ"]:
|
||||
folder = parm_template_group.findFolder(folder_label)
|
||||
if not folder:
|
||||
continue
|
||||
parm_template_group.hideFolder(folder_label, True)
|
||||
|
||||
container.setParmTemplateGroup(parm_template_group)
|
||||
|
||||
container.setDisplayFlag(False)
|
||||
container.setSelectableInViewport(False)
|
||||
container.useXray(False)
|
||||
|
||||
nodes = [container]
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
|
||||
# Update the file path
|
||||
representation_entity = context["representation"]
|
||||
file_path = self.format_path(
|
||||
path=self.filepath_from_context(context),
|
||||
representation=representation_entity
|
||||
)
|
||||
|
||||
node = container["node"]
|
||||
node.setParms({
|
||||
"filepath": file_path,
|
||||
"representation": str(representation_entity["id"])
|
||||
})
|
||||
|
||||
# Update the parameter default value (cosmetics)
|
||||
parm_template_group = node.parmTemplateGroup()
|
||||
parm = parm_template_group.find("filepath")
|
||||
parm.setDefaultValue((file_path,))
|
||||
parm_template_group.replace(parm_template_group.find("filepath"),
|
||||
parm)
|
||||
node.setParmTemplateGroup(parm_template_group)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
@staticmethod
|
||||
def format_path(path: str, representation: dict) -> str:
|
||||
"""Format file path for sequence with $F."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
# The path is either a single file or sequence in a folder.
|
||||
frame = representation["context"].get("frame")
|
||||
if frame is not None:
|
||||
# Substitute frame number in sequence with $F with padding
|
||||
ext = representation.get("ext", representation["name"])
|
||||
token = "$F{}".format(len(frame)) # e.g. $F4
|
||||
pattern = r"\.(\d+)\.{ext}$".format(ext=re.escape(ext))
|
||||
path = re.sub(pattern, ".{}.{}".format(token, ext), path)
|
||||
|
||||
return os.path.normpath(path).replace("\\", "/")
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
|
|
@ -44,7 +45,14 @@ def get_image_avalon_container():
|
|||
class ImageLoader(load.LoaderPlugin):
|
||||
"""Load images into COP2"""
|
||||
|
||||
product_types = {"imagesequence"}
|
||||
product_types = {
|
||||
"imagesequence",
|
||||
"review",
|
||||
"render",
|
||||
"plate",
|
||||
"image",
|
||||
"online",
|
||||
}
|
||||
label = "Load Image (COP2)"
|
||||
representations = ["*"]
|
||||
order = -10
|
||||
|
|
@ -55,10 +63,8 @@ class ImageLoader(load.LoaderPlugin):
|
|||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = os.path.normpath(file_path)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
path = self.filepath_from_context(context)
|
||||
path = self.format_path(path, representation=context["representation"])
|
||||
|
||||
# Get the root node
|
||||
parent = get_image_avalon_container()
|
||||
|
|
@ -70,7 +76,10 @@ class ImageLoader(load.LoaderPlugin):
|
|||
node = parent.createNode("file", node_name=node_name)
|
||||
node.moveToGoodPosition()
|
||||
|
||||
node.setParms({"filename1": file_path})
|
||||
parms = {"filename1": path}
|
||||
parms.update(self.get_colorspace_parms(context["representation"]))
|
||||
|
||||
node.setParms(parms)
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
|
|
@ -93,16 +102,17 @@ class ImageLoader(load.LoaderPlugin):
|
|||
|
||||
# Update the file path
|
||||
file_path = get_representation_path(repre_entity)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
file_path = self.format_path(file_path, repre_entity)
|
||||
|
||||
parms = {
|
||||
"filename1": file_path,
|
||||
"representation": repre_entity["id"],
|
||||
}
|
||||
|
||||
parms.update(self.get_colorspace_parms(repre_entity))
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filename1": file_path,
|
||||
"representation": repre_entity["id"],
|
||||
}
|
||||
)
|
||||
node.setParms(parms)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
|
|
@ -119,14 +129,58 @@ class ImageLoader(load.LoaderPlugin):
|
|||
if not parent.children():
|
||||
parent.destroy()
|
||||
|
||||
def _get_file_sequence(self, file_path):
|
||||
root = os.path.dirname(file_path)
|
||||
files = sorted(os.listdir(root))
|
||||
@staticmethod
|
||||
def format_path(path, representation):
|
||||
"""Format file path correctly for single image or sequence."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
first_fname = files[0]
|
||||
prefix, padding, suffix = first_fname.rsplit(".", 2)
|
||||
fname = ".".join([prefix, "$F{}".format(len(padding)), suffix])
|
||||
return os.path.join(root, fname).replace("\\", "/")
|
||||
ext = os.path.splitext(path)[-1]
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
is_sequence = bool(representation["context"].get("frame"))
|
||||
# The path is either a single file or sequence in a folder.
|
||||
if not is_sequence:
|
||||
filename = path
|
||||
else:
|
||||
filename = re.sub(r"(.*)\.(\d+){}$".format(re.escape(ext)),
|
||||
"\\1.$F4{}".format(ext),
|
||||
path)
|
||||
|
||||
filename = os.path.join(path, filename)
|
||||
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
return filename
|
||||
|
||||
def get_colorspace_parms(self, representation: dict) -> dict:
|
||||
"""Return the color space parameters.
|
||||
|
||||
Returns the values for the colorspace parameters on the node if there
|
||||
is colorspace data on the representation.
|
||||
|
||||
Arguments:
|
||||
representation (dict): The representation entity.
|
||||
|
||||
Returns:
|
||||
dict: Parm to value mapping if colorspace data is defined.
|
||||
|
||||
"""
|
||||
# Using OCIO colorspace on COP2 File node is only supported in Hou 20+
|
||||
major, _, _ = hou.applicationVersion()
|
||||
if major < 20:
|
||||
return {}
|
||||
|
||||
data = representation.get("data", {}).get("colorspaceData", {})
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
colorspace = data["colorspace"]
|
||||
if colorspace:
|
||||
return {
|
||||
"colorspace": 3, # Use OpenColorIO
|
||||
"ocio_space": colorspace
|
||||
}
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
77
client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py
Normal file
77
client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import os
|
||||
|
||||
from ayon_core.pipeline import load
|
||||
from ayon_core.hosts.houdini.api import pipeline
|
||||
|
||||
|
||||
class SopUsdImportLoader(load.LoaderPlugin):
|
||||
"""Load USD to SOPs via `usdimport`"""
|
||||
|
||||
label = "Load USD to SOPs"
|
||||
product_types = {"*"}
|
||||
representations = ["usd"]
|
||||
order = -6
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = os.path.normpath(file_path)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["folder"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create a new geo node
|
||||
container = obj.createNode("geo", node_name=node_name)
|
||||
|
||||
# Create a usdimport node
|
||||
usdimport = container.createNode("usdimport", node_name=node_name)
|
||||
usdimport.setParms({"filepath1": file_path})
|
||||
|
||||
# Set new position for unpack node else it gets cluttered
|
||||
nodes = [container, usdimport]
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
|
||||
node = container["node"]
|
||||
try:
|
||||
usdimport_node = next(
|
||||
n for n in node.children() if n.type().name() == "usdimport"
|
||||
)
|
||||
except StopIteration:
|
||||
self.log.error("Could not find node of type `usdimport`")
|
||||
return
|
||||
|
||||
# Update the file path
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
usdimport_node.setParms({"filepath1": file_path})
|
||||
|
||||
# Update attribute
|
||||
node.setParms({"representation": context["representation"]["id"]})
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
from ayon_core.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by all node in out graph and pre-defined attributes
|
||||
|
||||
This collector takes into account folders that are associated with
|
||||
an specific node and marked with a unique identifier;
|
||||
|
||||
Identifier:
|
||||
id (str): "ayon.create.instance"
|
||||
|
||||
Specific node:
|
||||
The specific node is important because it dictates in which way the
|
||||
product is being exported.
|
||||
|
||||
alembic: will export Alembic file which supports cascading attributes
|
||||
like 'cbId' and 'path'
|
||||
geometry: Can export a wide range of file types, default out
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect Instances"
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
nodes = hou.node("/out").children()
|
||||
nodes += hou.node("/obj").children()
|
||||
|
||||
# Include instances in USD stage only when it exists so it
|
||||
# remains backwards compatible with version before houdini 18
|
||||
stage = hou.node("/stage")
|
||||
if stage:
|
||||
nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
continue
|
||||
|
||||
if node.evalParm("id") not in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
continue
|
||||
|
||||
# instance was created by new creator code, skip it as
|
||||
# it is already collected.
|
||||
if node.parm("creator_identifier"):
|
||||
continue
|
||||
|
||||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
self.log.info(
|
||||
"Processing legacy instance node {}".format(node.path())
|
||||
)
|
||||
|
||||
data = lib.read(node)
|
||||
# Check bypass state and reverse
|
||||
if hasattr(node, "isBypassed"):
|
||||
data.update({"active": not node.isBypassed()})
|
||||
|
||||
# temporarily translation of `active` to `publish` till issue has
|
||||
# been resolved.
|
||||
# https://github.com/pyblish/pyblish-base/issues/307
|
||||
if "active" in data:
|
||||
data["publish"] = data["active"]
|
||||
|
||||
# Create nice name if the instance has a frame range.
|
||||
label = data.get("name", node.name())
|
||||
label += " (%s)" % data["folderPath"] # include folder in name
|
||||
|
||||
instance = context.create_instance(label)
|
||||
|
||||
# Include `families` using `family` data
|
||||
product_type = data["family"]
|
||||
data["productType"] = product_type
|
||||
instance.data["families"] = [product_type]
|
||||
|
||||
instance[:] = [node]
|
||||
instance.data["instance_node"] = node.path()
|
||||
instance.data.update(data)
|
||||
|
||||
def sort_by_family(instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get(
|
||||
"families", instance.data.get("productType")
|
||||
)
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=sort_by_family)
|
||||
|
||||
return context
|
||||
|
|
@ -41,23 +41,23 @@ class CollectKarmaROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
instance.data["chunkSize"] = chunk_size
|
||||
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||
|
||||
default_prefix = evalParmNoFrame(rop, "picture")
|
||||
render_products = []
|
||||
default_prefix = evalParmNoFrame(rop, "picture")
|
||||
render_products = []
|
||||
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
|
|||
|
|
@ -41,57 +41,57 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
instance.data["chunkSize"] = chunk_size
|
||||
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||
|
||||
default_prefix = evalParmNoFrame(rop, "vm_picture")
|
||||
render_products = []
|
||||
default_prefix = evalParmNoFrame(rop, "vm_picture")
|
||||
render_products = []
|
||||
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "soho_diskfile", pad_character="0"
|
||||
)
|
||||
beauty_export_product = self.get_render_product_name(
|
||||
prefix=export_prefix,
|
||||
suffix=None)
|
||||
export_products.append(beauty_export_product)
|
||||
self.log.debug(
|
||||
"Found export product: {}".format(beauty_export_product)
|
||||
)
|
||||
instance.data["ifdFile"] = beauty_export_product
|
||||
instance.data["exportFiles"] = list(export_products)
|
||||
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "soho_diskfile", pad_character="0"
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
beauty_export_product = self.get_render_product_name(
|
||||
prefix=export_prefix,
|
||||
suffix=None)
|
||||
export_products.append(beauty_export_product)
|
||||
self.log.debug(
|
||||
"Found export product: {}".format(beauty_export_product)
|
||||
)
|
||||
instance.data["ifdFile"] = beauty_export_product
|
||||
instance.data["exportFiles"] = list(export_products)
|
||||
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
|
||||
aov_numbers = rop.evalParm("vm_numaux")
|
||||
if aov_numbers > 0:
|
||||
# get the filenames of the AOVs
|
||||
for i in range(1, aov_numbers + 1):
|
||||
var = rop.evalParm("vm_variable_plane%d" % i)
|
||||
if var:
|
||||
aov_name = "vm_filename_plane%d" % i
|
||||
aov_boolean = "vm_usefile_plane%d" % i
|
||||
aov_enabled = rop.evalParm(aov_boolean)
|
||||
has_aov_path = rop.evalParm(aov_name)
|
||||
if has_aov_path and aov_enabled == 1:
|
||||
aov_prefix = evalParmNoFrame(rop, aov_name)
|
||||
aov_product = self.get_render_product_name(
|
||||
prefix=aov_prefix, suffix=None
|
||||
)
|
||||
render_products.append(aov_product)
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
|
||||
aov_numbers = rop.evalParm("vm_numaux")
|
||||
if aov_numbers > 0:
|
||||
# get the filenames of the AOVs
|
||||
for i in range(1, aov_numbers + 1):
|
||||
var = rop.evalParm("vm_variable_plane%d" % i)
|
||||
if var:
|
||||
aov_name = "vm_filename_plane%d" % i
|
||||
aov_boolean = "vm_usefile_plane%d" % i
|
||||
aov_enabled = rop.evalParm(aov_boolean)
|
||||
has_aov_path = rop.evalParm(aov_name)
|
||||
if has_aov_path and aov_enabled == 1:
|
||||
aov_prefix = evalParmNoFrame(rop, aov_name)
|
||||
aov_product = self.get_render_product_name(
|
||||
prefix=aov_prefix, suffix=None
|
||||
)
|
||||
render_products.append(aov_product)
|
||||
|
||||
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
|
|||
|
|
@ -68,12 +68,15 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
files_by_aov = {
|
||||
"_": self.generate_expected_files(instance,
|
||||
beauty_product)}
|
||||
|
||||
|
||||
aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode()
|
||||
if aovs_rop:
|
||||
rop = aovs_rop
|
||||
|
||||
num_aovs = rop.evalParm("RS_aov")
|
||||
num_aovs = 0
|
||||
if not rop.evalParm('RS_aovAllAOVsDisabled'):
|
||||
num_aovs = rop.evalParm("RS_aov")
|
||||
|
||||
for index in range(num_aovs):
|
||||
i = index + 1
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="openpype_menu">
|
||||
<subMenu id="ayon_menu">
|
||||
<labelExpression><![CDATA[
|
||||
import os
|
||||
return os.environ.get("AYON_MENU_LABEL") or "AYON"
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ from ayon_core.tools.utils import host_tools
|
|||
from ayon_core.hosts.max.api import lib
|
||||
|
||||
|
||||
class OpenPypeMenu(object):
|
||||
"""Object representing OpenPype/AYON menu.
|
||||
class AYONMenu(object):
|
||||
"""Object representing AYON menu.
|
||||
|
||||
This is using "hack" to inject itself before "Help" menu of 3dsmax.
|
||||
For some reason `postLoadingMenus` event doesn't fire, and main menu
|
||||
|
|
@ -39,7 +39,7 @@ class OpenPypeMenu(object):
|
|||
|
||||
self._counter = 0
|
||||
self._timer.stop()
|
||||
self.build_openpype_menu()
|
||||
self._build_ayon_menu()
|
||||
|
||||
@staticmethod
|
||||
def get_main_widget():
|
||||
|
|
@ -50,8 +50,8 @@ class OpenPypeMenu(object):
|
|||
"""Get main Menubar by 3dsmax main window."""
|
||||
return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0]
|
||||
|
||||
def get_or_create_openpype_menu(
|
||||
self, name: str = "&Openpype",
|
||||
def _get_or_create_ayon_menu(
|
||||
self, name: str = "&AYON",
|
||||
before: str = "&Help") -> QtWidgets.QAction:
|
||||
"""Create AYON menu.
|
||||
|
||||
|
|
@ -73,7 +73,7 @@ class OpenPypeMenu(object):
|
|||
help_action = None
|
||||
for item in menu_items:
|
||||
if name in item.title():
|
||||
# we already have OpenPype menu
|
||||
# we already have AYON menu
|
||||
return item
|
||||
|
||||
if before in item.title():
|
||||
|
|
@ -85,50 +85,50 @@ class OpenPypeMenu(object):
|
|||
self.menu = op_menu
|
||||
return op_menu
|
||||
|
||||
def build_openpype_menu(self) -> QtWidgets.QAction:
|
||||
def _build_ayon_menu(self) -> QtWidgets.QAction:
|
||||
"""Build items in AYON menu."""
|
||||
openpype_menu = self.get_or_create_openpype_menu()
|
||||
load_action = QtWidgets.QAction("Load...", openpype_menu)
|
||||
ayon_menu = self._get_or_create_ayon_menu()
|
||||
load_action = QtWidgets.QAction("Load...", ayon_menu)
|
||||
load_action.triggered.connect(self.load_callback)
|
||||
openpype_menu.addAction(load_action)
|
||||
ayon_menu.addAction(load_action)
|
||||
|
||||
publish_action = QtWidgets.QAction("Publish...", openpype_menu)
|
||||
publish_action = QtWidgets.QAction("Publish...", ayon_menu)
|
||||
publish_action.triggered.connect(self.publish_callback)
|
||||
openpype_menu.addAction(publish_action)
|
||||
ayon_menu.addAction(publish_action)
|
||||
|
||||
manage_action = QtWidgets.QAction("Manage...", openpype_menu)
|
||||
manage_action = QtWidgets.QAction("Manage...", ayon_menu)
|
||||
manage_action.triggered.connect(self.manage_callback)
|
||||
openpype_menu.addAction(manage_action)
|
||||
ayon_menu.addAction(manage_action)
|
||||
|
||||
library_action = QtWidgets.QAction("Library...", openpype_menu)
|
||||
library_action = QtWidgets.QAction("Library...", ayon_menu)
|
||||
library_action.triggered.connect(self.library_callback)
|
||||
openpype_menu.addAction(library_action)
|
||||
ayon_menu.addAction(library_action)
|
||||
|
||||
openpype_menu.addSeparator()
|
||||
ayon_menu.addSeparator()
|
||||
|
||||
workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu)
|
||||
workfiles_action = QtWidgets.QAction("Work Files...", ayon_menu)
|
||||
workfiles_action.triggered.connect(self.workfiles_callback)
|
||||
openpype_menu.addAction(workfiles_action)
|
||||
ayon_menu.addAction(workfiles_action)
|
||||
|
||||
openpype_menu.addSeparator()
|
||||
ayon_menu.addSeparator()
|
||||
|
||||
res_action = QtWidgets.QAction("Set Resolution", openpype_menu)
|
||||
res_action = QtWidgets.QAction("Set Resolution", ayon_menu)
|
||||
res_action.triggered.connect(self.resolution_callback)
|
||||
openpype_menu.addAction(res_action)
|
||||
ayon_menu.addAction(res_action)
|
||||
|
||||
frame_action = QtWidgets.QAction("Set Frame Range", openpype_menu)
|
||||
frame_action = QtWidgets.QAction("Set Frame Range", ayon_menu)
|
||||
frame_action.triggered.connect(self.frame_range_callback)
|
||||
openpype_menu.addAction(frame_action)
|
||||
ayon_menu.addAction(frame_action)
|
||||
|
||||
colorspace_action = QtWidgets.QAction("Set Colorspace", openpype_menu)
|
||||
colorspace_action = QtWidgets.QAction("Set Colorspace", ayon_menu)
|
||||
colorspace_action.triggered.connect(self.colorspace_callback)
|
||||
openpype_menu.addAction(colorspace_action)
|
||||
ayon_menu.addAction(colorspace_action)
|
||||
|
||||
unit_scale_action = QtWidgets.QAction("Set Unit Scale", openpype_menu)
|
||||
unit_scale_action = QtWidgets.QAction("Set Unit Scale", ayon_menu)
|
||||
unit_scale_action.triggered.connect(self.unit_scale_callback)
|
||||
openpype_menu.addAction(unit_scale_action)
|
||||
ayon_menu.addAction(unit_scale_action)
|
||||
|
||||
return openpype_menu
|
||||
return ayon_menu
|
||||
|
||||
def load_callback(self):
|
||||
"""Callback to show Loader tool."""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for OpenPype Houdini integration."""
|
||||
"""Pipeline tools for AYON 3ds max integration."""
|
||||
import os
|
||||
import logging
|
||||
from operator import attrgetter
|
||||
|
|
@ -14,7 +14,7 @@ from ayon_core.pipeline import (
|
|||
AVALON_CONTAINER_ID,
|
||||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.hosts.max.api.menu import OpenPypeMenu
|
||||
from ayon_core.hosts.max.api.menu import AYONMenu
|
||||
from ayon_core.hosts.max.api import lib
|
||||
from ayon_core.hosts.max.api.plugin import MS_CUSTOM_ATTRIB
|
||||
from ayon_core.hosts.max import MAX_HOST_DIR
|
||||
|
|
@ -48,7 +48,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# self._register_callbacks()
|
||||
self.menu = OpenPypeMenu()
|
||||
self.menu = AYONMenu()
|
||||
|
||||
self._has_been_setup = True
|
||||
|
||||
|
|
@ -94,7 +94,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
def _deferred_menu_creation(self):
|
||||
self.log.info("Building menu ...")
|
||||
self.menu = OpenPypeMenu()
|
||||
self.menu = AYONMenu()
|
||||
|
||||
@staticmethod
|
||||
def create_context_node():
|
||||
|
|
@ -148,7 +148,7 @@ attributes "OpenPypeContext"
|
|||
|
||||
|
||||
def ls() -> list:
|
||||
"""Get all OpenPype instances."""
|
||||
"""Get all AYON containers."""
|
||||
objs = rt.objects
|
||||
containers = [
|
||||
obj for obj in objs
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""3dsmax specific Avalon/Pyblish plugin definitions."""
|
||||
"""3dsmax specific AYON/Pyblish plugin definitions."""
|
||||
from abc import ABCMeta
|
||||
|
||||
import six
|
||||
|
|
@ -156,10 +156,6 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData"
|
|||
)"""
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
pass
|
||||
|
||||
|
||||
class MaxCreatorBase(object):
|
||||
|
||||
@staticmethod
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
|||
|
||||
|
||||
class ForceStartupScript(PreLaunchHook):
|
||||
"""Inject OpenPype environment to 3ds max.
|
||||
"""Inject AYON environment to 3ds max.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_core.lib.applications import PreLaunchHook, LaunchTypes
|
|||
|
||||
|
||||
class InjectPythonPath(PreLaunchHook):
|
||||
"""Inject OpenPype environment to 3dsmax.
|
||||
"""Inject AYON environment to 3dsmax.
|
||||
|
||||
Note that this works in combination whit 3dsmax startup script that
|
||||
is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
-- OpenPype Init Script
|
||||
-- AYON Init Script
|
||||
(
|
||||
local sysPath = dotNetClass "System.IO.Path"
|
||||
local sysDir = dotNetClass "System.IO.Directory"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""OpenPype script commands to be used directly in Maya."""
|
||||
"""AYON script commands to be used directly in Maya."""
|
||||
from maya import cmds
|
||||
|
||||
from ayon_api import get_project, get_folder_by_path
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_lookmanager",
|
||||
"ayon_toolbox_lookmanager",
|
||||
annotation="Look Manager",
|
||||
label="Look Manager",
|
||||
image=os.path.join(icons, "lookmanager.png"),
|
||||
|
|
@ -122,7 +122,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_workfiles",
|
||||
"ayon_toolbox_workfiles",
|
||||
annotation="Work Files",
|
||||
label="Work Files",
|
||||
image=os.path.join(icons, "workfiles.png"),
|
||||
|
|
@ -137,7 +137,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_loader",
|
||||
"ayon_toolbox_loader",
|
||||
annotation="Loader",
|
||||
label="Loader",
|
||||
image=os.path.join(icons, "loader.png"),
|
||||
|
|
@ -152,7 +152,7 @@ def override_toolbox_ui():
|
|||
|
||||
controls.append(
|
||||
cmds.iconTextButton(
|
||||
"pype_toolbox_manager",
|
||||
"ayon_toolbox_manager",
|
||||
annotation="Inventory",
|
||||
label="Inventory",
|
||||
image=os.path.join(icons, "inventory.png"),
|
||||
|
|
|
|||
|
|
@ -2931,13 +2931,13 @@ def bake_to_world_space(nodes,
|
|||
|
||||
|
||||
def load_capture_preset(data):
|
||||
"""Convert OpenPype Extract Playblast settings to `capture` arguments
|
||||
"""Convert AYON Extract Playblast settings to `capture` arguments
|
||||
|
||||
Input data is the settings from:
|
||||
`project_settings/maya/publish/ExtractPlayblast/capture_preset`
|
||||
|
||||
Args:
|
||||
data (dict): Capture preset settings from OpenPype settings
|
||||
data (dict): Capture preset settings from AYON settings
|
||||
|
||||
Returns:
|
||||
dict: `capture.capture` compatible keyword arguments
|
||||
|
|
@ -3288,7 +3288,7 @@ def set_colorspace():
|
|||
else:
|
||||
# TODO: deprecated code from 3.15.5 - remove
|
||||
# Maya 2022+ introduces new OCIO v2 color management settings that
|
||||
# can override the old color management preferences. OpenPype has
|
||||
# can override the old color management preferences. AYON has
|
||||
# separate settings for both so we fall back when necessary.
|
||||
use_ocio_v2 = imageio["colorManagementPreference_v2"]["enabled"]
|
||||
if use_ocio_v2 and not ocio_v2_support:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
https://github.com/Colorbleed/colorbleed-config/blob/acre/colorbleed/maya/lib_rendersetup.py
|
||||
Credits: Roy Nieterau (BigRoy) / Colorbleed
|
||||
Modified for use in OpenPype
|
||||
Modified for use in AYON
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def get_context_label():
|
|||
|
||||
def install(project_settings):
|
||||
if cmds.about(batch=True):
|
||||
log.info("Skipping openpype.menu initialization in batch mode..")
|
||||
log.info("Skipping AYON menu initialization in batch mode..")
|
||||
return
|
||||
|
||||
def add_menu():
|
||||
|
|
@ -261,7 +261,7 @@ def popup():
|
|||
|
||||
|
||||
def update_menu_task_label():
|
||||
"""Update the task label in Avalon menu to current session"""
|
||||
"""Update the task label in AYON menu to current session"""
|
||||
|
||||
if IS_HEADLESS:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -361,13 +361,13 @@ def parse_container(container):
|
|||
|
||||
|
||||
def _ls():
|
||||
"""Yields Avalon container node names.
|
||||
"""Yields AYON container node names.
|
||||
|
||||
Used by `ls()` to retrieve the nodes and then query the full container's
|
||||
data.
|
||||
|
||||
Yields:
|
||||
str: Avalon container node name (objectSet)
|
||||
str: AYON container node name (objectSet)
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -384,7 +384,7 @@ def _ls():
|
|||
}
|
||||
|
||||
# Iterate over all 'set' nodes in the scene to detect whether
|
||||
# they have the avalon container ".id" attribute.
|
||||
# they have the ayon container ".id" attribute.
|
||||
fn_dep = om.MFnDependencyNode()
|
||||
iterator = om.MItDependencyNodes(om.MFn.kSet)
|
||||
for mobject in _maya_iterate(iterator):
|
||||
|
|
@ -673,7 +673,7 @@ def workfile_save_before_xgen(event):
|
|||
switching context.
|
||||
|
||||
Args:
|
||||
event (Event) - openpype/lib/events.py
|
||||
event (Event) - ayon_core/lib/events.py
|
||||
"""
|
||||
if not cmds.pluginInfo("xgenToolkit", query=True, loaded=True):
|
||||
return
|
||||
|
|
|
|||
|
|
@ -899,7 +899,7 @@ class ReferenceLoader(Loader):
|
|||
cmds.disconnectAttr(input, node_attr)
|
||||
cmds.setAttr(node_attr, data["value"])
|
||||
|
||||
# Fix PLN-40 for older containers created with Avalon that had the
|
||||
# Fix PLN-40 for older containers created with AYON that had the
|
||||
# `.verticesOnlySet` set to True.
|
||||
if cmds.getAttr("{}.verticesOnlySet".format(node)):
|
||||
self.log.info("Setting %s.verticesOnlySet to False", node)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ Export Maya nodes from Render Setup layer as if flattened in that layer instead
|
|||
of exporting the defaultRenderLayer as Maya forces by default
|
||||
|
||||
Credits: Roy Nieterau (BigRoy) / Colorbleed
|
||||
Modified for use in OpenPype
|
||||
Modified for use in AYON
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ def load_package(filepath, name, namespace=None):
|
|||
containers.append(container)
|
||||
|
||||
# TODO: Do we want to cripple? Or do we want to add a 'parent' parameter?
|
||||
# Cripple the original avalon containers so they don't show up in the
|
||||
# Cripple the original AYON containers so they don't show up in the
|
||||
# manager
|
||||
# for container in containers:
|
||||
# cmds.setAttr("%s.id" % container,
|
||||
|
|
@ -175,7 +175,7 @@ def _add(instance, representation_id, loaders, namespace, root="|"):
|
|||
namespace (str):
|
||||
|
||||
Returns:
|
||||
str: The created Avalon container.
|
||||
str: The created AYON container.
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -244,7 +244,7 @@ def _instances_by_namespace(data):
|
|||
|
||||
|
||||
def get_contained_containers(container):
|
||||
"""Get the Avalon containers in this container
|
||||
"""Get the AYON containers in this container
|
||||
|
||||
Args:
|
||||
container (dict): The container dict.
|
||||
|
|
@ -256,7 +256,7 @@ def get_contained_containers(container):
|
|||
|
||||
from .pipeline import parse_container
|
||||
|
||||
# Get avalon containers in this package setdress container
|
||||
# Get AYON containers in this package setdress container
|
||||
containers = []
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
for node in cmds.ls(members, type="objectSet"):
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class MayaLegacyConvertor(ProductConvertorPlugin,
|
|||
|
||||
Its limitation is that you can have multiple creators creating product
|
||||
of the same type and there is no way to handle it. This code should
|
||||
nevertheless cover all creators that came with OpenPype.
|
||||
nevertheless cover all creators that came with AYON.
|
||||
|
||||
"""
|
||||
identifier = "io.openpype.creators.maya.legacy"
|
||||
|
|
|
|||
|
|
@ -32,10 +32,7 @@ class RedshiftProxyLoader(load.LoaderPlugin):
|
|||
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
"""Plugin entry point."""
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "redshiftproxy"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
folder_name = context["folder"]["name"]
|
||||
namespace = namespace or unique_namespace(
|
||||
|
|
|
|||
|
|
@ -117,11 +117,7 @@ class ReferenceLoader(plugin.ReferenceLoader):
|
|||
def process_reference(self, context, name, namespace, options):
|
||||
import maya.cmds as cmds
|
||||
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "model"
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
project_name = context["project"]["name"]
|
||||
# True by default to keep legacy behaviours
|
||||
attach_to_root = options.get("attach_to_root", True)
|
||||
|
|
|
|||
|
|
@ -25,10 +25,7 @@ class LoadVDBtoArnold(load.LoaderPlugin):
|
|||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
from ayon_core.hosts.maya.api.lib import unique_namespace
|
||||
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "vdbcache"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
# Check if the plugin for arnold is available on the pc
|
||||
try:
|
||||
|
|
@ -64,7 +61,7 @@ class LoadVDBtoArnold(load.LoaderPlugin):
|
|||
path = self.filepath_from_context(context)
|
||||
self._set_path(grid_node,
|
||||
path=path,
|
||||
representation=context["representation"])
|
||||
repre_entity=context["representation"])
|
||||
|
||||
# Lock the shape node so the user can't delete the transform/shape
|
||||
# as if it was referenced
|
||||
|
|
@ -94,7 +91,7 @@ class LoadVDBtoArnold(load.LoaderPlugin):
|
|||
assert len(grid_nodes) == 1, "This is a bug"
|
||||
|
||||
# Update the VRayVolumeGrid
|
||||
self._set_path(grid_nodes[0], path=path, representation=repre_entity)
|
||||
self._set_path(grid_nodes[0], path=path, repre_entity=repre_entity)
|
||||
|
||||
# Update container representation
|
||||
cmds.setAttr(container["objectName"] + ".representation",
|
||||
|
|
@ -108,7 +105,7 @@ class LoadVDBtoArnold(load.LoaderPlugin):
|
|||
|
||||
from maya import cmds
|
||||
|
||||
# Get all members of the avalon container, ensure they are unlocked
|
||||
# Get all members of the AYON container, ensure they are unlocked
|
||||
# and delete everything
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
|
|
|
|||
|
|
@ -31,10 +31,7 @@ class LoadVDBtoRedShift(load.LoaderPlugin):
|
|||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
from ayon_core.hosts.maya.api.lib import unique_namespace
|
||||
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "vdbcache"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
# Check if the plugin for redshift is available on the pc
|
||||
try:
|
||||
|
|
@ -115,7 +112,7 @@ class LoadVDBtoRedShift(load.LoaderPlugin):
|
|||
def remove(self, container):
|
||||
from maya import cmds
|
||||
|
||||
# Get all members of the avalon container, ensure they are unlocked
|
||||
# Get all members of the AYON container, ensure they are unlocked
|
||||
# and delete everything
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
|
|
|
|||
|
|
@ -94,10 +94,7 @@ class LoadVDBtoVRay(load.LoaderPlugin):
|
|||
"Path does not exist: %s" % path
|
||||
)
|
||||
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "vdbcache"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
# Ensure V-ray is loaded with the vrayvolumegrid
|
||||
if not cmds.pluginInfo("vrayformaya", query=True, loaded=True):
|
||||
|
|
@ -277,7 +274,7 @@ class LoadVDBtoVRay(load.LoaderPlugin):
|
|||
|
||||
def remove(self, container):
|
||||
|
||||
# Get all members of the avalon container, ensure they are unlocked
|
||||
# Get all members of the AYON container, ensure they are unlocked
|
||||
# and delete everything
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
|
|
|
|||
|
|
@ -47,10 +47,7 @@ class VRayProxyLoader(load.LoaderPlugin):
|
|||
|
||||
"""
|
||||
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "vrayproxy"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
# get all representations for this version
|
||||
filename = self._get_abc(
|
||||
|
|
|
|||
|
|
@ -26,10 +26,7 @@ class VRaySceneLoader(load.LoaderPlugin):
|
|||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "vrayscene_layer"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
folder_name = context["folder"]["name"]
|
||||
namespace = namespace or unique_namespace(
|
||||
|
|
|
|||
|
|
@ -56,10 +56,7 @@ class YetiCacheLoader(load.LoaderPlugin):
|
|||
|
||||
"""
|
||||
|
||||
try:
|
||||
product_type = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
product_type = "yeticache"
|
||||
product_type = context["product"]["productType"]
|
||||
|
||||
# Build namespace
|
||||
folder_name = context["folder"]["name"]
|
||||
|
|
|
|||
|
|
@ -79,12 +79,12 @@ def iter_history(nodes,
|
|||
def collect_input_containers(containers, nodes):
|
||||
"""Collect containers that contain any of the node in `nodes`.
|
||||
|
||||
This will return any loaded Avalon container that contains at least one of
|
||||
the nodes. As such, the Avalon container is an input for it. Or in short,
|
||||
This will return any loaded AYON container that contains at least one of
|
||||
the nodes. As such, the AYON container is an input for it. Or in short,
|
||||
there are member nodes of that container.
|
||||
|
||||
Returns:
|
||||
list: Input avalon containers
|
||||
list: Input loaded containers
|
||||
|
||||
"""
|
||||
# Assume the containers have collected their cached '_members' data
|
||||
|
|
|
|||
|
|
@ -40,9 +40,11 @@ class _NodeTypeAttrib(object):
|
|||
return "{}.{}".format(node, self.colour_space)
|
||||
|
||||
def __str__(self):
|
||||
return "_NodeTypeAttrib(name={}, fname={}, "
|
||||
"computed_fname={}, colour_space={})".format(
|
||||
self.name, self.fname, self.computed_fname, self.colour_space)
|
||||
return (
|
||||
"_NodeTypeAttrib(name={}, fname={}, "
|
||||
"computed_fname={}, colour_space={})".format(
|
||||
self.name, self.fname, self.computed_fname, self.colour_space)
|
||||
)
|
||||
|
||||
|
||||
NODETYPES = {
|
||||
|
|
|
|||
|
|
@ -106,10 +106,10 @@ class TextureProcessor:
|
|||
self.log = log
|
||||
|
||||
def apply_settings(self, project_settings):
|
||||
"""Apply OpenPype system/project settings to the TextureProcessor
|
||||
"""Apply AYON system/project settings to the TextureProcessor
|
||||
|
||||
Args:
|
||||
project_settings (dict): OpenPype project settings
|
||||
project_settings (dict): AYON project settings
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
|
@ -278,7 +278,7 @@ class MakeTX(TextureProcessor):
|
|||
"""Process the texture.
|
||||
|
||||
This function requires the `maketx` executable to be available in an
|
||||
OpenImageIO toolset detectable by OpenPype.
|
||||
OpenImageIO toolset detectable by AYON.
|
||||
|
||||
Args:
|
||||
source (str): Path to source file.
|
||||
|
|
|
|||
|
|
@ -5,13 +5,13 @@ from maya import cmds
|
|||
from ayon_core.pipeline import publish
|
||||
|
||||
|
||||
class ExtractYetiCache(publish.Extractor):
|
||||
class ExtractUnrealYetiCache(publish.Extractor):
|
||||
"""Producing Yeti cache files using scene time range.
|
||||
|
||||
This will extract Yeti cache file sequence and fur settings.
|
||||
"""
|
||||
|
||||
label = "Extract Yeti Cache"
|
||||
label = "Extract Yeti Cache (Unreal)"
|
||||
hosts = ["maya"]
|
||||
families = ["yeticacheUE"]
|
||||
|
||||
|
|
|
|||
|
|
@ -128,9 +128,11 @@ class ExtractWorkfileXgen(publish.Extractor):
|
|||
alembic_files.append(alembic_file)
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
published_maya_path = StringTemplate(
|
||||
instance.context.data["anatomy"].templates["publish"]["file"]
|
||||
).format(template_data)
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
publish_template = anatomy.get_template_item(
|
||||
"publish", "default", "file"
|
||||
)
|
||||
published_maya_path = publish_template.format(template_data)
|
||||
published_basename, _ = os.path.splitext(published_maya_path)
|
||||
|
||||
for source in alembic_files:
|
||||
|
|
|
|||
|
|
@ -39,8 +39,9 @@ class ExtractXgen(publish.Extractor):
|
|||
# Get published xgen file name.
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({"ext": "xgen"})
|
||||
templates = instance.context.data["anatomy"].templates["publish"]
|
||||
xgen_filename = StringTemplate(templates["file"]).format(template_data)
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
file_template = anatomy.get_template_item("publish", "default", "file")
|
||||
xgen_filename = file_template.format(template_data)
|
||||
|
||||
xgen_path = os.path.join(
|
||||
self.staging_dir(instance), xgen_filename
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Shape IDs mismatch original shape</title>
|
||||
<description>## Shapes mismatch IDs with original shape
|
||||
|
||||
Meshes are detected in the **rig** where the (deformed) mesh has a different
|
||||
`cbId` than the same mesh in its deformation history.
|
||||
Theses should normally be the same.
|
||||
|
||||
### How to repair?
|
||||
|
||||
By using the repair action the IDs from the shape in history will be
|
||||
copied to the deformed shape. For rig instances, in many cases the
|
||||
correct fix is to use the repair action **unless** you explicitly tried
|
||||
to update the `cbId` values on the meshes - in that case you actually want
|
||||
to do to the reverse and copy the IDs from the deformed mesh to the history
|
||||
mesh instead.
|
||||
|
||||
</description>
|
||||
<detail>
|
||||
### How does this happen?
|
||||
|
||||
When a deformer is applied in the scene on a referenced mesh that had no
|
||||
deformers then Maya will create a new shape node for the mesh that
|
||||
does not have the original id. Then on scene save new ids get created for the
|
||||
meshes lacking a `cbId` and thus the mesh then has a different `cbId` than
|
||||
the mesh in the deformation history.
|
||||
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -7,7 +7,9 @@ from ayon_core.pipeline.publish import (
|
|||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
OptionalPyblishPluginMixin,
|
||||
get_plugin_settings,
|
||||
apply_plugin_settings_automatically
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -32,6 +34,20 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin,
|
|||
]
|
||||
optional = False
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# Preserve automatic settings applying logic
|
||||
settings = get_plugin_settings(plugin=cls,
|
||||
project_settings=project_settings,
|
||||
log=cls.log,
|
||||
category="maya")
|
||||
apply_plugin_settings_automatically(cls, settings, logger=cls.log)
|
||||
|
||||
# Disable plug-in if cbId workflow is disabled
|
||||
if not project_settings["maya"].get("use_cbid_workflow", True):
|
||||
cls.enabled = False
|
||||
return
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
if not self.is_active(instance.data):
|
||||
|
|
|
|||
|
|
@ -22,6 +22,13 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin,
|
|||
actions = [RepairAction]
|
||||
optional = False
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# Disable plug-in if cbId workflow is disabled
|
||||
if not project_settings["maya"].get("use_cbid_workflow", True):
|
||||
cls.enabled = False
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def _get_nodes_by_name(nodes):
|
||||
nodes_by_name = {}
|
||||
|
|
|
|||
|
|
@ -1,81 +0,0 @@
|
|||
import maya.cmds as cmds
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.maya.api import lib
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstancerContent(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates that all meshes in the instance have object IDs.
|
||||
|
||||
This skips a check on intermediate objects because we consider them
|
||||
not important.
|
||||
"""
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'Instancer Content'
|
||||
families = ['instancer']
|
||||
optional = False
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
error = False
|
||||
members = instance.data['setMembers']
|
||||
export_members = instance.data['exactExportMembers']
|
||||
|
||||
self.log.debug("Contents {0}".format(members))
|
||||
|
||||
if not len(members) == len(cmds.ls(members, type="instancer")):
|
||||
self.log.error("Instancer can only contain instancers")
|
||||
error = True
|
||||
|
||||
# TODO: Implement better check for particles are cached
|
||||
if not cmds.ls(export_members, type="nucleus"):
|
||||
self.log.error("Instancer must have a connected nucleus")
|
||||
error = True
|
||||
|
||||
if not cmds.ls(export_members, type="cacheFile"):
|
||||
self.log.error("Instancer must be cached")
|
||||
error = True
|
||||
|
||||
hidden = self.check_geometry_hidden(export_members)
|
||||
if not hidden:
|
||||
error = True
|
||||
self.log.error("Instancer input geometry must be hidden "
|
||||
"the scene. Invalid: {0}".format(hidden))
|
||||
|
||||
# Ensure all in one group
|
||||
parents = cmds.listRelatives(members,
|
||||
allParents=True,
|
||||
fullPath=True) or []
|
||||
roots = list(set(cmds.ls(parents, assemblies=True, long=True)))
|
||||
if len(roots) > 1:
|
||||
self.log.error("Instancer should all be contained in a single "
|
||||
"group. Current roots: {0}".format(roots))
|
||||
error = True
|
||||
|
||||
if error:
|
||||
raise PublishValidationError(
|
||||
"Instancer Content is invalid. See log.")
|
||||
|
||||
def check_geometry_hidden(self, export_members):
|
||||
|
||||
# Ensure all instanced geometry is hidden
|
||||
shapes = cmds.ls(export_members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
noIntermediate=True)
|
||||
meshes = cmds.ls(shapes, type="mesh")
|
||||
|
||||
visible = [node for node in meshes
|
||||
if lib.is_visible(node,
|
||||
displayLayer=False,
|
||||
intermediateObject=False)]
|
||||
if visible:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
@ -1,172 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
def is_cache_resource(resource):
|
||||
"""Return whether resource is a cacheFile resource"""
|
||||
required = set(["maya", "node", "cacheFile"])
|
||||
tags = resource.get("tags", [])
|
||||
return required.issubset(tags)
|
||||
|
||||
|
||||
def valdidate_files(files):
|
||||
for f in files:
|
||||
assert os.path.exists(f)
|
||||
assert f.endswith(".mcx") or f.endswith(".mcc")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def filter_ticks(files):
|
||||
tick_files = set()
|
||||
ticks = set()
|
||||
for path in files:
|
||||
match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path))
|
||||
if match:
|
||||
tick_files.add(path)
|
||||
num = match.group(1)
|
||||
ticks.add(int(num))
|
||||
|
||||
return tick_files, ticks
|
||||
|
||||
|
||||
class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates all instancer particle systems are cached correctly.
|
||||
|
||||
This means they should have the files/frames as required by the start-end
|
||||
frame (including handles).
|
||||
|
||||
This also checks the files exist and checks the "ticks" (substeps) files.
|
||||
|
||||
"""
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'Instancer Cache Frame Ranges'
|
||||
families = ['instancer']
|
||||
optional = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
import pyseq
|
||||
|
||||
start_frame = instance.data.get("frameStart", 0)
|
||||
end_frame = instance.data.get("frameEnd", 0)
|
||||
required = range(int(start_frame), int(end_frame) + 1)
|
||||
|
||||
invalid = list()
|
||||
resources = instance.data.get("resources", [])
|
||||
|
||||
for resource in resources:
|
||||
if not is_cache_resource(resource):
|
||||
continue
|
||||
|
||||
node = resource['node']
|
||||
all_files = resource['files'][:]
|
||||
all_lookup = set(all_files)
|
||||
|
||||
# The first file is usually the .xml description file.
|
||||
xml = all_files.pop(0)
|
||||
assert xml.endswith(".xml")
|
||||
|
||||
# Ensure all files exist (including ticks)
|
||||
# The remainder file paths should be the .mcx or .mcc files
|
||||
valdidate_files(all_files)
|
||||
|
||||
# Maya particle caches support substeps by saving out additional
|
||||
# files that end with a Tick60.mcx, Tick120.mcx, etc. suffix.
|
||||
# To avoid `pyseq` getting confused we filter those out and then
|
||||
# for each file (except the last frame) check that at least all
|
||||
# ticks exist.
|
||||
|
||||
tick_files, ticks = filter_ticks(all_files)
|
||||
if tick_files:
|
||||
files = [f for f in all_files if f not in tick_files]
|
||||
else:
|
||||
files = all_files
|
||||
|
||||
sequences = pyseq.get_sequences(files)
|
||||
if len(sequences) != 1:
|
||||
invalid.append(node)
|
||||
cls.log.warning("More than one sequence found? "
|
||||
"{0} {1}".format(node, files))
|
||||
cls.log.warning("Found caches: {0}".format(sequences))
|
||||
continue
|
||||
|
||||
sequence = sequences[0]
|
||||
cls.log.debug("Found sequence: {0}".format(sequence))
|
||||
|
||||
start = sequence.start()
|
||||
end = sequence.end()
|
||||
|
||||
if start > start_frame or end < end_frame:
|
||||
invalid.append(node)
|
||||
cls.log.warning("Sequence does not have enough "
|
||||
"frames: {0}-{1} (requires: {2}-{3})"
|
||||
"".format(start, end,
|
||||
start_frame,
|
||||
end_frame))
|
||||
continue
|
||||
|
||||
# Ensure all frames are present
|
||||
missing = set(sequence.missing())
|
||||
if missing:
|
||||
required_missing = [x for x in required if x in missing]
|
||||
if required_missing:
|
||||
invalid.append(node)
|
||||
cls.log.warning("Sequence is missing required frames: "
|
||||
"{0}".format(required_missing))
|
||||
continue
|
||||
|
||||
# Ensure all tick files (substep) exist for the files in the folder
|
||||
# for the frames required by the time range.
|
||||
if ticks:
|
||||
ticks = list(sorted(ticks))
|
||||
cls.log.debug("Found ticks: {0} "
|
||||
"(substeps: {1})".format(ticks, len(ticks)))
|
||||
|
||||
# Check all frames except the last since we don't
|
||||
# require subframes after our time range.
|
||||
tick_check_frames = set(required[:-1])
|
||||
|
||||
# Check all frames
|
||||
for item in sequence:
|
||||
frame = item.frame
|
||||
if not frame:
|
||||
invalid.append(node)
|
||||
cls.log.error("Path is not a frame in sequence: "
|
||||
"{0}".format(item))
|
||||
continue
|
||||
|
||||
# Not required for our time range
|
||||
if frame not in tick_check_frames:
|
||||
continue
|
||||
|
||||
path = item.path
|
||||
for num in ticks:
|
||||
base, ext = os.path.splitext(path)
|
||||
tick_file = base + "Tick{0}".format(num) + ext
|
||||
if tick_file not in all_lookup:
|
||||
invalid.append(node)
|
||||
cls.log.warning("Tick file found that is not "
|
||||
"in cache query filenames: "
|
||||
"{0}".format(tick_file))
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
self.log.error("Invalid nodes: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
("Invalid particle caches in instance. "
|
||||
"See logs for details."))
|
||||
|
|
@ -24,7 +24,7 @@ class ValidateLoadedPlugin(pyblish.api.ContextPlugin,
|
|||
|
||||
invalid = []
|
||||
loaded_plugin = cmds.pluginInfo(query=True, listPlugins=True)
|
||||
# get variable from OpenPype settings
|
||||
# get variable from AYON settings
|
||||
whitelist_native_plugins = cls.whitelist_native_plugins
|
||||
authorized_plugins = cls.authorized_plugins or []
|
||||
|
||||
|
|
|
|||
|
|
@ -27,6 +27,13 @@ class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin):
|
|||
actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
# Disable plug-in if cbId workflow is disabled
|
||||
if not project_settings["maya"].get("use_cbid_workflow", True):
|
||||
cls.enabled = False
|
||||
return
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin,
|
|||
|
||||
Shading engines should be named "{surface_shader}SG"
|
||||
"""
|
||||
``
|
||||
|
||||
order = ValidateContentsOrder
|
||||
families = ["look"]
|
||||
hosts = ["maya"]
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue