mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into enhancement/add_dng_support
This commit is contained in:
commit
8d8daa31f8
367 changed files with 7107 additions and 4478 deletions
24
.github/workflows/pr_linting.yml
vendored
Normal file
24
.github/workflows/pr_linting.yml
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
name: 📇 Code Linting
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
branches: [ develop ]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
linting:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: chartboost/ruff-action@v1
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -77,6 +77,7 @@ dump.sql
|
|||
|
||||
# Poetry
|
||||
########
|
||||
.poetry/
|
||||
.python-version
|
||||
.editorconfig
|
||||
.pre-commit-config.yaml
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
flake8:
|
||||
enabled: true
|
||||
config_file: setup.cfg
|
||||
flake8:
|
||||
enabled: true
|
||||
config_file: setup.cfg
|
||||
|
|
|
|||
|
|
@ -1,12 +1,27 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: no-commit-to-branch
|
||||
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: no-commit-to-branch
|
||||
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
- tomli
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.3.3
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
# Run the formatter.
|
||||
# - id: ruff-format
|
||||
|
|
|
|||
24
README.md
24
README.md
|
|
@ -1,8 +1,8 @@
|
|||
|
||||
AYON Core addon
|
||||
========
|
||||
AYON Core Addon
|
||||
===============
|
||||
|
||||
AYON core provides the base building blocks for all other AYON addons and integrations and is responsible for discovery and initialization of other addons.
|
||||
AYON core provides the base building blocks for all other AYON addons and integrations and is responsible for discovery and initialization of other addons.
|
||||
|
||||
- Some of its key functions include:
|
||||
- It is used as the main command line handler in [ayon-launcher](https://github.com/ynput/ayon-launcher) application.
|
||||
|
|
@ -13,8 +13,20 @@ AYON core provides the base building blocks for all other AYON addons and integr
|
|||
- Defines pipeline API used by other integrations
|
||||
- Provides all graphical tools for artists
|
||||
- Defines AYON QT styling
|
||||
- A bunch more things
|
||||
- A bunch more things
|
||||
|
||||
Together with [ayon-launcher](https://github.com/ynput/ayon-launcher) , they form the base of AYON pipeline and is one of few compulsory addons for AYON pipeline to be useful in a meaningful way.
|
||||
Together with [ayon-launcher](https://github.com/ynput/ayon-launcher) , they form the base of AYON pipeline and is one of few compulsory addons for AYON pipeline to be useful in a meaningful way.
|
||||
|
||||
AYON-core is a successor to OpenPype repository (minus all the addons) and still in the process of cleaning up of all references. Please bear with us during this transitional phase.
|
||||
AYON-core is a successor to [OpenPype repository](https://github.com/ynput/OpenPype) (minus all the addons) and still in the process of cleaning up of all references. Please bear with us during this transitional phase.
|
||||
|
||||
Development and testing notes
|
||||
-----------------------------
|
||||
There is `pyproject.toml` file in the root of the repository. This file is used to define the development environment and is used by `poetry` to create a virtual environment.
|
||||
This virtual environment is used to run tests and to develop the code, to help with
|
||||
linting and formatting. Dependencies defined here are not used in actual addon
|
||||
deployment - for that you need to edit `./client/pyproject.toml` file. That file
|
||||
will be then processed [ayon-dependencies-tool](https://github.com/ynput/ayon-dependencies-tool)
|
||||
to create dependency package.
|
||||
|
||||
Right now, this file needs to by synced with dependencies manually, but in the future
|
||||
we plan to automate process of development environment creation.
|
||||
|
|
|
|||
|
|
@ -14,3 +14,15 @@ AYON_SERVER_ENABLED = True
|
|||
# Indicate if AYON entities should be used instead of OpenPype entities
|
||||
USE_AYON_ENTITIES = True
|
||||
# -------------------------
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
# Deprecated
|
||||
"AYON_CORE_ROOT",
|
||||
"PACKAGE_DIR",
|
||||
"PLUGINS_DIR",
|
||||
"AYON_SERVER_ENABLED",
|
||||
"USE_AYON_ENTITIES",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
- default interfaces are defined in `interfaces.py`
|
||||
|
||||
## IPluginPaths
|
||||
- addon wants to add directory path/s to avalon or publish plugins
|
||||
- addon wants to add directory path/s to publish, load, create or inventory plugins
|
||||
- addon must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"`
|
||||
- each key may contain list or string with a path to directory with plugins
|
||||
|
||||
|
|
@ -89,4 +89,4 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
|
||||
### TrayAddonsManager
|
||||
- inherits from `AddonsManager`
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayAddon` methods
|
||||
- has specific implementation for AYON Tray and handle `ITrayAddon` methods
|
||||
|
|
|
|||
|
|
@ -741,7 +741,7 @@ class AddonsManager:
|
|||
|
||||
addon_classes = []
|
||||
for module in openpype_modules:
|
||||
# Go through globals in `pype.modules`
|
||||
# Go through globals in `ayon_core.modules`
|
||||
for name in dir(module):
|
||||
modules_item = getattr(module, name, None)
|
||||
# Filter globals that are not classes which inherit from
|
||||
|
|
@ -1075,7 +1075,7 @@ class AddonsManager:
|
|||
"""Print out report of time spent on addons initialization parts.
|
||||
|
||||
Reporting is not automated must be implemented for each initialization
|
||||
part separatelly. Reports must be stored to `_report` attribute.
|
||||
part separately. Reports must be stored to `_report` attribute.
|
||||
Print is skipped if `_report` is empty.
|
||||
|
||||
Attribute `_report` is dictionary where key is "label" describing
|
||||
|
|
@ -1267,7 +1267,7 @@ class TrayAddonsManager(AddonsManager):
|
|||
def add_doubleclick_callback(self, addon, callback):
|
||||
"""Register doubleclick callbacks on tray icon.
|
||||
|
||||
Currently there is no way how to determine which is launched. Name of
|
||||
Currently, there is no way how to determine which is launched. Name of
|
||||
callback can be defined with `doubleclick_callback` attribute.
|
||||
|
||||
Missing feature how to define default callback.
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import warnings
|
||||
|
||||
|
||||
class Commands:
|
||||
|
|
@ -67,8 +66,6 @@ class Commands:
|
|||
install_ayon_plugins,
|
||||
get_global_context,
|
||||
)
|
||||
from ayon_core.tools.utils.host_tools import show_publish
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
|
|
@ -134,6 +131,8 @@ class Commands:
|
|||
print(plugin)
|
||||
|
||||
if gui:
|
||||
from ayon_core.tools.utils.host_tools import show_publish
|
||||
from ayon_core.tools.utils.lib import qt_app_context
|
||||
with qt_app_context():
|
||||
show_publish()
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,58 +0,0 @@
|
|||
import os
|
||||
|
||||
from ayon_core.lib import get_ayon_launcher_args
|
||||
from ayon_core.lib.applications import (
|
||||
get_non_python_host_kwargs,
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
|
||||
|
||||
class NonPythonHostHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Non python host implementation do not launch host directly but use
|
||||
python script which launch the host. For these cases it is necessary to
|
||||
prepend python (or ayon) executable and script path before application's.
|
||||
"""
|
||||
app_groups = {"harmony", "photoshop", "aftereffects"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = os.path.join(
|
||||
AYON_CORE_ROOT,
|
||||
"scripts",
|
||||
"non_python_host_launch.py"
|
||||
)
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these areguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = \
|
||||
get_non_python_host_kwargs(self.launch_context.kwargs)
|
||||
|
|
@ -1,6 +1,12 @@
|
|||
from .addon import AfterEffectsAddon
|
||||
from .addon import (
|
||||
AFTEREFFECTS_ADDON_ROOT,
|
||||
AfterEffectsAddon,
|
||||
get_launch_script_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AFTEREFFECTS_ADDON_ROOT",
|
||||
"AfterEffectsAddon",
|
||||
"get_launch_script_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
import os
|
||||
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
AFTEREFFECTS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class AfterEffectsAddon(AYONAddon, IHostAddon):
|
||||
name = "aftereffects"
|
||||
|
|
@ -17,3 +21,16 @@ class AfterEffectsAddon(AYONAddon, IHostAddon):
|
|||
|
||||
def get_workfile_extensions(self):
|
||||
return [".aep"]
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(AFTEREFFECTS_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
return os.path.join(
|
||||
AFTEREFFECTS_ADDON_ROOT, "api", "launch_script.py"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ __all__ = [
|
|||
"get_stub",
|
||||
|
||||
# pipeline
|
||||
"AfterEffectsHost",
|
||||
"ls",
|
||||
"containerise",
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import asyncio
|
|||
import functools
|
||||
import traceback
|
||||
|
||||
|
||||
from wsrpc_aiohttp import (
|
||||
WebSocketRoute,
|
||||
WebSocketAsync
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
"""Script wraps launch mechanism of non python host implementations.
|
||||
"""Script wraps launch mechanism of AfterEffects implementations.
|
||||
|
||||
Arguments passed to the script are passed to launch function in host
|
||||
implementation. In all cases requires host app executable and may contain
|
||||
|
|
@ -8,6 +8,8 @@ workfile or others.
|
|||
import os
|
||||
import sys
|
||||
|
||||
from ayon_core.hosts.aftereffects.api.launch_logic import main as host_main
|
||||
|
||||
# Get current file to locate start point of sys.argv
|
||||
CURRENT_FILE = os.path.abspath(__file__)
|
||||
|
||||
|
|
@ -79,26 +81,9 @@ def main(argv):
|
|||
if after_script_idx is not None:
|
||||
launch_args = sys_args[after_script_idx:]
|
||||
|
||||
host_name = os.environ["AYON_HOST_NAME"].lower()
|
||||
if host_name == "photoshop":
|
||||
# TODO refactor launch logic according to AE
|
||||
from ayon_core.hosts.photoshop.api.lib import main
|
||||
elif host_name == "aftereffects":
|
||||
from ayon_core.hosts.aftereffects.api.launch_logic import main
|
||||
elif host_name == "harmony":
|
||||
from ayon_core.hosts.harmony.api.lib import main
|
||||
else:
|
||||
title = "Unknown host name"
|
||||
message = (
|
||||
"BUG: Environment variable AYON_HOST_NAME contains unknown"
|
||||
" host name \"{}\""
|
||||
).format(host_name)
|
||||
show_error_messagebox(title, message)
|
||||
return
|
||||
|
||||
if launch_args:
|
||||
# Launch host implementation
|
||||
main(*launch_args)
|
||||
host_main(*launch_args)
|
||||
else:
|
||||
# Show message box
|
||||
on_invalid_args(after_script_idx is None)
|
||||
91
client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py
Normal file
91
client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from ayon_core.lib import (
|
||||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects import get_launch_script_path
|
||||
|
||||
|
||||
def get_launch_kwargs(kwargs):
|
||||
"""Explicit setting of kwargs for Popen for AfterEffects.
|
||||
|
||||
Expected behavior
|
||||
- ayon_console opens window with logs
|
||||
- ayon has stdout/stderr available for capturing
|
||||
|
||||
Args:
|
||||
kwargs (Union[dict, None]): Current kwargs or None.
|
||||
|
||||
"""
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
if platform.system().lower() != "windows":
|
||||
return kwargs
|
||||
|
||||
if is_using_ayon_console():
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE
|
||||
})
|
||||
else:
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
return kwargs
|
||||
|
||||
|
||||
class AEPrelaunchHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Hook add python executable and script path to AE implementation before
|
||||
AE executable and add last workfile path to launch arguments.
|
||||
|
||||
Existence of last workfile is checked. If workfile does not exists tries
|
||||
to copy templated workfile from predefined path.
|
||||
"""
|
||||
app_groups = {"aftereffects"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = get_launch_script_path()
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)
|
||||
):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these arguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = get_launch_kwargs(
|
||||
self.launch_context.kwargs
|
||||
)
|
||||
|
|
@ -1,14 +1,11 @@
|
|||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import attr
|
||||
|
||||
import attr
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.pipeline.publish import RenderInstance
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ def _process_app_events() -> Optional[float]:
|
|||
|
||||
|
||||
class LaunchQtApp(bpy.types.Operator):
|
||||
"""A Base class for opertors to launch a Qt app."""
|
||||
"""A Base class for operators to launch a Qt app."""
|
||||
|
||||
_app: QtWidgets.QApplication
|
||||
_window = Union[QtWidgets.QDialog, ModuleType]
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ class BlendLoader(plugin.AssetLoader):
|
|||
obj.animation_data_create()
|
||||
obj.animation_data.action = actions[obj.name]
|
||||
|
||||
# Restore the old data, but reset memebers, as they don't exist anymore
|
||||
# Restore the old data, but reset members, as they don't exist anymore
|
||||
# This avoids a crash, because the memory addresses of those members
|
||||
# are not valid anymore
|
||||
old_data["members"] = []
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import bpy
|
|||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import bpy
|
|||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
|
|||
tree = bpy.context.scene.node_tree
|
||||
output_type = "CompositorNodeOutputFile"
|
||||
output_node = None
|
||||
# Remove all output nodes that inlcude "AYON" in the name.
|
||||
# Remove all output nodes that include "AYON" in the name.
|
||||
# There should be only one.
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == output_type and "AYON" in node.name:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import (
|
|||
import ayon_core.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator,
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Ensure that meshes don't have a negative scale."""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,94 @@
|
|||
import inspect
|
||||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError,
|
||||
RepairAction
|
||||
)
|
||||
import ayon_core.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateModelMeshUvMap1(
|
||||
pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin,
|
||||
):
|
||||
"""Validate model mesh uvs are named `map1`.
|
||||
|
||||
This is solely to get them to work nicely for the Maya pipeline.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
label = "Mesh UVs named map1"
|
||||
actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
optional = True
|
||||
enabled = False
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
|
||||
invalid = []
|
||||
for obj in instance:
|
||||
if obj.mode != "OBJECT":
|
||||
cls.log.warning(
|
||||
f"Mesh object {obj.name} should be in 'OBJECT' mode"
|
||||
" to be properly checked."
|
||||
)
|
||||
|
||||
obj_data = obj.data
|
||||
if isinstance(obj_data, bpy.types.Mesh):
|
||||
mesh = obj_data
|
||||
|
||||
# Ignore mesh without UVs
|
||||
if not mesh.uv_layers:
|
||||
continue
|
||||
|
||||
# If mesh has map1 all is ok
|
||||
if mesh.uv_layers.get("map1"):
|
||||
continue
|
||||
|
||||
cls.log.warning(
|
||||
f"Mesh object {obj.name} should be in 'OBJECT' mode"
|
||||
" to be properly checked."
|
||||
)
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
for obj in cls.get_invalid(instance):
|
||||
mesh = obj.data
|
||||
|
||||
# Rename the first UV set to map1
|
||||
mesh.uv_layers[0].name = "map1"
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError(
|
||||
f"Meshes found in instance without valid UV's: {invalid}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""## Meshes must have map1 uv set
|
||||
|
||||
To accompany a better Maya-focused pipeline with Alembics it is
|
||||
expected that a Mesh has a `map1` UV set. Blender defaults to
|
||||
a UV set named `UVMap` and thus needs to be renamed.
|
||||
|
||||
"""
|
||||
)
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import inspect
|
||||
from typing import List
|
||||
|
||||
import mathutils
|
||||
|
|
@ -5,29 +6,26 @@ import bpy
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.blender.api import plugin, lib
|
||||
import ayon_core.hosts.blender.api.action
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
OptionalPyblishPluginMixin,
|
||||
PublishValidationError
|
||||
PublishValidationError,
|
||||
RepairAction
|
||||
)
|
||||
|
||||
|
||||
class ValidateTransformZero(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Transforms can't have any values
|
||||
|
||||
To solve this issue, try freezing the transforms. So long
|
||||
as the transforms, rotation and scale values are zero,
|
||||
you're all good.
|
||||
|
||||
"""
|
||||
"""Transforms can't have any values"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
label = "Transform Zero"
|
||||
actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction]
|
||||
actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
||||
_identity = mathutils.Matrix()
|
||||
|
||||
|
|
@ -51,5 +49,46 @@ class ValidateTransformZero(pyblish.api.InstancePlugin,
|
|||
names = ", ".join(obj.name for obj in invalid)
|
||||
raise PublishValidationError(
|
||||
"Objects found in instance which do not"
|
||||
f" have transform set to zero: {names}"
|
||||
f" have transform set to zero: {names}",
|
||||
description=self.get_description()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
invalid = cls.get_invalid(instance)
|
||||
if not invalid:
|
||||
return
|
||||
|
||||
context = plugin.create_blender_context(
|
||||
active=invalid[0], selected=invalid
|
||||
)
|
||||
with lib.maintained_selection():
|
||||
with bpy.context.temp_override(**context):
|
||||
plugin.deselect_all()
|
||||
for obj in invalid:
|
||||
obj.select_set(True)
|
||||
|
||||
# TODO: Preferably this does allow custom pivot point locations
|
||||
# and if so, this should likely apply to the delta instead
|
||||
# using `bpy.ops.object.transforms_to_deltas(mode="ALL")`
|
||||
bpy.ops.object.transform_apply(location=True,
|
||||
rotation=True,
|
||||
scale=True)
|
||||
|
||||
def get_description(self):
|
||||
return inspect.cleandoc(
|
||||
"""## Transforms can't have any values.
|
||||
|
||||
The location, rotation and scale on the transform must be at
|
||||
the default values. This also goes for the delta transforms.
|
||||
|
||||
To solve this issue, try freezing the transforms:
|
||||
- `Object` > `Apply` > `All Transforms`
|
||||
|
||||
Using the Repair action directly will do the same.
|
||||
|
||||
So long as the transforms, rotation and scale values are zero,
|
||||
you're all good.
|
||||
"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ class CelactionPrelaunchHook(PreLaunchHook):
|
|||
def workfile_path(self):
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
|
||||
# copy workfile from template if doesnt exist any on path
|
||||
# copy workfile from template if doesn't exist any on path
|
||||
if not os.path.exists(workfile_path):
|
||||
# TODO add ability to set different template workfile path via
|
||||
# settings
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ import sys
|
|||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectCelactionCliKwargs(pyblish.api.Collector):
|
||||
class CollectCelactionCliKwargs(pyblish.api.ContextPlugin):
|
||||
""" Collects all keyword arguments passed from the terminal """
|
||||
|
||||
label = "Collect Celaction Cli Kwargs"
|
||||
order = pyblish.api.Collector.order - 0.1
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
|
||||
def process(self, context):
|
||||
args = list(sys.argv[1:])
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
padding = anatomy.templates.get("frame_padding", 4)
|
||||
padding = anatomy.templates_obj.frame_padding
|
||||
product_type = "render"
|
||||
anatomy_data.update({
|
||||
"frame": f"%0{padding}d",
|
||||
|
|
@ -28,18 +28,17 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
})
|
||||
anatomy_data["product"]["type"] = product_type
|
||||
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
|
||||
# get anatomy rendering keys
|
||||
r_anatomy_key = self.anatomy_template_key_render_files
|
||||
m_anatomy_key = self.anatomy_template_key_metadata
|
||||
|
||||
# get folder and path for rendering images from celaction
|
||||
render_dir = anatomy_filled[r_anatomy_key]["folder"]
|
||||
render_path = anatomy_filled[r_anatomy_key]["path"]
|
||||
r_template_item = anatomy.get_template_item("publish", r_anatomy_key)
|
||||
render_dir = r_template_item["directory"].format_strict(anatomy_data)
|
||||
render_path = r_template_item["path"].format_strict(anatomy_data)
|
||||
self.log.debug("__ render_path: `{}`".format(render_path))
|
||||
|
||||
# create dir if it doesnt exists
|
||||
# create dir if it doesn't exists
|
||||
try:
|
||||
if not os.path.isdir(render_dir):
|
||||
os.makedirs(render_dir, exist_ok=True)
|
||||
|
|
@ -51,11 +50,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
instance.data["path"] = render_path
|
||||
|
||||
# get anatomy for published renders folder path
|
||||
if anatomy_filled.get(m_anatomy_key):
|
||||
instance.data["publishRenderMetadataFolder"] = anatomy_filled[
|
||||
m_anatomy_key]["folder"]
|
||||
self.log.info("Metadata render path: `{}`".format(
|
||||
instance.data["publishRenderMetadataFolder"]
|
||||
))
|
||||
m_template_item = anatomy.get_template_item(
|
||||
"publish", m_anatomy_key, default=None
|
||||
)
|
||||
if m_template_item is not None:
|
||||
metadata_path = m_template_item["directory"].format_strict(
|
||||
anatomy_data
|
||||
)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_path
|
||||
self.log.info("Metadata render path: `{}`".format(metadata_path))
|
||||
|
||||
self.log.info(f"Render output path set to: `{render_path}`")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
OpenPype Autodesk Flame api
|
||||
AYON Autodesk Flame api
|
||||
"""
|
||||
from .constants import (
|
||||
COLOR_MAP,
|
||||
|
|
@ -23,7 +23,7 @@ from .lib import (
|
|||
reset_segment_selection,
|
||||
get_segment_attributes,
|
||||
get_clips_in_reels,
|
||||
get_reformated_filename,
|
||||
get_reformatted_filename,
|
||||
get_frame_from_filename,
|
||||
get_padding_from_filename,
|
||||
maintained_object_duplication,
|
||||
|
|
@ -101,7 +101,7 @@ __all__ = [
|
|||
"reset_segment_selection",
|
||||
"get_segment_attributes",
|
||||
"get_clips_in_reels",
|
||||
"get_reformated_filename",
|
||||
"get_reformatted_filename",
|
||||
"get_frame_from_filename",
|
||||
"get_padding_from_filename",
|
||||
"maintained_object_duplication",
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
|
||||
"""
|
||||
OpenPype Flame api constances
|
||||
AYON Flame api constances
|
||||
"""
|
||||
# OpenPype marker workflow variables
|
||||
# AYON marker workflow variables
|
||||
MARKER_NAME = "OpenPypeData"
|
||||
MARKER_DURATION = 0
|
||||
MARKER_COLOR = "cyan"
|
||||
MARKER_PUBLISH_DEFAULT = False
|
||||
|
||||
# OpenPype color definitions
|
||||
# AYON color definitions
|
||||
COLOR_MAP = {
|
||||
"red": (1.0, 0.0, 0.0),
|
||||
"orange": (1.0, 0.5, 0.0),
|
||||
|
|
|
|||
|
|
@ -607,7 +607,7 @@ def get_clips_in_reels(project):
|
|||
return output_clips
|
||||
|
||||
|
||||
def get_reformated_filename(filename, padded=True):
|
||||
def get_reformatted_filename(filename, padded=True):
|
||||
"""
|
||||
Return fixed python expression path
|
||||
|
||||
|
|
@ -615,10 +615,10 @@ def get_reformated_filename(filename, padded=True):
|
|||
filename (str): file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
|
||||
get_reformatted_filename("plate.1001.exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
found = FRAME_PATTERN.search(filename)
|
||||
|
|
@ -980,7 +980,7 @@ class MediaInfoFile(object):
|
|||
|
||||
@property
|
||||
def file_pattern(self):
|
||||
"""Clips file patter
|
||||
"""Clips file pattern.
|
||||
|
||||
Returns:
|
||||
str: file pattern. ex. file.[1-2].exr
|
||||
|
|
|
|||
|
|
@ -38,12 +38,12 @@ def install():
|
|||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info("OpenPype Flame plug-ins registered ...")
|
||||
log.info("AYON Flame plug-ins registered ...")
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("OpenPype Flame host installed ...")
|
||||
log.info("AYON Flame host installed ...")
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
@ -57,7 +57,7 @@ def uninstall():
|
|||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
log.info("OpenPype Flame host uninstalled ...")
|
||||
log.info("AYON Flame host uninstalled ...")
|
||||
|
||||
|
||||
def containerise(flame_clip_segment,
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setWindowTitle(name or "Pype Creator Input")
|
||||
self.setWindowTitle(name or "AYON Creator Input")
|
||||
self.resize(500, 700)
|
||||
|
||||
# Where inputs and labels are set
|
||||
|
|
@ -644,13 +644,13 @@ class PublishableClip:
|
|||
"families": [self.base_product_type, self.product_type]
|
||||
}
|
||||
|
||||
def _convert_to_entity(self, type, template):
|
||||
def _convert_to_entity(self, src_type, template):
|
||||
""" Converting input key to key with type. """
|
||||
# convert to entity type
|
||||
entity_type = self.types.get(type, None)
|
||||
folder_type = self.types.get(src_type, None)
|
||||
|
||||
assert entity_type, "Missing entity type for `{}`".format(
|
||||
type
|
||||
assert folder_type, "Missing folder type for `{}`".format(
|
||||
src_type
|
||||
)
|
||||
|
||||
# first collect formatting data to use for formatting template
|
||||
|
|
@ -661,7 +661,7 @@ class PublishableClip:
|
|||
formatting_data[_k] = value
|
||||
|
||||
return {
|
||||
"entity_type": entity_type,
|
||||
"folder_type": folder_type,
|
||||
"entity_name": template.format(
|
||||
**formatting_data
|
||||
)
|
||||
|
|
@ -1018,7 +1018,7 @@ class OpenClipSolver(flib.MediaInfoFile):
|
|||
self.feed_version_name))
|
||||
else:
|
||||
self.log.debug("adding new track element ..")
|
||||
# create new track as it doesnt exists yet
|
||||
# create new track as it doesn't exist yet
|
||||
# set current version to feeds on tmp
|
||||
tmp_xml_feeds = tmp_xml_track.find('feeds')
|
||||
tmp_xml_feeds.set('currentVersion', self.feed_version_name)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class WireTapCom(object):
|
|||
|
||||
def get_launch_args(
|
||||
self, project_name, project_data, user_name, *args, **kwargs):
|
||||
"""Forming launch arguments for OpenPype launcher.
|
||||
"""Forming launch arguments for AYON launcher.
|
||||
|
||||
Args:
|
||||
project_name (str): name of project
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ log = Logger.get_logger(__name__)
|
|||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for flame.
|
||||
|
||||
To be able to run start OpenPype within Flame we have to copy
|
||||
To be able to run start AYON within Flame we have to copy
|
||||
all utility_scripts and additional FLAME_SCRIPT_DIR into
|
||||
`/opt/Autodesk/shared/python`. This will be always synchronizing those
|
||||
folders.
|
||||
|
|
@ -124,7 +124,7 @@ def setup(env=None):
|
|||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Flame OpenPype wrapper has been installed")
|
||||
log.info("Flame AYON wrapper has been installed")
|
||||
|
||||
|
||||
def get_flame_version():
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
project_data = {
|
||||
"Name": project_entity["name"],
|
||||
"Nickname": project_entity["code"],
|
||||
"Description": "Created by OpenPype",
|
||||
"Description": "Created by AYON",
|
||||
"SetupDir": project_entity["name"],
|
||||
"FrameWidth": int(width),
|
||||
"FrameHeight": int(height),
|
||||
|
|
|
|||
|
|
@ -256,7 +256,7 @@ def create_otio_reference(clip_data, fps=None):
|
|||
|
||||
if not otio_ex_ref_item:
|
||||
dirname, file_name = os.path.split(path)
|
||||
file_name = utils.get_reformated_filename(file_name, padded=False)
|
||||
file_name = utils.get_reformatted_filename(file_name, padded=False)
|
||||
reformated_path = os.path.join(dirname, file_name)
|
||||
# in case old OTIO or video file create `ExternalReference`
|
||||
otio_ex_ref_item = otio.schema.ExternalReference(
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ def frames_to_seconds(frames, framerate):
|
|||
return otio.opentime.to_seconds(rt)
|
||||
|
||||
|
||||
def get_reformated_filename(filename, padded=True):
|
||||
def get_reformatted_filename(filename, padded=True):
|
||||
"""
|
||||
Return fixed python expression path
|
||||
|
||||
|
|
@ -29,10 +29,10 @@ def get_reformated_filename(filename, padded=True):
|
|||
filename (str): file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
|
||||
get_reformatted_filename("plate.1001.exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
found = FRAME_PATTERN.search(filename)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
presets = deepcopy(self.presets)
|
||||
gui_inputs = self.get_gui_inputs()
|
||||
|
||||
# get key pares from presets and match it on ui inputs
|
||||
# get key pairs from presets and match it on ui inputs
|
||||
for k, v in gui_inputs.items():
|
||||
if v["type"] in ("dict", "section"):
|
||||
# nested dictionary (only one level allowed
|
||||
|
|
@ -236,7 +236,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
"type": "QCheckBox",
|
||||
"label": "Source resolution",
|
||||
"target": "tag",
|
||||
"toolTip": "Is resloution taken from timeline or source?", # noqa
|
||||
"toolTip": "Is resolution taken from timeline or source?", # noqa
|
||||
"order": 4},
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
self.otio_timeline = context.data["otioTimeline"]
|
||||
self.fps = context.data["fps"]
|
||||
|
||||
# process all sellected
|
||||
# process all selected
|
||||
for segment in selected_segments:
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
|
|
@ -100,6 +100,12 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
|
||||
# Backward compatibility fix of 'entity_type' > 'folder_type'
|
||||
if "parents" in marker_data:
|
||||
for parent in marker_data["parents"]:
|
||||
if "entity_type" in parent:
|
||||
parent["folder_type"] = parent.pop("entity_type")
|
||||
|
||||
workfile_start = self._set_workfile_start(marker_data)
|
||||
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
|
|
|
|||
|
|
@ -396,7 +396,7 @@ class FtrackEntityOperator:
|
|||
|
||||
entity = session.query(query).first()
|
||||
|
||||
# if entity doesnt exist then create one
|
||||
# if entity doesn't exist then create one
|
||||
if not entity:
|
||||
entity = self.create_ftrack_entity(
|
||||
session,
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ class FlameBabyPublisherPanel(object):
|
|||
|
||||
# creating ui
|
||||
self.window.setMinimumSize(1500, 600)
|
||||
self.window.setWindowTitle('OpenPype: Baby-publisher')
|
||||
self.window.setWindowTitle('AYON: Baby-publisher')
|
||||
self.window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
|
||||
self.window.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.window.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ def scope_sequence(selection):
|
|||
def get_media_panel_custom_ui_actions():
|
||||
return [
|
||||
{
|
||||
"name": "OpenPype: Baby-publisher",
|
||||
"name": "AYON: Baby-publisher",
|
||||
"actions": [
|
||||
{
|
||||
"name": "Create Shots",
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from ayon_core.pipeline import (
|
|||
|
||||
|
||||
def openpype_install():
|
||||
"""Registering OpenPype in context
|
||||
"""Registering AYON in context
|
||||
"""
|
||||
install_host(opfapi)
|
||||
print("Registered host: {}".format(registered_host()))
|
||||
|
|
@ -28,7 +28,7 @@ def exeption_handler(exctype, value, _traceback):
|
|||
tb (str): traceback to show
|
||||
"""
|
||||
import traceback
|
||||
msg = "OpenPype: Python exception {} in {}".format(value, exctype)
|
||||
msg = "AYON: Python exception {} in {}".format(value, exctype)
|
||||
mbox = QtWidgets.QMessageBox()
|
||||
mbox.setText(msg)
|
||||
mbox.setDetailedText(
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from .lib import (
|
|||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
from .menu import launch_openpype_menu
|
||||
from .menu import launch_ayon_menu
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -35,5 +35,5 @@ __all__ = [
|
|||
"comp_lock_and_undo_chunk",
|
||||
|
||||
# menu
|
||||
"launch_openpype_menu",
|
||||
"launch_ayon_menu",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import contextlib
|
|||
|
||||
from ayon_core.lib import Logger
|
||||
|
||||
from ayon_core.pipeline import registered_host
|
||||
from ayon_core.pipeline.create import CreateContext
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
|
||||
self = sys.modules[__name__]
|
||||
|
|
@ -52,9 +54,15 @@ def update_frame_range(start, end, comp=None, set_render_range=True,
|
|||
comp.SetAttrs(attrs)
|
||||
|
||||
|
||||
def set_current_context_framerange():
|
||||
def set_current_context_framerange(folder_entity=None):
|
||||
"""Set Comp's frame range based on current folder."""
|
||||
folder_entity = get_current_project_folder()
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(
|
||||
fields={"attrib.frameStart",
|
||||
"attrib.frameEnd",
|
||||
"attrib.handleStart",
|
||||
"attrib.handleEnd"})
|
||||
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
start = folder_attributes["frameStart"]
|
||||
end = folder_attributes["frameEnd"]
|
||||
|
|
@ -65,9 +73,24 @@ def set_current_context_framerange():
|
|||
handle_end=handle_end)
|
||||
|
||||
|
||||
def set_current_context_resolution():
|
||||
def set_current_context_fps(folder_entity=None):
|
||||
"""Set Comp's frame rate (FPS) to based on current asset"""
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(fields={"attrib.fps"})
|
||||
|
||||
fps = float(folder_entity["attrib"].get("fps", 24.0))
|
||||
comp = get_current_comp()
|
||||
comp.SetPrefs({
|
||||
"Comp.FrameFormat.Rate": fps,
|
||||
})
|
||||
|
||||
|
||||
def set_current_context_resolution(folder_entity=None):
|
||||
"""Set Comp's resolution width x height default based on current folder"""
|
||||
folder_entity = get_current_project_folder()
|
||||
if folder_entity is None:
|
||||
folder_entity = get_current_project_folder(
|
||||
fields={"attrib.resolutionWidth", "attrib.resolutionHeight"})
|
||||
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
width = folder_attributes["resolutionWidth"]
|
||||
height = folder_attributes["resolutionHeight"]
|
||||
|
|
@ -285,3 +308,98 @@ def comp_lock_and_undo_chunk(
|
|||
finally:
|
||||
comp.Unlock()
|
||||
comp.EndUndo(keep_undo)
|
||||
|
||||
|
||||
def update_content_on_context_change():
|
||||
"""Update all Creator instances to current asset"""
|
||||
host = registered_host()
|
||||
context = host.get_current_context()
|
||||
|
||||
folder_path = context["folder_path"]
|
||||
task = context["task_name"]
|
||||
|
||||
create_context = CreateContext(host, reset=True)
|
||||
|
||||
for instance in create_context.instances:
|
||||
instance_folder_path = instance.get("folderPath")
|
||||
if instance_folder_path and instance_folder_path != folder_path:
|
||||
instance["folderPath"] = folder_path
|
||||
instance_task = instance.get("task")
|
||||
if instance_task and instance_task != task:
|
||||
instance["task"] = task
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
def prompt_reset_context():
|
||||
"""Prompt the user what context settings to reset.
|
||||
This prompt is used on saving to a different task to allow the scene to
|
||||
get matched to the new context.
|
||||
"""
|
||||
# TODO: Cleanup this prototyped mess of imports and odd dialog
|
||||
from ayon_core.tools.attribute_defs.dialog import (
|
||||
AttributeDefinitionsDialog
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from ayon_core.lib import BoolDef, UILabelDef
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
definitions = [
|
||||
UILabelDef(
|
||||
label=(
|
||||
"You are saving your workfile into a different folder or task."
|
||||
"\n\n"
|
||||
"Would you like to update some settings to the new context?\n"
|
||||
)
|
||||
),
|
||||
BoolDef(
|
||||
"fps",
|
||||
label="FPS",
|
||||
tooltip="Reset Comp FPS",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"frame_range",
|
||||
label="Frame Range",
|
||||
tooltip="Reset Comp start and end frame ranges",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"resolution",
|
||||
label="Comp Resolution",
|
||||
tooltip="Reset Comp resolution",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"instances",
|
||||
label="Publish instances",
|
||||
tooltip="Update all publish instance's folder and task to match "
|
||||
"the new folder and task",
|
||||
default=True
|
||||
),
|
||||
]
|
||||
|
||||
dialog = AttributeDefinitionsDialog(definitions)
|
||||
dialog.setWindowFlags(
|
||||
dialog.windowFlags() | QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
dialog.setWindowTitle("Saving to different context.")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
options = dialog.get_values()
|
||||
folder_entity = get_current_project_folder()
|
||||
if options["frame_range"]:
|
||||
set_current_context_framerange(folder_entity)
|
||||
|
||||
if options["fps"]:
|
||||
set_current_context_fps(folder_entity)
|
||||
|
||||
if options["resolution"]:
|
||||
set_current_context_resolution(folder_entity)
|
||||
|
||||
if options["instances"]:
|
||||
update_content_on_context_change()
|
||||
|
||||
dialog.deleteLater()
|
||||
|
|
|
|||
|
|
@ -28,9 +28,9 @@ self = sys.modules[__name__]
|
|||
self.menu = None
|
||||
|
||||
|
||||
class OpenPypeMenu(QtWidgets.QWidget):
|
||||
class AYONMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OpenPypeMenu, self).__init__(*args, **kwargs)
|
||||
super(AYONMenu, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName(f"{MENU_LABEL}Menu")
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
self._pulse = FusionPulse(parent=self)
|
||||
self._pulse.start()
|
||||
|
||||
# Detect Fusion events as OpenPype events
|
||||
# Detect Fusion events as AYON events
|
||||
self._event_handler = FusionEventHandler(parent=self)
|
||||
self._event_handler.start()
|
||||
|
||||
|
|
@ -174,16 +174,16 @@ class OpenPypeMenu(QtWidgets.QWidget):
|
|||
set_current_context_framerange()
|
||||
|
||||
|
||||
def launch_openpype_menu():
|
||||
def launch_ayon_menu():
|
||||
app = get_qt_app()
|
||||
|
||||
pype_menu = OpenPypeMenu()
|
||||
ayon_menu = AYONMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
ayon_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
self.menu = pype_menu
|
||||
ayon_menu.show()
|
||||
self.menu = ayon_menu
|
||||
|
||||
result = app.exec_()
|
||||
print("Shutting down..")
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import os
|
|||
import sys
|
||||
import logging
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
|
||||
import pyblish.api
|
||||
from qtpy import QtCore
|
||||
|
|
@ -28,8 +29,8 @@ from ayon_core.tools.utils import host_tools
|
|||
|
||||
from .lib import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk,
|
||||
validate_comp_prefs
|
||||
validate_comp_prefs,
|
||||
prompt_reset_context
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
|
@ -41,6 +42,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class FusionLogHandler(logging.Handler):
|
||||
# Keep a reference to fusion's Print function (Remote Object)
|
||||
|
|
@ -70,7 +74,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
name = "fusion"
|
||||
|
||||
def install(self):
|
||||
"""Install fusion-specific functionality of OpenPype.
|
||||
"""Install fusion-specific functionality of AYON.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into fusion.
|
||||
|
|
@ -104,8 +108,10 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
|
||||
# Register events
|
||||
register_event_callback("open", on_after_open)
|
||||
register_event_callback("workfile.save.before", before_workfile_save)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("new", on_new)
|
||||
register_event_callback("taskChanged", on_task_changed)
|
||||
|
||||
# region workfile io api
|
||||
def has_unsaved_changes(self):
|
||||
|
|
@ -169,6 +175,19 @@ def on_save(event):
|
|||
comp = event["sender"]
|
||||
validate_comp_prefs(comp)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global _about_to_save
|
||||
print(f"Task changed: {_about_to_save}")
|
||||
# TODO: Only do this if not headless
|
||||
if _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
prompt_reset_context()
|
||||
|
||||
|
||||
def on_after_open(event):
|
||||
comp = event["sender"]
|
||||
|
|
@ -177,7 +196,7 @@ def on_after_open(event):
|
|||
if any_outdated_containers():
|
||||
log.warning("Scene has outdated content.")
|
||||
|
||||
# Find OpenPype menu to attach to
|
||||
# Find AYON menu to attach to
|
||||
from . import menu
|
||||
|
||||
def _on_show_scene_inventory():
|
||||
|
|
@ -202,6 +221,28 @@ def on_after_open(event):
|
|||
dialog.setStyleSheet(load_stylesheet())
|
||||
|
||||
|
||||
def before_workfile_save(event):
|
||||
# Due to Fusion's external python process design we can't really
|
||||
# detect whether the current Fusion environment matches the one the artists
|
||||
# expects it to be. For example, our pipeline python process might
|
||||
# have been shut down, and restarted - which will restart it to the
|
||||
# environment Fusion started with; not necessarily where the artist
|
||||
# is currently working.
|
||||
# The `_about_to_save` var is used to detect context changes when
|
||||
# saving into another asset. If we keep it False it will be ignored
|
||||
# as context change. As such, before we change tasks we will only
|
||||
# consider it the current filepath is within the currently known
|
||||
# AVALON_WORKDIR. This way we avoid false positives of thinking it's
|
||||
# saving to another context and instead sometimes just have false negatives
|
||||
# where we fail to show the "Update on task change" prompt.
|
||||
comp = get_current_comp()
|
||||
filepath = comp.GetAttrs()["COMPS_FileName"]
|
||||
workdir = os.environ.get("AYON_WORKDIR")
|
||||
if Path(workdir) in Path(filepath).parents:
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def ls():
|
||||
"""List containers from active Fusion scene
|
||||
|
||||
|
|
@ -326,9 +367,9 @@ class FusionEventThread(QtCore.QThread):
|
|||
|
||||
|
||||
class FusionEventHandler(QtCore.QObject):
|
||||
"""Emits OpenPype events based on Fusion events captured in a QThread.
|
||||
"""Emits AYON events based on Fusion events captured in a QThread.
|
||||
|
||||
This will emit the following OpenPype events based on Fusion actions:
|
||||
This will emit the following AYON events based on Fusion actions:
|
||||
save: Comp_Save, Comp_SaveAs
|
||||
open: Comp_Opened
|
||||
new: Comp_New
|
||||
|
|
@ -338,7 +379,6 @@ class FusionEventHandler(QtCore.QObject):
|
|||
>>> handler = FusionEventHandler(parent=window)
|
||||
>>> handler.start()
|
||||
|
||||
|
||||
"""
|
||||
ACTION_IDS = [
|
||||
"Comp_Save",
|
||||
|
|
@ -374,7 +414,7 @@ class FusionEventHandler(QtCore.QObject):
|
|||
self._event_thread.stop()
|
||||
|
||||
def _on_event(self, event):
|
||||
"""Handle Fusion events to emit OpenPype events"""
|
||||
"""Handle Fusion events to emit AYON events"""
|
||||
if not event:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ class GenericCreateSaver(Creator):
|
|||
formatting_data = deepcopy(data)
|
||||
|
||||
# get frame padding from anatomy templates
|
||||
frame_padding = self.project_anatomy.templates["frame_padding"]
|
||||
frame_padding = self.project_anatomy.templates_obj.frame_padding
|
||||
|
||||
# get output format
|
||||
ext = data["creator_attributes"]["image_format"]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
### OpenPype deploy MenuScripts
|
||||
### AYON deploy MenuScripts
|
||||
|
||||
Note that this `MenuScripts` is not an official Fusion folder.
|
||||
OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions.
|
||||
AYON only uses this folder in `{fusion}/deploy/` to trigger the AYON menu actions.
|
||||
|
||||
They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`.
|
||||
|
|
@ -35,7 +35,7 @@ def main(env):
|
|||
log = Logger.get_logger(__name__)
|
||||
log.info(f"Registered host: {registered_host()}")
|
||||
|
||||
menu.launch_openpype_menu()
|
||||
menu.launch_ayon_menu()
|
||||
|
||||
# Initiate a QTimer to check if Fusion is still alive every X interval
|
||||
# If Fusion is not found - kill itself
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
|
|||
Prepares local Fusion profile directory, copies existing Fusion profile.
|
||||
This also sets FUSION MasterPrefs variable, which is used
|
||||
to apply Master.prefs file to override some Fusion profile settings to:
|
||||
- enable the OpenPype menu
|
||||
- enable the AYON menu
|
||||
- force Python 3 over Python 2
|
||||
- force English interface
|
||||
Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from ayon_core.hosts.fusion import (
|
|||
|
||||
class FusionPrelaunch(PreLaunchHook):
|
||||
"""
|
||||
Prepares OpenPype Fusion environment.
|
||||
Prepares AYON Fusion environment.
|
||||
Requires correct Python home variable to be defined in the environment
|
||||
settings for Fusion to point at a valid Python 3 build for Fusion.
|
||||
Python3 versions that are supported by Fusion:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from ayon_core.lib import NumberDef
|
||||
|
||||
from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver
|
||||
from ayon_core.hosts.fusion.api import get_current_comp
|
||||
|
||||
|
||||
class CreateImageSaver(GenericCreateSaver):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
from ayon_core.lib import EnumDef
|
||||
from ayon_core.lib import (
|
||||
UILabelDef,
|
||||
NumberDef,
|
||||
EnumDef
|
||||
)
|
||||
|
||||
from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver
|
||||
from ayon_core.hosts.fusion.api.lib import get_current_comp
|
||||
|
||||
|
||||
class CreateSaver(GenericCreateSaver):
|
||||
|
|
@ -45,6 +50,7 @@ class CreateSaver(GenericCreateSaver):
|
|||
self._get_reviewable_bool(),
|
||||
self._get_frame_range_enum(),
|
||||
self._get_image_format_enum(),
|
||||
*self._get_custom_frame_range_attribute_defs()
|
||||
]
|
||||
return attr_defs
|
||||
|
||||
|
|
@ -53,6 +59,7 @@ class CreateSaver(GenericCreateSaver):
|
|||
"current_folder": "Current Folder context",
|
||||
"render_range": "From render in/out",
|
||||
"comp_range": "From composition timeline",
|
||||
"custom_range": "Custom frame range",
|
||||
}
|
||||
|
||||
return EnumDef(
|
||||
|
|
@ -61,3 +68,82 @@ class CreateSaver(GenericCreateSaver):
|
|||
label="Frame range source",
|
||||
default=self.default_frame_range_option
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_custom_frame_range_attribute_defs() -> list:
|
||||
|
||||
# Define custom frame range defaults based on current comp
|
||||
# timeline settings (if a comp is currently open)
|
||||
comp = get_current_comp()
|
||||
if comp is not None:
|
||||
attrs = comp.GetAttrs()
|
||||
frame_defaults = {
|
||||
"frameStart": int(attrs["COMPN_GlobalStart"]),
|
||||
"frameEnd": int(attrs["COMPN_GlobalEnd"]),
|
||||
"handleStart": int(
|
||||
attrs["COMPN_RenderStart"] - attrs["COMPN_GlobalStart"]
|
||||
),
|
||||
"handleEnd": int(
|
||||
attrs["COMPN_GlobalEnd"] - attrs["COMPN_RenderEnd"]
|
||||
),
|
||||
}
|
||||
else:
|
||||
frame_defaults = {
|
||||
"frameStart": 1001,
|
||||
"frameEnd": 1100,
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0
|
||||
}
|
||||
|
||||
return [
|
||||
UILabelDef(
|
||||
label="<br><b>Custom Frame Range</b><br>"
|
||||
"<i>only used with 'Custom frame range' source</i>"
|
||||
),
|
||||
NumberDef(
|
||||
"custom_frameStart",
|
||||
label="Frame Start",
|
||||
default=frame_defaults["frameStart"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the start frame for the export.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_frameEnd",
|
||||
label="Frame End",
|
||||
default=frame_defaults["frameEnd"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the end frame for the export.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_handleStart",
|
||||
label="Handle Start",
|
||||
default=frame_defaults["handleStart"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the start handles for the export, this will be "
|
||||
"added before the start frame.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_handleEnd",
|
||||
label="Handle End",
|
||||
default=frame_defaults["handleEnd"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the end handles for the export, this will be added "
|
||||
"after the end frame.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -57,6 +57,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
|
|||
start_with_handle = comp_start
|
||||
end_with_handle = comp_end
|
||||
|
||||
if frame_range_source == "custom_range":
|
||||
start = int(instance.data["custom_frameStart"])
|
||||
end = int(instance.data["custom_frameEnd"])
|
||||
handle_start = int(instance.data["custom_handleStart"])
|
||||
handle_end = int(instance.data["custom_handleEnd"])
|
||||
start_with_handle = start - handle_start
|
||||
end_with_handle = end + handle_end
|
||||
|
||||
frame = instance.data["creator_attributes"].get("frame")
|
||||
# explicitly publishing only single frame
|
||||
if frame is not None:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from .addon import (
|
||||
HARMONY_HOST_DIR,
|
||||
HARMONY_ADDON_ROOT,
|
||||
HarmonyAddon,
|
||||
get_launch_script_path,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HARMONY_HOST_DIR",
|
||||
"HARMONY_ADDON_ROOT",
|
||||
"HarmonyAddon",
|
||||
"get_launch_script_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from ayon_core.addon import AYONAddon, IHostAddon
|
||||
|
||||
HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
HARMONY_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class HarmonyAddon(AYONAddon, IHostAddon):
|
||||
|
|
@ -11,10 +11,23 @@ class HarmonyAddon(AYONAddon, IHostAddon):
|
|||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
openharmony_path = os.path.join(
|
||||
HARMONY_HOST_DIR, "vendor", "OpenHarmony"
|
||||
HARMONY_ADDON_ROOT, "vendor", "OpenHarmony"
|
||||
)
|
||||
# TODO check if is already set? What to do if is already set?
|
||||
env["LIB_OPENHARMONY_PATH"] = openharmony_path
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".zip"]
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(HARMONY_ADDON_ROOT, "hooks")
|
||||
]
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
return os.path.join(
|
||||
HARMONY_ADDON_ROOT, "api", "launch_script.py"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ class CreateComposite(harmony.Creator):
|
|||
|
||||
name = "compositeDefault"
|
||||
label = "Composite"
|
||||
product_type = "mindbender.template"
|
||||
product_type = "template"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateComposite, self).__init__(*args, **kwargs)
|
||||
|
|
@ -221,7 +221,7 @@ class CreateRender(harmony.Creator):
|
|||
|
||||
name = "writeDefault"
|
||||
label = "Write"
|
||||
product_type = "mindbender.imagesequence"
|
||||
product_type = "render"
|
||||
node_type = "WRITE"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -304,7 +304,7 @@ class ExtractImage(pyblish.api.InstancePlugin):
|
|||
label = "Extract Image Sequence"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
hosts = ["harmony"]
|
||||
families = ["mindbender.imagesequence"]
|
||||
families = ["render"]
|
||||
|
||||
def process(self, instance):
|
||||
project_path = harmony.send(
|
||||
|
|
@ -582,8 +582,16 @@ class ImageSequenceLoader(load.LoaderPlugin):
|
|||
"""Load images
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
product_types = {"mindbender.imagesequence"}
|
||||
product_types = {
|
||||
"shot",
|
||||
"render",
|
||||
"image",
|
||||
"plate",
|
||||
"reference",
|
||||
"review",
|
||||
}
|
||||
representations = ["*"]
|
||||
extensions = {"jpeg", "png", "jpg"}
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
files = []
|
||||
|
|
|
|||
93
client/ayon_core/hosts/harmony/api/launch_script.py
Normal file
93
client/ayon_core/hosts/harmony/api/launch_script.py
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
"""Script wraps launch mechanism of Harmony implementations.
|
||||
|
||||
Arguments passed to the script are passed to launch function in host
|
||||
implementation. In all cases requires host app executable and may contain
|
||||
workfile or others.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ayon_core.hosts.harmony.api.lib import main as host_main
|
||||
|
||||
# Get current file to locate start point of sys.argv
|
||||
CURRENT_FILE = os.path.abspath(__file__)
|
||||
|
||||
|
||||
def show_error_messagebox(title, message, detail_message=None):
|
||||
"""Function will show message and process ends after closing it."""
|
||||
from qtpy import QtWidgets, QtCore
|
||||
from ayon_core import style
|
||||
|
||||
app = QtWidgets.QApplication([])
|
||||
app.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
msgbox = QtWidgets.QMessageBox()
|
||||
msgbox.setWindowTitle(title)
|
||||
msgbox.setText(message)
|
||||
|
||||
if detail_message:
|
||||
msgbox.setDetailedText(detail_message)
|
||||
|
||||
msgbox.setWindowModality(QtCore.Qt.ApplicationModal)
|
||||
msgbox.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
||||
|
||||
def on_invalid_args(script_not_found):
|
||||
"""Show to user message box saying that something went wrong.
|
||||
|
||||
Tell user that arguments to launch implementation are invalid with
|
||||
arguments details.
|
||||
|
||||
Args:
|
||||
script_not_found (bool): Use different message based on this value.
|
||||
"""
|
||||
|
||||
title = "Invalid arguments"
|
||||
joined_args = ", ".join("\"{}\"".format(arg) for arg in sys.argv)
|
||||
if script_not_found:
|
||||
submsg = "Where couldn't find script path:\n\"{}\""
|
||||
else:
|
||||
submsg = "Expected Host executable after script path:\n\"{}\""
|
||||
|
||||
message = "BUG: Got invalid arguments so can't launch Host application."
|
||||
detail_message = "Process was launched with arguments:\n{}\n\n{}".format(
|
||||
joined_args,
|
||||
submsg.format(CURRENT_FILE)
|
||||
)
|
||||
|
||||
show_error_messagebox(title, message, detail_message)
|
||||
|
||||
|
||||
def main(argv):
|
||||
# Modify current file path to find match in sys.argv which may be different
|
||||
# on windows (different letter cases and slashes).
|
||||
modified_current_file = CURRENT_FILE.replace("\\", "/").lower()
|
||||
|
||||
# Create a copy of sys argv
|
||||
sys_args = list(argv)
|
||||
after_script_idx = None
|
||||
# Find script path in sys.argv to know index of argv where host
|
||||
# executable should be.
|
||||
for idx, item in enumerate(sys_args):
|
||||
if item.replace("\\", "/").lower() == modified_current_file:
|
||||
after_script_idx = idx + 1
|
||||
break
|
||||
|
||||
# Validate that there is at least one argument after script path
|
||||
launch_args = None
|
||||
if after_script_idx is not None:
|
||||
launch_args = sys_args[after_script_idx:]
|
||||
|
||||
if launch_args:
|
||||
# Launch host implementation
|
||||
host_main(*launch_args)
|
||||
else:
|
||||
# Show message box
|
||||
on_invalid_args(after_script_idx is None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Utility functions used for Avalon - Harmony integration."""
|
||||
import platform
|
||||
import subprocess
|
||||
import threading
|
||||
import os
|
||||
|
|
@ -14,15 +15,16 @@ import json
|
|||
import signal
|
||||
import time
|
||||
from uuid import uuid4
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
import collections
|
||||
|
||||
from .server import Server
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from ayon_core.lib import is_using_ayon_console
|
||||
from ayon_core.tools.stdout_broker.app import StdOutBroker
|
||||
from ayon_core.tools.utils import host_tools
|
||||
from ayon_core import style
|
||||
from ayon_core.lib.applications import get_non_python_host_kwargs
|
||||
|
||||
from .server import Server
|
||||
|
||||
# Setup logging.
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -324,7 +326,18 @@ def launch_zip_file(filepath):
|
|||
return
|
||||
|
||||
print("Launching {}".format(scene_path))
|
||||
kwargs = get_non_python_host_kwargs({}, False)
|
||||
# QUESTION Could we use 'run_detached_process' from 'ayon_core.lib'?
|
||||
kwargs = {}
|
||||
if (
|
||||
platform.system().lower() == "windows"
|
||||
and not is_using_ayon_console()
|
||||
):
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
|
||||
process = subprocess.Popen(
|
||||
[ProcessContext.application_path, scene_path],
|
||||
**kwargs
|
||||
|
|
@ -555,7 +568,7 @@ def save_scene():
|
|||
"""Save the Harmony scene safely.
|
||||
|
||||
The built-in (to Avalon) background zip and moving of the Harmony scene
|
||||
folder, interfers with server/client communication by sending two requests
|
||||
folder, interferes with server/client communication by sending two requests
|
||||
at the same time. This only happens when sending "scene.saveAll()". This
|
||||
method prevents this double request and safely saves the scene.
|
||||
|
||||
|
|
|
|||
|
|
@ -15,13 +15,13 @@ from ayon_core.pipeline import (
|
|||
from ayon_core.pipeline.load import get_outdated_containers
|
||||
from ayon_core.pipeline.context_tools import get_current_project_folder
|
||||
|
||||
from ayon_core.hosts.harmony import HARMONY_HOST_DIR
|
||||
from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT
|
||||
import ayon_core.hosts.harmony.api as harmony
|
||||
|
||||
|
||||
log = logging.getLogger("ayon_core.hosts.harmony")
|
||||
|
||||
PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins")
|
||||
PLUGINS_DIR = os.path.join(HARMONY_ADDON_ROOT, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
|
|
|||
91
client/ayon_core/hosts/harmony/hooks/pre_launch_args.py
Normal file
91
client/ayon_core/hosts/harmony/hooks/pre_launch_args.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from ayon_core.lib import (
|
||||
get_ayon_launcher_args,
|
||||
is_using_ayon_console,
|
||||
)
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
LaunchTypes,
|
||||
)
|
||||
from ayon_core.hosts.harmony import get_launch_script_path
|
||||
|
||||
|
||||
def get_launch_kwargs(kwargs):
|
||||
"""Explicit setting of kwargs for Popen for Harmony.
|
||||
|
||||
Expected behavior
|
||||
- ayon_console opens window with logs
|
||||
- ayon has stdout/stderr available for capturing
|
||||
|
||||
Args:
|
||||
kwargs (Union[dict, None]): Current kwargs or None.
|
||||
|
||||
"""
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
if platform.system().lower() != "windows":
|
||||
return kwargs
|
||||
|
||||
if is_using_ayon_console():
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NEW_CONSOLE
|
||||
})
|
||||
else:
|
||||
kwargs.update({
|
||||
"creationflags": subprocess.CREATE_NO_WINDOW,
|
||||
"stdout": subprocess.DEVNULL,
|
||||
"stderr": subprocess.DEVNULL
|
||||
})
|
||||
return kwargs
|
||||
|
||||
|
||||
class HarmonyPrelaunchHook(PreLaunchHook):
|
||||
"""Launch arguments preparation.
|
||||
|
||||
Hook add python executable and script path to Harmony implementation
|
||||
before Harmony executable and add last workfile path to launch arguments.
|
||||
|
||||
Existence of last workfile is checked. If workfile does not exists tries
|
||||
to copy templated workfile from predefined path.
|
||||
"""
|
||||
app_groups = {"harmony"}
|
||||
|
||||
order = 20
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
# Pop executable
|
||||
executable_path = self.launch_context.launch_args.pop(0)
|
||||
|
||||
# Pop rest of launch arguments - There should not be other arguments!
|
||||
remainders = []
|
||||
while self.launch_context.launch_args:
|
||||
remainders.append(self.launch_context.launch_args.pop(0))
|
||||
|
||||
script_path = get_launch_script_path()
|
||||
|
||||
new_launch_args = get_ayon_launcher_args(
|
||||
"run", script_path, executable_path
|
||||
)
|
||||
# Add workfile path if exists
|
||||
workfile_path = self.data["last_workfile_path"]
|
||||
if (
|
||||
self.data.get("start_last_workfile")
|
||||
and workfile_path
|
||||
and os.path.exists(workfile_path)
|
||||
):
|
||||
new_launch_args.append(workfile_path)
|
||||
|
||||
# Append as whole list as these arguments should not be separated
|
||||
self.launch_context.launch_args.append(new_launch_args)
|
||||
|
||||
if remainders:
|
||||
self.launch_context.launch_args.extend(remainders)
|
||||
|
||||
self.launch_context.kwargs = get_launch_kwargs(
|
||||
self.launch_context.kwargs
|
||||
)
|
||||
|
|
@ -21,12 +21,12 @@ class CreateFarmRender(plugin.Creator):
|
|||
path = "render/{0}/{0}.".format(node.split("/")[-1])
|
||||
harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Creators.CreateRender.create",
|
||||
"function": "PypeHarmony.Creators.CreateRender.create",
|
||||
"args": [node, path]
|
||||
})
|
||||
harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.color",
|
||||
"function": "PypeHarmony.color",
|
||||
"args": [[0.9, 0.75, 0.3, 1.0]]
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -50,11 +50,11 @@ class ImportTemplateLoader(load.LoaderPlugin):
|
|||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
pass
|
||||
def update(self, container, context):
|
||||
pass
|
||||
|
||||
def remove(self, container):
|
||||
pass
|
||||
def remove(self, container):
|
||||
pass
|
||||
|
||||
|
||||
class ImportWorkfileLoader(ImportTemplateLoader):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAudio(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collect relative path for audio file to instance.
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CollectScene(pyblish.api.ContextPlugin):
|
|||
"""Plugin entry point."""
|
||||
result = harmony.send(
|
||||
{
|
||||
f"function": "PypeHarmony.getSceneSettings",
|
||||
"function": "PypeHarmony.getSceneSettings",
|
||||
"args": []}
|
||||
)["result"]
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ class CollectScene(pyblish.api.ContextPlugin):
|
|||
|
||||
result = harmony.send(
|
||||
{
|
||||
f"function": "PypeHarmony.getVersion",
|
||||
"function": "PypeHarmony.getVersion",
|
||||
"args": []}
|
||||
)["result"]
|
||||
context.data["harmonyVersion"] = "{}.{}".format(result[0], result[1])
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import os
|
||||
|
||||
import hiero.core.events
|
||||
|
||||
from ayon_core.lib import Logger, register_event_callback
|
||||
|
||||
from .lib import (
|
||||
sync_avalon_data_to_workfile,
|
||||
launch_workfiles_app,
|
||||
selection_changed_timeline,
|
||||
before_project_save,
|
||||
)
|
||||
from .tags import add_tags_to_workfile
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ def get_current_track(sequence, name, audio=False):
|
|||
Creates new if none is found.
|
||||
|
||||
Args:
|
||||
sequence (hiero.core.Sequence): hiero sequene object
|
||||
sequence (hiero.core.Sequence): hiero sequence object
|
||||
name (str): name of track we want to return
|
||||
audio (bool)[optional]: switch to AudioTrack
|
||||
|
||||
|
|
@ -632,7 +632,9 @@ def sync_avalon_data_to_workfile():
|
|||
project_name = get_current_project_name()
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
work_template = anatomy.templates["work"]["path"]
|
||||
work_template = anatomy.get_template_item(
|
||||
"work", "default", "path"
|
||||
)
|
||||
work_root = anatomy.root_value_for_template(work_template)
|
||||
active_project_root = (
|
||||
os.path.join(work_root, project_name)
|
||||
|
|
@ -825,7 +827,7 @@ class PublishAction(QtWidgets.QAction):
|
|||
# root_node = hiero.core.nuke.RootNode()
|
||||
#
|
||||
# anatomy = Anatomy(get_current_project_name())
|
||||
# work_template = anatomy.templates["work"]["path"]
|
||||
# work_template = anatomy.get_template_item("work", "default", "path")
|
||||
# root_path = anatomy.root_value_for_template(work_template)
|
||||
#
|
||||
# nuke_script.addNode(root_node)
|
||||
|
|
@ -844,8 +846,8 @@ def create_nuke_workfile_clips(nuke_workfiles, seq=None):
|
|||
[{
|
||||
'path': 'P:/Jakub_testy_pipeline/test_v01.nk',
|
||||
'name': 'test',
|
||||
'handleStart': 15, # added asymetrically to handles
|
||||
'handleEnd': 10, # added asymetrically to handles
|
||||
'handleStart': 15, # added asymmetrically to handles
|
||||
'handleEnd': 10, # added asymmetrically to handles
|
||||
"clipIn": 16,
|
||||
"frameStart": 991,
|
||||
"frameEnd": 1023,
|
||||
|
|
@ -1190,7 +1192,7 @@ def get_sequence_pattern_and_padding(file):
|
|||
|
||||
Return:
|
||||
string: any matching sequence pattern
|
||||
int: padding of sequnce numbering
|
||||
int: padding of sequence numbering
|
||||
"""
|
||||
foundall = re.findall(
|
||||
r"(#+)|(%\d+d)|(?<=[^a-zA-Z0-9])(\d+)(?=\.\w+$)", file)
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
if isinstance(track, hiero.core.AudioTrack):
|
||||
kind = 'Audio'
|
||||
|
||||
# Gather TrackItems involved in trasition
|
||||
# Gather TrackItems involved in transition
|
||||
item_in, item_out = get_neighboring_trackitems(
|
||||
otio_item,
|
||||
otio_track,
|
||||
|
|
@ -101,7 +101,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
if transition_type == 'dissolve':
|
||||
transition_func = getattr(
|
||||
hiero.core.Transition,
|
||||
'create{kind}DissolveTransition'.format(kind=kind)
|
||||
"create{kind}DissolveTransition".format(kind=kind)
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
@ -109,7 +109,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
item_in,
|
||||
item_out,
|
||||
otio_item.in_offset.value,
|
||||
otio_item.out_offset.value
|
||||
otio_item.out_offset.value,
|
||||
)
|
||||
|
||||
# Catch error raised if transition is bigger than TrackItem source
|
||||
|
|
@ -134,7 +134,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
|
||||
transition = transition_func(
|
||||
item_out,
|
||||
otio_item.out_offset.value
|
||||
otio_item.out_offset.value,
|
||||
)
|
||||
|
||||
elif transition_type == 'fade_out':
|
||||
|
|
@ -183,9 +183,7 @@ def prep_url(url_in):
|
|||
def create_offline_mediasource(otio_clip, path=None):
|
||||
global _otio_old
|
||||
|
||||
hiero_rate = hiero.core.TimeBase(
|
||||
otio_clip.source_range.start_time.rate
|
||||
)
|
||||
hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate)
|
||||
|
||||
try:
|
||||
legal_media_refs = (
|
||||
|
|
@ -212,7 +210,7 @@ def create_offline_mediasource(otio_clip, path=None):
|
|||
source_range.start_time.value,
|
||||
source_range.duration.value,
|
||||
hiero_rate,
|
||||
source_range.start_time.value
|
||||
source_range.start_time.value,
|
||||
)
|
||||
|
||||
return media
|
||||
|
|
@ -385,7 +383,8 @@ def create_trackitem(playhead, track, otio_clip, clip):
|
|||
# Only reverse effect can be applied here
|
||||
if abs(time_scalar) == 1.:
|
||||
trackitem.setPlaybackSpeed(
|
||||
trackitem.playbackSpeed() * time_scalar)
|
||||
trackitem.playbackSpeed() * time_scalar
|
||||
)
|
||||
|
||||
elif isinstance(effect, otio.schema.FreezeFrame):
|
||||
# For freeze frame, playback speed must be set after range
|
||||
|
|
@ -397,28 +396,21 @@ def create_trackitem(playhead, track, otio_clip, clip):
|
|||
source_in = source_range.end_time_inclusive().value
|
||||
|
||||
timeline_in = playhead + source_out
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
else:
|
||||
# Normal playback speed
|
||||
source_in = source_range.start_time.value
|
||||
source_out = source_range.end_time_inclusive().value
|
||||
|
||||
timeline_in = playhead
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
|
||||
# Set source and timeline in/out points
|
||||
trackitem.setTimes(
|
||||
timeline_in,
|
||||
timeline_out,
|
||||
source_in,
|
||||
source_out
|
||||
|
||||
source_out,
|
||||
)
|
||||
|
||||
# Apply playback speed for freeze frames
|
||||
|
|
@ -435,7 +427,8 @@ def create_trackitem(playhead, track, otio_clip, clip):
|
|||
|
||||
|
||||
def build_sequence(
|
||||
otio_timeline, project=None, sequence=None, track_kind=None):
|
||||
otio_timeline, project=None, sequence=None, track_kind=None
|
||||
):
|
||||
if project is None:
|
||||
if sequence:
|
||||
project = sequence.project()
|
||||
|
|
@ -509,10 +502,7 @@ def build_sequence(
|
|||
|
||||
# Create TrackItem
|
||||
trackitem = create_trackitem(
|
||||
playhead,
|
||||
track,
|
||||
otio_clip,
|
||||
clip
|
||||
playhead, track, otio_clip, clip
|
||||
)
|
||||
|
||||
# Add markers
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True):
|
|||
path (str): path url or simple file name
|
||||
|
||||
Returns:
|
||||
type: string with reformated path
|
||||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
self.setWindowTitle(name or "Pype Creator Input")
|
||||
self.setWindowTitle(name or "AYON Creator Input")
|
||||
self.resize(500, 700)
|
||||
|
||||
# Where inputs and labels are set
|
||||
|
|
@ -449,7 +449,6 @@ class ClipLoader:
|
|||
repr = self.context["representation"]
|
||||
repr_cntx = repr["context"]
|
||||
folder_path = self.context["folder"]["path"]
|
||||
folder_name = self.context["folder"]["name"]
|
||||
product_name = self.context["product"]["name"]
|
||||
representation = repr["name"]
|
||||
self.data["clip_name"] = self.clip_name_template.format(**repr_cntx)
|
||||
|
|
@ -906,16 +905,16 @@ class PublishClip:
|
|||
"hierarchyData": hierarchy_formatting_data,
|
||||
"productName": self.product_name,
|
||||
"productType": self.product_type,
|
||||
"families": [self.product_type, self.data["family"]]
|
||||
"families": [self.product_type, self.data["productType"]]
|
||||
}
|
||||
|
||||
def _convert_to_entity(self, type, template):
|
||||
def _convert_to_entity(self, src_type, template):
|
||||
""" Converting input key to key with type. """
|
||||
# convert to entity type
|
||||
entity_type = self.types.get(type, None)
|
||||
folder_type = self.types.get(src_type, None)
|
||||
|
||||
assert entity_type, "Missing entity type for `{}`".format(
|
||||
type
|
||||
assert folder_type, "Missing folder type for `{}`".format(
|
||||
src_type
|
||||
)
|
||||
|
||||
# first collect formatting data to use for formatting template
|
||||
|
|
@ -926,7 +925,7 @@ class PublishClip:
|
|||
formatting_data[_k] = value
|
||||
|
||||
return {
|
||||
"entity_type": entity_type,
|
||||
"folder_type": folder_type,
|
||||
"entity_name": template.format(
|
||||
**formatting_data
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,9 +3,11 @@
|
|||
# Note: This only prints the text data that is visible in the active Spreadsheet View.
|
||||
# If you've filtered text, only the visible text will be printed to the CSV file
|
||||
# Usage: Copy to ~/.hiero/Python/StartupUI
|
||||
import os
|
||||
import csv
|
||||
|
||||
import hiero.core.events
|
||||
import hiero.ui
|
||||
import os, csv
|
||||
try:
|
||||
from PySide.QtGui import *
|
||||
from PySide.QtCore import *
|
||||
|
|
|
|||
|
|
@ -641,7 +641,7 @@ def _setStatus(self, status):
|
|||
global gStatusTags
|
||||
|
||||
# Get a valid Tag object from the Global list of statuses
|
||||
if not status in gStatusTags.keys():
|
||||
if status not in gStatusTags.keys():
|
||||
print("Status requested was not a valid Status string.")
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
kind = "Audio"
|
||||
|
||||
try:
|
||||
# Gather TrackItems involved in trasition
|
||||
# Gather TrackItems involved in transition
|
||||
item_in, item_out = get_neighboring_trackitems(
|
||||
otio_item,
|
||||
otio_track,
|
||||
|
|
@ -101,14 +101,14 @@ def apply_transition(otio_track, otio_item, track):
|
|||
if transition_type == "dissolve":
|
||||
transition_func = getattr(
|
||||
hiero.core.Transition,
|
||||
'create{kind}DissolveTransition'.format(kind=kind)
|
||||
"create{kind}DissolveTransition".format(kind=kind)
|
||||
)
|
||||
|
||||
transition = transition_func(
|
||||
item_in,
|
||||
item_out,
|
||||
otio_item.in_offset.value,
|
||||
otio_item.out_offset.value
|
||||
otio_item.out_offset.value,
|
||||
)
|
||||
|
||||
elif transition_type == "fade_in":
|
||||
|
|
@ -116,20 +116,14 @@ def apply_transition(otio_track, otio_item, track):
|
|||
hiero.core.Transition,
|
||||
'create{kind}FadeInTransition'.format(kind=kind)
|
||||
)
|
||||
transition = transition_func(
|
||||
item_out,
|
||||
otio_item.out_offset.value
|
||||
)
|
||||
transition = transition_func(item_out, otio_item.out_offset.value)
|
||||
|
||||
elif transition_type == "fade_out":
|
||||
transition_func = getattr(
|
||||
hiero.core.Transition,
|
||||
'create{kind}FadeOutTransition'.format(kind=kind)
|
||||
)
|
||||
transition = transition_func(
|
||||
item_in,
|
||||
otio_item.in_offset.value
|
||||
"create{kind}FadeOutTransition".format(kind=kind)
|
||||
)
|
||||
transition = transition_func(item_in, otio_item.in_offset.value)
|
||||
|
||||
else:
|
||||
# Unknown transition
|
||||
|
|
@ -138,11 +132,10 @@ def apply_transition(otio_track, otio_item, track):
|
|||
# Apply transition to track
|
||||
track.addTransition(transition)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
sys.stderr.write(
|
||||
'Unable to apply transition "{t}": "{e}"\n'.format(
|
||||
t=otio_item,
|
||||
e=e
|
||||
t=otio_item, e=e
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -153,18 +146,14 @@ def prep_url(url_in):
|
|||
if url.startswith("file://localhost/"):
|
||||
return url.replace("file://localhost/", "")
|
||||
|
||||
url = '{url}'.format(
|
||||
sep=url.startswith(os.sep) and "" or os.sep,
|
||||
url=url.startswith(os.sep) and url[1:] or url
|
||||
)
|
||||
if url.startswith(os.sep):
|
||||
url = url[1:]
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def create_offline_mediasource(otio_clip, path=None):
|
||||
hiero_rate = hiero.core.TimeBase(
|
||||
otio_clip.source_range.start_time.rate
|
||||
)
|
||||
hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate)
|
||||
|
||||
if isinstance(otio_clip.media_reference, otio.schema.ExternalReference):
|
||||
source_range = otio_clip.available_range()
|
||||
|
|
@ -180,7 +169,7 @@ def create_offline_mediasource(otio_clip, path=None):
|
|||
source_range.start_time.value,
|
||||
source_range.duration.value,
|
||||
hiero_rate,
|
||||
source_range.start_time.value
|
||||
source_range.start_time.value,
|
||||
)
|
||||
|
||||
return media
|
||||
|
|
@ -203,7 +192,7 @@ marker_color_map = {
|
|||
"MAGENTA": "Magenta",
|
||||
"BLACK": "Blue",
|
||||
"WHITE": "Green",
|
||||
"MINT": "Cyan"
|
||||
"MINT": "Cyan",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -254,12 +243,6 @@ def add_markers(otio_item, hiero_item, tagsbin):
|
|||
if _tag is None:
|
||||
_tag = hiero.core.Tag(marker_color_map[marker.color])
|
||||
|
||||
start = marker.marked_range.start_time.value
|
||||
end = (
|
||||
marker.marked_range.start_time.value +
|
||||
marker.marked_range.duration.value
|
||||
)
|
||||
|
||||
tag = hiero_item.addTag(_tag)
|
||||
tag.setName(marker.name or marker_color_map[marker_color])
|
||||
|
||||
|
|
@ -275,12 +258,12 @@ def create_track(otio_track, tracknum, track_kind):
|
|||
# Create a Track
|
||||
if otio_track.kind == otio.schema.TrackKind.Video:
|
||||
track = hiero.core.VideoTrack(
|
||||
otio_track.name or 'Video{n}'.format(n=tracknum)
|
||||
otio_track.name or "Video{n}".format(n=tracknum)
|
||||
)
|
||||
|
||||
else:
|
||||
track = hiero.core.AudioTrack(
|
||||
otio_track.name or 'Audio{n}'.format(n=tracknum)
|
||||
otio_track.name or "Audio{n}".format(n=tracknum)
|
||||
)
|
||||
|
||||
return track
|
||||
|
|
@ -315,34 +298,25 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin):
|
|||
for effect in otio_clip.effects:
|
||||
if isinstance(effect, otio.schema.LinearTimeWarp):
|
||||
trackitem.setPlaybackSpeed(
|
||||
trackitem.playbackSpeed() *
|
||||
effect.time_scalar
|
||||
trackitem.playbackSpeed() * effect.time_scalar
|
||||
)
|
||||
|
||||
# If reverse playback speed swap source in and out
|
||||
if trackitem.playbackSpeed() < 0:
|
||||
source_out = source_range.start_time.value
|
||||
source_in = (
|
||||
source_range.start_time.value +
|
||||
source_range.duration.value
|
||||
source_range.start_time.value + source_range.duration.value
|
||||
) - 1
|
||||
timeline_in = playhead + source_out
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
else:
|
||||
# Normal playback speed
|
||||
source_in = source_range.start_time.value
|
||||
source_out = (
|
||||
source_range.start_time.value +
|
||||
source_range.duration.value
|
||||
source_range.start_time.value + source_range.duration.value
|
||||
) - 1
|
||||
timeline_in = playhead
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
|
||||
# Set source and timeline in/out points
|
||||
trackitem.setSourceIn(source_in)
|
||||
|
|
@ -357,7 +331,8 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin):
|
|||
|
||||
|
||||
def build_sequence(
|
||||
otio_timeline, project=None, sequence=None, track_kind=None):
|
||||
otio_timeline, project=None, sequence=None, track_kind=None
|
||||
):
|
||||
|
||||
if project is None:
|
||||
if sequence:
|
||||
|
|
@ -414,8 +389,7 @@ def build_sequence(
|
|||
if isinstance(otio_clip, otio.schema.Stack):
|
||||
bar = hiero.ui.mainWindow().statusBar()
|
||||
bar.showMessage(
|
||||
"Nested sequences are created separately.",
|
||||
timeout=3000
|
||||
"Nested sequences are created separately.", timeout=3000
|
||||
)
|
||||
build_sequence(otio_clip, project, otio_track.kind)
|
||||
|
||||
|
|
@ -428,11 +402,7 @@ def build_sequence(
|
|||
|
||||
# Create TrackItem
|
||||
trackitem = create_trackitem(
|
||||
playhead,
|
||||
track,
|
||||
otio_clip,
|
||||
clip,
|
||||
tagsbin
|
||||
playhead, track, otio_clip, clip, tagsbin
|
||||
)
|
||||
|
||||
# Add trackitem to track
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ def update_tag(tag, data):
|
|||
# set all data metadata to tag metadata
|
||||
for _k, _v in data_mtd.items():
|
||||
value = str(_v)
|
||||
if type(_v) == dict:
|
||||
if isinstance(_v, dict):
|
||||
value = json.dumps(_v)
|
||||
|
||||
# set the value
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class CreateShotClip(phiero.Creator):
|
|||
|
||||
gui_tracks = [track.name()
|
||||
for track in phiero.get_current_sequence().videoTracks()]
|
||||
gui_name = "Pype publish attributes creator"
|
||||
gui_name = "AYON publish attributes creator"
|
||||
gui_info = "Define sequential rename and fill hierarchy data."
|
||||
gui_inputs = {
|
||||
"renameHierarchy": {
|
||||
|
|
@ -166,7 +166,7 @@ class CreateShotClip(phiero.Creator):
|
|||
"type": "QCheckBox",
|
||||
"label": "Source resolution",
|
||||
"target": "tag",
|
||||
"toolTip": "Is resloution taken from timeline or source?", # noqa
|
||||
"toolTip": "Is resolution taken from timeline or source?", # noqa
|
||||
"order": 4},
|
||||
}
|
||||
},
|
||||
|
|
@ -211,7 +211,7 @@ class CreateShotClip(phiero.Creator):
|
|||
presets = deepcopy(self.presets)
|
||||
gui_inputs = deepcopy(self.gui_inputs)
|
||||
|
||||
# get key pares from presets and match it on ui inputs
|
||||
# get key pairs from presets and match it on ui inputs
|
||||
for k, v in gui_inputs.items():
|
||||
if v["type"] in ("dict", "section"):
|
||||
# nested dictionary (only one level allowed
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from itertools import product
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
tracks_effect_items = self.collect_sub_track_items(all_tracks)
|
||||
context.data["tracksEffectItems"] = tracks_effect_items
|
||||
|
||||
# process all sellected timeline track items
|
||||
# process all selected timeline track items
|
||||
for track_item in selected_timeline_items:
|
||||
data = {}
|
||||
clip_name = track_item.name()
|
||||
|
|
@ -62,7 +62,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
}:
|
||||
continue
|
||||
|
||||
# get clips subtracks and anotations
|
||||
# get clips subtracks and annotations
|
||||
annotations = self.clip_annotations(source_clip)
|
||||
subtracks = self.clip_subtrack(track_item)
|
||||
self.log.debug("Annotations: {}".format(annotations))
|
||||
|
|
@ -84,6 +84,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
k: v for k, v in tag_data.items()
|
||||
if k not in ("id", "applieswhole", "label")
|
||||
})
|
||||
# Backward compatibility fix of 'entity_type' > 'folder_type'
|
||||
if "parents" in data:
|
||||
for parent in data["parents"]:
|
||||
if "entity_type" in parent:
|
||||
parent["folder_type"] = parent.pop("entity_type")
|
||||
|
||||
asset, asset_name = self._get_folder_data(tag_data)
|
||||
|
||||
|
|
@ -378,12 +383,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
# collect all subtrack items
|
||||
sub_track_items = {}
|
||||
for track in tracks:
|
||||
items = track.items()
|
||||
|
||||
effet_items = track.subTrackItems()
|
||||
effect_items = track.subTrackItems()
|
||||
|
||||
# skip if no clips on track > need track with effect only
|
||||
if not effet_items:
|
||||
if not effect_items:
|
||||
continue
|
||||
|
||||
# skip all disabled tracks
|
||||
|
|
@ -391,7 +394,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
track_index = track.trackIndex()
|
||||
_sub_track_items = phiero.flatten(effet_items)
|
||||
_sub_track_items = phiero.flatten(effect_items)
|
||||
|
||||
_sub_track_items = list(_sub_track_items)
|
||||
# continue only if any subtrack items are collected
|
||||
|
|
@ -439,10 +442,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
for item in subTrackItems:
|
||||
if "TimeWarp" in item.name():
|
||||
continue
|
||||
# avoid all anotation
|
||||
# avoid all annotation
|
||||
if isinstance(item, hiero.core.Annotation):
|
||||
continue
|
||||
# # avoid all not anaibled
|
||||
# avoid all disabled
|
||||
if not item.isEnabled():
|
||||
continue
|
||||
subtracks.append(item)
|
||||
|
|
|
|||
|
|
@ -35,10 +35,6 @@ class PrecollectRetime(api.InstancePlugin):
|
|||
source_out = int(track_item.sourceOut())
|
||||
speed = track_item.playbackSpeed()
|
||||
|
||||
# calculate available material before retime
|
||||
available_in = int(track_item.handleInLength() * speed)
|
||||
available_out = int(track_item.handleOutLength() * speed)
|
||||
|
||||
self.log.debug((
|
||||
"_BEFORE: \n timeline_in: `{0}`,\n timeline_out: `{1}`, \n "
|
||||
"source_in: `{2}`,\n source_out: `{3}`,\n speed: `{4}`,\n "
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ def create_interactive(creator_identifier, **kwargs):
|
|||
pane = stateutils.activePane(kwargs)
|
||||
if isinstance(pane, hou.NetworkEditor):
|
||||
pwd = pane.pwd()
|
||||
project_name = context.get_current_project_name(),
|
||||
project_name = context.get_current_project_name()
|
||||
folder_path = context.get_current_folder_path()
|
||||
task_name = context.get_current_task_name()
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import sys
|
|||
import os
|
||||
import errno
|
||||
import re
|
||||
import uuid
|
||||
import logging
|
||||
import json
|
||||
from contextlib import contextmanager
|
||||
|
|
@ -44,84 +43,6 @@ def get_folder_fps(folder_entity=None):
|
|||
return folder_entity["attrib"]["fps"]
|
||||
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
exists = node.parm("id")
|
||||
if not exists:
|
||||
imprint(node, {"id": unique_id})
|
||||
|
||||
if not exists and overwrite:
|
||||
node.setParm("id", unique_id)
|
||||
|
||||
|
||||
def get_id(node):
|
||||
"""Get the `cbId` attribute of the given node.
|
||||
|
||||
Args:
|
||||
node (hou.Node): the name of the node to retrieve the attribute from
|
||||
|
||||
Returns:
|
||||
str: cbId attribute of the node.
|
||||
|
||||
"""
|
||||
|
||||
if node is not None:
|
||||
return node.parm("id")
|
||||
|
||||
|
||||
def generate_ids(nodes, folder_id=None):
|
||||
"""Returns new unique ids for the given nodes.
|
||||
|
||||
Note: This does not assign the new ids, it only generates the values.
|
||||
|
||||
To assign new ids using this method:
|
||||
>>> nodes = ["a", "b", "c"]
|
||||
>>> for node, id in generate_ids(nodes):
|
||||
>>> set_id(node, id)
|
||||
|
||||
To also override any existing values (and assign regenerated ids):
|
||||
>>> nodes = ["a", "b", "c"]
|
||||
>>> for node, id in generate_ids(nodes):
|
||||
>>> set_id(node, id, overwrite=True)
|
||||
|
||||
Args:
|
||||
nodes (list): List of nodes.
|
||||
folder_id (str): Folder id . Use current folder id if is ``None``.
|
||||
|
||||
Returns:
|
||||
list: A list of (node, id) tuples.
|
||||
|
||||
"""
|
||||
|
||||
if folder_id is None:
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
# Get folder id of current context folder
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path, fields={"id"}
|
||||
)
|
||||
if not folder_entity:
|
||||
raise ValueError("No current folder is set.")
|
||||
|
||||
folder_id = folder_entity["id"]
|
||||
|
||||
node_ids = []
|
||||
for node in nodes:
|
||||
_, uid = str(uuid.uuid4()).rsplit("-", 1)
|
||||
unique_id = "{}:{}".format(folder_id, uid)
|
||||
node_ids.append((node, unique_id))
|
||||
|
||||
return node_ids
|
||||
|
||||
|
||||
def get_id_required_nodes():
|
||||
|
||||
valid_types = ["geometry"]
|
||||
nodes = {n for n in hou.node("/out").children() if
|
||||
n.type().name() in valid_types}
|
||||
|
||||
return list(nodes)
|
||||
|
||||
|
||||
def get_output_parameter(node):
|
||||
"""Return the render output parameter of the given node
|
||||
|
||||
|
|
@ -526,7 +447,7 @@ def maintained_selection():
|
|||
node.setSelected(on=True)
|
||||
|
||||
|
||||
def reset_framerange():
|
||||
def reset_framerange(fps=True, frame_range=True):
|
||||
"""Set frame range and FPS to current folder."""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
|
|
@ -535,29 +456,32 @@ def reset_framerange():
|
|||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
# Get FPS
|
||||
fps = get_folder_fps(folder_entity)
|
||||
# Set FPS
|
||||
if fps:
|
||||
fps = get_folder_fps(folder_entity)
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
|
||||
# Get Start and End Frames
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
if frame_range:
|
||||
|
||||
if frame_start is None or frame_end is None:
|
||||
log.warning("No edit information found for '{}'".format(folder_path))
|
||||
return
|
||||
# Set Start and End Frames
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
|
||||
handle_start = folder_attributes.get("handleStart", 0)
|
||||
handle_end = folder_attributes.get("handleEnd", 0)
|
||||
if frame_start is None or frame_end is None:
|
||||
log.warning("No edit information found for '%s'", folder_path)
|
||||
return
|
||||
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
handle_start = folder_attributes.get("handleStart", 0)
|
||||
handle_end = folder_attributes.get("handleEnd", 0)
|
||||
|
||||
# Set frame range and FPS
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
|
||||
# Set frame range and FPS
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
|
||||
|
||||
def get_main_window():
|
||||
|
|
@ -1072,3 +996,84 @@ def add_self_publish_button(node):
|
|||
template = node.parmTemplateGroup()
|
||||
template.insertBefore((0,), button_parm)
|
||||
node.setParmTemplateGroup(template)
|
||||
|
||||
|
||||
def update_content_on_context_change():
|
||||
"""Update all Creator instances to current asset"""
|
||||
host = registered_host()
|
||||
context = host.get_current_context()
|
||||
|
||||
folder_path = context["folder_path"]
|
||||
task = context["task_name"]
|
||||
|
||||
create_context = CreateContext(host, reset=True)
|
||||
|
||||
for instance in create_context.instances:
|
||||
instance_folder_path = instance.get("folderPath")
|
||||
if instance_folder_path and instance_folder_path != folder_path:
|
||||
instance["folderPath"] = folder_path
|
||||
instance_task = instance.get("task")
|
||||
if instance_task and instance_task != task:
|
||||
instance["task"] = task
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
def prompt_reset_context():
|
||||
"""Prompt the user what context settings to reset.
|
||||
This prompt is used on saving to a different task to allow the scene to
|
||||
get matched to the new context.
|
||||
"""
|
||||
# TODO: Cleanup this prototyped mess of imports and odd dialog
|
||||
from ayon_core.tools.attribute_defs.dialog import (
|
||||
AttributeDefinitionsDialog
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from ayon_core.lib import BoolDef, UILabelDef
|
||||
|
||||
definitions = [
|
||||
UILabelDef(
|
||||
label=(
|
||||
"You are saving your workfile into a different folder or task."
|
||||
"\n\n"
|
||||
"Would you like to update some settings to the new context?\n"
|
||||
)
|
||||
),
|
||||
BoolDef(
|
||||
"fps",
|
||||
label="FPS",
|
||||
tooltip="Reset workfile FPS",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"frame_range",
|
||||
label="Frame Range",
|
||||
tooltip="Reset workfile start and end frame ranges",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"instances",
|
||||
label="Publish instances",
|
||||
tooltip="Update all publish instance's folder and task to match "
|
||||
"the new folder and task",
|
||||
default=True
|
||||
),
|
||||
]
|
||||
|
||||
dialog = AttributeDefinitionsDialog(definitions)
|
||||
dialog.setWindowTitle("Saving to different context.")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
options = dialog.get_values()
|
||||
if options["fps"] or options["frame_range"]:
|
||||
reset_framerange(
|
||||
fps=options["fps"],
|
||||
frame_range=options["frame_range"]
|
||||
)
|
||||
|
||||
if options["instances"]:
|
||||
update_content_on_context_change()
|
||||
|
||||
dialog.deleteLater()
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for OpenPype Houdini integration."""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import hou # noqa
|
||||
|
|
@ -39,6 +38,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "houdini"
|
||||
|
|
@ -61,10 +63,12 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
self._register_callbacks()
|
||||
register_event_callback("workfile.save.before", before_workfile_save)
|
||||
register_event_callback("before.save", before_save)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("open", on_open)
|
||||
register_event_callback("new", on_new)
|
||||
register_event_callback("taskChanged", on_task_changed)
|
||||
|
||||
self._has_been_setup = True
|
||||
|
||||
|
|
@ -166,7 +170,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
if not op_ctx:
|
||||
op_ctx = self.create_context_node()
|
||||
|
||||
lib.imprint(op_ctx, data)
|
||||
lib.imprint(op_ctx, data, update=True)
|
||||
|
||||
def get_context_data(self):
|
||||
op_ctx = hou.node(CONTEXT_CONTAINER)
|
||||
|
|
@ -287,6 +291,11 @@ def ls():
|
|||
yield parse_container(container)
|
||||
|
||||
|
||||
def before_workfile_save(event):
|
||||
global _about_to_save
|
||||
_about_to_save = True
|
||||
|
||||
|
||||
def before_save():
|
||||
return lib.validate_fps()
|
||||
|
||||
|
|
@ -298,9 +307,16 @@ def on_save():
|
|||
# update houdini vars
|
||||
lib.update_houdini_vars_context_dialog()
|
||||
|
||||
nodes = lib.get_id_required_nodes()
|
||||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
# We are now starting the actual save directly
|
||||
global _about_to_save
|
||||
_about_to_save = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global _about_to_save
|
||||
if not IS_HEADLESS and _about_to_save:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
lib.prompt_reset_context()
|
||||
|
||||
|
||||
def _show_outdated_content_popup():
|
||||
|
|
|
|||
|
|
@ -19,10 +19,6 @@ from ayon_core.lib import BoolDef
|
|||
from .lib import imprint, read, lsattr, add_self_publish_button
|
||||
|
||||
|
||||
class OpenPypeCreatorError(CreatorError):
|
||||
pass
|
||||
|
||||
|
||||
class Creator(LegacyCreator):
|
||||
"""Creator plugin to create instances in Houdini
|
||||
|
||||
|
|
@ -92,8 +88,8 @@ class Creator(LegacyCreator):
|
|||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
CreatorError,
|
||||
CreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
|
||||
|
|
@ -147,7 +143,6 @@ class HoudiniCreatorBase(object):
|
|||
def create_instance_node(
|
||||
folder_path, node_name, parent, node_type="geometry"
|
||||
):
|
||||
# type: (str, str, str) -> hou.Node
|
||||
"""Create node representing instance.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -210,8 +205,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
|
||||
except hou.Error as er:
|
||||
six.reraise(
|
||||
OpenPypeCreatorError,
|
||||
OpenPypeCreatorError("Creator error: {}".format(er)),
|
||||
CreatorError,
|
||||
CreatorError("Creator error: {}".format(er)),
|
||||
sys.exc_info()[2])
|
||||
|
||||
def lock_parameters(self, node, parameters):
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Creator plugin for creating publishable Houdini Digital Assets."""
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
import hou
|
||||
|
||||
|
|
@ -16,7 +17,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
maintain_selection = False
|
||||
|
||||
def _check_existing(self, folder_path, product_name):
|
||||
# type: (str) -> bool
|
||||
# type: (str, str) -> bool
|
||||
"""Check if existing product name versions already exists."""
|
||||
# Get all products of the current folder
|
||||
project_name = self.project_name
|
||||
|
|
@ -52,7 +53,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
# if node type has not its definition, it is not user
|
||||
# created hda. We test if hda can be created from the node.
|
||||
if not to_hda.canCreateDigitalAsset():
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
"cannot create hda from node {}".format(to_hda))
|
||||
|
||||
hda_node = to_hda.createDigitalAsset(
|
||||
|
|
@ -61,7 +62,7 @@ class CreateHDA(plugin.HoudiniCreator):
|
|||
)
|
||||
hda_node.layoutChildren()
|
||||
elif self._check_existing(folder_path, node_name):
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
("product {} is already published with different HDA"
|
||||
"definition.").format(node_name))
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Creator plugin to create Redshift ROP."""
|
||||
import hou # noqa
|
||||
|
||||
from ayon_core.pipeline import CreatorError
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.lib import EnumDef, BoolDef
|
||||
|
||||
|
|
@ -14,6 +15,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
product_type = "redshift_rop"
|
||||
icon = "magic"
|
||||
ext = "exr"
|
||||
multi_layered_mode = "No Multi-Layered EXR File"
|
||||
|
||||
# Default to split export and render jobs
|
||||
split_render = True
|
||||
|
|
@ -42,7 +44,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
"Redshift_IPR", node_name=f"{basename}_IPR"
|
||||
)
|
||||
except hou.OperationFailed as e:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
(
|
||||
"Cannot create Redshift node. Is Redshift "
|
||||
"installed and enabled?"
|
||||
|
|
@ -54,25 +56,36 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
|
||||
# Set the linked rop to the Redshift ROP
|
||||
ipr_rop.parm("linked_rop").set(instance_node.path())
|
||||
|
||||
ext = pre_create_data.get("image_format")
|
||||
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
|
||||
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||
product_name=product_name,
|
||||
fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext)
|
||||
)
|
||||
multi_layered_mode = pre_create_data.get("multi_layered_mode")
|
||||
|
||||
ext_format_index = {"exr": 0, "tif": 1, "jpg": 2, "png": 3}
|
||||
multilayer_mode_index = {"No Multi-Layered EXR File": "1",
|
||||
"Full Multi-Layered EXR File": "2" }
|
||||
|
||||
filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format(
|
||||
renders_dir=hou.text.expandString("$HIP/pyblish/renders/"),
|
||||
product_name=product_name,
|
||||
fmt="$AOV.$F4.{ext}".format(ext=ext)
|
||||
)
|
||||
|
||||
if multilayer_mode_index[multi_layered_mode] == "1":
|
||||
multipart = False
|
||||
|
||||
elif multilayer_mode_index[multi_layered_mode] == "2":
|
||||
multipart = True
|
||||
|
||||
parms = {
|
||||
# Render frame range
|
||||
"trange": 1,
|
||||
# Redshift ROP settings
|
||||
"RS_outputFileNamePrefix": filepath,
|
||||
"RS_outputMultilayerMode": "1", # no multi-layered exr
|
||||
"RS_outputBeautyAOVSuffix": "beauty",
|
||||
"RS_outputFileFormat": ext_format_index[ext],
|
||||
}
|
||||
if ext == "exr":
|
||||
parms["RS_outputMultilayerMode"] = multilayer_mode_index[multi_layered_mode]
|
||||
parms["RS_aovMultipart"] = multipart
|
||||
|
||||
if self.selected_nodes:
|
||||
# set up the render camera from the selected node
|
||||
|
|
@ -110,6 +123,11 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
image_format_enum = [
|
||||
"exr", "tif", "jpg", "png",
|
||||
]
|
||||
multi_layered_mode = [
|
||||
"No Multi-Layered EXR File",
|
||||
"Full Multi-Layered EXR File"
|
||||
]
|
||||
|
||||
|
||||
return attrs + [
|
||||
BoolDef("farm",
|
||||
|
|
@ -121,5 +139,9 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
|
|||
EnumDef("image_format",
|
||||
image_format_enum,
|
||||
default=self.ext,
|
||||
label="Image Format Options")
|
||||
label="Image Format Options"),
|
||||
EnumDef("multi_layered_mode",
|
||||
multi_layered_mode,
|
||||
default=self.multi_layered_mode,
|
||||
label="Multi-Layered EXR")
|
||||
]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import hou
|
||||
|
||||
from ayon_core.hosts.houdini.api import plugin
|
||||
from ayon_core.pipeline import CreatedInstance
|
||||
from ayon_core.pipeline import CreatedInstance, CreatorError
|
||||
from ayon_core.lib import EnumDef, BoolDef
|
||||
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
|
|||
"vray", node_name=basename + "_IPR"
|
||||
)
|
||||
except hou.OperationFailed:
|
||||
raise plugin.OpenPypeCreatorError(
|
||||
raise CreatorError(
|
||||
"Cannot create Vray render node. "
|
||||
"Make sure Vray installed and enabled!"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -76,8 +76,8 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin):
|
|||
return
|
||||
|
||||
# Include handles
|
||||
start -= version_data.get("handleStart", 0)
|
||||
end += version_data.get("handleEnd", 0)
|
||||
start -= version_attributes.get("handleStart", 0)
|
||||
end += version_attributes.get("handleEnd", 0)
|
||||
|
||||
hou.playbar.setFrameRange(start, end)
|
||||
hou.playbar.setPlaybackRange(start, end)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
product_types = {"model", "animation", "pointcache", "gpuCache"}
|
||||
label = "Load Alembic"
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extensions = {"abc"}
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
|
@ -58,7 +59,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
normal_node.setInput(0, unpack)
|
||||
|
||||
null = container.createNode("null", node_name="OUT".format(name))
|
||||
null = container.createNode("null", node_name="OUT")
|
||||
null.setInput(0, normal_node)
|
||||
|
||||
# Ensure display flag is on the Alembic input node and not on the OUT
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ class AbcArchiveLoader(load.LoaderPlugin):
|
|||
|
||||
product_types = {"model", "animation", "pointcache", "gpuCache"}
|
||||
label = "Load Alembic as Archive"
|
||||
representations = ["abc"]
|
||||
representations = ["*"]
|
||||
extensions = {"abc"}
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
|
|
|||
|
|
@ -167,6 +167,9 @@ class CameraLoader(load.LoaderPlugin):
|
|||
|
||||
temp_camera.destroy()
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
|
|
@ -195,7 +198,6 @@ class CameraLoader(load.LoaderPlugin):
|
|||
def _match_maya_render_mask(self, camera):
|
||||
"""Workaround to match Maya render mask in Houdini"""
|
||||
|
||||
# print("Setting match maya render mask ")
|
||||
parm = camera.parm("aperture")
|
||||
expression = parm.expression()
|
||||
expression = expression.replace("return ", "aperture = ")
|
||||
|
|
|
|||
129
client/ayon_core/hosts/houdini/plugins/load/load_filepath.py
Normal file
129
client/ayon_core/hosts/houdini/plugins/load/load_filepath.py
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from ayon_core.pipeline import load
|
||||
from openpype.hosts.houdini.api import pipeline
|
||||
|
||||
import hou
|
||||
|
||||
|
||||
class FilePathLoader(load.LoaderPlugin):
|
||||
"""Load a managed filepath to a null node.
|
||||
|
||||
This is useful if for a particular workflow there is no existing loader
|
||||
yet. A Houdini artists can load as the generic filepath loader and then
|
||||
reference the relevant Houdini parm to use the exact value. The benefit
|
||||
is that this filepath will be managed and can be updated as usual.
|
||||
|
||||
"""
|
||||
|
||||
label = "Load filepath to node"
|
||||
order = 9
|
||||
icon = "link"
|
||||
color = "white"
|
||||
product_types = {"*"}
|
||||
representations = ["*"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["folder"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create a null node
|
||||
container = obj.createNode("null", node_name=node_name)
|
||||
|
||||
# Destroy any children
|
||||
for node in container.children():
|
||||
node.destroy()
|
||||
|
||||
# Add filepath attribute, set value as default value
|
||||
filepath = self.format_path(
|
||||
path=self.filepath_from_context(context),
|
||||
representation=context["representation"]
|
||||
)
|
||||
parm_template_group = container.parmTemplateGroup()
|
||||
attr_folder = hou.FolderParmTemplate("attributes_folder", "Attributes")
|
||||
parm = hou.StringParmTemplate(name="filepath",
|
||||
label="Filepath",
|
||||
num_components=1,
|
||||
default_value=(filepath,))
|
||||
attr_folder.addParmTemplate(parm)
|
||||
parm_template_group.append(attr_folder)
|
||||
|
||||
# Hide some default labels
|
||||
for folder_label in ["Transform", "Render", "Misc", "Redshift OBJ"]:
|
||||
folder = parm_template_group.findFolder(folder_label)
|
||||
if not folder:
|
||||
continue
|
||||
parm_template_group.hideFolder(folder_label, True)
|
||||
|
||||
container.setParmTemplateGroup(parm_template_group)
|
||||
|
||||
container.setDisplayFlag(False)
|
||||
container.setSelectableInViewport(False)
|
||||
container.useXray(False)
|
||||
|
||||
nodes = [container]
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
|
||||
# Update the file path
|
||||
representation_entity = context["representation"]
|
||||
file_path = self.format_path(
|
||||
path=self.filepath_from_context(context),
|
||||
representation=representation_entity
|
||||
)
|
||||
|
||||
node = container["node"]
|
||||
node.setParms({
|
||||
"filepath": file_path,
|
||||
"representation": str(representation_entity["id"])
|
||||
})
|
||||
|
||||
# Update the parameter default value (cosmetics)
|
||||
parm_template_group = node.parmTemplateGroup()
|
||||
parm = parm_template_group.find("filepath")
|
||||
parm.setDefaultValue((file_path,))
|
||||
parm_template_group.replace(parm_template_group.find("filepath"),
|
||||
parm)
|
||||
node.setParmTemplateGroup(parm_template_group)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
@staticmethod
|
||||
def format_path(path: str, representation: dict) -> str:
|
||||
"""Format file path for sequence with $F."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
# The path is either a single file or sequence in a folder.
|
||||
frame = representation["context"].get("frame")
|
||||
if frame is not None:
|
||||
# Substitute frame number in sequence with $F with padding
|
||||
ext = representation.get("ext", representation["name"])
|
||||
token = "$F{}".format(len(frame)) # e.g. $F4
|
||||
pattern = r"\.(\d+)\.{ext}$".format(ext=re.escape(ext))
|
||||
path = re.sub(pattern, ".{}.{}".format(token, ext), path)
|
||||
|
||||
return os.path.normpath(path).replace("\\", "/")
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
|
|
@ -44,7 +45,14 @@ def get_image_avalon_container():
|
|||
class ImageLoader(load.LoaderPlugin):
|
||||
"""Load images into COP2"""
|
||||
|
||||
product_types = {"imagesequence"}
|
||||
product_types = {
|
||||
"imagesequence",
|
||||
"review",
|
||||
"render",
|
||||
"plate",
|
||||
"image",
|
||||
"online",
|
||||
}
|
||||
label = "Load Image (COP2)"
|
||||
representations = ["*"]
|
||||
order = -10
|
||||
|
|
@ -55,10 +63,8 @@ class ImageLoader(load.LoaderPlugin):
|
|||
def load(self, context, name=None, namespace=None, data=None):
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = os.path.normpath(file_path)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
path = self.filepath_from_context(context)
|
||||
path = self.format_path(path, representation=context["representation"])
|
||||
|
||||
# Get the root node
|
||||
parent = get_image_avalon_container()
|
||||
|
|
@ -70,7 +76,10 @@ class ImageLoader(load.LoaderPlugin):
|
|||
node = parent.createNode("file", node_name=node_name)
|
||||
node.moveToGoodPosition()
|
||||
|
||||
node.setParms({"filename1": file_path})
|
||||
parms = {"filename1": path}
|
||||
parms.update(self.get_colorspace_parms(context["representation"]))
|
||||
|
||||
node.setParms(parms)
|
||||
|
||||
# Imprint it manually
|
||||
data = {
|
||||
|
|
@ -93,16 +102,17 @@ class ImageLoader(load.LoaderPlugin):
|
|||
|
||||
# Update the file path
|
||||
file_path = get_representation_path(repre_entity)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
file_path = self._get_file_sequence(file_path)
|
||||
file_path = self.format_path(file_path, repre_entity)
|
||||
|
||||
parms = {
|
||||
"filename1": file_path,
|
||||
"representation": repre_entity["id"],
|
||||
}
|
||||
|
||||
parms.update(self.get_colorspace_parms(repre_entity))
|
||||
|
||||
# Update attributes
|
||||
node.setParms(
|
||||
{
|
||||
"filename1": file_path,
|
||||
"representation": repre_entity["id"],
|
||||
}
|
||||
)
|
||||
node.setParms(parms)
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
|
|
@ -119,14 +129,58 @@ class ImageLoader(load.LoaderPlugin):
|
|||
if not parent.children():
|
||||
parent.destroy()
|
||||
|
||||
def _get_file_sequence(self, file_path):
|
||||
root = os.path.dirname(file_path)
|
||||
files = sorted(os.listdir(root))
|
||||
@staticmethod
|
||||
def format_path(path, representation):
|
||||
"""Format file path correctly for single image or sequence."""
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError("Path does not exist: %s" % path)
|
||||
|
||||
first_fname = files[0]
|
||||
prefix, padding, suffix = first_fname.rsplit(".", 2)
|
||||
fname = ".".join([prefix, "$F{}".format(len(padding)), suffix])
|
||||
return os.path.join(root, fname).replace("\\", "/")
|
||||
ext = os.path.splitext(path)[-1]
|
||||
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
is_sequence = bool(representation["context"].get("frame"))
|
||||
# The path is either a single file or sequence in a folder.
|
||||
if not is_sequence:
|
||||
filename = path
|
||||
else:
|
||||
filename = re.sub(r"(.*)\.(\d+){}$".format(re.escape(ext)),
|
||||
"\\1.$F4{}".format(ext),
|
||||
path)
|
||||
|
||||
filename = os.path.join(path, filename)
|
||||
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
|
||||
return filename
|
||||
|
||||
def get_colorspace_parms(self, representation: dict) -> dict:
|
||||
"""Return the color space parameters.
|
||||
|
||||
Returns the values for the colorspace parameters on the node if there
|
||||
is colorspace data on the representation.
|
||||
|
||||
Arguments:
|
||||
representation (dict): The representation entity.
|
||||
|
||||
Returns:
|
||||
dict: Parm to value mapping if colorspace data is defined.
|
||||
|
||||
"""
|
||||
# Using OCIO colorspace on COP2 File node is only supported in Hou 20+
|
||||
major, _, _ = hou.applicationVersion()
|
||||
if major < 20:
|
||||
return {}
|
||||
|
||||
data = representation.get("data", {}).get("colorspaceData", {})
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
colorspace = data["colorspace"]
|
||||
if colorspace:
|
||||
return {
|
||||
"colorspace": 3, # Use OpenColorIO
|
||||
"ocio_space": colorspace
|
||||
}
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
|
|||
77
client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py
Normal file
77
client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import os
|
||||
|
||||
from ayon_core.pipeline import load
|
||||
from ayon_core.hosts.houdini.api import pipeline
|
||||
|
||||
|
||||
class SopUsdImportLoader(load.LoaderPlugin):
|
||||
"""Load USD to SOPs via `usdimport`"""
|
||||
|
||||
label = "Load USD to SOPs"
|
||||
product_types = {"*"}
|
||||
representations = ["usd"]
|
||||
order = -6
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
import hou
|
||||
|
||||
# Format file name, Houdini only wants forward slashes
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = os.path.normpath(file_path)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Get the root node
|
||||
obj = hou.node("/obj")
|
||||
|
||||
# Define node name
|
||||
namespace = namespace if namespace else context["folder"]["name"]
|
||||
node_name = "{}_{}".format(namespace, name) if namespace else name
|
||||
|
||||
# Create a new geo node
|
||||
container = obj.createNode("geo", node_name=node_name)
|
||||
|
||||
# Create a usdimport node
|
||||
usdimport = container.createNode("usdimport", node_name=node_name)
|
||||
usdimport.setParms({"filepath1": file_path})
|
||||
|
||||
# Set new position for unpack node else it gets cluttered
|
||||
nodes = [container, usdimport]
|
||||
|
||||
return pipeline.containerise(
|
||||
node_name,
|
||||
namespace,
|
||||
nodes,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
suffix="",
|
||||
)
|
||||
|
||||
def update(self, container, context):
|
||||
|
||||
node = container["node"]
|
||||
try:
|
||||
usdimport_node = next(
|
||||
n for n in node.children() if n.type().name() == "usdimport"
|
||||
)
|
||||
except StopIteration:
|
||||
self.log.error("Could not find node of type `usdimport`")
|
||||
return
|
||||
|
||||
# Update the file path
|
||||
file_path = self.filepath_from_context(context)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
usdimport_node.setParms({"filepath1": file_path})
|
||||
|
||||
# Update attribute
|
||||
node.setParms({"representation": context["representation"]["id"]})
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
node = container["node"]
|
||||
node.destroy()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
import hou
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
from ayon_core.hosts.houdini.api import lib
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by all node in out graph and pre-defined attributes
|
||||
|
||||
This collector takes into account folders that are associated with
|
||||
an specific node and marked with a unique identifier;
|
||||
|
||||
Identifier:
|
||||
id (str): "ayon.create.instance"
|
||||
|
||||
Specific node:
|
||||
The specific node is important because it dictates in which way the
|
||||
product is being exported.
|
||||
|
||||
alembic: will export Alembic file which supports cascading attributes
|
||||
like 'cbId' and 'path'
|
||||
geometry: Can export a wide range of file types, default out
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
label = "Collect Instances"
|
||||
hosts = ["houdini"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
nodes = hou.node("/out").children()
|
||||
nodes += hou.node("/obj").children()
|
||||
|
||||
# Include instances in USD stage only when it exists so it
|
||||
# remains backwards compatible with version before houdini 18
|
||||
stage = hou.node("/stage")
|
||||
if stage:
|
||||
nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop)
|
||||
|
||||
for node in nodes:
|
||||
|
||||
if not node.parm("id"):
|
||||
continue
|
||||
|
||||
if node.evalParm("id") not in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
continue
|
||||
|
||||
# instance was created by new creator code, skip it as
|
||||
# it is already collected.
|
||||
if node.parm("creator_identifier"):
|
||||
continue
|
||||
|
||||
has_family = node.evalParm("family")
|
||||
assert has_family, "'%s' is missing 'family'" % node.name()
|
||||
|
||||
self.log.info(
|
||||
"Processing legacy instance node {}".format(node.path())
|
||||
)
|
||||
|
||||
data = lib.read(node)
|
||||
# Check bypass state and reverse
|
||||
if hasattr(node, "isBypassed"):
|
||||
data.update({"active": not node.isBypassed()})
|
||||
|
||||
# temporarily translation of `active` to `publish` till issue has
|
||||
# been resolved.
|
||||
# https://github.com/pyblish/pyblish-base/issues/307
|
||||
if "active" in data:
|
||||
data["publish"] = data["active"]
|
||||
|
||||
# Create nice name if the instance has a frame range.
|
||||
label = data.get("name", node.name())
|
||||
label += " (%s)" % data["folderPath"] # include folder in name
|
||||
|
||||
instance = context.create_instance(label)
|
||||
|
||||
# Include `families` using `family` data
|
||||
product_type = data["family"]
|
||||
data["productType"] = product_type
|
||||
instance.data["families"] = [product_type]
|
||||
|
||||
instance[:] = [node]
|
||||
instance.data["instance_node"] = node.path()
|
||||
instance.data.update(data)
|
||||
|
||||
def sort_by_family(instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get(
|
||||
"families", instance.data.get("productType")
|
||||
)
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=sort_by_family)
|
||||
|
||||
return context
|
||||
|
|
@ -41,23 +41,23 @@ class CollectKarmaROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
instance.data["chunkSize"] = chunk_size
|
||||
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||
|
||||
default_prefix = evalParmNoFrame(rop, "picture")
|
||||
render_products = []
|
||||
default_prefix = evalParmNoFrame(rop, "picture")
|
||||
render_products = []
|
||||
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||
filenames = list(render_products)
|
||||
instance.data["files"] = filenames
|
||||
instance.data["renderProducts"] = colorspace.ARenderProduct()
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
|
|||
|
|
@ -41,57 +41,57 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
instance.data["chunkSize"] = chunk_size
|
||||
self.log.debug("Chunk Size: %s" % chunk_size)
|
||||
|
||||
default_prefix = evalParmNoFrame(rop, "vm_picture")
|
||||
render_products = []
|
||||
default_prefix = evalParmNoFrame(rop, "vm_picture")
|
||||
render_products = []
|
||||
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "soho_diskfile", pad_character="0"
|
||||
)
|
||||
beauty_export_product = self.get_render_product_name(
|
||||
prefix=export_prefix,
|
||||
suffix=None)
|
||||
export_products.append(beauty_export_product)
|
||||
self.log.debug(
|
||||
"Found export product: {}".format(beauty_export_product)
|
||||
)
|
||||
instance.data["ifdFile"] = beauty_export_product
|
||||
instance.data["exportFiles"] = list(export_products)
|
||||
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
# Store whether we are splitting the render job (export + render)
|
||||
split_render = bool(rop.parm("soho_outputmode").eval())
|
||||
instance.data["splitRender"] = split_render
|
||||
export_prefix = None
|
||||
export_products = []
|
||||
if split_render:
|
||||
export_prefix = evalParmNoFrame(
|
||||
rop, "soho_diskfile", pad_character="0"
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
beauty_export_product = self.get_render_product_name(
|
||||
prefix=export_prefix,
|
||||
suffix=None)
|
||||
export_products.append(beauty_export_product)
|
||||
self.log.debug(
|
||||
"Found export product: {}".format(beauty_export_product)
|
||||
)
|
||||
instance.data["ifdFile"] = beauty_export_product
|
||||
instance.data["exportFiles"] = list(export_products)
|
||||
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
# Default beauty AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=None
|
||||
)
|
||||
render_products.append(beauty_product)
|
||||
|
||||
aov_numbers = rop.evalParm("vm_numaux")
|
||||
if aov_numbers > 0:
|
||||
# get the filenames of the AOVs
|
||||
for i in range(1, aov_numbers + 1):
|
||||
var = rop.evalParm("vm_variable_plane%d" % i)
|
||||
if var:
|
||||
aov_name = "vm_filename_plane%d" % i
|
||||
aov_boolean = "vm_usefile_plane%d" % i
|
||||
aov_enabled = rop.evalParm(aov_boolean)
|
||||
has_aov_path = rop.evalParm(aov_name)
|
||||
if has_aov_path and aov_enabled == 1:
|
||||
aov_prefix = evalParmNoFrame(rop, aov_name)
|
||||
aov_product = self.get_render_product_name(
|
||||
prefix=aov_prefix, suffix=None
|
||||
)
|
||||
render_products.append(aov_product)
|
||||
files_by_aov = {
|
||||
"beauty": self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
|
||||
aov_numbers = rop.evalParm("vm_numaux")
|
||||
if aov_numbers > 0:
|
||||
# get the filenames of the AOVs
|
||||
for i in range(1, aov_numbers + 1):
|
||||
var = rop.evalParm("vm_variable_plane%d" % i)
|
||||
if var:
|
||||
aov_name = "vm_filename_plane%d" % i
|
||||
aov_boolean = "vm_usefile_plane%d" % i
|
||||
aov_enabled = rop.evalParm(aov_boolean)
|
||||
has_aov_path = rop.evalParm(aov_name)
|
||||
if has_aov_path and aov_enabled == 1:
|
||||
aov_prefix = evalParmNoFrame(rop, aov_name)
|
||||
aov_product = self.get_render_product_name(
|
||||
prefix=aov_prefix, suffix=None
|
||||
)
|
||||
render_products.append(aov_product)
|
||||
|
||||
files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
|
|||
|
|
@ -60,20 +60,30 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
instance.data["ifdFile"] = beauty_export_product
|
||||
instance.data["exportFiles"] = list(export_products)
|
||||
|
||||
# Default beauty AOV
|
||||
full_exr_mode = (rop.evalParm("RS_outputMultilayerMode") == "2")
|
||||
if full_exr_mode:
|
||||
# Ignore beauty suffix if full mode is enabled
|
||||
# As this is what the rop does.
|
||||
beauty_suffix = ""
|
||||
|
||||
# Default beauty/main layer AOV
|
||||
beauty_product = self.get_render_product_name(
|
||||
prefix=default_prefix, suffix=beauty_suffix
|
||||
)
|
||||
render_products = [beauty_product]
|
||||
files_by_aov = {
|
||||
"_": self.generate_expected_files(instance,
|
||||
beauty_product)}
|
||||
beauty_suffix: self.generate_expected_files(instance,
|
||||
beauty_product)
|
||||
}
|
||||
|
||||
aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode()
|
||||
if aovs_rop:
|
||||
rop = aovs_rop
|
||||
|
||||
num_aovs = rop.evalParm("RS_aov")
|
||||
num_aovs = 0
|
||||
if not rop.evalParm('RS_aovAllAOVsDisabled'):
|
||||
num_aovs = rop.evalParm("RS_aov")
|
||||
|
||||
for index in range(num_aovs):
|
||||
i = index + 1
|
||||
|
||||
|
|
@ -86,11 +96,14 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
if not aov_prefix:
|
||||
aov_prefix = default_prefix
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
if rop.parm(f"RS_aovID_{i}").evalAsString() == "CRYPTOMATTE" or \
|
||||
not full_exr_mode:
|
||||
|
||||
aov_product = self.get_render_product_name(aov_prefix, aov_suffix)
|
||||
render_products.append(aov_product)
|
||||
|
||||
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
|
||||
aov_product) # noqa
|
||||
files_by_aov[aov_suffix] = self.generate_expected_files(instance,
|
||||
aov_product) # noqa
|
||||
|
||||
for product in render_products:
|
||||
self.log.debug("Found render product: %s" % product)
|
||||
|
|
@ -118,7 +131,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin):
|
|||
|
||||
# When AOV is explicitly defined in prefix we just swap it out
|
||||
# directly with the AOV suffix to embed it.
|
||||
# Note: ${AOV} seems to be evaluated in the parameter as %AOV%
|
||||
# Note: '$AOV' seems to be evaluated in the parameter as '%AOV%'
|
||||
has_aov_in_prefix = "%AOV%" in prefix
|
||||
if has_aov_in_prefix:
|
||||
# It seems that when some special separator characters are present
|
||||
|
|
|
|||
|
|
@ -71,6 +71,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
# the isinstance check above should be stricter than this category
|
||||
if output_node.type().category().name() != "Cop2":
|
||||
raise PublishValidationError(
|
||||
("Output node %s is not of category Cop2. "
|
||||
"This is a bug...").format(output_node.path()),
|
||||
(
|
||||
"Output node {} is not of category Cop2."
|
||||
" This is a bug..."
|
||||
).format(output_node.path()),
|
||||
title=cls.label)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="openpype_menu">
|
||||
<subMenu id="ayon_menu">
|
||||
<labelExpression><![CDATA[
|
||||
import os
|
||||
return os.environ.get("AYON_MENU_LABEL") or "AYON"
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue